diff --git a/.github/ISSUE_TEMPLATE/bug_7.md b/.github/ISSUE_TEMPLATE/bug_7.md deleted file mode 100644 index 48faa9c79ed6b..0000000000000 --- a/.github/ISSUE_TEMPLATE/bug_7.md +++ /dev/null @@ -1,35 +0,0 @@ ---- -name: 🐞 Bug v7 -about: File a bug/issue against v7.x -title: '[BUG] ' -labels: Bug, Needs Triage, Release 7.x -assignees: '' - ---- - -<!-- -Note: Please search to see if an issue already exists for your problem: https://github.com/npm/cli/issues ---> - -### Current Behavior: -<!-- ex. a clear & concise description of what you're experiencing. --> - -### Expected Behavior: -<!-- ex. a clear & concise description of what you expected to happen. --> - -### Steps To Reproduce: -<!-- -ex. steps to reproduce the behavior: -1. In this environment... -2. With this config... -3. Run '...' -4. See error... ---> - -### Environment: -<!-- -ex. -- OS: Ubuntu 20.04 -- Node: 13.14.0 -- npm: 6.4.12 ---> diff --git a/.github/ISSUE_TEMPLATE/bug_7.yml b/.github/ISSUE_TEMPLATE/bug_7.yml new file mode 100644 index 0000000000000..455177d277a5c --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_7.yml @@ -0,0 +1,49 @@ +name: 🐞 Bug v7 +description: File a bug/issue against v7.x +title: "[BUG] <title>" +labels: [Bug, Needs Triage, Release 7.x] +body: +- type: checkboxes + attributes: + label: Is there an existing issue for this? + description: Please [search here](https://github.com/npm/cli/issues) to see if an issue already exists for your problem. + options: + - label: I have searched the existing issues + required: true +- type: textarea + attributes: + label: Current Behavior + description: A clear & concise description of what you're experiencing. + validations: + required: false +- type: textarea + attributes: + label: Expected Behavior + description: A clear & concise description of what you expected to happen. + validations: + required: false +- type: textarea + attributes: + label: Steps To Reproduce + description: Steps to reproduce the behavior. + value: | + 1. In this environment... + 2. With this config... + 3. Run '...' + 4. See error... + validations: + required: false +- type: textarea + attributes: + label: Environment + description: | + examples: + - **OS**: Ubuntu 20.04 + - **Node**: 13.14.0 + - **npm**: 7.6.3 + value: | + - OS: + - Node: + - npm: + validations: + required: false diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 851b3fdcb9fe2..3b622ed82fa78 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -37,11 +37,94 @@ jobs: - name: Run linting run: node . run licenses + smoke-tests: + strategy: + fail-fast: false + matrix: + node-version: [10.x, 12.x, 14.x, 16.x] + platform: + - os: ubuntu-latest + shell: bash + - os: macos-latest + shell: bash + - os: windows-latest + shell: bash + - os: windows-latest + shell: powershell + + runs-on: ${{ matrix.platform.os }} + defaults: + run: + shell: ${{ matrix.platform.shell }} + + steps: + # Checkout the npm/cli repo + - uses: actions/checkout@v2 + + # Installs the specific version of Node.js + - name: Use Node.js ${{ matrix.node-version }} + uses: actions/setup-node@v1 + with: + node-version: ${{ matrix.node-version }} + + # Run the installer script + - name: Install dependencies + run: | + node . install --ignore-scripts --no-audit + node . rebuild + + # Run the smoke tests + - name: Run Smoke tests + run: node . run --ignore-scripts smoke-tests -- --no-check-coverage -t600 -Rbase -c + env: + DEPLOY_VERSION: testing + + workspaces-tests: + strategy: + fail-fast: false + matrix: + node-version: [10.x, 12.x, 14.x, 16.x] + platform: + - os: ubuntu-latest + shell: bash + - os: macos-latest + shell: bash + - os: windows-latest + shell: bash + - os: windows-latest + shell: powershell + + runs-on: ${{ matrix.platform.os }} + defaults: + run: + shell: ${{ matrix.platform.shell }} + + steps: + # Checkout the npm/cli repo + - uses: actions/checkout@v2 + + # Installs the specific version of Node.js + - name: Use Node.js ${{ matrix.node-version }} + uses: actions/setup-node@v1 + with: + node-version: ${{ matrix.node-version }} + + # Run the installer script + - name: Install dependencies + run: | + node . install --ignore-scripts --no-audit + node . rebuild + + - name: Run workspaces tests + run: node . test -w ./packages -- --no-check-coverage -t600 -Rbase -c + env: + DEPLOY_VERSION: testing + build: strategy: fail-fast: false matrix: - node-version: ['10.1', 10.x, '12.1', 12.x, '14.1', 14.x] + node-version: ['10.1', 10.x, '12.1', 12.x, '14.1', 14.x, '16.1', 16.x] platform: - os: ubuntu-latest shell: bash diff --git a/.github/workflows/create-cli-deps-pr.yml b/.github/workflows/create-cli-deps-pr.yml new file mode 100644 index 0000000000000..a59302ebeb0ac --- /dev/null +++ b/.github/workflows/create-cli-deps-pr.yml @@ -0,0 +1,89 @@ +name: "Create CLI Deps PR" + +on: + workflow_dispatch: + inputs: + npmVersion: + description: "6.x.x or latest" + required: true + default: 'latest' + + +jobs: + create-pull-request: + runs-on: ubuntu-latest + env: + GITHUB_TOKEN: ${{ secrets.NPM_ROBOT_USER_PAT }} + NPM_VERSION: ${{ github.event.inputs.npmVersion }} + SUPPORT_BRANCH: "v14.x-staging" + steps: + - name: Update gh cli & install jq parser + run: | + sudo apt-get update -y + sudo apt update + sudo apt-get install -y jq + sudo apt install gh + - name: Checkout npm/node + uses: actions/checkout@v2 + with: + fetch-depth: 0 + ref: master + repository: "npm/node" + token: ${{ secrets.NPM_ROBOT_USER_PAT }} + - name: Pull (Fast-Forward) upstream + id: sync + uses: aormsby/Fork-Sync-With-Upstream-action@v2.1 + with: + upstream_repository: nodejs/node + upstream_branch: master + target_branch: master + git_pull_args: --ff-only # optional arg use, defaults to simple 'pull' + github_token: ${{ secrets.NPM_ROBOT_USER_PAT }} # optional, for accessing repos that require authentication + - name: Run dependency updates and create PR + run: | + npm_tag="" + base_branch="" + if [ "$NPM_VERSION" == "latest" ] + then + npm_tag=`npm view npm@latest version` + base_branch="master" + else + npm_tag="$NPM_VERSION" + base_branch="v14.x-staging" + fi + + git config user.name "npm team" + git config user.email "ops+robot@npmjs.com" + git checkout -b "npm-$npm_tag" + + BASE_DIR="$( pwd )"/ + DEPS_DIR="$BASE_DIR"deps/ + + echo "Cloning CLI repo" + gh repo clone npm/cli + + echo "Prepping CLI repo for release" + cd cli + git checkout v"$npm_tag" + make + make release + + + echo "Removing old npm" + cd "$DEPS_DIR" + rm -rf npm/ + + echo "Copying new npm" + tar zxf "$BASE_DIR"cli/release/npm-"$npm_tag".tgz + + echo "Removing CLI workspace" + cd "$BASE_DIR" + rm -rf cli + + git add -A deps/npm + git commit -m "deps: upgrade npm to $npm_tag" + git rebase --whitespace=fix master + git push origin "npm-$npm_tag" + gh_release_body=`gh release view v"$npm_tag" -R npm/cli --json body | jq -r '.body'` + + gh pr create -R "nodejs/node" -B "$base_branch" -H "npm:npm-$npm_tag" --title "deps: upgrade npm to $npm_tag" --body "$gh_release_body" diff --git a/.gitignore b/.gitignore index e704f5ad3484b..cbcf027f30343 100644 --- a/.gitignore +++ b/.gitignore @@ -27,3 +27,4 @@ npm-debug.log /coverage /*.tgz /.editorconfig +.vscode/ \ No newline at end of file diff --git a/.npmignore b/.npmignore deleted file mode 100644 index d1b1fde3d4e0d..0000000000000 --- a/.npmignore +++ /dev/null @@ -1,31 +0,0 @@ -*.swp -.*.swp -netlify.toml -npm-debug.log -/.github -/test -node_modules/marked -node_modules/marked-man -node_modules/tap -tap-snapshots -node_modules/.bin -node_modules/npm-registry-mock -/npmrc -/release/ -/coverage/ - -# don't need these in the npm package. -html/*.png -docs/nav.yml - -# don't ignore .npmignore files -# these are used in some tests. -!.npmignore - -/npm-*.tgz - -*.pyc - -Session.vim -.nyc_output -/.editorconfig diff --git a/AUTHORS b/AUTHORS index e58d1dffaeaf1..c3685720598a9 100644 --- a/AUTHORS +++ b/AUTHORS @@ -755,3 +755,37 @@ Ikko Ashimine <eltociear@gmail.com> MrBrain295 <66077254+MrBrain295@users.noreply.github.com> kumavis <aaron@kumavis.me> Christof Lemke <christoflemke@github.com> +Nathan Shively-Sanders <293473+sandersn@users.noreply.github.com> +Bjørn Johansen <bjjohans@microsoft.com> +Fraqe <f@fraqe.ca> +Edward Grech <dwardu@gmail.com> +Kenrick <kenrick95@gmail.com> +Karthik Sundari <karthik_sundari@comcast.com> +Jan Sepke <625043+jansepke@users.noreply.github.com> +Augusto Moura <augusto.borgesm@gmail.com> +Eric Chow <eric.zjp.chow@gmail.com> +kbayrhammer <klaus.bayrhammer@redbull.com> +James Chen-Smith <jameschensmith@gmail.com> +Yash Singh <saiansh2525@gmail.com> +Danielle Church <dani.church@gmail.com> +Seth Thomas <seth@emailseth.com> +Andreas <andreas@bielk.se> +Felipe Santos <felipecassiors@gmail.com> +Luigi Pinca <luigipinca@gmail.com> +Marco Sirabella <marco@sirabella.org> +wangsai <wangsai@bootcss.com> +Luke Hefson <luke@github.com> +mrmlnc <mrmlnc@yandex-team.ru> +Juan Picado <juanpicado19@gmail.com> +Kevin Cormier <kcormier@redhat.com> +Nariyasu Heseri <heserisiyookang@gmail.com> +rethab <rethab@protonmail.ch> +Spencer Wilson <5624115+spencerwilson@users.noreply.github.com> +Daniel Park <gimli01@github.com> +Daniel Park <daniel.park@endevors.io> +Luke Karrys <luke@lukekarrys.com> +Ivan <ivanaguilar01@live.com.mx> +Aluneed <31174087+aluneed@users.noreply.github.com> +relrelb <relrelbachar@gmail.com> +Cameron Tacklind <cameron@tacklind.com> +Demira Dimitrova <demiradimitrova@github.com> diff --git a/CHANGELOG.md b/CHANGELOG.md index 22e6018f0a86c..7f4f3c9678bfb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,1217 @@ +## v7.20.3 (2021-07-29) + +### BUG FIXES + +* [`66dc5f94d`](https://github.com/npm/cli/commit/66dc5f94dfb5bc99c715e075cde1ab9c1ec84a83) + [#3588](https://github.com/npm/cli/issues/3588) + update eresolve explanations for new arborist data provided +* [`99575acab`](https://github.com/npm/cli/commit/99575acab5c93c03c59cb918c7916647b2c0be51) + [#3591](https://github.com/npm/cli/issues/3591) + fix(node_modules): remove duplicated file + ([@wraithgar](https://github.com/wraithgar)) + +### DEPENDENCIES + +* [`97cb5ec31`](https://github.com/npm/cli/commit/97cb5ec312e151527ba2aab77ed0307917e1d845) + `@npmcli/arborist@2.8.0`: + * Refactor ideal tree building to handle more complicated + peerDependencies use cases. + * Do not modify ideal tree while checking if a peerSet can be placed. +* [`7db1a0a26`](https://github.com/npm/cli/commit/7db1a0a264cf67d2a2a3cdc71bbf09b36dc45075) + chore(deps): `mime-types@1.49.0` `mime-db@1.49.0` + +## v7.20.2 (2021-07-27) + +### DEPENDENCIES + +* [`f5aab1f88`](https://github.com/npm/cli/commit/f5aab1f8878b4e9a6f4d47dddc449e18a190e201) + `tar@6.1.1` + * fix: strip absolute paths more comprehensively +* [`ce8fb0f69`](https://github.com/npm/cli/commit/ce8fb0f69ae1b3fdd8834cf073d3d30c2bfc7bc6) + `tar@6.1.2` + * fix: Remove paths from dirCache when no longer dirs +* [`ced85087a`](https://github.com/npm/cli/commit/ced85087ac5fce5984ae28af910357a9a94434d7) + `gauge@3.0.1` + * add missing dependency to package.json + +## v7.20.1 (2021-07-22) + +### BUG FIXES + +* [`009ad1e68`](https://github.com/npm/cli/commit/009ad1e683aa061d7e5c78b9362b0bd1b14ee643) + [#3561](https://github.com/npm/cli/issues/3561) + fix(exit-handler): always warn if not called + ([@wraithgar](https://github.com/wraithgar)) +* [`eb67054c8`](https://github.com/npm/cli/commit/eb67054c8303348b25f9717c8f82c8d8d494a242) + [#3563](https://github.com/npm/cli/issues/3563) + fix(config): consolidate use of npm.color + ([@wraithgar](https://github.com/wraithgar)) + +### DOCUMENTATION + +* [`a014f3d28`](https://github.com/npm/cli/commit/a014f3d284e49cd085cfd060a71a161b93bca9d1) + [#3562](https://github.com/npm/cli/issues/3562) + fix(docs): typo in `npm cmd` docs + ([@wraithgar](https://github.com/wraithgar)) +* [`1fe1c9b74`](https://github.com/npm/cli/commit/1fe1c9b74ea3c3d5bb5b3696b954422b9b55dd91) + [#3523](https://github.com/npm/cli/issues/3523) + fix(docs): updated policy urls + ([@DemiraDimitrova](https://github.com/DemiraDimitrova)) + +### DEPENDENCIES + +* [`d7f29e8c9`](https://github.com/npm/cli/commit/d7f29e8c94ae77661390f82ae72efc1bd6fcfbc3) + `read-package-json-fast@2.0.3`: + - feat: load directories.bin as a bin object +* [`b1fefa73d`](https://github.com/npm/cli/commit/b1fefa73db2f8d9c55b4447ffc1cdbaf8e9bb298) + `npmlog@5.0.0` + * Drop support for node 6 and 8 +* [`b6e09971a`](https://github.com/npm/cli/commit/b6e09971a8f9a3c92188838b69be0a0dda27f0bb) + remove ignored files from node_modules + ([@Ruy Adorno](https://github.com/Ruy Adorno)) +* [`cf737c505`](https://github.com/npm/cli/commit/cf737c505e76a473850c5244b17f3469efbc3c02) + `debug@4.3.2` + +## v7.20.0 (2021-07-15) + +### FEATURES + +* [`f17aca5cd`](https://github.com/npm/cli/commit/f17aca5cdf355aaa7e1f517d1b3bb4213f4df092) + [#3487](https://github.com/npm/cli/issues/3487) + feat: add `npm pkg` command + ([@ruyadorno](https://github.com/ruyadorno)) +* [`98905ae37`](https://github.com/npm/cli/commit/98905ae3759165cd6d6f6306f31acc6a2baa4cde) + [#3471](https://github.com/npm/cli/issues/3471) + feat(config): introduce `location` parameter + ([@nlf](https://github.com/nlf)) + +### BUG FIXES + +* [`4755b0728`](https://github.com/npm/cli/commit/4755b072877f547585cb0e2562261b2c87e2ff0b) + [#3498](https://github.com/npm/cli/issues/3498) + friendlier errors for `ERR_SOCKET_TIMEOUT` + ([@nlf](https://github.com/nlf)) +* [`3ecf19cdc`](https://github.com/npm/cli/commit/3ecf19cdc35684ccb15280b2c34d27496aa1c634) + [#3508](https://github.com/npm/cli/issues/3508) + fix(config): fix noproxy + ([@wraithgar](https://github.com/wraithgar)) +* [`c3bd10e46`](https://github.com/npm/cli/commit/c3bd10e461976a073e6a898c46f8bde28b17668f) + [#3499](https://github.com/npm/cli/issues/3499) + fix(update-notifier): don't force black background + ([@wraithgar](https://github.com/wraithgar)) +* [`89483e888`](https://github.com/npm/cli/commit/89483e888acc56386b9ebc4d70a4676e4a5a5cb1) + [#3497](https://github.com/npm/cli/issues/3497) + fix(usage): better audit/boolean flag usage output + ([@wraithgar](https://github.com/wraithgar)) +* [`feeb8e42a`](https://github.com/npm/cli/commit/feeb8e42a7b0510023175dc86269edb544d97601) + [#3495](https://github.com/npm/cli/issues/3495) + fix(publish): obey --ignore-scripts flag + ([@wraithgar](https://github.com/wraithgar)) +* [`103c8c3ef`](https://github.com/npm/cli/commit/103c8c3ef3ba7ff0483557f32eebc4c6298285e3) + [#3479](https://github.com/npm/cli/issues/3479) + chore(exit): log any un-ended timings + ([@wraithgar](https://github.com/wraithgar)) +* [`efc4313c2`](https://github.com/npm/cli/commit/efc4313c2062ffad22aa24e5198d575a7eb5f20e) + [#3482](https://github.com/npm/cli/issues/3482) + chore(refactor): refactor exit handler and tests + ([@wraithgar](https://github.com/wraithgar)) +* [`d8eb49b70`](https://github.com/npm/cli/commit/d8eb49b705acb50b6bed971bfcce4db6e18e73dd) + [#3540](https://github.com/npm/cli/issues/3540) + fix(bundle-and-ignore): case sensitivity cleanup + ([@wraithgar](https://github.com/wraithgar)) + +### DOCUMENTATION + +* [`339145f64`](https://github.com/npm/cli/commit/339145f64f82d540dbc72ef97b54ae20c34315dd) + [#3491](https://github.com/npm/cli/issues/3491) + fix(docs): clarify what install type gets `.bin` + ([@wraithgar](https://github.com/wraithgar)) +* [`74c99755e`](https://github.com/npm/cli/commit/74c99755e522f9cfc0d602841568d5e1f835fcaf) + [#3494](https://github.com/npm/cli/issues/3494) + fix(docs): add npm update example + ([@wraithgar](https://github.com/wraithgar)) +* [`801a52330`](https://github.com/npm/cli/commit/801a52330636008fecadc812916c76fb945ce1f6) + [#3542](https://github.com/npm/cli/issues/3542) + fix(docs): correct Node.js JavaScript stylings + ([@relrelb](https://github.com/relrelb)) +* [`791416713`](https://github.com/npm/cli/commit/791416713d64c072d73bffbab2daf7b8eb3c4868) + [#3546](https://github.com/npm/cli/issues/3546) + fix(docs): how to see background script output + ([@cinderblock](https://github.com/cinderblock)) + +### DEPENDENCIES + +* [`691816f3d`](https://github.com/npm/cli/commit/691816f3de2a679152644a60f3e2c5962df6a81d) + `@npmcli/arborist@2.7.1` + * fixes running prepare scripts for workspaces on reify + * ensure pacote always compares correct integrity values +* [`b9597e944`](https://github.com/npm/cli/commit/b9597e944377e74907607ee280ec1e8c31dd3156) + `make-fetch-happen@9.0.4` + * fix: retry socket timeout failures + * fix: clean up invalid indexes and content after cacache read errors +* [`f573e7c56`](https://github.com/npm/cli/commit/f573e7c56e8505fd6dcc3e5f5b5be401d0a45b58) + `minipass-fetch@1.3.4` + * fix: correctly handle error events that happen after response events +* [`2d5797ea0`](https://github.com/npm/cli/commit/2d5797ea01e17b1559d792613446e1435e588a35) + `pacote@11.3.5` + * fix: show more actionable messages for git pathspec errors + * fix: include all dep types when building for prepare + * fix: do not set mtime when unpacking + +## v7.19.1 (2021-07-01) + +### BUG FIXES + +* [`013f0262d`](https://github.com/npm/cli/commit/013f0262db3e16605820f6117749fd3ebc70f6d1) + [#3469](https://github.com/npm/cli/issues/3469) + fix(exitHandler): write code to logfile + ([@wraithgar](https://github.com/wraithgar)) +* [`0dd0341ac`](https://github.com/npm/cli/commit/0dd0341ac9a65a2df8fc262ad9a56b7351f99d66) + [#3474](https://github.com/npm/cli/issues/3474) + fix(ping): make "npm ping" echo a right time + ([@aluneed](https://github.com/aluneed)) +* [`d2e298f3c`](https://github.com/npm/cli/commit/d2e298f3cbab278071480f94ff7d916d42cbf43b) + [#3484](https://github.com/npm/cli/issues/3484) + fix(deprecate): add undeprecate support + ([@wraithgar](https://github.com/wraithgar)) + + ### DOCUMENTATION + +* [`9dd32d08e`](https://github.com/npm/cli/commit/9dd32d08e09c21c9a4517161abfc7eed6518faf2) + [#3485](https://github.com/npm/cli/issues/3485) + fix(docs): remove npm package config override + ([@wraithgar](https://github.com/wraithgar)) +* [`a4e095618`](https://github.com/npm/cli/commit/a4e095618cda72244a18aaff9d6660b9082a2b84) + [#3486](https://github.com/npm/cli/issues/3486) + fix(docs): remove .hooks scripts + ([@wraithgar](https://github.com/wraithgar)) + +### TESTING + +* [`5f8ccccef`](https://github.com/npm/cli/commit/5f8ccccef9fc19229320df8cbcae9fcea8d31388) + [#3483](https://github.com/npm/cli/issues/3483) + chore(tests): clean snapshot for lib/view.js tests + ([@wraithgar](https://github.com/wraithgar)) + +## v7.19.0 (2021-06-24) + +### FEATURES + +* [`23ce3af19`](https://github.com/npm/cli/commit/23ce3af199c8a14ef16c63fc638a1ac21fd9a9b0) + [#3460](https://github.com/npm/cli/issues/3460) + feat(ls): report *why* something is invalid + ([@isaacs](https://github.com/isaacs)) + +### BUG FIXES + +* [`53f81af31`](https://github.com/npm/cli/commit/53f81af319f298a0fdd8f143184c3e89770f24ea) + [#3450](https://github.com/npm/cli/issues/3450) + fix(docs): Improve phrasing of workspace example + ([@lumaxis](https://github.com/lumaxis)) +* [`78da60ffe`](https://github.com/npm/cli/commit/78da60ffefcfd457a4432ce1492ee7b53d854450) + [#3454](https://github.com/npm/cli/issues/3454) + chore(linting): add bin and clean up lib/ls.js +* [`54eae3063`](https://github.com/npm/cli/commit/54eae3063eeb197225ee930525a1316e34ecf34c) + [#3416](https://github.com/npm/cli/issues/3416) + chore(errorHandler): rename to exit handler + ([@wraithgar](https://github.com/wraithgar)) +* [`d0f50b156`](https://github.com/npm/cli/commit/d0f50b156725e5b414050d9e9a59d5fad8a39a3d) + [#3451](https://github.com/npm/cli/issues/3451) + chore(refactor): async npm.load + ([@wraithgar](https://github.com/wraithgar)) +* [`87f67d9ef`](https://github.com/npm/cli/commit/87f67d9efaf6f897cf0d74e738c2625a21044109) + [#3458](https://github.com/npm/cli/issues/3458) + chore(tests): expose real mock npm object + ([@wraithgar](https://github.com/wraithgar)) +* [`f3dce0917`](https://github.com/npm/cli/commit/f3dce0917088dc37795af39e7f6b5089beff984c) + [#3459](https://github.com/npm/cli/issues/3459) + chore(config): snapshot config descriptions + ([@wraithgar](https://github.com/wraithgar)) +* [`6254b6f72`](https://github.com/npm/cli/commit/6254b6f726a301908f73b36ccfa52cd4fd6619e5) + [#3234](https://github.com/npm/cli/issues/3234) + [#3455](https://github.com/npm/cli/issues/3455) + @npmcli/package-json refactor + ([@ruyadorno](https://github.com/ruyadorno)) + +### DEPENDENCIES + +* [`fe4138381`](https://github.com/npm/cli/commit/fe4138381fd2e8c919bb9f794e20033ff049f783) + `@npmcli/arborist@2.6.4`: + * bin: allow turning off timer display with --timers=false + * fix: do not try to inflate a fresh lockfile + * fix(diff): walk target children if root is a link + * chore: @npmcli/package-json refactor + +## v7.18.1 (2021-06-17) + +## BUG FIXES + +* [`fce30e423`](https://github.com/npm/cli/commit/fce30e423745a2b81530176d2f08ca84896eef4c) + [#3435](https://github.com/npm/cli/issues/3435) + fix(docs): rebuild config docs + ([@wraithgar](https://github.com/wraithgar)) + +## v7.18.0 (2021-06-17) + +## FEATURES + +* [`ae285b391`](https://github.com/npm/cli/commit/ae285b39191f3a0c4edfb045a334057bef4567b5) + [#3408](https://github.com/npm/cli/issues/3408) + feat(ls): support `--package-lock-only` flag + ([@G-Rath](https://github.com/G-Rath)) +* [`c984fb59c`](https://github.com/npm/cli/commit/c984fb59c5af087b91acd927cbbacad7c6a46576) + [#3420](https://github.com/npm/cli/issues/3420) + feat(pack): add pack-destination config + ([@wraithgar](https://github.com/wraithgar)) + +## BUG FIXES + +* [`40829ec40`](https://github.com/npm/cli/commit/40829ec40c33a6d23f18715e60e3395bdcb0467e) + [#2554](https://github.com/npm/cli/issues/2554) + [#3399](https://github.com/npm/cli/issues/3399) + fix(link): do not prune packages + ([@ruyadorno](https://github.com/ruyadorno)) +* [`102d4e6fb`](https://github.com/npm/cli/commit/102d4e6fb3c3b02148dbeee977a7d1e6372340d5) + [#3417](https://github.com/npm/cli/issues/3417) + fix(workspaces): explicitly error in global mode + ([@wraithgar](https://github.com/wraithgar)) +* [`993df3041`](https://github.com/npm/cli/commit/993df3041f5bdaa496c3c8d80f00d16b9cf0a1e6) + [#3423](https://github.com/npm/cli/issues/3423) + fix(docs): ls command usage instructions + ([@gurdiga](https://github.com/gurdiga)) +* [`dcc13662c`](https://github.com/npm/cli/commit/dcc13662c1d3e22eaf392647a9cddbb5b0710d24) + [#3418](https://github.com/npm/cli/issues/3418) + fix(config): update link definition + ([@wraithgar](https://github.com/wraithgar)) +* [`b19e56c2e`](https://github.com/npm/cli/commit/b19e56c2e54c035518165470c10480201cefa997) + [#3382](https://github.com/npm/cli/issues/3382) + [#3429](https://github.com/npm/cli/issues/3429) + fix(ls): respect prod config for workspaces + ([@ruyadorno](https://github.com/ruyadorno)) +* [`c99b8b53c`](https://github.com/npm/cli/commit/c99b8b53c3d7a9b0daa6d4416e9c40202ddd59a2) + [#3430](https://github.com/npm/cli/issues/3430) + fix(config): add flatOptions.npxCache + ([@wraithgar](https://github.com/wraithgar)) +* [`e5abf2a21`](https://github.com/npm/cli/commit/e5abf2a2171d95bafc0993f337230d2b6633a6ed) + [#3386](https://github.com/npm/cli/issues/3386) + chore(libnpmdiff): added as workspace + ([@ruyadorno](https://github.com/ruyadorno)) +* [`c6a8734d7`](https://github.com/npm/cli/commit/c6a8734d7d6e4b6d061110a01e45e1d418d56489) + [#3388](https://github.com/npm/cli/issues/3388) + chore(refactor): finish passing npm context + ([@wraithgar](https://github.com/wraithgar)) +* [`d16ee452a`](https://github.com/npm/cli/commit/d16ee452a4a034caada4e9b96faf5c453a658876) + [#3426](https://github.com/npm/cli/issues/3426) + chore(tests): use path.resolve + ([@wraithgar](https://github.com/wraithgar)) + +## DEPENDENCIES + +* [`6b951c042`](https://github.com/npm/cli/commit/6b951c042084e639be929a7ea783c2d85b311bad) + `libnpmversion@1.2.1`: + * fix(retrieve-tag): pass match in a way git accepts +* [`de820a021`](https://github.com/npm/cli/commit/de820a0213f54bbcd155dff25b05d072d5c4a57a) + `npm-package-arg@8.1.5`: + * fix: Make file: URLs (mostly) RFC 8909 compliant +* [`16a95c647`](https://github.com/npm/cli/commit/16a95c64731609c69630c17c45b16edb53ee81b2) + `@npmcli/arborist@2.6.3`: + * fix(inventory) handle old and british forms of 'license' + * fix: removes [_complete] check to apply correct metadata + * ensure node.fsParent is not set to node itself + * fix extraneous deps on load-actual +* [`d341bd86c`](https://github.com/npm/cli/commit/d341bd86ce05fabe44f3be5888ba2611b61914b4) + `make-fetch-happen@9.0.3`: + * fix: implement cache modes correctly +* [`c90612cf5`](https://github.com/npm/cli/commit/c90612cf566d563199553749900d8b05367e2532) + `libnpmexec@2.0.0`: + * use new npxCache option + + +## v7.17.0 (2021-06-10) + +## FEATURES + +* [`ef668ab57`](https://github.com/npm/cli/commit/ef668ab57b15789c6e2971ac39d8ecb3757629fa) + [#3368](https://github.com/npm/cli/issues/3368) + feat(diff): add workspace support + ([@wraithgar](https://github.com/wraithgar)) + +## BUG FIXES + +* [`26d00c477`](https://github.com/npm/cli/commit/26d00c47785dfb300eab6a926f9d7c4d566776b1) + [#3364](https://github.com/npm/cli/issues/3364) + fix(tests): mock writeFile in pack tests so we dont create 0 byte files in the repo + ([@nlf](https://github.com/nlf)) +* [`f130a81d6`](https://github.com/npm/cli/commit/f130a81d62bf4f540ab252a09ff5a618827f9265) + [#3367](https://github.com/npm/cli/issues/3367) + fix(linting): add scripts, docs, smoke-tests + ([@wraithgar](https://github.com/wraithgar)) +* [`992799cd8`](https://github.com/npm/cli/commit/992799cd8c4427ed8c57270b399b2d6bbc94f2a8) + [#3383](https://github.com/npm/cli/issues/3383) + fix(login): properly save scope if defined + ([@wraithgar](https://github.com/wraithgar)) + +## DOCUMENTATION + +* [`844229519`](https://github.com/npm/cli/commit/844229519dd51d0bcafc8c39109a671b6333cf6c) + [#3392](https://github.com/npm/cli/issues/3392) + docs(workspaces): update using npm section + Added examples of using `npm init` to bootstrap a new workspace and a + section on how to add/manage dependencies to workspaces. + ([@ruyadorno](https://github.com/ruyadorno)) + +## DEPENDENCIES + +* [`3654890fb`](https://github.com/npm/cli/commit/3654890fb3be8b57e73f7e6ac4d895017603ca9e) + remove ignored dep + ([@nlf](https://github.com/nlf)) +* [`a4a0e68a9`](https://github.com/npm/cli/commit/a4a0e68a9e34a4c99e10e4fb8c5f89d323a4192f) + [#3362](https://github.com/npm/cli/issues/3362) + check less stuff into node_modules + ([@isaacs](https://github.com/isaacs)) +* [`7d5b049b6`](https://github.com/npm/cli/commit/7d5b049b654f96fc4c49d2f18a19adb4aa0f7d3c) + [#3365](https://github.com/npm/cli/issues/3365) + chore(package) Use a "files" list + ([@isaacs](https://github.com/isaacs)) + +## v7.16.0 (2021-06-03) + +## FEATURES + +* [`e92b5f2ba`](https://github.com/npm/cli/commit/e92b5f2ba07746ae07646566f3dc73c9e004a2fc) + `npm-registry-fetch@11.0.0` + * feat: improved logging of cache status + +## BUG FIXES + +* [`e864bd3ce`](https://github.com/npm/cli/commit/e864bd3ce8e8467e0f8ebb499dc2daf06143bc33) + [#3345](https://github.com/npm/cli/issues/3345) + fix(update-notifier): do not update notify when installing npm@spec + ([@isaacs](https://github.com/isaacs)) +* [`aafe23572`](https://github.com/npm/cli/commit/aafe2357279230e333d3342752a28fce6b9cd152) + [#3348](https://github.com/npm/cli/issues/3348) + fix(update-notifier): parallelize check for updates + ([@isaacs](https://github.com/isaacs)) + +## DOCUMENTATION + +* [`bc9c57dda`](https://github.com/npm/cli/commit/bc9c57dda7cf3abcdee17550205daf1a82e90438) + [#3353](https://github.com/npm/cli/issues/3353) + fix(docs): remove documentation for '--scripts-prepend-node-path' as it was removed in npm@7 + ([@gimli01](https://github.com/gimli01)) +* [`ca2822110`](https://github.com/npm/cli/commit/ca28221103aa0e9ccba7043ac515a541b625c53a) + [#3360](https://github.com/npm/cli/issues/3360) + fix(docs): link foreground-scripts w/ loglevel + ([@wraithgar](https://github.com/wraithgar)) +* [`fb630b5a9`](https://github.com/npm/cli/commit/fb630b5a9af86c71602803297634ec291eeedee0) + [#3342](https://github.com/npm/cli/issues/3342) + chore(docs): manage docs as a workspace + ([@ruyadorno](https://github.com/ruyadorno)) + +## DEPENDENCIES + +* [`54de5c6a4`](https://github.com/npm/cli/commit/54de5c6a4cd593bbbe364132f3f7348586441b31) + `npm-package-arg@8.1.4`: + * fix: trim whitespace from fetchSpec + * fix: handle file: when root directory begins with a special character +* [`e92b5f2ba`](https://github.com/npm/cli/commit/e92b5f2ba07746ae07646566f3dc73c9e004a2fc) + `make-fetch-happen@9.0.1` + * breaking: complete refactor of caching. drops warning headers, + prevents cache indexes from growing for every request, correctly + handles varied requests to the same url, and now caches redirects. + * fix: support url-encoded proxy authorization + * fix: do not lazy-load proxy agents or agentkeepalive. fixes the + intermittent failures to update npm on slower connections. + `npm-registry-fetch@11.0.0` + * breaking: drop handling of deprecated warning headers + * docs: fix header type for npm-command + * docs: update registry param + * feat: improved logging of cache status +* [`23c50a45f`](https://github.com/npm/cli/commit/23c50a45f59ea3ed4c36f35df15e54adc5603034) + `make-fetch-happen@9.0.2`: + * fix: work around negotiator's lazy loading + +## AUTOMATION + +* [`c4ef78b08`](https://github.com/npm/cli/commit/c4ef78b08e6859fc191cabbe58c8d88c070e0612) + [#3344](https://github.com/npm/cli/issues/3344) + fix(automation): update incorrect variable name in create-cli-deps-pr workflow + ([@gimli01](https://github.com/gimli01)) + +## v7.15.1 (2021-05-31) + +### BUG FIXES + +* [`598a17a26`](https://github.com/npm/cli/commit/598a17a2671c9e3bc204dddd6488169c9a72c6a1) + [#3329](https://github.com/npm/cli/issues/3329) + fix(libnpmexec): don't detach output from npm + ([@wraithgar](https://github.com/wraithgar)) + +### DEPENDENCIES + +* [`c4fc03e9e`](https://github.com/npm/cli/commit/c4fc03e9eb3a6386e8feacb67c19f0a1578dfe38) + `@npmcli/arborist@2.6.1` + * fixes reifying deps with mismatching version ranges between + actual and virtual trees +* [`9159fa62a`](https://github.com/npm/cli/commit/9159fa62a10dee09daef178fc7be161a02824004) + `libnpmexec@1.2.0` + +## v7.15.0 (2021-05-27) + +### FEATURES + +* [`399ff8cbc`](https://github.com/npm/cli/commit/399ff8cbccd5198f637518ccafa86c43bab47a4a) + [#3312](https://github.com/npm/cli/issues/3312) + feat(link): add workspace support + ([@isaacs](https://github.com/isaacs)) + +### BUG FIXES + +* [`46a9bcbcb`](https://github.com/npm/cli/commit/46a9bcbcb0bb2435dca6f45a61b8631f580c7f06) + [#3282](https://github.com/npm/cli/issues/3282) + fix(docs): proper postinstall script file name + ([@KevinFCormier](https://github.com/KevinFCormier)) +* [`83590d40f`](https://github.com/npm/cli/commit/83590d40f94347f21714dbd158b9ddcad9c82de9) + [#3272](https://github.com/npm/cli/issues/3272) + fix(ls): show relative paths from root + ([@isaacs](https://github.com/isaacs)) +* [`a574b518a`](https://github.com/npm/cli/commit/a574b518ae5b8f0664ed388cf1be6288d8c2e68d) + [#3304](https://github.com/npm/cli/issues/3304) + fix(completion): restore IFS even if `npm completion` returns error + ([@NariyasuHeseri](https://github.com/NariyasuHeseri)) +* [`554e8a5cd`](https://github.com/npm/cli/commit/554e8a5cd7034052a59a9ada31e4b8f73712211a) + [#3311](https://github.com/npm/cli/issues/3311) + set audit exit code properly + ([@isaacs](https://github.com/isaacs)) +* [`4a4fbe33c`](https://github.com/npm/cli/commit/4a4fbe33c51413adcd558b4af6f1e204b1b87e41) + [#3268](https://github.com/npm/cli/issues/3268) + [#3285](https://github.com/npm/cli/issues/3285) + fix(publish): skip private workspaces + ([@ruyadorno](https://github.com/ruyadorno)) + +### DOCUMENTATION + +* [`3c53d631f`](https://github.com/npm/cli/commit/3c53d631f557cf2484e2f6a6172c44e36aea4817) + [#3307](https://github.com/npm/cli/issues/3307) + fix(docs): typo in package-lock.json docs + ([@rethab](https://github.com/rethab)) +* [`96367f93f`](https://github.com/npm/cli/commit/96367f93f46c24494d084c8b8d34e4de9cd375da) + rebuild npm-pack doc + ([@isaacs](https://github.com/isaacs)) +* [`64b13dd10`](https://github.com/npm/cli/commit/64b13dd1082b6ca7eac4e8e329bfdd8cd8daf157) + [#3313](https://github.com/npm/cli/issues/3313) + Drop stale Python 3<->node-gyp remark + ([@spencerwilson](https://github.com/spencerwilson)) + +### DEPENDENCIES + +* [`7b56bfdf3`](https://github.com/npm/cli/commit/7b56bfdf3f2ac67a926fc7893b883a16b46eb3fd) + `cacache@15.2.0`: + * feat: allow fully deleting indices + * feat: add a validateEntry option to compact + * chore: lint + * chore: use standard npm style release scripts +* [`dbbc151a3`](https://github.com/npm/cli/commit/dbbc151a3bcf89e2627dc267063edd185ead1cb8) + `npm-audit-report@2.1.5`: + * fix(exit-code): account for null auditLevel default (#46) +* [`5b2604507`](https://github.com/npm/cli/commit/5b26045076477d3d350f539e60adf48a80376fda) + chore(package-lock): update devDependencies + ([@Gar](https://github.com/Gar)) + +### AUTOMATION + +* [`3d5df0082`](https://github.com/npm/cli/commit/3d5df0082ae904dacdea8644286e8362d4a2ed50) + [#3294](https://github.com/npm/cli/issues/3294) + chore(ci): move node release PR workflow to cli repo + ([@gimli01](https://github.com/gimli01)) + +## v7.14.0 (2021-05-20) + +### FEATURES + +* [`0d1a9d787`](https://github.com/npm/cli/commit/0d1a9d78779dc015242fc03d2dad2039004fa2df) + [#3227](https://github.com/npm/cli/issues/3227) + feat(install): add workspaces support to npm install commands + ([@isaacs](https://github.com/isaacs)) +* [`c18626f04`](https://github.com/npm/cli/commit/c18626f047e3a0fedd3c86554a4a0a8f27925e77) + [#3250](https://github.com/npm/cli/issues/3250) + feat(ls): add workspaces support + ([@ruyadorno](https://github.com/ruyadorno)) +* [`41099d395`](https://github.com/npm/cli/commit/41099d3958d08f166313b7eb69b76458f8f9224c) + [#3265](https://github.com/npm/cli/issues/3265) + feat(explain): add workspaces support + ([@ruyadorno](https://github.com/ruyadorno)) +* [`fde354669`](https://github.com/npm/cli/commit/fde35466915b5ac5958c827fa7e919e1f186db51) + [#3251](https://github.com/npm/cli/issues/3251) + feat(unpublish): add workspace/dry-run support + ([@wraithgar](https://github.com/wraithgar)) +* [`83df3666c`](https://github.com/npm/cli/commit/83df3666cd82819230fb45f2a40afd531fe3b3c7) + [#3260](https://github.com/npm/cli/issues/3260) + feat(outdated): add workspaces support + ([@ruyadorno](https://github.com/ruyadorno)) +* [`63a7635f7`](https://github.com/npm/cli/commit/63a7635f7a2225a4edd1fe92f94a563965ac06c7) + [#3217](https://github.com/npm/cli/issues/3217) + feat(pack): add support to json config/output + ([@mrmlnc](https://github.com/mrmlnc)) + +### BUG FIXES + +* [`faa12ccc2`](https://github.com/npm/cli/commit/faa12ccc26b5f0790f79b2589780e536f4284491) + [#3253](https://github.com/npm/cli/issues/3253) + fix search description typos + ([@juanpicado](https://github.com/juanpicado)) +* [`2f5c28a68`](https://github.com/npm/cli/commit/2f5c28a68719e948d2efedf463ebcb35972aaefb) + [#3243](https://github.com/npm/cli/issues/3243) + fix(docs): autogenerate config docs for commands + ([@isaacs](https://github.com/isaacs)) + +### DEPENDENCIES + +* [`ec256a14a`](https://github.com/npm/cli/commit/ec256a14aa6eb2bd59fd55dcc6a4bc0148662c4e) + `@npmcli/arborist@2.6.0` +* [`5f15aba86`](https://github.com/npm/cli/commit/5f15aba866026e7c0d6844e6c07a528dc7454f14) + `cacache@15.1.0` +* [`b3add87e6`](https://github.com/npm/cli/commit/b3add87e686968b7af3067c685d2561baf90e397) + [#3262](https://github.com/npm/cli/pull/3262) + `npm-registry-client@10.1.2`: + * fixed sso login token + +## v7.13.0 (2021-05-13) + +### FEATURES + +* [`076420c14`](https://github.com/npm/cli/commit/076420c149d097056f687e44e21744b743b86e4e) + [#3231](https://github.com/npm/cli/issues/3231) + feat(publish): add workspace support + ([@wraithgar](https://github.com/wraithgar)) +* [`370b36a36`](https://github.com/npm/cli/commit/370b36a36ca226840761e4214cbccaf2a1a90e3c) + [#3241](https://github.com/npm/cli/issues/3241) + feat(fund): add workspaces support + ([@ruyadorno](https://github.com/ruyadorno)) + +### DEPENDENCIES + +* [`0c18e4f77`](https://github.com/npm/cli/commit/0c18e4f774562fa054fedf323bea25805ebf39b3) + `@npmcli/arborist@2.5.0` +* [`b551c6811`](https://github.com/npm/cli/commit/b551c6811251dbc901f47fea3c137f93e205a9e4) + `libnpmfund@1.1.0` + +## v7.12.1 (2021-05-10) + +### BUG FIXES + +* [`de49f58f5`](https://github.com/npm/cli/commit/de49f58f55dc2ac3a5057cd492a43c32ae41381e) + [#3216](https://github.com/npm/cli/issues/3216) + fix(contributing): link to proper cli repo + ([@mrmlnc](https://github.com/mrmlnc)) +* [`1d092144e`](https://github.com/npm/cli/commit/1d092144eaaabff63ac8424b40b2286822be7677) + [#3203](https://github.com/npm/cli/issues/3203) + fix(packages): locale-agnostic string sorting + ([@isaacs](https://github.com/isaacs)) +* [`0696fca13`](https://github.com/npm/cli/commit/0696fca13d10726e04ca97ff50eef7bd7455a3ab) + [#3209](https://github.com/npm/cli/issues/3209) + fix(view): fix non-registry specs + ([@wraithgar](https://github.com/wraithgar)) +* [`71ac93597`](https://github.com/npm/cli/commit/71ac935976390e4fd05987ff510049f82bc6e2a9) + [#3206](https://github.com/npm/cli/issues/3206) + chore(github): Convert md issue template to yaml + ([@lukehefson](https://github.com/lukehefson)) +* [`6fb386d3b`](https://github.com/npm/cli/commit/6fb386d3bfbaa8e4771ff87a08de1f3aa6f9b34d) + [#3201](https://github.com/npm/cli/issues/3201) + fix(tests): increase test fuzziness + ([@wraithgar](https://github.com/wraithgar)) +* [`f3a662fcd`](https://github.com/npm/cli/commit/f3a662fcd869653f9753aef3d40cc96ed28ed509) + [#3211](https://github.com/npm/cli/issues/3211) + fix(tests): use config defaults + ([@wraithgar](https://github.com/wraithgar)) + +### DEPENDENCIES + +* [`285976fd1`](https://github.com/npm/cli/commit/285976fd12f037f59da47307d98df7ebda5278d9) + `@npmcli/arborist@2.4.4` + * fix(reify): properly save spec if prerelease +* [`f9f24d17c`](https://github.com/npm/cli/commit/f9f24d17c29c421de3c9b82c6b98a40268aeb920) + `libnpmexec@1.1.1` + * fix(add): Specify 'en' locale to String.localeCompare +* [`cb9f17499`](https://github.com/npm/cli/commit/cb9f174996dbb4779a1be82890564f9abffb11f4) + `glob@7.1.7` + * force 'en' locale in string sorting +* [`24b4e4a41`](https://github.com/npm/cli/commit/24b4e4a41b451db3de381fac6b719149db14c288) + `ignore-walk@3.0.4` + * Avoid locale-specific sorting issues +* [`1eb7e5c7d`](https://github.com/npm/cli/commit/1eb7e5c7d466293b472c2506c64e5a89ec84ac2f) + `@npmcli/arborist@2.4.3` + * guard against locale-specific sorting +* [`a6a826067`](https://github.com/npm/cli/commit/a6a826067cb46c711521772c2d0158257d54400a) + `npm-packlist@2.2.2`: + * fix(sort): avoid locale-dependent sorting issues + +## v7.12.0 (2021-05-06) + +### FEATURES + +* [`701627c51`](https://github.com/npm/cli/commit/701627c5169934e59da2959d76a49c77278cc9dc) + [#3098](https://github.com/npm/cli/issues/3098) + feat(cache): Allow `add` to accept multiple specs + ([@mjsir911](https://github.com/mjsir911)) +* [`59171f030`](https://github.com/npm/cli/commit/59171f0304f048a009f1697eec6f74f778bc52ff) + [#3187](https://github.com/npm/cli/issues/3187) + feat(config): add workspaces boolean to user-agent + ([@nlf](https://github.com/nlf)) + +### BUG FIXES + +* [`2c9b8713c`](https://github.com/npm/cli/commit/2c9b8713c4c88fbd0c3c48eb0de84dbd7269398f) + [#3182](https://github.com/npm/cli/issues/3182) + fix(docs): fix broken links + ([@wangsai](https://github.com/wangsai)) +* [`88cbc8c44`](https://github.com/npm/cli/commit/88cbc8c447cbaef20b5a8f19246211ce4918f4d8) + [#3198](https://github.com/npm/cli/issues/3198) + fix(tests): reflect new libnpmexec logic + +### DEPENDENCIES + +* [`d01ce5e13`](https://github.com/npm/cli/commit/d01ce5e132cb4661698012fd5017753c2bdb660b) + `libnpmexec@1.1.0`: + * feat: add walk up dir lookup to satisfy local bins +* [`81c1dfaaa`](https://github.com/npm/cli/commit/81c1dfaaaf918229316a975aa8075769ffafdb6d) + `@npmcli/arborist@2.4.2`: + * fix(add): save packages in the right place + * fix(reify): do not clean up nodes with no parent + * fix(audit): support alias specs & root package names +* [`87c2303ea`](https://github.com/npm/cli/commit/87c2303eaa6edfa5309da0a30f5ad291b6d57640) + `@npmcli/git@2.0.9`: + * fix(clone): Do not allow git replacement objects by default +* [`99ff40dff`](https://github.com/npm/cli/commit/99ff40dff5e5e55a5d5f045ba90e76c08174ca38) + `npm-packlist@2.2.0`: + * feat(npmignore): Do not force include history, changelogs, notice + * fix(package.json): add missing bin/index.js to files + +## v7.11.2 (2021-04-29) + +### BUG FIXES + +* [`c371f183e`](https://github.com/npm/cli/commit/c371f183ebe833c2439e98b679f14e7a59f22c34) + [#3137](https://github.com/npm/cli/issues/3137) + [#3140](https://github.com/npm/cli/issues/3140) + fix(ls): do not warn on missing optional deps + ([@isaacs](https://github.com/isaacs)) +* [`861f606c7`](https://github.com/npm/cli/commit/861f606c7609d177c644814a171581afbb72f6db) + [#3156](https://github.com/npm/cli/issues/3156) + fix(build): make prune rule work on case-sensitive file systems + ([@lpinca](https://github.com/lpinca)) + +### DEPENDENCIES + +* [`fb79d89a0`](https://github.com/npm/cli/commit/fb79d89a07ef03e76633db275463f701d3dae42f) + `tap@15.0.6` +* [`ce3820043`](https://github.com/npm/cli/commit/ce38200437e9ed527df973794909b2699909bc9b) + `@npmcli/arborist@2.4.1` + * fix: prevent and eliminate unnecessary duplicates + * fix: support resolvable partial intersecting peerSets + +### DOCUMENTATION + +* [`e479f1dac`](https://github.com/npm/cli/commit/e479f1dac9a7639304d20116583034861635b2b1) + [#3146](https://github.com/npm/cli/issues/3146) + mention `directories.bin` in `bin` + ([@felipecrs](https://github.com/felipecrs)) + +## v7.11.1 (2021-04-23) + +### DEPENDENCIES + +* [`7925cca24`](https://github.com/npm/cli/commit/7925cca24543d9e1a8297844b3e53e11057643ef) + `pacote@11.3.3`: + * fix(registry): normalize manfest +* [`b61eac693`](https://github.com/npm/cli/commit/b61eac693df82c52b955e6c18ec4dcf4cedea8a3) + [#3130](https://github.com/npm/cli/issues/3130) + `@npmcli/config@2.2.0` +* [`c74e67fc6`](https://github.com/npm/cli/commit/c74e67fc6572bb001d74c7486c05d211a0e03de8) + [#3130](https://github.com/npm/cli/issues/3130) + `npm-registry-fetch@10.1.1` + +### DOCUMENTATION + +* [`efdd7dd44`](https://github.com/npm/cli/commit/efdd7dd4427a0ee856c18aab1df2d3d30a307997) + Remove unused and incorrectly documented `--always-auth` config definition + ([@isaacs](https://github.com/isaacs)) + +## v7.11.0 (2021-04-22) + +### FEATURES + +* [`4c1f16d2c`](https://github.com/npm/cli/commit/4c1f16d2c29a7a56c19b97f2820e6305a6075083) + [#3095](https://github.com/npm/cli/issues/3095) + feat(init): add workspaces support + ([@ruyadorno](https://github.com/ruyadorno)) + +### BUG FIXES + +* [`42ca59eee`](https://github.com/npm/cli/commit/42ca59eeedd3e402aa1c606941f7f52864e6039b) + [#3086](https://github.com/npm/cli/issues/3086) + fix(ls): do not exit with error when all problems are extraneous deps + ([@nlf](https://github.com/nlf)) +* [`2aecec591`](https://github.com/npm/cli/commit/2aecec591df6866e27d0b17dc49cef8f7d738d77) + [#2724](https://github.com/npm/cli/issues/2724) + [#3119](https://github.com/npm/cli/issues/3119) + fix(ls): make --long work when missing deps + ([@ruyadorno](https://github.com/ruyadorno)) +* [`42e0587a9`](https://github.com/npm/cli/commit/42e0587a9ea6940a5d5be5903370ad1113feef21) + [#3115](https://github.com/npm/cli/issues/3115) + fix(pack): refuse to pack invalid packument + ([@wraithgar](https://github.com/wraithgar)) +* [`1c4eff7b5`](https://github.com/npm/cli/commit/1c4eff7b513b8e84876818ede014d3ab19d203c6) + [#3126](https://github.com/npm/cli/issues/3126) + fix(logout): use isBasicAuth attribute + ([@wraithgar](https://github.com/wraithgar)) + +### DOCUMENTATION + +* [`c93f1c39e`](https://github.com/npm/cli/commit/c93f1c39e326feff0857712a10ef6183fbafe1ab) + [#3101](https://github.com/npm/cli/issues/3101) + chore(docs): update view docs + ([@wraithgar](https://github.com/wraithgar)) +* [`c4ff4bc11`](https://github.com/npm/cli/commit/c4ff4bc113c3a5b6ee5d74ab0b1adee95169ed32) + [npm/statusboard#313](https://github.com/npm/statusboard/issues/313) + [#3109](https://github.com/npm/cli/issues/3109) + fix(usage): fix refs to ws shorthand + ([@ruyadorno](https://github.com/ruyadorno)) + +### DEPENDENCIES + +* [`83166ebcc`](https://github.com/npm/cli/commit/83166ebcc4ba5e3bf215f08151437d96637f4f33) + `npm-registry-fetch@10.1.0` + * feat(auth): set isBasicAuth +* [`e02bda6da`](https://github.com/npm/cli/commit/e02bda6da68b8e8f490bf270cb5d6adec81685ea) + `npm-registry-fetch@10.0.0` + * feat(auth) load/send based on URI, not registry +* [`a0382deba`](https://github.com/npm/cli/commit/a0382deba346b09834e75db89e1fd4527f1f07dd) + `@npmcli/run-script@1.8.5` + * fix: windows ComSpec env variable name +* [`7f82ef5a8`](https://github.com/npm/cli/commit/7f82ef5a84d70e28983ed43ba1d8aced0fb4ba45) + `pacote@11.3.2` +* [`35e49b94f`](https://github.com/npm/cli/commit/35e49b94fba478a63df6cc9b62816eafe5f1fbdd) + `@npmcli/arborist@2.4.0` +* [`95faf8ce6`](https://github.com/npm/cli/commit/95faf8ce6c007082a02c160977da194c08ee9d82) + `libnpmaccess@4.0.2` +* [`17fffc0e4`](https://github.com/npm/cli/commit/17fffc0e42b2a9e7b84691093e45ba511906cbfa) + `libnpmhook@6.0.2` +* [`1b5a213aa`](https://github.com/npm/cli/commit/1b5a213aaf39652661ba72ba2e8751f049b170fb) + `libnpmorg@2.0.2` +* [`9f83e6484`](https://github.com/npm/cli/commit/9f83e6484aa163d066f318df42ec89c8234b614e) + `libnpmpublish@4.0.1` +* [`251f788c5`](https://github.com/npm/cli/commit/251f788c554a198ab42682453fa5504f8abe93fe) + `libnpmsearch@3.1.1` +* [`35873a989`](https://github.com/npm/cli/commit/35873a989fe67041ddcf30a0a278ed77ace5ee3c) + `libnpmteam@2.0.3` +* [`23e12b4d8`](https://github.com/npm/cli/commit/23e12b4d8f63d765a48036e7bb08f53319c73304) + `npm-profile@5.0.3` + +## v7.10.0 (2021-04-15) + +### FEATURES + +* [`f9b639eb6`](https://github.com/npm/cli/commit/f9b639eb6c504ded6cdd59e83e26a392bfe81e5d) + [#3052](https://github.com/npm/cli/issues/3052) + feat(bugs): fall back to email if provided + ([@Yash-Singh1](https://github.com/Yash-Singh1)) +* [`8c9e24778`](https://github.com/npm/cli/commit/8c9e24778db867cb3148bc247c7e321639aa9f58) + [#3055](https://github.com/npm/cli/issues/3055) + feat(version): add workspace support + ([@wraithgar](https://github.com/wraithgar)) + +### DEPENDENCIES + +* [`f1e6743a6`](https://github.com/npm/cli/commit/f1e6743a6e8e32ddad6d1964eb05d17e6c50a456) + `libnpmversion@1.2.0` + * feat(retrieve-tag): retrieve unannotated git tags + * fix(retrieve-tag): use semver to look for semver +* [`3b476a24c`](https://github.com/npm/cli/commit/3b476a24cf0b2823fdf92505b84bddde4fcc8b14) + `@npmcl/git@2.0.8` + * fix(git): do not use shell when calling git +* [`dfcd0c1e2`](https://github.com/npm/cli/commit/dfcd0c1e2331c1f4b6573466b50505772eddaf22) + [#3069](https://github.com/npm/cli/issues/3069) + `tap@15.0.2` + +### DOCUMENTATION + +* [`90b61eda9`](https://github.com/npm/cli/commit/90b61eda9b41af108ed69fc0c43a522a92745047) + [#3053](https://github.com/npm/cli/issues/3053) + fix(contributing.md): explicitely outline dep updates + ([@darcyclarke](https://github.com/darcyclarke)) + +## v7.9.0 (2021-04-08) + +### FEATURES + +* [`1f3e88eba`](https://github.com/npm/cli/commit/1f3e88ebaf4901d8f9f07b43404d824fef7e5ff5) + [#3032](https://github.com/npm/cli/issues/3032) + feat(dist-tag): add workspace support + ([@nlf](https://github.com/nlf)) +* [`6e31df4e7`](https://github.com/npm/cli/commit/6e31df4e7957337962fd3d93e495931e3592bb9e) + [#3033](https://github.com/npm/cli/issues/3033) + feat(pack): add workspace support + ([@wraithgar](https://github.com/wraithgar)) + +### DEPENDENCIES + +* [`ba4f7fea8`](https://github.com/npm/cli/commit/ba4f7fea8fca8e3509469a218f094fe69095888b) + `licensee@8.2.0` + +## v7.8.0 (2021-04-01) + +### FEATURES + + +* [`8bcc5d73f`](https://github.com/npm/cli/commit/8bcc5d73f35434e781ff56419dd7f0c380efd072) + [#2972](https://github.com/npm/cli/issues/2972) + feat(workspaces): add repo and docs + ([@wraithgar](https://github.com/wraithgar)) +* [`ec520ce32`](https://github.com/npm/cli/commit/ec520ce32d5e834a32ebd58491df4200e01ce690) + [#2998](https://github.com/npm/cli/issues/2998) + feat(set-script): implement workspaces +* [`32717a60e`](https://github.com/npm/cli/commit/32717a60eb55fcf8c7e5016223bfee78a6daba0e) + [#3001](https://github.com/npm/cli/issues/3001) + feat(view): add workspace support + ([@wraithgar](https://github.com/wraithgar)) +* [`7b177e43f`](https://github.com/npm/cli/commit/7b177e43f3bfb558bcd8723cdb2166a3df19647a) + [#3014](https://github.com/npm/cli/issues/3014) + feat(config): add 'envExport' flag + ([@isaacs](https://github.com/isaacs)) + +### BUG FIXES + +* [`4c4252348`](https://github.com/npm/cli/commit/4c4252348c538246e1072421d65f4558dc948080) + [#3016](https://github.com/npm/cli/issues/3016) + fix(usage): specify the key each time for multiples + ([@isaacs](https://github.com/isaacs)) +* [`9237d375b`](https://github.com/npm/cli/commit/9237d375b0b7d34c7dc5ba70aec7f616f4133732) + [#3013](https://github.com/npm/cli/issues/3013) + fix(docs): add workspaces configuration + ([@wraithgar](https://github.com/wraithgar)) +* [`cb6eb0d20`](https://github.com/npm/cli/commit/cb6eb0d206b7e2f63d5c7a7a17bea4aed1b9f2bf) + [#3015](https://github.com/npm/cli/issues/3015) + fix(ERESOLVE): better errors when current is missing + ([@isaacs](https://github.com/isaacs)) + +### DEPENDENCIES + +* [`61da39beb`](https://github.com/npm/cli/commit/61da39beb5373320e2b591b61ecd6596eeaba6ed) + `@npmcli/config@2.1.0` + * feat(config): add support for envExport:false +* [`fb095a708`](https://github.com/npm/cli/commit/fb095a708a1f930bbd0195446ac611b82bfeff14) + `@npmcli/arborist@2.3.0`: + * [#2896](https://github.com/npm/cli/issues/2896) Provide currentEdge in + ERESOLVE if known, and address self-linking edge case. + * Add/remove dependencies to/from workspaces when set, not root project + * Only reify the portions of the dependency graph identified by the + `workspace` configuration value. + * Do not recursively `chown` the project root path. + +## v7.7.6 (2021-03-29) + +### BUG FIXES + +* [`9dd2ed518`](https://github.com/npm/cli/commit/9dd2ed5189b6f283094664e9e192cf1598ec3f79) + fix empty newline printed to stderr + ([@ruyadorno](https://github.com/ruyadorno)) +* [`9d391462a`](https://github.com/npm/cli/commit/9d391462a25f637219501e2430ef1f7b89710816) + [#2973](https://github.com/npm/cli/issues/2973) + fix spelling in workspaces.md file + ([@sethomas](https://github.com/sethomas)) +* [`4b100249a`](https://github.com/npm/cli/commit/4b100249a6cad67e002186816e64817313b636c7) + [#2979](https://github.com/npm/cli/issues/2979) + change 'maxsockets' default value back to 15 + ([@wallrat](https://github.com/wallrat)) + +### DEPENDENCIES + +* [`a28f89572`](https://github.com/npm/cli/commit/a28f89572a708cced69cc938f877eaa969dbad9e) + `libnpmversion@1.1.0` + * fix reading `script-shell` config on `npm version` lifecycle scripts +* [`03734c29e`](https://github.com/npm/cli/commit/03734c29e00191d17f164d1c0e75d9f228268842) + `npm-packlist@2.1.5` + * fix packaging `bundledDependencies` +* [`80ce2a019`](https://github.com/npm/cli/commit/80ce2a019526632b01b70e1c75c42608dc160332) + `@npmcli/metavuln-calculator@1.1.1` + * fix error auditing package documents with missing dependencies + +## v7.7.5 (2021-03-25) + +### BUG FIXES + +* [`95ba87622`](https://github.com/npm/cli/commit/95ba87622e00d68270eda9e071b19737718fca16) + [#2949](https://github.com/npm/cli/issues/2949) + fix handling manual indexes in `npm help` + ([@dmchurch](https://github.com/dmchurch)) +* [`59cf37962`](https://github.com/npm/cli/commit/59cf37962a2286e0f7d3bd37fa9c8bc3bac94218) + [#2958](https://github.com/npm/cli/issues/2958) + always set `npm.command` to canonical command name + ([@isaacs](https://github.com/isaacs)) +* [`1415b4bde`](https://github.com/npm/cli/commit/1415b4bdeeaabb6e0ba12b6b1b0cc56502bd64ab) + [#2964](https://github.com/npm/cli/issues/2964) + fix(config): properly translate user-agent + ([@wraithgar](https://github.com/wraithgar)) +* [`59271936d`](https://github.com/npm/cli/commit/59271936d90fbd6956a41967119f578c0ba63db9) + [#2965](https://github.com/npm/cli/issues/2965) + fix(config): tie save-exact/save-prefix together + ([@wraithgar](https://github.com/wraithgar)) + +### TESTS + +* [`97b415287`](https://github.com/npm/cli/commit/97b41528739460b2e9e72e09000aded412418cb2) + [#2959](https://github.com/npm/cli/issues/2959) + add smoke tests + ([@ruyadorno](https://github.com/ruyadorno)) + +## v7.7.4 (2021-03-24) + +### BUG FIXES + +* [`200bee74b`](https://github.com/npm/cli/commit/200bee74b31a738687446b7b535cac67b1c582fd) + [#2951](https://github.com/npm/cli/issues/2951) + fix(config): accept explicit `production=false` + ([@wraithgar](https://github.com/wraithgar)) +* [`7b45e9df6`](https://github.com/npm/cli/commit/7b45e9df6102c7bd6e403d1fdc9939581c38f546) + [#2950](https://github.com/npm/cli/issues/2950) + warn if using workspaces config options in `npm config` + ([@ruyadorno](https://github.com/ruyadorno)) + +## v7.7.3 (2021-03-24) + +### BUG FIXES + +* [`c76f04ac2`](https://github.com/npm/cli/commit/c76f04ac28ddf2ae4df4b3ce0aec684a118de1b5) + [#2925](https://github.com/npm/cli/issues/2925) + fix(set-script): add completion + ([@Yash-Singh1](https://github.com/Yash-Singh1)) +* [`0379eab69`](https://github.com/npm/cli/commit/0379eab698b78ae4aa89bbe2043607f420e52f11) + [#2929](https://github.com/npm/cli/issues/2929) + fix(install): ignore auditLevel + `npm install` should not be affected by the `auditLevel` config, as the + results of audit do not change its exit status. + ([@wraithgar](https://github.com/wraithgar)) +* [`98efadeb4`](https://github.com/npm/cli/commit/98efadeb4b2ae9289f14ed6f42a169230faf7239) + [#2923](https://github.com/npm/cli/issues/2923) + fix(audit-level): add `info` audit level + This is a valid level but wasn't configured to be allowed. + Also added this param to the usage output for `npm audit` + ([@wraithgar](https://github.com/wraithgar)) +* [`e8d2adcf4`](https://github.com/npm/cli/commit/e8d2adcf40ad63030f844c9aa44c6d16e2146797) + [#2945](https://github.com/npm/cli/issues/2945) + config should not error when workspaces are configured + ([@nlf](https://github.com/nlf)) +* [`aba2bc623`](https://github.com/npm/cli/commit/aba2bc623ea99e563b1b15b81dbb4ba94f86fe4c) + [#2944](https://github.com/npm/cli/issues/2944) + fix(progress): re-add progress bar to reify + The logger was no longer in flatOptions, we pass it in explicitly now + ([@wraithgar](https://github.com/wraithgar)) +* [`877b4ed29`](https://github.com/npm/cli/commit/877b4ed2925c97b5249a4d33575420dda64f7339) + [#2946](https://github.com/npm/cli/issues/2946) + fix(flatOptions): re-add `_auth` + This was not being added to flatOptions, and things like + `npm-registry-fetch` are looking for it. + ([@wraithgar](https://github.com/wraithgar)) + +## v7.7.2 (2021-03-24) + +### BUG FIXES +* [`a4df2b98d`](https://github.com/npm/cli/commit/a4df2b98d89429b19cd29b5fc895cdbfc0a6bd78) + [#2942](https://github.com/npm/cli/issues/2942) + Restore --dev flag, unify --omit flatteners + ([@isaacs](https://github.com/isaacs)) + +### DEPENDENCIES +* [`2cbfaac0e`](https://github.com/npm/cli/commit/2cbfaac0ecd5810316f6d76168ed9618bd11bf3a) + `hosted-git-info@4.0.2` + * [#83](https://github.com/npm/hosted-git-info/pull/83) Do not parse + urls for gitlab + ([@nlf](https://github.com/nlf)) + +## v7.7.1 (2021-03-24) + +### BUG FIXES + +* [`543b0e39b`](https://github.com/npm/cli/commit/543b0e39bcb94fc408804b01ca9c0d7b960b2681) + [#2930](https://github.com/npm/cli/issues/2930) + fix(uninstall): use correct local prefix + ([@jameschensmith](https://github.com/jameschensmith)) +* [`dce4960ef`](https://github.com/npm/cli/commit/dce4960ef6d52af128affe7755b2ca72de913b6c) + [#2932](https://github.com/npm/cli/issues/2932) + fix(config): flatten savePrefix properly + ([@wraithgar](https://github.com/wraithgar)) + +## v7.7.0 (2021-03-23) + +### FEATURES + +* [`33c4189f9`](https://github.com/npm/cli/commit/33c4189f939aebdfaf85ea419e6ea01d0977b79d) + [#2864](https://github.com/npm/cli/issues/2864) + add `npm run-script` workspaces support + ([@ruyadorno](https://github.com/ruyadorno)) +* [`e1b3b318f`](https://github.com/npm/cli/commit/e1b3b318f095a7e1a7cc4b131907de4955275d9d) + [#2886](https://github.com/npm/cli/issues/2886) + add `npm exec` workspaces support + ([@ruyadorno](https://github.com/ruyadorno)) +* [`41facf643`](https://github.com/npm/cli/commit/41facf6435ced4e416d74111d9c3ff00ee19ab7d) + [#2859](https://github.com/npm/cli/issues/2859) + expanded "Did you mean?" suggestions for missing cmds and scripts + ([@wraithgar](https://github.com/wraithgar)) + +### BUG FIXES + +* [`8cce4282f`](https://github.com/npm/cli/commit/8cce4282f7bef11aeeb73cffd532b477b241985e) + [#2865](https://github.com/npm/cli/issues/2865) + `npm publish`: handle case where multiple config list is present + ([@kenrick95](https://github.com/kenrick95)) +* [`6598bfe86`](https://github.com/npm/cli/commit/6598bfe8697439e827d84981f8504febca64a55a) + mark deprecated configs + ([@isaacs](https://github.com/isaacs)) +* [`8a38afe77`](https://github.com/npm/cli/commit/8a38afe779ce71a10178ed62b13709d06adf7a66) + [#2881](https://github.com/npm/cli/issues/2881) + docs(package-json): document default main behavior + ([@klausbayrhammer](https://github.com/klausbayrhammer)) +* [`93a061d73`](https://github.com/npm/cli/commit/93a061d737dc769663652368e8586e4202267b9e) + [#2917](https://github.com/npm/cli/issues/2917) + add action items to `npm run` error output + ([@wraithgar](https://github.com/wraithgar)) + +### DOCUMENTATION + +* [`ad65bd910`](https://github.com/npm/cli/commit/ad65bd9101aa8e8b94bc1e48df3ef93deca6d30c) + [#2860](https://github.com/npm/cli/issues/2860) + fix link in configuring-npm + ([@varmakarthik12](https://github.com/varmakarthik12)) +* [`b419bfb02`](https://github.com/npm/cli/commit/b419bfb0259596fb338d45b2eaeab25a7a0d1f1e) + [#2876](https://github.com/npm/cli/issues/2876) + fix test-coverage command in contributing guide + ([@chowkapow](https://github.com/chowkapow)) + +### DEPENDENCIES + +* [`7b5606b93`](https://github.com/npm/cli/commit/7b5606b931083e8a70f5ea094c2b46f0b7a38a18) + `@npmcli/arborist@2.2.9` + * [#254](https://github.com/npm/arborist/pull/254) Honor explicit + prefix when saving dependencies + ([@jameschensmith](https://github.com/jameschensmith)) + * [#255](https://github.com/npm/arborist/pull/255) Never save to + `bundleDependencies` when saving a `peer` or `peerOptional` + dependency. ([@isaacs](https://github.com/isaacs)) +* [`f76e7c21f`](https://github.com/npm/cli/commit/f76e7c21ffd87b08593d8c396a78ab9c5fa790bd) + `pacote@11.3.1` + * increases tarball compression level +* [`4928512bc`](https://github.com/npm/cli/commit/4928512bcefd8448ff5852978cfc7f903e3ae996) + `semver@7.3.5` + * fix handling prereleases/ANY ranges in subset +* [`1924eb457`](https://github.com/npm/cli/commit/1924eb457aea7c93dfaf4a911355a63d84d66eee) + `libnpmversion@1.0.12` + * fix removing undescored-prefixed package.json properties in `npm version` +* [`916623056`](https://github.com/npm/cli/commit/91662305643509eebd2f79ed7e3ff01562aa4968) + `@npmcli/run-script@1.8.4` + * fix expanding windows-style environment variables +* [`a8d0751e4`](https://github.com/npm/cli/commit/a8d0751e4b7c7d8b808c8a49f288fc7272f729b0) + `npm-pick-manifest@6.1.1` + * fix running packages with a single executable binary with `npm exec` +* [`af7eaac50`](https://github.com/npm/cli/commit/af7eaac5018ed821d72d43d08f1d7e49e7491453) + `hosted-git-info@4.0.1` +* [`f52c51db1`](https://github.com/npm/cli/commit/f52c51db13c39cfbaed18dbd13ba7302a4b6a0d9) + `@npmcli/config@2.0.0` + +## v7.6.3 (2021-03-11) + +### DOCUMENTATION + +* [`8c44e999b`](https://github.com/npm/cli/commit/8c44e999bdf7639893535c55beebf7996da2c47f) + [#2855](https://github.com/npm/cli/issues/2855) + Correct "npm COMMAND help" to "npm help COMMAND" + ([@dwardu](https://github.com/dwardu)) + +### DEPENDENCIES + +* [`57ed390d6`](https://github.com/npm/cli/commit/57ed390d64a44ae0a1b2c4afd79d690170b194ec) + `@npmcli/arborist@2.2.8` + * Respect link deps when calculating peerDep sets + +## v7.6.2 (2021-03-09) + +### BUG FIXES + +* [`e0a3a5218`](https://github.com/npm/cli/commit/e0a3a5218cac7ca5850930aaaad8a939ddf75d4d) + [#2831](https://github.com/npm/cli/issues/2831) + Fix cb() never called in search with --json option + ([@fraqe](https://github.com/fraqe)) +* [`85a8694dd`](https://github.com/npm/cli/commit/85a8694dd9b4a924a474ba75261914511a216868) + [#2795](https://github.com/npm/cli/issues/2795) + fix(npm.output): make output go through npm.output + ([@wraithgar](https://github.com/wraithgar)) +* [`9fe0df5b5`](https://github.com/npm/cli/commit/9fe0df5b5d7606e5841288d9931be6c04767c9ca) + [#2821](https://github.com/npm/cli/issues/2821) + fix(usage): clean up usage declarations + ([@wraithgar](https://github.com/wraithgar)) + +### DEPENDENCIES + +* [`7f470b5c2`](https://github.com/npm/cli/commit/7f470b5c25d544e36d97b28e28ae20dfa1d4ab31) + `@npmcli/arborist@2.2.7` + * fix(install): Do not revert a file: dep to version on bare name re-install +* [`e9b7fc275`](https://github.com/npm/cli/commit/e9b7fc275a0bdf8f00dbcf5dd2283675776fc459) + `libnpmdiff@2.0.4` + * fix(diff): Gracefully handle packages with prepare script +* [`c7314aa62`](https://github.com/npm/cli/commit/c7314aa62195b7f0d8886776692e8a2c892413ed) + `byte-size@7.0.1` +* [`864f48d43`](https://github.com/npm/cli/commit/864f48d4327269f521161cf89888ea2b6db5fdab) + `pacote@11.3.0` + +## v7.6.1 (2021-03-04) + +### BUG FIXES + +* [`3c9a589b0`](https://github.com/npm/cli/commit/3c9a589b004fa828a304abaf52d1d781710e1143) + [#2807](https://github.com/npm/cli/issues/2807) + `npm explain` show when an edge is a bundled edge + ([@kumavis](https://github.com/kumavis)) +* [`b33c760ce`](https://github.com/npm/cli/commit/b33c760cea7fe2696d35b5530abc1b455980fef1) + [#2766](https://github.com/npm/cli/issues/2766) + unused arguments cleanup + ([@sandersn](https://github.com/sandersn)) +* [`4a5dd3a5a`](https://github.com/npm/cli/commit/4a5dd3a5a200b3f4f7b47168497d8e03dca3a2ca) + [#2772](https://github.com/npm/cli/issues/2772) + fix(npm) pass npm context everywhere + ([@wraithgar](https://github.com/wraithgar)) +* [`e69be2ac5`](https://github.com/npm/cli/commit/e69be2ac5c35e985732e2baa00b70d39332e4b9f) + [#2789](https://github.com/npm/cli/issues/2789) + fix npm prefix on all Windows unix shells + ([@isaacs](https://github.com/isaacs)) +* [`2d682e4ca`](https://github.com/npm/cli/commit/2d682e4cab0cf109a16332f3222f1e9a4027db69) + [#2803](https://github.com/npm/cli/issues/2803) + fix(search): don't pass unused args + ([@wraithgar](https://github.com/wraithgar)) +* [`b3e7dd19b`](https://github.com/npm/cli/commit/b3e7dd19bb4888dad2bfb6702aed6560a7f91bf8) + [#2822](https://github.com/npm/cli/issues/2822) + fix(diff): set option "where" for pacote + ([@ruyadorno](https://github.com/ruyadorno)) +* [`96006640b`](https://github.com/npm/cli/commit/96006640b902d31415260df5ce3ad8d066a64623) + [#2824](https://github.com/npm/cli/issues/2824) + fix(repo, auth.sso): don't promisify open-url + ([@wraithgar](https://github.com/wraithgar)) + +### DOCUMENTATION + +* [`c8b73db82`](https://github.com/npm/cli/commit/c8b73db82f0f2445c20a0a64110586253accd66b) + [#2690](https://github.com/npm/cli/issues/2690) + fix(docs): update scripts docs + ([@wraithgar](https://github.com/wraithgar)) +* [`5d922394b`](https://github.com/npm/cli/commit/5d922394b7874b2b38d34f03f2decbe0eb3e8583) + [#2809](https://github.com/npm/cli/issues/2809) + update republish timeout after unpublish + ([@BAJ-](https://github.com/BAJ-)) + +### DEPENDENCIES + +* [`2d4ae598f`](https://github.com/npm/cli/commit/2d4ae598f30049680797685f76154b16a7e15a66) + `@npmcli/arborist@2.2.6` + ## v7.6.0 (2021-02-25) ### FEATURES @@ -1280,7 +2494,7 @@ fix(lib/npm): do not clobber config.execPath fix: patch `config.js` to remove duplicate vals ([@darcyclarke](https://github.com/darcyclarke)) -### DOCUMENTION +### DOCUMENTATION * [`60769d757`](https://github.com/npm/cli/commit/60769d757859c88e2cceab66975f182a47822816) [#1911](https://github.com/npm/cli/pull/1911) docs: v7 npm-install diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 5198918f010df..ec1c513864c1b 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -2,14 +2,14 @@ ## Code of Conduct -All interactions in the **npm** organization on GitHub are considered to be covered by our standard [Code of Conduct](https://www.npmjs.com/policies/conduct). +All interactions in the **npm** organization on GitHub are considered to be covered by our standard [Code of Conduct](https://docs.npmjs.com/policies/conduct). ## Development **1. Clone this repository...** ```bash -$ git clone git@github.com:npm/cli.git +$ git clone git@github.com:npm/cli.git npm ``` **2. Navigate into project & install development-specific dependencies...** @@ -38,7 +38,7 @@ We expect that every new feature or bug fix comes with corresponding tests that **You can find out what the current test coverage percentage is by running...** ```bash -$ npm run test-coverage +$ npm run check-coverage ``` ## Performance & Benchmarks @@ -51,6 +51,10 @@ We've set up an automated [benchmark](https://github.com/npm/benchmarks) integra You can learn more about this tool, including how to run & configure it manually, [here](https://github.com/npm/benchmarks) +## Dependency Updates + +It should be noted that our team does not accept third-party dependency updates/PRs. We have a [release process](https://github.com/npm/cli/wiki/Release-Process) that includes checks to ensure dependencies are staying up-to-date & will ship security patches for CVEs as they occur. If you submit a PR trying to update our dependencies we will close it with or without a reference to these contribution guidelines. + ## Reporting Bugs When submitting a new bug report, please first [search](https://github.com/npm/cli/issues) for an existing or similar report & then use one of our existing [issue templates](https://github.com/npm/cli/issues/new/choose) if you believe you've come across a unique problem. Duplicate issues, or issues that don't use one of our templates may get closed without a response. diff --git a/Makefile b/Makefile index 4c96615203b02..0005223d9921a 100644 --- a/Makefile +++ b/Makefile @@ -36,7 +36,7 @@ $(version_mandocs): package.json htmldocs: dev-deps node bin/npm-cli.js rebuild - cd docs && node dockhand.js >&2 + node bin/npm-cli.js run -w docs build clean: docs-clean gitclean @@ -47,7 +47,7 @@ docs-clean: ## build-time dependencies for the documentation dev-deps: - node bin/npm-cli.js install --only=dev --no-audit --ignore-scripts + node bin/npm-cli.js install --no-audit --ignore-scripts ## targets for man files, these are encouraged to be only built by running `make docs` or `make mandocs` man/man1/%.1: docs/content/commands/%.md scripts/docs-build.js @@ -68,9 +68,20 @@ man/man7/%.7: docs/content/using-npm/%.md scripts/docs-build.js @[ -d man/man7 ] || mkdir -p man/man7 node scripts/docs-build.js $< $@ +# Any time the config definitions description changes, automatically +# update the documentation to account for it +docs/content/using-npm/config.md: scripts/config-doc.js lib/utils/config/*.js + node scripts/config-doc.js + +docs/content/commands/npm-%.md: lib/%.js scripts/config-doc-command.js lib/utils/config/*.js + node scripts/config-doc-command.js $@ $< + test: dev-deps node bin/npm-cli.js test +smoke-tests: dev-deps + node bin/npm-cli.js run smoke-tests -- --no-check-coverage + ls-ok: node . ls --production >/dev/null @@ -84,11 +95,12 @@ link: uninstall node bin/npm-cli.js link -f --ignore-scripts prune: + node bin/npm-cli.js run resetdeps node bin/npm-cli.js prune --production --no-save --no-audit @[[ "$(shell git status -s)" != "" ]] && echo "ERR: found unpruned files" && exit 1 || echo "git status is clean" -publish: gitclean ls-ok link test docs prune +publish: gitclean ls-ok link test smoke-tests docs prune @git push origin :v$(shell node bin/npm-cli.js --no-timing -v) 2>&1 || true git push origin $(BRANCH) &&\ git push origin --tags &&\ diff --git a/README.md b/README.md index 8a649847ecaab..01de9e8f69445 100644 --- a/README.md +++ b/README.md @@ -28,6 +28,7 @@ If you're looking to manage multiple versions of **`node`** &/or **`npm`**, cons * [**`n`**](https://github.com/tj/n) * [**`volta`**](https://github.com/volta-cli/volta) * [**`nodenv`**](https://github.com/nodenv/nodenv) +* [**`asdf-nodejs`**](https://github.com/asdf-vm/asdf-nodejs) ### Usage diff --git a/bin/npx-cli.js b/bin/npx-cli.js index f4a419972f7cf..7a3fb39837d27 100755 --- a/bin/npx-cli.js +++ b/bin/npx-cli.js @@ -10,25 +10,25 @@ process.argv.splice(2, 0, 'exec') const removedSwitches = new Set([ 'always-spawn', 'ignore-existing', - 'shell-auto-fallback' + 'shell-auto-fallback', ]) const removedOpts = new Set([ 'npm', 'node-arg', - 'n' + 'n', ]) const removed = new Set([ ...removedSwitches, - ...removedOpts + ...removedOpts, ]) -const { types, shorthands } = require('../lib/utils/config.js') -const npmSwitches = Object.entries(types) - .filter(([key, type]) => type === Boolean || +const { definitions, shorthands } = require('../lib/utils/config/index.js') +const npmSwitches = Object.entries(definitions) + .filter(([key, {type}]) => type === Boolean || (Array.isArray(type) && type.includes(Boolean))) - .map(([key, type]) => key) + .map(([key]) => key) // things that don't take a value const switches = new Set([ @@ -40,7 +40,7 @@ const switches = new Set([ 'version', 'v', 'help', - 'h' + 'h', ]) // things that do take a value @@ -55,7 +55,7 @@ const opts = new Set([ 'shell', 'npm', 'node-arg', - 'n' + 'n', ]) // break out of loop when we find a positional argument or -- @@ -65,9 +65,9 @@ let i let sawRemovedFlags = false for (i = 3; i < process.argv.length; i++) { const arg = process.argv[i] - if (arg === '--') { + if (arg === '--') break - } else if (/^-/.test(arg)) { + else if (/^-/.test(arg)) { const [key, ...v] = arg.replace(/^-+/, '').split('=') switch (key) { @@ -87,9 +87,8 @@ for (i = 3; i < process.argv.length; i++) { // resolve shorthands and run again if (shorthands[key] && !removed.has(key)) { const a = [...shorthands[key]] - if (v.length) { + if (v.length) a.push(v.join('=')) - } process.argv.splice(i, 1, ...a) i-- continue @@ -110,9 +109,8 @@ for (i = 3; i < process.argv.length; i++) { if (removed.has(key)) { // also remove the value for the cut key. process.argv.splice(i + 1, 1) - } else { + } else i++ - } } } else { // found a positional arg, put -- in front of it, and we're done @@ -121,8 +119,7 @@ for (i = 3; i < process.argv.length; i++) { } } -if (sawRemovedFlags) { +if (sawRemovedFlags) console.error('See `npm help exec` for more information') -} cli(process) diff --git a/docs/content/commands/npm-access.md b/docs/content/commands/npm-access.md index ed9c7775745ce..6d73d6a5e28ce 100644 --- a/docs/content/commands/npm-access.md +++ b/docs/content/commands/npm-access.md @@ -81,6 +81,30 @@ fail with an HTTP 402 status code (logically enough), unless you use Management of teams and team memberships is done with the `npm team` command. +### Configuration + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `registry` + +* Default: "https://registry.npmjs.org/" +* Type: URL + +The base URL of the npm registry. + +#### `otp` + +* Default: null +* Type: null or String + +This is a one-time password from a two-factor authenticator. It's needed +when publishing or changing package permissions with `npm access`. + +If not set, and a registry response fails with a challenge for a one-time +password, npm will prompt on the command line for one. + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> + ### See Also * [`libnpmaccess`](https://npm.im/libnpmaccess) diff --git a/docs/content/commands/npm-adduser.md b/docs/content/commands/npm-adduser.md index 7960869ad33cc..f25d3ccd87ab4 100644 --- a/docs/content/commands/npm-adduser.md +++ b/docs/content/commands/npm-adduser.md @@ -7,11 +7,13 @@ description: Add a registry user account ### Synopsis ```bash -npm adduser [--registry=url] [--scope=@orgname] [--always-auth] [--auth-type=legacy] +npm adduser [--registry=url] [--scope=@orgname] [--auth-type=legacy] aliases: login, add-user ``` +Note: This command is unaware of workspaces. + ### Description Create or verify a user named `<username>` in the specified registry, and @@ -33,54 +35,46 @@ your existing record. ### Configuration -#### registry +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `registry` -Default: https://registry.npmjs.org/ +* Default: "https://registry.npmjs.org/" +* Type: URL -The base URL of the npm package registry. If `scope` is also specified, -this registry will only be used for packages with that scope. `scope` defaults -to the scope of the project directory you're currently in, if any. See [`scope`](/using-npm/scope). +The base URL of the npm registry. -#### scope +#### `scope` -Default: none +* Default: the scope of the current project, if any, or "" +* Type: String -If specified, the user and login credentials given will be associated -with the specified scope. See [`scope`](/using-npm/scope). You can use both at the same time, -e.g. +Associate an operation with a scope for a scoped registry. -```bash -npm adduser --registry=http://myregistry.example.com --scope=@myco -``` +Useful when logging in to or out of a private registry: -This will set a registry for the given scope and login or create a user for -that registry at the same time. +``` +# log in, linking the scope to the custom registry +npm login --scope=@mycorp --registry=https://registry.mycorp.com -#### always-auth +# log out, removing the link and the auth token +npm logout --scope=@mycorp +``` -Default: false +This will cause `@mycorp` to be mapped to the registry for future +installation of packages specified according to the pattern +`@mycorp/package`. -If specified, save configuration indicating that all requests to the given -registry should include authorization information. Useful for private -registries. Can be used with `--registry` and / or `--scope`, e.g. +This will also cause `npm init` to create a scoped package. -```bash -npm adduser --registry=http://private-registry.example.com --always-auth +``` +# accept all defaults, and create a package named "@foo/whatever", +# instead of just named "whatever" +npm init --scope=@foo --yes ``` -This will ensure that all requests to that registry (including for tarballs) -include an authorization header. This setting may be necessary for use with -private registries where metadata and package tarballs are stored on hosts with -different hostnames. See `always-auth` in [`config`](/using-npm/config) for more details on always-auth. Registry-specific configuration of `always-auth` takes precedence over any global configuration. - -#### auth-type - -* Default: `'legacy'` -* Type: `'legacy'`, `'sso'`, `'saml'`, `'oauth'` -What authentication strategy to use with `adduser`/`login`. Some npm registries -(for example, npmE) might support alternative auth strategies besides classic -username/password entry in legacy npm. +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> ### See Also @@ -89,3 +83,5 @@ username/password entry in legacy npm. * [npmrc](/configuring-npm/npmrc) * [npm owner](/commands/npm-owner) * [npm whoami](/commands/npm-whoami) +* [npm token](/commands/npm-token) +* [npm profile](/commands/npm-profile) diff --git a/docs/content/commands/npm-audit.md b/docs/content/commands/npm-audit.md index 7ad950a6ba99e..94b16b27bd7ed 100644 --- a/docs/content/commands/npm-audit.md +++ b/docs/content/commands/npm-audit.md @@ -187,6 +187,134 @@ Fail an audit only if the results include a vulnerability with a level of modera $ npm audit --audit-level=moderate ``` +### Configuration + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `audit-level` + +* Default: null +* Type: null, "info", "low", "moderate", "high", "critical", or "none" + +The minimum level of vulnerability for `npm audit` to exit with a non-zero +exit code. + +#### `dry-run` + +* Default: false +* Type: Boolean + +Indicates that you don't want npm to make any changes and that it should +only report what it would have done. This can be passed into any of the +commands that modify your local installation, eg, `install`, `update`, +`dedupe`, `uninstall`, as well as `pack` and `publish`. + +Note: This is NOT honored by other network related commands, eg `dist-tags`, +`owner`, etc. + +#### `force` + +* Default: false +* Type: Boolean + +Removes various protections against unfortunate side effects, common +mistakes, unnecessary performance degradation, and malicious input. + +* Allow clobbering non-npm files in global installs. +* Allow the `npm version` command to work on an unclean git repository. +* Allow deleting the cache folder with `npm cache clean`. +* Allow installing packages that have an `engines` declaration requiring a + different version of npm. +* Allow installing packages that have an `engines` declaration requiring a + different version of `node`, even if `--engine-strict` is enabled. +* Allow `npm audit fix` to install modules outside your stated dependency + range (including SemVer-major changes). +* Allow unpublishing all versions of a published package. +* Allow conflicting peerDependencies to be installed in the root project. +* Implicitly set `--yes` during `npm init`. +* Allow clobbering existing values in `npm pkg` + +If you don't have a clear idea of what you want to do, it is strongly +recommended that you do not use this option! + +#### `json` + +* Default: false +* Type: Boolean + +Whether or not to output JSON data, rather than the normal output. + +* In `npm pkg set` it enables parsing set values with JSON.parse() before + saving them to your `package.json`. + +Not supported by all npm commands. + +#### `package-lock-only` + +* Default: false +* Type: Boolean + +If set to true, the current operation will only use the `package-lock.json`, +ignoring `node_modules`. + +For `update` this means only the `package-lock.json` will be updated, +instead of checking `node_modules` and downloading dependencies. + +For `list` this means the output will be based on the tree described by the +`package-lock.json`, rather than the contents of `node_modules`. + +#### `omit` + +* Default: 'dev' if the `NODE_ENV` environment variable is set to + 'production', otherwise empty. +* Type: "dev", "optional", or "peer" (can be set multiple times) + +Dependency types to omit from the installation tree on disk. + +Note that these dependencies _are_ still resolved and added to the +`package-lock.json` or `npm-shrinkwrap.json` file. They are just not +physically installed on disk. + +If a package type appears in both the `--include` and `--omit` lists, then +it will be included. + +If the resulting omit list includes `'dev'`, then the `NODE_ENV` environment +variable will be set to `'production'` for all lifecycle scripts. + +#### `workspace` + +* Default: +* Type: String (can be set multiple times) + +Enable running a command in the context of the configured workspaces of the +current project while filtering by running only the workspaces defined by +this configuration option. + +Valid values for the `workspace` config are either: + +* Workspace names +* Path to a workspace directory +* Path to a parent workspace directory (will result to selecting all of the + nested workspaces) + +When set for the `npm init` command, this may be set to the folder of a +workspace which does not yet exist, to create the folder and set it up as a +brand new workspace within the project. + +This value is not exported to the environment for child processes. + +#### `workspaces` + +* Default: false +* Type: Boolean + +Enable running a command in the context of **all** the configured +workspaces. + +This value is not exported to the environment for child processes. + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> + ### See Also * [npm install](/commands/npm-install) diff --git a/docs/content/commands/npm-bin.md b/docs/content/commands/npm-bin.md index 4303040e78dac..12984da1d1db7 100644 --- a/docs/content/commands/npm-bin.md +++ b/docs/content/commands/npm-bin.md @@ -10,10 +10,32 @@ description: Display npm bin folder npm bin [-g|--global] ``` +Note: This command is unaware of workspaces. + ### Description Print the folder where npm will install executables. +### Configuration + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `global` + +* Default: false +* Type: Boolean + +Operates in "global" mode, so that packages are installed into the `prefix` +folder instead of the current working directory. See +[folders](/configuring-npm/folders) for more on the differences in behavior. + +* packages are installed into the `{prefix}/lib/node_modules` folder, instead + of the current working directory. +* bin files are linked to `{prefix}/bin` +* man pages are linked to `{prefix}/share/man` + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> + ### See Also * [npm prefix](/commands/npm-prefix) diff --git a/docs/content/commands/npm-bugs.md b/docs/content/commands/npm-bugs.md index dd1ab299f9c55..b8778324b8e4a 100644 --- a/docs/content/commands/npm-bugs.md +++ b/docs/content/commands/npm-bugs.md @@ -15,30 +15,34 @@ aliases: issues ### Description This command tries to guess at the likely location of a package's bug -tracker URL, and then tries to open it using the `--browser` config param. -If no package name is provided, it will search for a `package.json` in the -current folder and use the `name` property. +tracker URL or the `mailto` URL of the support email, and then tries to +open it using the `--browser` config param. If no package name is provided, it +will search for a `package.json` in the current folder and use the `name` property. ### Configuration -#### browser +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `browser` * Default: OS X: `"open"`, Windows: `"start"`, Others: `"xdg-open"` -* Type: String or Boolean +* Type: null, Boolean, or String -The browser that is called by the `npm bugs` command to open websites. +The browser that is called by npm commands to open websites. Set to `false` to suppress browser behavior and instead print urls to terminal. Set to `true` to use default system URL opener. -#### registry +#### `registry` -* Default: https://registry.npmjs.org/ -* Type: url +* Default: "https://registry.npmjs.org/" +* Type: URL -The base URL of the npm package registry. +The base URL of the npm registry. + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> ### See Also diff --git a/docs/content/commands/npm-cache.md b/docs/content/commands/npm-cache.md index 13386f2c4a439..e371f196d7c42 100644 --- a/docs/content/commands/npm-cache.md +++ b/docs/content/commands/npm-cache.md @@ -7,10 +7,10 @@ description: Manipulates packages cache ### Synopsis ```bash -npm cache add <tarball file> -npm cache add <folder> -npm cache add <tarball url> -npm cache add <name>@<version> +npm cache add <tarball file>... +npm cache add <folder>... +npm cache add <tarball url>... +npm cache add <name>@<version>... npm cache clean aliases: npm cache clear, npm cache rm @@ -18,12 +18,14 @@ aliases: npm cache clear, npm cache rm npm cache verify ``` +Note: This command is unaware of workspaces. + ### Description Used to add, list, or clean the npm cache folder. * add: - Add the specified package to the local cache. This command is primarily + Add the specified packages to the local cache. This command is primarily intended to be used internally by npm, but it can provide a way to add data to the local installation cache explicitly. @@ -73,11 +75,17 @@ verify`. ### Configuration -#### cache +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `cache` + +* Default: Windows: `%LocalAppData%\npm-cache`, Posix: `~/.npm` +* Type: Path -Default: `~/.npm` on Posix, or `%AppData%/npm-cache` on Windows. +The location of npm's cache directory. See [`npm +cache`](/commands/npm-cache) -The root cache folder. +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> ### See Also diff --git a/docs/content/commands/npm-ci.md b/docs/content/commands/npm-ci.md index 925ba8de2e5b7..31c92b13c5cdd 100644 --- a/docs/content/commands/npm-ci.md +++ b/docs/content/commands/npm-ci.md @@ -12,7 +12,7 @@ npm ci ### Description -This command is similar to [`npm install`](/cli-commands/install), except +This command is similar to [`npm install`](/commands/npm-install), except it's meant to be used in automated environments such as test platforms, continuous integration, and deployment -- or any situation where you want to make sure you're doing a clean install of your dependencies. @@ -65,6 +65,42 @@ cache: - "$HOME/.npm" ``` +### Configuration + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `audit` + +* Default: true +* Type: Boolean + +When "true" submit audit reports alongside the current npm command to the +default registry and all registries configured for scopes. See the +documentation for [`npm audit`](/commands/npm-audit) for details on what is +submitted. + +#### `ignore-scripts` + +* Default: false +* Type: Boolean + +If true, npm does not run scripts specified in package.json files. + +Note that commands explicitly intended to run a particular script, such as +`npm start`, `npm stop`, `npm restart`, `npm test`, and `npm run-script` +will still run their intended script if `ignore-scripts` is set, but they +will *not* run any pre- or post-scripts. + +#### `script-shell` + +* Default: '/bin/sh' on POSIX systems, 'cmd.exe' on Windows +* Type: null or String + +The shell to use for scripts run with the `npm exec`, `npm run` and `npm +init <pkg>` commands. + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> + ### See Also * [npm install](/commands/npm-install) diff --git a/docs/content/commands/npm-completion.md b/docs/content/commands/npm-completion.md index 53737c8033194..9dbd960913f27 100644 --- a/docs/content/commands/npm-completion.md +++ b/docs/content/commands/npm-completion.md @@ -10,6 +10,8 @@ description: Tab Completion for npm source <(npm completion) ``` +Note: This command is unaware of workspaces. + ### Description Enables tab-completion in all npm commands. diff --git a/docs/content/commands/npm-config.md b/docs/content/commands/npm-config.md index 51caa5a61b607..9e76a23671e86 100644 --- a/docs/content/commands/npm-config.md +++ b/docs/content/commands/npm-config.md @@ -18,6 +18,8 @@ npm get [<key> [<key> ...]] alias: c ``` +Note: This command is unaware of workspaces. + ### Description npm gets its config settings from the command line, environment @@ -91,6 +93,61 @@ npm config edit Opens the config file in an editor. Use the `--global` flag to edit the global config. +### Configuration + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `json` + +* Default: false +* Type: Boolean + +Whether or not to output JSON data, rather than the normal output. + +* In `npm pkg set` it enables parsing set values with JSON.parse() before + saving them to your `package.json`. + +Not supported by all npm commands. + +#### `global` + +* Default: false +* Type: Boolean + +Operates in "global" mode, so that packages are installed into the `prefix` +folder instead of the current working directory. See +[folders](/configuring-npm/folders) for more on the differences in behavior. + +* packages are installed into the `{prefix}/lib/node_modules` folder, instead + of the current working directory. +* bin files are linked to `{prefix}/bin` +* man pages are linked to `{prefix}/share/man` + +#### `editor` + +* Default: The EDITOR or VISUAL environment variables, or 'notepad.exe' on + Windows, or 'vim' on Unix systems +* Type: String + +The command to run for `npm edit` and `npm config edit`. + +#### `location` + +* Default: "user" unless `--global` is passed, which will also set this value + to "global" +* Type: "global", "user", or "project" + +When passed to `npm config` this refers to which config file to use. + +#### `long` + +* Default: false +* Type: Boolean + +Show extended information in `ls`, `search`, and `help-search`. + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> + ### See Also * [npm folders](/configuring-npm/folders) diff --git a/docs/content/commands/npm-dedupe.md b/docs/content/commands/npm-dedupe.md index 9b14e99dd14f0..324e6a71b7a3e 100644 --- a/docs/content/commands/npm-dedupe.md +++ b/docs/content/commands/npm-dedupe.md @@ -1,7 +1,7 @@ --- title: npm-dedupe section: 1 -description: Reduce duplication +description: Reduce duplication in the package tree --- ### Synopsis @@ -10,7 +10,7 @@ description: Reduce duplication npm dedupe npm ddp -aliases: find-dupes, ddp +aliases: ddp ``` ### Description @@ -72,8 +72,174 @@ result in new modules being installed. Using `npm find-dupes` will run the command in `--dry-run` mode. +### Configuration + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `global-style` + +* Default: false +* Type: Boolean + +Causes npm to install the package into your local `node_modules` folder with +the same layout it uses with the global `node_modules` folder. Only your +direct dependencies will show in `node_modules` and everything they depend +on will be flattened in their `node_modules` folders. This obviously will +eliminate some deduping. If used with `legacy-bundling`, `legacy-bundling` +will be preferred. + +#### `legacy-bundling` + +* Default: false +* Type: Boolean + +Causes npm to install the package such that versions of npm prior to 1.4, +such as the one included with node 0.8, can install the package. This +eliminates all automatic deduping. If used with `global-style` this option +will be preferred. + +#### `strict-peer-deps` + +* Default: false +* Type: Boolean + +If set to `true`, and `--legacy-peer-deps` is not set, then _any_ +conflicting `peerDependencies` will be treated as an install failure, even +if npm could reasonably guess the appropriate resolution based on non-peer +dependency relationships. + +By default, conflicting `peerDependencies` deep in the dependency graph will +be resolved using the nearest non-peer dependency specification, even if +doing so will result in some packages receiving a peer dependency outside +the range set in their package's `peerDependencies` object. + +When such and override is performed, a warning is printed, explaining the +conflict and the packages involved. If `--strict-peer-deps` is set, then +this warning is treated as a failure. + +#### `package-lock` + +* Default: true +* Type: Boolean + +If set to false, then ignore `package-lock.json` files when installing. This +will also prevent _writing_ `package-lock.json` if `save` is true. + +When package package-locks are disabled, automatic pruning of extraneous +modules will also be disabled. To remove extraneous modules with +package-locks disabled use `npm prune`. + +#### `omit` + +* Default: 'dev' if the `NODE_ENV` environment variable is set to + 'production', otherwise empty. +* Type: "dev", "optional", or "peer" (can be set multiple times) + +Dependency types to omit from the installation tree on disk. + +Note that these dependencies _are_ still resolved and added to the +`package-lock.json` or `npm-shrinkwrap.json` file. They are just not +physically installed on disk. + +If a package type appears in both the `--include` and `--omit` lists, then +it will be included. + +If the resulting omit list includes `'dev'`, then the `NODE_ENV` environment +variable will be set to `'production'` for all lifecycle scripts. + +#### `ignore-scripts` + +* Default: false +* Type: Boolean + +If true, npm does not run scripts specified in package.json files. + +Note that commands explicitly intended to run a particular script, such as +`npm start`, `npm stop`, `npm restart`, `npm test`, and `npm run-script` +will still run their intended script if `ignore-scripts` is set, but they +will *not* run any pre- or post-scripts. + +#### `audit` + +* Default: true +* Type: Boolean + +When "true" submit audit reports alongside the current npm command to the +default registry and all registries configured for scopes. See the +documentation for [`npm audit`](/commands/npm-audit) for details on what is +submitted. + +#### `bin-links` + +* Default: true +* Type: Boolean + +Tells npm to create symlinks (or `.cmd` shims on Windows) for package +executables. + +Set to false to have it not do this. This can be used to work around the +fact that some file systems don't support symlinks, even on ostensibly Unix +systems. + +#### `fund` + +* Default: true +* Type: Boolean + +When "true" displays the message at the end of each `npm install` +acknowledging the number of dependencies looking for funding. See [`npm +fund`](/commands/npm-fund) for details. + +#### `dry-run` + +* Default: false +* Type: Boolean + +Indicates that you don't want npm to make any changes and that it should +only report what it would have done. This can be passed into any of the +commands that modify your local installation, eg, `install`, `update`, +`dedupe`, `uninstall`, as well as `pack` and `publish`. + +Note: This is NOT honored by other network related commands, eg `dist-tags`, +`owner`, etc. + +#### `workspace` + +* Default: +* Type: String (can be set multiple times) + +Enable running a command in the context of the configured workspaces of the +current project while filtering by running only the workspaces defined by +this configuration option. + +Valid values for the `workspace` config are either: + +* Workspace names +* Path to a workspace directory +* Path to a parent workspace directory (will result to selecting all of the + nested workspaces) + +When set for the `npm init` command, this may be set to the folder of a +workspace which does not yet exist, to create the folder and set it up as a +brand new workspace within the project. + +This value is not exported to the environment for child processes. + +#### `workspaces` + +* Default: false +* Type: Boolean + +Enable running a command in the context of **all** the configured +workspaces. + +This value is not exported to the environment for child processes. + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> + ### See Also -* [npm ls](/cli-commands/ls) -* [npm update](/cli-commands/update) -* [npm install](/cli-commands/install) +* [npm find-dupes](/commands/npm-find-dupes) +* [npm ls](/commands/npm-ls) +* [npm update](/commands/npm-update) +* [npm install](/commands/npm-install) diff --git a/docs/content/commands/npm-deprecate.md b/docs/content/commands/npm-deprecate.md index 139441856bb06..b5c0e67144aef 100644 --- a/docs/content/commands/npm-deprecate.md +++ b/docs/content/commands/npm-deprecate.md @@ -10,6 +10,8 @@ description: Deprecate a version of a package npm deprecate <pkg>[@<version range>] <message> ``` +Note: This command is unaware of workspaces. + ### Description This command will update the npm registry entry for a package, providing a @@ -38,9 +40,33 @@ To un-deprecate a package, specify an empty string (`""`) for the `message` argument. Note that you must use double quotes with no space between them to format an empty string. +### Configuration + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `registry` + +* Default: "https://registry.npmjs.org/" +* Type: URL + +The base URL of the npm registry. + +#### `otp` + +* Default: null +* Type: null or String + +This is a one-time password from a two-factor authenticator. It's needed +when publishing or changing package permissions with `npm access`. + +If not set, and a registry response fails with a challenge for a one-time +password, npm will prompt on the command line for one. + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> + ### See Also * [npm publish](/commands/npm-publish) * [npm registry](/using-npm/registry) -* [npm owner](/cli-commands/owner) -* [npm owner](/cli-commands/adduser) +* [npm owner](/commands/npm-owner) +* [npm owner](/commands/npm-adduser) diff --git a/docs/content/commands/npm-diff.md b/docs/content/commands/npm-diff.md index 0fe6a68fa2f0f..479cb63b11213 100644 --- a/docs/content/commands/npm-diff.md +++ b/docs/content/commands/npm-diff.md @@ -153,82 +153,128 @@ located within the folder `./lib/` and changed lines of code within the ### Configuration -#### diff +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `diff` -* Type: Array -* Default: null +* Default: +* Type: String (can be set multiple times) -Defines npm package specifiers to compare using the `npm diff` command. +Define arguments to compare in `npm diff`. -This can be specified up to 2 times. +#### `diff-name-only` -#### diff-name-only - -* Type: Boolean * Default: false +* Type: Boolean -When set to `true` running `npm diff` only returns the names of the files that -have any difference. +Prints only filenames when using `npm diff`. -#### diff-unified +#### `diff-unified` +* Default: 3 * Type: Number -* Default: `3` -The number of lines of context to print in the unified diff format output. +The number of lines of context to print in `npm diff`. -#### diff-ignore-all-space +#### `diff-ignore-all-space` -* Type: Boolean * Default: false +* Type: Boolean -Ignore whitespace when comparing lines. This ignores differences even if one -line has whitespace where the other line has none. +Ignore whitespace when comparing lines in `npm diff`. -#### diff-no-prefix +#### `diff-no-prefix` -* Type: Boolean * Default: false +* Type: Boolean -Do not show any source or destination prefix. +Do not show any source or destination prefix in `npm diff` output. -#### diff-src-prefix +Note: this causes `npm diff` to ignore the `--diff-src-prefix` and +`--diff-dst-prefix` configs. +#### `diff-src-prefix` + +* Default: "a/" * Type: String -* Default: `"a/"` -Show the given source prefix in diff patches headers instead of using "a/". +Source prefix to be used in `npm diff` output. -#### diff-dst-prefix +#### `diff-dst-prefix` +* Default: "b/" * Type: String -* Default: `"b/"` -Show the given source prefix in diff patches headers instead of using "b/". +Destination prefix to be used in `npm diff` output. -#### diff-text +#### `diff-text` -* Type: Boolean * Default: false +* Type: Boolean -Treat all files as text. +Treat all files as text in `npm diff`. -#### global +#### `global` * Default: false * Type: Boolean -Uses packages from the global space as a source for comparison. +Operates in "global" mode, so that packages are installed into the `prefix` +folder instead of the current working directory. See +[folders](/configuring-npm/folders) for more on the differences in behavior. -#### tag +* packages are installed into the `{prefix}/lib/node_modules` folder, instead + of the current working directory. +* bin files are linked to `{prefix}/bin` +* man pages are linked to `{prefix}/share/man` +#### `tag` + +* Default: "latest" * Type: String -* Default: `"latest"` -The tag used to fetch the tarball that will be compared with the local file -system files when running npm diff with no arguments. +If you ask npm to install a package and don't tell it a specific version, +then it will install the specified tag. + +Also the tag that is added to the package@version specified by the `npm tag` +command, if no explicit tag is given. + +When used by the `npm diff` command, this is the tag used to fetch the +tarball that will be compared with the local files by default. + +#### `workspace` + +* Default: +* Type: String (can be set multiple times) + +Enable running a command in the context of the configured workspaces of the +current project while filtering by running only the workspaces defined by +this configuration option. + +Valid values for the `workspace` config are either: + +* Workspace names +* Path to a workspace directory +* Path to a parent workspace directory (will result to selecting all of the + nested workspaces) + +When set for the `npm init` command, this may be set to the folder of a +workspace which does not yet exist, to create the folder and set it up as a +brand new workspace within the project. + +This value is not exported to the environment for child processes. + +#### `workspaces` + +* Default: false +* Type: Boolean + +Enable running a command in the context of **all** the configured +workspaces. +This value is not exported to the environment for child processes. +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> ## See Also * [npm outdated](/commands/npm-outdated) diff --git a/docs/content/commands/npm-dist-tag.md b/docs/content/commands/npm-dist-tag.md index 585da16ad2d2c..0e4e0ce56c69d 100644 --- a/docs/content/commands/npm-dist-tag.md +++ b/docs/content/commands/npm-dist-tag.md @@ -88,6 +88,44 @@ semver as `>=1.4.0 <1.5.0`. See <https://github.com/npm/npm/issues/6082>. The simplest way to avoid semver problems with tags is to use tags that do not begin with a number or the letter `v`. +### Configuration + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `workspace` + +* Default: +* Type: String (can be set multiple times) + +Enable running a command in the context of the configured workspaces of the +current project while filtering by running only the workspaces defined by +this configuration option. + +Valid values for the `workspace` config are either: + +* Workspace names +* Path to a workspace directory +* Path to a parent workspace directory (will result to selecting all of the + nested workspaces) + +When set for the `npm init` command, this may be set to the folder of a +workspace which does not yet exist, to create the folder and set it up as a +brand new workspace within the project. + +This value is not exported to the environment for child processes. + +#### `workspaces` + +* Default: false +* Type: Boolean + +Enable running a command in the context of **all** the configured +workspaces. + +This value is not exported to the environment for child processes. + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> + ### See Also * [npm publish](/commands/npm-publish) diff --git a/docs/content/commands/npm-docs.md b/docs/content/commands/npm-docs.md index c66e5fc627d66..7f4c860837035 100644 --- a/docs/content/commands/npm-docs.md +++ b/docs/content/commands/npm-docs.md @@ -22,24 +22,60 @@ the `name` property. ### Configuration -#### browser +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `browser` * Default: OS X: `"open"`, Windows: `"start"`, Others: `"xdg-open"` -* Type: String or Boolean +* Type: null, Boolean, or String -The browser that is called by the `npm docs` command to open websites. +The browser that is called by npm commands to open websites. Set to `false` to suppress browser behavior and instead print urls to terminal. Set to `true` to use default system URL opener. -#### registry +#### `registry` -* Default: https://registry.npmjs.org/ -* Type: url +* Default: "https://registry.npmjs.org/" +* Type: URL -The base URL of the npm package registry. +The base URL of the npm registry. + +#### `workspace` + +* Default: +* Type: String (can be set multiple times) + +Enable running a command in the context of the configured workspaces of the +current project while filtering by running only the workspaces defined by +this configuration option. + +Valid values for the `workspace` config are either: + +* Workspace names +* Path to a workspace directory +* Path to a parent workspace directory (will result to selecting all of the + nested workspaces) + +When set for the `npm init` command, this may be set to the folder of a +workspace which does not yet exist, to create the folder and set it up as a +brand new workspace within the project. + +This value is not exported to the environment for child processes. + +#### `workspaces` + +* Default: false +* Type: Boolean + +Enable running a command in the context of **all** the configured +workspaces. + +This value is not exported to the environment for child processes. + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> ### See Also diff --git a/docs/content/commands/npm-doctor.md b/docs/content/commands/npm-doctor.md index 2aceee2390331..839f4261bbbc2 100644 --- a/docs/content/commands/npm-doctor.md +++ b/docs/content/commands/npm-doctor.md @@ -10,6 +10,8 @@ description: Check your npm environment npm doctor ``` +Note: This command is unaware of workspaces. + ### Description `npm doctor` runs a set of checks to ensure that your npm installation has @@ -100,6 +102,19 @@ located with `npm config get cache`). In the event that there are corrupt packages in your cache, you should probably run `npm cache clean -f` and reset the cache. +### Configuration + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `registry` + +* Default: "https://registry.npmjs.org/" +* Type: URL + +The base URL of the npm registry. + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> + ### See Also * [npm bugs](/commands/npm-bugs) diff --git a/docs/content/commands/npm-edit.md b/docs/content/commands/npm-edit.md index 40fac0408529a..6930844bcae2c 100644 --- a/docs/content/commands/npm-edit.md +++ b/docs/content/commands/npm-edit.md @@ -10,6 +10,8 @@ description: Edit an installed package npm edit <pkg> ``` +Note: This command is unaware of workspaces. + ### Description Selects a dependency in the current project and opens the package folder in @@ -25,13 +27,17 @@ changes to your locally installed copy. ### Configuration -#### editor +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `editor` + +* Default: The EDITOR or VISUAL environment variables, or 'notepad.exe' on + Windows, or 'vim' on Unix systems +* Type: String -* Default: `EDITOR` environment variable if set, or `"vi"` on Posix, - or `"notepad"` on Windows. -* Type: path +The command to run for `npm edit` and `npm config edit`. -The command to run for `npm edit` or `npm config edit`. +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> ### See Also diff --git a/docs/content/commands/npm-exec.md b/docs/content/commands/npm-exec.md index cb3e51c8255d4..d4ea94371a85c 100644 --- a/docs/content/commands/npm-exec.md +++ b/docs/content/commands/npm-exec.md @@ -11,6 +11,7 @@ npm exec -- <pkg>[@<version>] [args...] npm exec --package=<pkg>[@<version>] -- <cmd> [args...] npm exec -c '<cmd> [args...]' npm exec --package=foo -c '<cmd> [args...]' +npm exec [--ws] [-w <workspace-name] [args...] npx <pkg>[@<specifier>] [args...] npx -p <pkg>[@<specifier>] <cmd> [args...] @@ -119,6 +120,64 @@ thus be equivalent to the `npx` command above: $ npm exec -- foo@latest bar --package=@npmcli/foo ``` +### Configuration + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `package` + +* Default: +* Type: String (can be set multiple times) + +The package to install for [`npm exec`](/commands/npm-exec) + +#### `call` + +* Default: "" +* Type: String + +Optional companion option for `npm exec`, `npx` that allows for specifying a +custom command to be run along with the installed packages. + +```bash +npm exec --package yo --package generator-node --call "yo node" +``` + + +#### `workspace` + +* Default: +* Type: String (can be set multiple times) + +Enable running a command in the context of the configured workspaces of the +current project while filtering by running only the workspaces defined by +this configuration option. + +Valid values for the `workspace` config are either: + +* Workspace names +* Path to a workspace directory +* Path to a parent workspace directory (will result to selecting all of the + nested workspaces) + +When set for the `npm init` command, this may be set to the folder of a +workspace which does not yet exist, to create the folder and set it up as a +brand new workspace within the project. + +This value is not exported to the environment for child processes. + +#### `workspaces` + +* Default: false +* Type: Boolean + +Enable running a command in the context of **all** the configured +workspaces. + +This value is not exported to the environment for child processes. + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> + ### Examples Run the version of `tap` in the local dependencies, with the provided @@ -145,6 +204,68 @@ $ npm x -c 'eslint && say "hooray, lint passed"' $ npx -c 'eslint && say "hooray, lint passed"' ``` +### Workspaces support + +You may use the `workspace` or `workspaces` configs in order to run an +arbitrary command from an npm package (either one installed locally, or fetched +remotely) in the context of the specified workspaces. +If no positional argument or `--call` option is provided, it will open an +interactive subshell in the context of each of these configured workspaces one +at a time. + +Given a project with configured workspaces, e.g: + +``` +. ++-- package.json +`-- packages + +-- a + | `-- package.json + +-- b + | `-- package.json + `-- c + `-- package.json +``` + +Assuming the workspace configuration is properly set up at the root level +`package.json` file. e.g: + +``` +{ + "workspaces": [ "./packages/*" ] +} +``` + +You can execute an arbitrary command from a package in the context of each of +the configured workspaces when using the `workspaces` configuration options, +in this example we're using **eslint** to lint any js file found within each +workspace folder: + +``` +npm exec --ws -- eslint ./*.js +``` + +#### Filtering workspaces + +It's also possible to execute a command in a single workspace using the +`workspace` config along with a name or directory path: + +``` +npm exec --workspace=a -- eslint ./*.js +``` + +The `workspace` config can also be specified multiple times in order to run a +specific script in the context of multiple workspaces. When defining values for +the `workspace` config in the command line, it also possible to use `-w` as a +shorthand, e.g: + +``` +npm exec -w a -w b -- eslint ./*.js +``` + +This last command will run the `eslint` command in both `./packages/a` and +`./packages/b` folders. + ### Compatibility with Older npx Versions The `npx` binary was rewritten in npm v7.0.0, and the standalone `npx` @@ -195,6 +316,33 @@ requested from the server. To force full offline mode, use `offline`. Forces full offline mode. Any packages not locally cached will result in an error. +#### workspace + +* Default: +* Type: String (can be set multiple times) + +Enable running a command in the context of the configured workspaces of the +current project while filtering by running only the workspaces defined by +this configuration option. + +Valid values for the `workspace` config are either: + +* Workspace names +* Path to a workspace directory +* Path to a parent workspace directory (will result to selecting all of the + nested workspaces) + +This value is not exported to the environment for child processes. + +#### workspaces + +* Alias: `--ws` +* Type: Boolean +* Default: `false` + +Run scripts in the context of all configured workspaces for the current +project. + ### See Also * [npm run-script](/commands/npm-run-script) @@ -204,3 +352,4 @@ an error. * [npm restart](/commands/npm-restart) * [npm stop](/commands/npm-stop) * [npm config](/commands/npm-config) +* [npm workspaces](/using-npm/workspaces) diff --git a/docs/content/commands/npm-explain.md b/docs/content/commands/npm-explain.md index ec63ec34f26d8..3a87ee8e438ba 100644 --- a/docs/content/commands/npm-explain.md +++ b/docs/content/commands/npm-explain.md @@ -54,13 +54,43 @@ node_modules/nyc/node_modules/find-up ``` ### Configuration - -#### json +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `json` * Default: false * Type: Boolean -Show information in JSON format. +Whether or not to output JSON data, rather than the normal output. + +* In `npm pkg set` it enables parsing set values with JSON.parse() before + saving them to your `package.json`. + +Not supported by all npm commands. + +#### `workspace` + +* Default: +* Type: String (can be set multiple times) + +Enable running a command in the context of the configured workspaces of the +current project while filtering by running only the workspaces defined by +this configuration option. + +Valid values for the `workspace` config are either: + +* Workspace names +* Path to a workspace directory +* Path to a parent workspace directory (will result to selecting all of the + nested workspaces) + +When set for the `npm init` command, this may be set to the folder of a +workspace which does not yet exist, to create the folder and set it up as a +brand new workspace within the project. + +This value is not exported to the environment for child processes. + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> ### See Also diff --git a/docs/content/commands/npm-explore.md b/docs/content/commands/npm-explore.md index e467a755753b4..c4a40cf7229f7 100644 --- a/docs/content/commands/npm-explore.md +++ b/docs/content/commands/npm-explore.md @@ -10,6 +10,8 @@ description: Browse an installed package npm explore <pkg> [ -- <command>] ``` +Note: This command is unaware of workspaces. + ### Description Spawn a subshell in the directory of the installed package specified. @@ -29,18 +31,21 @@ sure to use `npm rebuild <pkg>` if you make any changes. ### Configuration -#### shell +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `shell` -* Default: SHELL environment variable, or "bash" on Posix, or "cmd" on +* Default: SHELL environment variable, or "bash" on Posix, or "cmd.exe" on Windows -* Type: path +* Type: String The shell to run for the `npm explore` command. +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> + ### See Also * [npm folders](/configuring-npm/folders) * [npm edit](/commands/npm-edit) * [npm rebuild](/commands/npm-rebuild) -* [npm build](/commands/npm-build) * [npm install](/commands/npm-install) diff --git a/docs/content/commands/npm-find-dupes.md b/docs/content/commands/npm-find-dupes.md new file mode 100644 index 0000000000000..3b28f6443decd --- /dev/null +++ b/docs/content/commands/npm-find-dupes.md @@ -0,0 +1,176 @@ +--- +title: npm-find-dupes +section: 1 +description: Find duplication in the package tree +--- + +### Synopsis + +```bash +npm find-dupes +``` + +### Description + +Runs `npm dedupe` in `--dry-run` mode, making npm only output the +duplications, without actually changing the package tree. + +### Configuration + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `global-style` + +* Default: false +* Type: Boolean + +Causes npm to install the package into your local `node_modules` folder with +the same layout it uses with the global `node_modules` folder. Only your +direct dependencies will show in `node_modules` and everything they depend +on will be flattened in their `node_modules` folders. This obviously will +eliminate some deduping. If used with `legacy-bundling`, `legacy-bundling` +will be preferred. + +#### `legacy-bundling` + +* Default: false +* Type: Boolean + +Causes npm to install the package such that versions of npm prior to 1.4, +such as the one included with node 0.8, can install the package. This +eliminates all automatic deduping. If used with `global-style` this option +will be preferred. + +#### `strict-peer-deps` + +* Default: false +* Type: Boolean + +If set to `true`, and `--legacy-peer-deps` is not set, then _any_ +conflicting `peerDependencies` will be treated as an install failure, even +if npm could reasonably guess the appropriate resolution based on non-peer +dependency relationships. + +By default, conflicting `peerDependencies` deep in the dependency graph will +be resolved using the nearest non-peer dependency specification, even if +doing so will result in some packages receiving a peer dependency outside +the range set in their package's `peerDependencies` object. + +When such and override is performed, a warning is printed, explaining the +conflict and the packages involved. If `--strict-peer-deps` is set, then +this warning is treated as a failure. + +#### `package-lock` + +* Default: true +* Type: Boolean + +If set to false, then ignore `package-lock.json` files when installing. This +will also prevent _writing_ `package-lock.json` if `save` is true. + +When package package-locks are disabled, automatic pruning of extraneous +modules will also be disabled. To remove extraneous modules with +package-locks disabled use `npm prune`. + +#### `omit` + +* Default: 'dev' if the `NODE_ENV` environment variable is set to + 'production', otherwise empty. +* Type: "dev", "optional", or "peer" (can be set multiple times) + +Dependency types to omit from the installation tree on disk. + +Note that these dependencies _are_ still resolved and added to the +`package-lock.json` or `npm-shrinkwrap.json` file. They are just not +physically installed on disk. + +If a package type appears in both the `--include` and `--omit` lists, then +it will be included. + +If the resulting omit list includes `'dev'`, then the `NODE_ENV` environment +variable will be set to `'production'` for all lifecycle scripts. + +#### `ignore-scripts` + +* Default: false +* Type: Boolean + +If true, npm does not run scripts specified in package.json files. + +Note that commands explicitly intended to run a particular script, such as +`npm start`, `npm stop`, `npm restart`, `npm test`, and `npm run-script` +will still run their intended script if `ignore-scripts` is set, but they +will *not* run any pre- or post-scripts. + +#### `audit` + +* Default: true +* Type: Boolean + +When "true" submit audit reports alongside the current npm command to the +default registry and all registries configured for scopes. See the +documentation for [`npm audit`](/commands/npm-audit) for details on what is +submitted. + +#### `bin-links` + +* Default: true +* Type: Boolean + +Tells npm to create symlinks (or `.cmd` shims on Windows) for package +executables. + +Set to false to have it not do this. This can be used to work around the +fact that some file systems don't support symlinks, even on ostensibly Unix +systems. + +#### `fund` + +* Default: true +* Type: Boolean + +When "true" displays the message at the end of each `npm install` +acknowledging the number of dependencies looking for funding. See [`npm +fund`](/commands/npm-fund) for details. + +#### `workspace` + +* Default: +* Type: String (can be set multiple times) + +Enable running a command in the context of the configured workspaces of the +current project while filtering by running only the workspaces defined by +this configuration option. + +Valid values for the `workspace` config are either: + +* Workspace names +* Path to a workspace directory +* Path to a parent workspace directory (will result to selecting all of the + nested workspaces) + +When set for the `npm init` command, this may be set to the folder of a +workspace which does not yet exist, to create the folder and set it up as a +brand new workspace within the project. + +This value is not exported to the environment for child processes. + +#### `workspaces` + +* Default: false +* Type: Boolean + +Enable running a command in the context of **all** the configured +workspaces. + +This value is not exported to the environment for child processes. + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> + +### See Also + +* [npm dedupe](/commands/npm-dedupe) +* [npm ls](/commands/npm-ls) +* [npm update](/commands/npm-update) +* [npm install](/commands/npm-install) + diff --git a/docs/content/commands/npm-fund.md b/docs/content/commands/npm-fund.md index aa1b26b9a8971..ec5f5a37fdb71 100644 --- a/docs/content/commands/npm-fund.md +++ b/docs/content/commands/npm-fund.md @@ -8,6 +8,7 @@ description: Retrieve funding information ```bash npm fund [<pkg>] +npm fund [-w <workspace-name>] ``` ### Description @@ -24,40 +25,115 @@ The list will avoid duplicated entries and will stack all packages that share the same url as a single entry. Thus, the list does not have the same shape of the output from `npm ls`. +#### Example + +### Workspaces support + +It's possible to filter the results to only include a single workspace and its +dependencies using the `workspace` config option. + +#### Example: + +Here's an example running `npm fund` in a project with a configured +workspace `a`: + +```bash +$ npm fund +test-workspaces-fund@1.0.0 ++-- https://example.com/a +| | `-- a@1.0.0 +| `-- https://example.com/maintainer +| `-- foo@1.0.0 ++-- https://example.com/npmcli-funding +| `-- @npmcli/test-funding +`-- https://example.com/org + `-- bar@2.0.0 +``` + +And here is an example of the expected result when filtering only by +a specific workspace `a` in the same project: + +```bash +$ npm fund -w a +test-workspaces-fund@1.0.0 +`-- https://example.com/a + | `-- a@1.0.0 + `-- https://example.com/maintainer + `-- foo@2.0.0 +``` + ### Configuration -#### browser +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `json` + +* Default: false +* Type: Boolean + +Whether or not to output JSON data, rather than the normal output. + +* In `npm pkg set` it enables parsing set values with JSON.parse() before + saving them to your `package.json`. + +Not supported by all npm commands. + +#### `browser` * Default: OS X: `"open"`, Windows: `"start"`, Others: `"xdg-open"` -* Type: String +* Type: null, Boolean, or String + +The browser that is called by npm commands to open websites. + +Set to `false` to suppress browser behavior and instead print urls to +terminal. -The browser that is called by the `npm fund` command to open websites. +Set to `true` to use default system URL opener. -#### json +#### `unicode` +* Default: false on windows, true on mac/unix systems with a unicode locale, + as defined by the `LC_ALL`, `LC_CTYPE`, or `LANG` environment variables. * Type: Boolean -* Default: false -Show information in JSON format. +When set to true, npm uses unicode characters in the tree output. When +false, it uses ascii characters instead of unicode glyphs. -#### unicode +#### `workspace` -* Type: Boolean -* Default: true +* Default: +* Type: String (can be set multiple times) + +Enable running a command in the context of the configured workspaces of the +current project while filtering by running only the workspaces defined by +this configuration option. + +Valid values for the `workspace` config are either: -Whether to represent the tree structure using unicode characters. -Set it to `false` in order to use all-ansi output. +* Workspace names +* Path to a workspace directory +* Path to a parent workspace directory (will result to selecting all of the + nested workspaces) -#### which +When set for the `npm init` command, this may be set to the folder of a +workspace which does not yet exist, to create the folder and set it up as a +brand new workspace within the project. -* Type: Number -* Default: undefined +This value is not exported to the environment for child processes. + +#### `which` + +* Default: null +* Type: null or Number If there are multiple funding sources, which 1-indexed source URL to open. +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> + ## See Also * [npm install](/commands/npm-install) * [npm docs](/commands/npm-docs) * [npm ls](/commands/npm-ls) * [npm config](/commands/npm-config) +* [npm workspaces](/using-npm/workspaces) diff --git a/docs/content/commands/npm-help-search.md b/docs/content/commands/npm-help-search.md index e10638efa07d9..70f4f182d17e3 100644 --- a/docs/content/commands/npm-help-search.md +++ b/docs/content/commands/npm-help-search.md @@ -10,6 +10,8 @@ description: Search npm help documentation npm help-search <text> ``` +Note: This command is unaware of workspaces. + ### Description This command will search the npm markdown documentation files for the terms @@ -23,15 +25,16 @@ directly. ### Configuration -#### long +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `long` -* Type: Boolean * Default: false +* Type: Boolean -If true, the "long" flag will cause help-search to output context around -where the terms were found in the documentation. +Show extended information in `ls`, `search`, and `help-search`. -If false, then help-search will just list out the help topics found. +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> ### See Also diff --git a/docs/content/commands/npm-help.md b/docs/content/commands/npm-help.md index 56e46645522ba..81f55db332eda 100644 --- a/docs/content/commands/npm-help.md +++ b/docs/content/commands/npm-help.md @@ -10,6 +10,8 @@ description: Get help on npm npm help <term> [<terms..>] ``` +Note: This command is unaware of workspaces. + ### Description If supplied a topic, then show the appropriate documentation page. @@ -21,15 +23,19 @@ topic, so unique matches are equivalent to specifying a topic name. ### Configuration -#### viewer +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `viewer` * Default: "man" on Posix, "browser" on Windows -* Type: path +* Type: String The program to use to view help content. Set to `"browser"` to view html help content in the default web browser. +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> + ### See Also * [npm](/commands/npm) diff --git a/docs/content/commands/npm-hook.md b/docs/content/commands/npm-hook.md index 2ac548ada0c21..2917375ea38cd 100644 --- a/docs/content/commands/npm-hook.md +++ b/docs/content/commands/npm-hook.md @@ -13,6 +13,8 @@ npm hook update <id> <url> [secret] npm hook rm <id> ``` +Note: This command is unaware of workspaces. + ### Description Allows you to manage [npm @@ -81,6 +83,30 @@ Remove a hook: $ npm hook rm id-deadbeef ``` +### Configuration + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `registry` + +* Default: "https://registry.npmjs.org/" +* Type: URL + +The base URL of the npm registry. + +#### `otp` + +* Default: null +* Type: null or String + +This is a one-time password from a two-factor authenticator. It's needed +when publishing or changing package permissions with `npm access`. + +If not set, and a registry response fails with a challenge for a one-time +password, npm will prompt on the command line for one. + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> + ### See Also * ["Introducing Hooks" blog post](https://blog.npmjs.org/post/145260155635/introducing-hooks-get-notifications-of-npm) diff --git a/docs/content/commands/npm-init.md b/docs/content/commands/npm-init.md index 8a40d90e8354d..54c3bdb4b74ab 100644 --- a/docs/content/commands/npm-init.md +++ b/docs/content/commands/npm-init.md @@ -1,15 +1,16 @@ --- title: npm-init section: 1 -description: create a package.json file +description: Create a package.json file --- ### Synopsis ```bash -npm init [--force|-f|--yes|-y|--scope] -npm init <@scope> (same as `npx <@scope>/create`) -npm init [<@scope>/]<name> (same as `npx [<@scope>/]create-<name>`) +npm init [--yes|-y|--scope] +npm init <@scope> (same as `npm exec <@scope>/create`) +npm init [<@scope>/]<name> (same as `npm exec [<@scope>/]create-<name>`) +npm init [-w <dir>] [args...] ``` ### Description @@ -18,19 +19,16 @@ npm init [<@scope>/]<name> (same as `npx [<@scope>/]create-<name>`) package. `initializer` in this case is an npm package named `create-<initializer>`, -which will be installed by [`npx`](https://npm.im/npx), and then have its +which will be installed by [`npm-exec`](/commands/npm-exec), and then have its main bin executed -- presumably creating or updating `package.json` and running any other initialization-related operations. -The init command is transformed to a corresponding `npx` operation as +The init command is transformed to a corresponding `npm exec` operation as follows: -* `npm init foo` -> `npx create-foo` -* `npm init @usr/foo` -> `npx @usr/create-foo` -* `npm init @usr` -> `npx @usr/create` - -Any additional options will be passed directly to the command, so `npm init -foo -- --hello` will map to `npx create-foo --hello`. +* `npm init foo` -> `npm exec create-foo` +* `npm init @usr/foo` -> `npm exec @usr/create-foo` +* `npm init @usr` -> `npm exec @usr/create` If the initializer is omitted (by just calling `npm init`), init will fall back to legacy init behavior. It will ask you a bunch of questions, and @@ -40,6 +38,18 @@ strictly additive, so it will keep any fields and values that were already set. You can also use `-y`/`--yes` to skip the questionnaire altogether. If you pass `--scope`, it will create a scoped package. +#### Forwarding additional options + +Any additional options will be passed directly to the command, so `npm init +foo -- --hello` will map to `npm exec -- create-foo --hello`. + +To better illustrate how options are forwarded, here's a more evolved +example showing options passed to both the **npm cli** and a create package, +both following commands are equivalent: + +- `npm init foo -y --registry=<url> -- --hello -a` +- `npm exec -y --registry=<url> -- create-foo --hello -a` + ### Examples Create a new React-based project using @@ -71,27 +81,138 @@ Generate it without having it ask any questions: $ npm init -y ``` -### A note on caching +### Workspaces support + +It's possible to create a new workspace within your project by using the +`workspace` config option. When using `npm init -w <dir>` the cli will +create the folders and boilerplate expected while also adding a reference +to your project `package.json` `"workspaces": []` property in order to make +sure that new generated **workspace** is properly set up as such. + +Given a project with no workspaces, e.g: + +``` +. ++-- package.json +``` + +You may generate a new workspace using the legacy init: + +```bash +$ npm init -w packages/a +``` + +That will generate a new folder and `package.json` file, while also updating +your top-level `package.json` to add the reference to this new workspace: + +``` +. ++-- package.json +`-- packages + `-- a + `-- package.json +``` + +The workspaces init also supports the `npm init <initializer> -w <dir>` +syntax, following the same set of rules explained earlier in the initial +**Description** section of this page. Similar to the previous example of +creating a new React-based project using +[`create-react-app`](https://npm.im/create-react-app), the following syntax +will make sure to create the new react app as a nested **workspace** within your +project and configure your `package.json` to recognize it as such: + +```bash +npm init -w packages/my-react-app react-app . +``` + +This will make sure to generate your react app as expected, one important +consideration to have in mind is that `npm exec` is going to be run in the +context of the newly created folder for that workspace, and that's the reason +why in this example the initializer uses the initializer name followed with a +dot to represent the current directory in that context, e.g: `react-app .`: + +``` +. ++-- package.json +`-- packages + +-- a + | `-- package.json + `-- my-react-app + +-- README + +-- package.json + `-- ... +``` + +### Configuration + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `yes` + +* Default: null +* Type: null or Boolean + +Automatically answer "yes" to any prompts that npm might print on the +command line. + +#### `force` + +* Default: false +* Type: Boolean + +Removes various protections against unfortunate side effects, common +mistakes, unnecessary performance degradation, and malicious input. + +* Allow clobbering non-npm files in global installs. +* Allow the `npm version` command to work on an unclean git repository. +* Allow deleting the cache folder with `npm cache clean`. +* Allow installing packages that have an `engines` declaration requiring a + different version of npm. +* Allow installing packages that have an `engines` declaration requiring a + different version of `node`, even if `--engine-strict` is enabled. +* Allow `npm audit fix` to install modules outside your stated dependency + range (including SemVer-major changes). +* Allow unpublishing all versions of a published package. +* Allow conflicting peerDependencies to be installed in the root project. +* Implicitly set `--yes` during `npm init`. +* Allow clobbering existing values in `npm pkg` + +If you don't have a clear idea of what you want to do, it is strongly +recommended that you do not use this option! + +#### `workspace` + +* Default: +* Type: String (can be set multiple times) + +Enable running a command in the context of the configured workspaces of the +current project while filtering by running only the workspaces defined by +this configuration option. + +Valid values for the `workspace` config are either: + +* Workspace names +* Path to a workspace directory +* Path to a parent workspace directory (will result to selecting all of the + nested workspaces) -The npm cli utilizes its internal package cache when using the package -name specified. You can use the following to change how and when the -cli uses this cache. See [`npm cache`](/commands/npm-cache) for more on -how the cache works. +When set for the `npm init` command, this may be set to the folder of a +workspace which does not yet exist, to create the folder and set it up as a +brand new workspace within the project. -#### prefer-online +This value is not exported to the environment for child processes. -Forces staleness checks for packages, making the cli look for updates -immediately even if the package is already in the cache. +#### `workspaces` -#### prefer-offline +* Default: false +* Type: Boolean -Bypasses staleness checks for packages. Missing data will still be -requested from the server. To force full offline mode, use `offline`. +Enable running a command in the context of **all** the configured +workspaces. -#### offline +This value is not exported to the environment for child processes. -Forces full offline mode. Any packages not locally cached will result in -an error. +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> ### See Also @@ -100,3 +221,4 @@ an error. * [npm version](/commands/npm-version) * [npm scope](/using-npm/scope) * [npm exec](/commands/npm-exec) +* [npm workspaces](/using-npm/workspaces) diff --git a/docs/content/commands/npm-install-ci-test.md b/docs/content/commands/npm-install-ci-test.md index 568d174cf44c5..2640311cf94be 100644 --- a/docs/content/commands/npm-install-ci-test.md +++ b/docs/content/commands/npm-install-ci-test.md @@ -16,6 +16,42 @@ alias: npm cit This command runs `npm ci` followed immediately by `npm test`. +### Configuration + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `audit` + +* Default: true +* Type: Boolean + +When "true" submit audit reports alongside the current npm command to the +default registry and all registries configured for scopes. See the +documentation for [`npm audit`](/commands/npm-audit) for details on what is +submitted. + +#### `ignore-scripts` + +* Default: false +* Type: Boolean + +If true, npm does not run scripts specified in package.json files. + +Note that commands explicitly intended to run a particular script, such as +`npm start`, `npm stop`, `npm restart`, `npm test`, and `npm run-script` +will still run their intended script if `ignore-scripts` is set, but they +will *not* run any pre- or post-scripts. + +#### `script-shell` + +* Default: '/bin/sh' on POSIX systems, 'cmd.exe' on Windows +* Type: null or String + +The shell to use for scripts run with the `npm exec`, `npm run` and `npm +init <pkg>` commands. + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> + ### See Also * [npm install-test](/commands/npm-install-test) diff --git a/docs/content/commands/npm-install-test.md b/docs/content/commands/npm-install-test.md index 6350c5d01ad70..c8533cafedd7a 100644 --- a/docs/content/commands/npm-install-test.md +++ b/docs/content/commands/npm-install-test.md @@ -25,6 +25,203 @@ common options: [--save|--save-dev|--save-optional] [--save-exact] [--dry-run] This command runs an `npm install` followed immediately by an `npm test`. It takes exactly the same arguments as `npm install`. +### Configuration + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `save` + +* Default: true +* Type: Boolean + +Save installed packages to a package.json file as dependencies. + +When used with the `npm rm` command, removes the dependency from +package.json. + +#### `save-exact` + +* Default: false +* Type: Boolean + +Dependencies saved to package.json will be configured with an exact version +rather than using npm's default semver range operator. + +#### `global` + +* Default: false +* Type: Boolean + +Operates in "global" mode, so that packages are installed into the `prefix` +folder instead of the current working directory. See +[folders](/configuring-npm/folders) for more on the differences in behavior. + +* packages are installed into the `{prefix}/lib/node_modules` folder, instead + of the current working directory. +* bin files are linked to `{prefix}/bin` +* man pages are linked to `{prefix}/share/man` + +#### `global-style` + +* Default: false +* Type: Boolean + +Causes npm to install the package into your local `node_modules` folder with +the same layout it uses with the global `node_modules` folder. Only your +direct dependencies will show in `node_modules` and everything they depend +on will be flattened in their `node_modules` folders. This obviously will +eliminate some deduping. If used with `legacy-bundling`, `legacy-bundling` +will be preferred. + +#### `legacy-bundling` + +* Default: false +* Type: Boolean + +Causes npm to install the package such that versions of npm prior to 1.4, +such as the one included with node 0.8, can install the package. This +eliminates all automatic deduping. If used with `global-style` this option +will be preferred. + +#### `strict-peer-deps` + +* Default: false +* Type: Boolean + +If set to `true`, and `--legacy-peer-deps` is not set, then _any_ +conflicting `peerDependencies` will be treated as an install failure, even +if npm could reasonably guess the appropriate resolution based on non-peer +dependency relationships. + +By default, conflicting `peerDependencies` deep in the dependency graph will +be resolved using the nearest non-peer dependency specification, even if +doing so will result in some packages receiving a peer dependency outside +the range set in their package's `peerDependencies` object. + +When such and override is performed, a warning is printed, explaining the +conflict and the packages involved. If `--strict-peer-deps` is set, then +this warning is treated as a failure. + +#### `package-lock` + +* Default: true +* Type: Boolean + +If set to false, then ignore `package-lock.json` files when installing. This +will also prevent _writing_ `package-lock.json` if `save` is true. + +When package package-locks are disabled, automatic pruning of extraneous +modules will also be disabled. To remove extraneous modules with +package-locks disabled use `npm prune`. + +#### `omit` + +* Default: 'dev' if the `NODE_ENV` environment variable is set to + 'production', otherwise empty. +* Type: "dev", "optional", or "peer" (can be set multiple times) + +Dependency types to omit from the installation tree on disk. + +Note that these dependencies _are_ still resolved and added to the +`package-lock.json` or `npm-shrinkwrap.json` file. They are just not +physically installed on disk. + +If a package type appears in both the `--include` and `--omit` lists, then +it will be included. + +If the resulting omit list includes `'dev'`, then the `NODE_ENV` environment +variable will be set to `'production'` for all lifecycle scripts. + +#### `ignore-scripts` + +* Default: false +* Type: Boolean + +If true, npm does not run scripts specified in package.json files. + +Note that commands explicitly intended to run a particular script, such as +`npm start`, `npm stop`, `npm restart`, `npm test`, and `npm run-script` +will still run their intended script if `ignore-scripts` is set, but they +will *not* run any pre- or post-scripts. + +#### `audit` + +* Default: true +* Type: Boolean + +When "true" submit audit reports alongside the current npm command to the +default registry and all registries configured for scopes. See the +documentation for [`npm audit`](/commands/npm-audit) for details on what is +submitted. + +#### `bin-links` + +* Default: true +* Type: Boolean + +Tells npm to create symlinks (or `.cmd` shims on Windows) for package +executables. + +Set to false to have it not do this. This can be used to work around the +fact that some file systems don't support symlinks, even on ostensibly Unix +systems. + +#### `fund` + +* Default: true +* Type: Boolean + +When "true" displays the message at the end of each `npm install` +acknowledging the number of dependencies looking for funding. See [`npm +fund`](/commands/npm-fund) for details. + +#### `dry-run` + +* Default: false +* Type: Boolean + +Indicates that you don't want npm to make any changes and that it should +only report what it would have done. This can be passed into any of the +commands that modify your local installation, eg, `install`, `update`, +`dedupe`, `uninstall`, as well as `pack` and `publish`. + +Note: This is NOT honored by other network related commands, eg `dist-tags`, +`owner`, etc. + +#### `workspace` + +* Default: +* Type: String (can be set multiple times) + +Enable running a command in the context of the configured workspaces of the +current project while filtering by running only the workspaces defined by +this configuration option. + +Valid values for the `workspace` config are either: + +* Workspace names +* Path to a workspace directory +* Path to a parent workspace directory (will result to selecting all of the + nested workspaces) + +When set for the `npm init` command, this may be set to the folder of a +workspace which does not yet exist, to create the folder and set it up as a +brand new workspace within the project. + +This value is not exported to the environment for child processes. + +#### `workspaces` + +* Default: false +* Type: Boolean + +Enable running a command in the context of **all** the configured +workspaces. + +This value is not exported to the environment for child processes. + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> + ### See Also * [npm install](/commands/npm-install) diff --git a/docs/content/commands/npm-install.md b/docs/content/commands/npm-install.md index 5ab9275ee4eaf..70d4c0d46ffeb 100644 --- a/docs/content/commands/npm-install.md +++ b/docs/content/commands/npm-install.md @@ -411,89 +411,200 @@ does. These are some of the most common options related to installation. -#### Configuration Options Affecting Dependency Resolution And Tree Design +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `save` -* `-g` or `--global`: install the package globally rather than locally. - See [folders](/configuring-npm/folders). +* Default: true +* Type: Boolean -* `--global-style`: install the package into your local `node_modules` - folder with the same layout it uses with the global `node_modules` - folder. Only your direct dependencies will show in `node_modules` and - everything they depend on will be flattened in their `node_modules` - folders. This obviously will eliminate some deduping. +Save installed packages to a package.json file as dependencies. -* `--legacy-bundling`: install the package in the style of versions of npm - prior to 1.4, where dependencies are not automatically deduped up to the - shallowest level in the tree possible. This is extremely - disk-inefficient. +When used with the `npm rm` command, removes the dependency from +package.json. -* `--legacy-peer-deps`: ignore all `peerDependencies` when installing, in - the style of npm version 4 through version 6. +#### `save-exact` -* `--strict-peer-deps`: fail and abort the install process for any - conflicting peerDependencies when encountered. By default, npm will only - crash for peerDependencies conflicts caused by the direct dependencies of - the root project. +* Default: false +* Type: Boolean -* `--no-package-lock` (alias: `--no-shrinkwrap`): do not read the - lockfile (`package-lock.json` or `npm-shrinkwrap.json`) for the intended - package tree, and do not save the resulting package tree back to a - lockfile. +Dependencies saved to package.json will be configured with an exact version +rather than using npm's default semver range operator. -#### Omitting Dependency Types +#### `global` -You may omit certain types of dependencies by using the `--omit=<type>` -config option. This may be specified multiple types on the command line. -To enter `omit` options in `.npmrc` files, use the following syntax: +* Default: false +* Type: Boolean -```ini -omit[] = dev -omit[] = optional -; etc... -``` +Operates in "global" mode, so that packages are installed into the `prefix` +folder instead of the current working directory. See +[folders](/configuring-npm/folders) for more on the differences in behavior. + +* packages are installed into the `{prefix}/lib/node_modules` folder, instead + of the current working directory. +* bin files are linked to `{prefix}/bin` +* man pages are linked to `{prefix}/share/man` + +#### `global-style` + +* Default: false +* Type: Boolean + +Causes npm to install the package into your local `node_modules` folder with +the same layout it uses with the global `node_modules` folder. Only your +direct dependencies will show in `node_modules` and everything they depend +on will be flattened in their `node_modules` folders. This obviously will +eliminate some deduping. If used with `legacy-bundling`, `legacy-bundling` +will be preferred. + +#### `legacy-bundling` + +* Default: false +* Type: Boolean + +Causes npm to install the package such that versions of npm prior to 1.4, +such as the one included with node 0.8, can install the package. This +eliminates all automatic deduping. If used with `global-style` this option +will be preferred. + +#### `strict-peer-deps` + +* Default: false +* Type: Boolean + +If set to `true`, and `--legacy-peer-deps` is not set, then _any_ +conflicting `peerDependencies` will be treated as an install failure, even +if npm could reasonably guess the appropriate resolution based on non-peer +dependency relationships. + +By default, conflicting `peerDependencies` deep in the dependency graph will +be resolved using the nearest non-peer dependency specification, even if +doing so will result in some packages receiving a peer dependency outside +the range set in their package's `peerDependencies` object. + +When such and override is performed, a warning is printed, explaining the +conflict and the packages involved. If `--strict-peer-deps` is set, then +this warning is treated as a failure. + +#### `package-lock` + +* Default: true +* Type: Boolean + +If set to false, then ignore `package-lock.json` files when installing. This +will also prevent _writing_ `package-lock.json` if `save` is true. + +When package package-locks are disabled, automatic pruning of extraneous +modules will also be disabled. To remove extraneous modules with +package-locks disabled use `npm prune`. + +#### `omit` + +* Default: 'dev' if the `NODE_ENV` environment variable is set to + 'production', otherwise empty. +* Type: "dev", "optional", or "peer" (can be set multiple times) + +Dependency types to omit from the installation tree on disk. + +Note that these dependencies _are_ still resolved and added to the +`package-lock.json` or `npm-shrinkwrap.json` file. They are just not +physically installed on disk. + +If a package type appears in both the `--include` and `--omit` lists, then +it will be included. + +If the resulting omit list includes `'dev'`, then the `NODE_ENV` environment +variable will be set to `'production'` for all lifecycle scripts. + +#### `ignore-scripts` + +* Default: false +* Type: Boolean + +If true, npm does not run scripts specified in package.json files. + +Note that commands explicitly intended to run a particular script, such as +`npm start`, `npm stop`, `npm restart`, `npm test`, and `npm run-script` +will still run their intended script if `ignore-scripts` is set, but they +will *not* run any pre- or post-scripts. + +#### `audit` + +* Default: true +* Type: Boolean + +When "true" submit audit reports alongside the current npm command to the +default registry and all registries configured for scopes. See the +documentation for [`npm audit`](/commands/npm-audit) for details on what is +submitted. + +#### `bin-links` + +* Default: true +* Type: Boolean + +Tells npm to create symlinks (or `.cmd` shims on Windows) for package +executables. + +Set to false to have it not do this. This can be used to work around the +fact that some file systems don't support symlinks, even on ostensibly Unix +systems. + +#### `fund` + +* Default: true +* Type: Boolean + +When "true" displays the message at the end of each `npm install` +acknowledging the number of dependencies looking for funding. See [`npm +fund`](/commands/npm-fund) for details. + +#### `dry-run` -The dependency types that may be omitted or included are: +* Default: false +* Type: Boolean -* `peer`: any `peerDependencies`, including those with a - `peerDependenciesMeta` entry specifying `optional: true` -* `optional`: dependencies listed in `optionalDependencies` -* `dev`: dependencies listed in `devDependencies` +Indicates that you don't want npm to make any changes and that it should +only report what it would have done. This can be passed into any of the +commands that modify your local installation, eg, `install`, `update`, +`dedupe`, `uninstall`, as well as `pack` and `publish`. -To re-include dependency, use the `--include` option, which may also be -specified multiple times. +Note: This is NOT honored by other network related commands, eg `dist-tags`, +`owner`, etc. -Legacy shorthands for `omit` settings are: +#### `workspace` -* `--no-optional`: prevent optionalDependencies from being installed. Note - that their presence is still entered in the `package-lock.json` file, and - the tree is designed such that they _can_ be installed in the future. +* Default: +* Type: String (can be set multiple times) -* `--prod`: prevent devDependencies from being installed. +Enable running a command in the context of the configured workspaces of the +current project while filtering by running only the workspaces defined by +this configuration option. -* `--only=prod`: omit `devDependencies` +Valid values for the `workspace` config are either: -* `--also=dev`: include `devDependencies` +* Workspace names +* Path to a workspace directory +* Path to a parent workspace directory (will result to selecting all of the + nested workspaces) -#### Configuration Options Affecting Build Process +When set for the `npm init` command, this may be set to the folder of a +workspace which does not yet exist, to create the folder and set it up as a +brand new workspace within the project. -* `--ignore-scripts`: do not execute any scripts defined in the - package.json. See [`scripts`](/using-npm/scripts). +This value is not exported to the environment for child processes. -* `--no-audit`: disable sending audit reports to the configured registries. - See [`npm-audit`](npm-audit) for details on what is sent. +#### `workspaces` -* `--no-bin-links`: prevent npm from creating symlinks for any binaries the - package might contain. +* Default: false +* Type: Boolean -* `--no-fund`: suppress the message displayed at the end of each install - that acknowledges the number of dependencies looking for funding. See - [`npm-fund`](/commands/npm-fund) +Enable running a command in the context of **all** the configured +workspaces. -* `--dry-run`: Do not actually install anything into the `node_modules` - folder. Just build the intended tree in memory, and report on it. +This value is not exported to the environment for child processes. -* `--no-save`: Do not save installed dependencies to `package.json` or - `package-lock.json`. +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> ### Algorithm @@ -538,7 +649,6 @@ the specific folder structures that npm creates. * [npm link](/commands/npm-link) * [npm rebuild](/commands/npm-rebuild) * [npm scripts](/using-npm/scripts) -* [npm build](/commands/npm-build) * [npm config](/commands/npm-config) * [npmrc](/configuring-npm/npmrc) * [npm registry](/using-npm/registry) diff --git a/docs/content/commands/npm-link.md b/docs/content/commands/npm-link.md index 1a835001fc64f..c7b385009519a 100644 --- a/docs/content/commands/npm-link.md +++ b/docs/content/commands/npm-link.md @@ -99,6 +99,213 @@ relevant metadata by running `npm install <dep> --package-lock-only`. If you _want_ to save the `file:` reference in your `package.json` and `package-lock.json` files, you can use `npm link <dep> --save` to do so. +### Workspace Usage + +`npm link <pkg> --workspace <name>` will link the relevant package as a +dependency of the specified workspace(s). Note that It may actually be +linked into the parent project's `node_modules` folder, if there are no +conflicting dependencies. + +`npm link --workspace <name>` will create a global link to the specified +workspace(s). + +### Configuration + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `save` + +* Default: true +* Type: Boolean + +Save installed packages to a package.json file as dependencies. + +When used with the `npm rm` command, removes the dependency from +package.json. + +#### `save-exact` + +* Default: false +* Type: Boolean + +Dependencies saved to package.json will be configured with an exact version +rather than using npm's default semver range operator. + +#### `global` + +* Default: false +* Type: Boolean + +Operates in "global" mode, so that packages are installed into the `prefix` +folder instead of the current working directory. See +[folders](/configuring-npm/folders) for more on the differences in behavior. + +* packages are installed into the `{prefix}/lib/node_modules` folder, instead + of the current working directory. +* bin files are linked to `{prefix}/bin` +* man pages are linked to `{prefix}/share/man` + +#### `global-style` + +* Default: false +* Type: Boolean + +Causes npm to install the package into your local `node_modules` folder with +the same layout it uses with the global `node_modules` folder. Only your +direct dependencies will show in `node_modules` and everything they depend +on will be flattened in their `node_modules` folders. This obviously will +eliminate some deduping. If used with `legacy-bundling`, `legacy-bundling` +will be preferred. + +#### `legacy-bundling` + +* Default: false +* Type: Boolean + +Causes npm to install the package such that versions of npm prior to 1.4, +such as the one included with node 0.8, can install the package. This +eliminates all automatic deduping. If used with `global-style` this option +will be preferred. + +#### `strict-peer-deps` + +* Default: false +* Type: Boolean + +If set to `true`, and `--legacy-peer-deps` is not set, then _any_ +conflicting `peerDependencies` will be treated as an install failure, even +if npm could reasonably guess the appropriate resolution based on non-peer +dependency relationships. + +By default, conflicting `peerDependencies` deep in the dependency graph will +be resolved using the nearest non-peer dependency specification, even if +doing so will result in some packages receiving a peer dependency outside +the range set in their package's `peerDependencies` object. + +When such and override is performed, a warning is printed, explaining the +conflict and the packages involved. If `--strict-peer-deps` is set, then +this warning is treated as a failure. + +#### `package-lock` + +* Default: true +* Type: Boolean + +If set to false, then ignore `package-lock.json` files when installing. This +will also prevent _writing_ `package-lock.json` if `save` is true. + +When package package-locks are disabled, automatic pruning of extraneous +modules will also be disabled. To remove extraneous modules with +package-locks disabled use `npm prune`. + +#### `omit` + +* Default: 'dev' if the `NODE_ENV` environment variable is set to + 'production', otherwise empty. +* Type: "dev", "optional", or "peer" (can be set multiple times) + +Dependency types to omit from the installation tree on disk. + +Note that these dependencies _are_ still resolved and added to the +`package-lock.json` or `npm-shrinkwrap.json` file. They are just not +physically installed on disk. + +If a package type appears in both the `--include` and `--omit` lists, then +it will be included. + +If the resulting omit list includes `'dev'`, then the `NODE_ENV` environment +variable will be set to `'production'` for all lifecycle scripts. + +#### `ignore-scripts` + +* Default: false +* Type: Boolean + +If true, npm does not run scripts specified in package.json files. + +Note that commands explicitly intended to run a particular script, such as +`npm start`, `npm stop`, `npm restart`, `npm test`, and `npm run-script` +will still run their intended script if `ignore-scripts` is set, but they +will *not* run any pre- or post-scripts. + +#### `audit` + +* Default: true +* Type: Boolean + +When "true" submit audit reports alongside the current npm command to the +default registry and all registries configured for scopes. See the +documentation for [`npm audit`](/commands/npm-audit) for details on what is +submitted. + +#### `bin-links` + +* Default: true +* Type: Boolean + +Tells npm to create symlinks (or `.cmd` shims on Windows) for package +executables. + +Set to false to have it not do this. This can be used to work around the +fact that some file systems don't support symlinks, even on ostensibly Unix +systems. + +#### `fund` + +* Default: true +* Type: Boolean + +When "true" displays the message at the end of each `npm install` +acknowledging the number of dependencies looking for funding. See [`npm +fund`](/commands/npm-fund) for details. + +#### `dry-run` + +* Default: false +* Type: Boolean + +Indicates that you don't want npm to make any changes and that it should +only report what it would have done. This can be passed into any of the +commands that modify your local installation, eg, `install`, `update`, +`dedupe`, `uninstall`, as well as `pack` and `publish`. + +Note: This is NOT honored by other network related commands, eg `dist-tags`, +`owner`, etc. + +#### `workspace` + +* Default: +* Type: String (can be set multiple times) + +Enable running a command in the context of the configured workspaces of the +current project while filtering by running only the workspaces defined by +this configuration option. + +Valid values for the `workspace` config are either: + +* Workspace names +* Path to a workspace directory +* Path to a parent workspace directory (will result to selecting all of the + nested workspaces) + +When set for the `npm init` command, this may be set to the folder of a +workspace which does not yet exist, to create the folder and set it up as a +brand new workspace within the project. + +This value is not exported to the environment for child processes. + +#### `workspaces` + +* Default: false +* Type: Boolean + +Enable running a command in the context of **all** the configured +workspaces. + +This value is not exported to the environment for child processes. + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> + ### See Also * [npm developers](/using-npm/developers) diff --git a/docs/content/commands/npm-logout.md b/docs/content/commands/npm-logout.md index 7fa858a99993d..000b1006e8b35 100644 --- a/docs/content/commands/npm-logout.md +++ b/docs/content/commands/npm-logout.md @@ -10,6 +10,8 @@ description: Log out of the registry npm logout [--registry=<url>] [--scope=<@scope>] ``` +Note: This command is unaware of workspaces. + ### Description When logged into a registry that supports token-based authentication, tell @@ -25,22 +27,46 @@ connected to that scope, if set. ### Configuration -#### registry +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `registry` -Default: https://registry.npmjs.org/ +* Default: "https://registry.npmjs.org/" +* Type: URL -The base URL of the npm package registry. If `scope` is also specified, -it takes precedence. +The base URL of the npm registry. -#### scope +#### `scope` -Default: The scope of your current project, if any, otherwise none. +* Default: the scope of the current project, if any, or "" +* Type: String -If specified, you will be logged out of the specified scope. See [`scope`](/using-npm/scope). +Associate an operation with a scope for a scoped registry. + +Useful when logging in to or out of a private registry: + +``` +# log in, linking the scope to the custom registry +npm login --scope=@mycorp --registry=https://registry.mycorp.com + +# log out, removing the link and the auth token +npm logout --scope=@mycorp +``` + +This will cause `@mycorp` to be mapped to the registry for future +installation of packages specified according to the pattern +`@mycorp/package`. + +This will also cause `npm init` to create a scoped package. -```bash -npm logout --scope=@myco ``` +# accept all defaults, and create a package named "@foo/whatever", +# instead of just named "whatever" +npm init --scope=@foo --yes +``` + + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> ### See Also diff --git a/docs/content/commands/npm-ls.md b/docs/content/commands/npm-ls.md index 7abdbf82a5712..350f40a9991e5 100644 --- a/docs/content/commands/npm-ls.md +++ b/docs/content/commands/npm-ls.md @@ -73,86 +73,149 @@ least the default human-readable `npm ls` output in npm v8. ### Configuration -#### all +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `all` -* Default: `false` +* Default: false * Type: Boolean When running `npm outdated` and `npm ls`, setting `--all` will show all outdated or installed packages, rather than only those directly depended upon by the current project. -#### json +#### `json` * Default: false * Type: Boolean -Show information in JSON format. +Whether or not to output JSON data, rather than the normal output. + +* In `npm pkg set` it enables parsing set values with JSON.parse() before + saving them to your `package.json`. + +Not supported by all npm commands. -#### long +#### `long` * Default: false * Type: Boolean -Show extended information. +Show extended information in `ls`, `search`, and `help-search`. -#### parseable +#### `parseable` * Default: false * Type: Boolean -Show parseable output instead of tree view. +Output parseable results from commands that write to standard output. For +`npm search`, this will be tab-separated table format. -#### global +#### `global` * Default: false * Type: Boolean -List packages in the global install prefix instead of in the current -project. +Operates in "global" mode, so that packages are installed into the `prefix` +folder instead of the current working directory. See +[folders](/configuring-npm/folders) for more on the differences in behavior. + +* packages are installed into the `{prefix}/lib/node_modules` folder, instead + of the current working directory. +* bin files are linked to `{prefix}/bin` +* man pages are linked to `{prefix}/share/man` + +#### `depth` + +* Default: `Infinity` if `--all` is set, otherwise `1` +* Type: null or Number -#### depth +The depth to go when recursing packages for `npm ls`. -* Type: Int +If not set, `npm ls` will show only the immediate dependencies of the root +project. If `--all` is set, then npm will show all dependencies by default. -Max display depth of the dependency tree. +#### `omit` -#### prod / production +* Default: 'dev' if the `NODE_ENV` environment variable is set to + 'production', otherwise empty. +* Type: "dev", "optional", or "peer" (can be set multiple times) +Dependency types to omit from the installation tree on disk. + +Note that these dependencies _are_ still resolved and added to the +`package-lock.json` or `npm-shrinkwrap.json` file. They are just not +physically installed on disk. + +If a package type appears in both the `--include` and `--omit` lists, then +it will be included. + +If the resulting omit list includes `'dev'`, then the `NODE_ENV` environment +variable will be set to `'production'` for all lifecycle scripts. + +#### `link` + +* Default: false * Type: Boolean + +Used with `npm ls`, limiting output to only those packages that are linked. + +#### `package-lock-only` + * Default: false +* Type: Boolean + +If set to true, the current operation will only use the `package-lock.json`, +ignoring `node_modules`. + +For `update` this means only the `package-lock.json` will be updated, +instead of checking `node_modules` and downloading dependencies. -Display only the dependency tree for packages in `dependencies`. +For `list` this means the output will be based on the tree described by the +`package-lock.json`, rather than the contents of `node_modules`. -#### dev / development +#### `unicode` +* Default: false on windows, true on mac/unix systems with a unicode locale, + as defined by the `LC_ALL`, `LC_CTYPE`, or `LANG` environment variables. * Type: Boolean -* Default: false -Display only the dependency tree for packages in `devDependencies`. +When set to true, npm uses unicode characters in the tree output. When +false, it uses ascii characters instead of unicode glyphs. -#### only +#### `workspace` -* Type: String +* Default: +* Type: String (can be set multiple times) -When "dev" or "development", is an alias to `dev`. +Enable running a command in the context of the configured workspaces of the +current project while filtering by running only the workspaces defined by +this configuration option. -When "prod" or "production", is an alias to `production`. +Valid values for the `workspace` config are either: -#### link +* Workspace names +* Path to a workspace directory +* Path to a parent workspace directory (will result to selecting all of the + nested workspaces) -* Type: Boolean -* Default: false +When set for the `npm init` command, this may be set to the folder of a +workspace which does not yet exist, to create the folder and set it up as a +brand new workspace within the project. -Display only dependencies which are linked +This value is not exported to the environment for child processes. -#### unicode +#### `workspaces` +* Default: false * Type: Boolean -* Default: true -Whether to represent the tree structure using unicode characters. -Set it to false in order to use all-ansi output. +Enable running a command in the context of **all** the configured +workspaces. + +This value is not exported to the environment for child processes. + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> ### See Also diff --git a/docs/content/commands/npm-org.md b/docs/content/commands/npm-org.md index 18047d109cc0b..269f5cc3ee5b8 100644 --- a/docs/content/commands/npm-org.md +++ b/docs/content/commands/npm-org.md @@ -12,6 +12,8 @@ npm org rm <orgname> <username> npm org ls <orgname> [<username>] ``` +Note: This command is unaware of workspaces. + ### Example Add a new developer to an org: @@ -56,6 +58,50 @@ You can use the `npm org` commands to manage and view users of an organization. It supports adding and removing users, changing their roles, listing them, and finding specific ones and their roles. +### Configuration + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `registry` + +* Default: "https://registry.npmjs.org/" +* Type: URL + +The base URL of the npm registry. + +#### `otp` + +* Default: null +* Type: null or String + +This is a one-time password from a two-factor authenticator. It's needed +when publishing or changing package permissions with `npm access`. + +If not set, and a registry response fails with a challenge for a one-time +password, npm will prompt on the command line for one. + +#### `json` + +* Default: false +* Type: Boolean + +Whether or not to output JSON data, rather than the normal output. + +* In `npm pkg set` it enables parsing set values with JSON.parse() before + saving them to your `package.json`. + +Not supported by all npm commands. + +#### `parseable` + +* Default: false +* Type: Boolean + +Output parseable results from commands that write to standard output. For +`npm search`, this will be tab-separated table format. + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> + ### See Also * [using orgs](/using-npm/orgs) diff --git a/docs/content/commands/npm-outdated.md b/docs/content/commands/npm-outdated.md index ee1157f332de0..40e5feafd4cc6 100644 --- a/docs/content/commands/npm-outdated.md +++ b/docs/content/commands/npm-outdated.md @@ -15,7 +15,8 @@ npm outdated [[<@scope>/]<pkg> ...] This command will check the registry to see if any (or, specific) installed packages are currently outdated. -By default, only the direct dependencies of the root project are shown. +By default, only the direct dependencies of the root project and direct +dependencies of your configured *workspaces* are shown. Use `--all` to find all outdated meta-dependencies as well. In the output: @@ -85,41 +86,81 @@ A few things to note: ### Configuration -#### json +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `all` * Default: false * Type: Boolean -Show information in JSON format. +When running `npm outdated` and `npm ls`, setting `--all` will show all +outdated or installed packages, rather than only those directly depended +upon by the current project. -#### long +#### `json` * Default: false * Type: Boolean -Show extended information. +Whether or not to output JSON data, rather than the normal output. -#### parseable +* In `npm pkg set` it enables parsing set values with JSON.parse() before + saving them to your `package.json`. + +Not supported by all npm commands. + +#### `long` * Default: false * Type: Boolean -Show parseable output instead of tree view. +Show extended information in `ls`, `search`, and `help-search`. -#### global +#### `parseable` * Default: false * Type: Boolean -Check packages in the global install prefix instead of in the current -project. +Output parseable results from commands that write to standard output. For +`npm search`, this will be tab-separated table format. -#### all +#### `global` * Default: false * Type: Boolean -Display all outdated dependencies on the tree. +Operates in "global" mode, so that packages are installed into the `prefix` +folder instead of the current working directory. See +[folders](/configuring-npm/folders) for more on the differences in behavior. + +* packages are installed into the `{prefix}/lib/node_modules` folder, instead + of the current working directory. +* bin files are linked to `{prefix}/bin` +* man pages are linked to `{prefix}/share/man` + +#### `workspace` + +* Default: +* Type: String (can be set multiple times) + +Enable running a command in the context of the configured workspaces of the +current project while filtering by running only the workspaces defined by +this configuration option. + +Valid values for the `workspace` config are either: + +* Workspace names +* Path to a workspace directory +* Path to a parent workspace directory (will result to selecting all of the + nested workspaces) + +When set for the `npm init` command, this may be set to the folder of a +workspace which does not yet exist, to create the folder and set it up as a +brand new workspace within the project. + +This value is not exported to the environment for child processes. + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> ### See Also @@ -127,3 +168,4 @@ Display all outdated dependencies on the tree. * [npm dist-tag](/commands/npm-dist-tag) * [npm registry](/using-npm/registry) * [npm folders](/configuring-npm/folders) +* [npm workspaces](/using-npm/workspaces) diff --git a/docs/content/commands/npm-owner.md b/docs/content/commands/npm-owner.md index 69eba56afd97d..da22e899c2ebb 100644 --- a/docs/content/commands/npm-owner.md +++ b/docs/content/commands/npm-owner.md @@ -14,6 +14,8 @@ npm owner ls [<@scope>/]<pkg> aliases: author ``` +Note: This command is unaware of workspaces. + ### Description Manage ownership of published packages. @@ -33,6 +35,30 @@ If you have two-factor authentication enabled with `auth-and-writes` (see [`npm-profile`](/commands/npm-profile)) then you'll need to include an otp on the command line when changing ownership with `--otp`. +### Configuration + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `registry` + +* Default: "https://registry.npmjs.org/" +* Type: URL + +The base URL of the npm registry. + +#### `otp` + +* Default: null +* Type: null or String + +This is a one-time password from a two-factor authenticator. It's needed +when publishing or changing package permissions with `npm access`. + +If not set, and a registry response fails with a challenge for a one-time +password, npm will prompt on the command line for one. + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> + ### See Also * [npm profile](/commands/npm-profile) diff --git a/docs/content/commands/npm-pack.md b/docs/content/commands/npm-pack.md index cc6b669efb1ef..cd4a175919e7e 100644 --- a/docs/content/commands/npm-pack.md +++ b/docs/content/commands/npm-pack.md @@ -7,9 +7,79 @@ description: Create a tarball from a package ### Synopsis ```bash -npm pack [[<@scope>/]<pkg>...] [--dry-run] +npm pack [[<@scope>/]<pkg>...] [--dry-run] [--json] ``` +### Configuration + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `dry-run` + +* Default: false +* Type: Boolean + +Indicates that you don't want npm to make any changes and that it should +only report what it would have done. This can be passed into any of the +commands that modify your local installation, eg, `install`, `update`, +`dedupe`, `uninstall`, as well as `pack` and `publish`. + +Note: This is NOT honored by other network related commands, eg `dist-tags`, +`owner`, etc. + +#### `json` + +* Default: false +* Type: Boolean + +Whether or not to output JSON data, rather than the normal output. + +* In `npm pkg set` it enables parsing set values with JSON.parse() before + saving them to your `package.json`. + +Not supported by all npm commands. + +#### `pack-destination` + +* Default: "." +* Type: String + +Directory in which `npm pack` will save tarballs. + +#### `workspace` + +* Default: +* Type: String (can be set multiple times) + +Enable running a command in the context of the configured workspaces of the +current project while filtering by running only the workspaces defined by +this configuration option. + +Valid values for the `workspace` config are either: + +* Workspace names +* Path to a workspace directory +* Path to a parent workspace directory (will result to selecting all of the + nested workspaces) + +When set for the `npm init` command, this may be set to the folder of a +workspace which does not yet exist, to create the folder and set it up as a +brand new workspace within the project. + +This value is not exported to the environment for child processes. + +#### `workspaces` + +* Default: false +* Type: Boolean + +Enable running a command in the context of **all** the configured +workspaces. + +This value is not exported to the environment for child processes. + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> + ### Description For anything that's installable (that is, a package folder, tarball, @@ -23,10 +93,6 @@ overwritten the second time. If no arguments are supplied, then npm packs the current package folder. -The `--dry-run` argument will do everything that pack usually does without -actually packing anything. That is, it reports on what would have gone -into the tarball, but nothing else. - ### See Also * [npm-packlist package](http://npm.im/npm-packlist) diff --git a/docs/content/commands/npm-ping.md b/docs/content/commands/npm-ping.md index 8de06aa184836..7c7b66b181b4a 100644 --- a/docs/content/commands/npm-ping.md +++ b/docs/content/commands/npm-ping.md @@ -10,6 +10,8 @@ description: Ping npm registry npm ping [--registry <registry>] ``` +Note: This command is unaware of workspaces. + ### Description Ping the configured or given npm registry and verify authentication. @@ -23,6 +25,19 @@ otherwise you will get: Ping error: {*Detail about error} ``` +### Configuration + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `registry` + +* Default: "https://registry.npmjs.org/" +* Type: URL + +The base URL of the npm registry. + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> + ### See Also * [npm doctor](/commands/npm-doctor) diff --git a/docs/content/commands/npm-pkg.md b/docs/content/commands/npm-pkg.md new file mode 100644 index 0000000000000..bc96eb8c3af94 --- /dev/null +++ b/docs/content/commands/npm-pkg.md @@ -0,0 +1,245 @@ +--- +title: npm-pkg +section: 1 +description: Manages your package.json +--- + +### Synopsis + +```bash +npm pkg get [<field> [.<subfield> ...]] +npm pkg set <field>=<value> [.<subfield>=<value> ...] +npm pkg delete <field> [.<subfield> ...] +``` + +### Description + +A command that automates the management of `package.json` files. +`npm pkg` provide 3 different sub commands that allow you to modify or retrieve +values for given object keys in your `package.json`. + +The syntax to retrieve and set fields is a dot separated representation of +the nested object properties to be found within your `package.json`, it's the +same notation used in [`npm view`](/commands/npm-view) to retrieve information +from the registry manifest, below you can find more examples on how to use it. + +Returned values are always in **json** format. + +* `npm pkg get <field>` + + Retrieves a value `key`, defined in your `package.json` file. + + For example, in order to retrieve the name of the current package, you + can run: + + ```bash + npm pkg get name + ``` + + It's also possible to retrieve multiple values at once: + + ```bash + npm pkg get name version + ``` + + You can view child fields by separating them with a period. To retrieve + the value of a test `script` value, you would run the following command: + + ```bash + npm pkg get scripts.test + ``` + + For fields that are arrays, requesting a non-numeric field will return + all of the values from the objects in the list. For example, to get all + the contributor emails for a package, you would run: + + ```bash + npm pkg get contributors.email + ``` + + You may also use numeric indices in square braces to specifically select + an item in an array field. To just get the email address of the first + contributor in the list, you can run: + + ```bash + npm pkg get contributors[0].email + ``` + +* `npm pkg set <field>=<value>` + + Sets a `value` in your `package.json` based on the `field` value. When + saving to your `package.json` file the same set of rules used during + `npm install` and other cli commands that touches the `package.json` file + are used, making sure to respect the existing indentation and possibly + applying some validation prior to saving values to the file. + + The same syntax used to retrieve values from your package can also be used + to define new properties or overriding existing ones, below are some + examples of how the dot separated syntax can be used to edit your + `package.json` file. + + Defining a new bin named `mynewcommand` in your `package.json` that points + to a file `cli.js`: + + ```bash + npm pkg set bin.mynewcommand=cli.js + ``` + + Setting multiple fields at once is also possible: + + ```bash + npm pkg set description='Awesome package' engines.node='>=10' + ``` + + It's also possible to add to array values, for example to add a new + contributor entry: + + ```bash + npm pkg set contributors[0].name='Foo' contributors[0].email='foo@bar.ca' + ``` + + You may also append items to the end of an array using the special + empty bracket notation: + + ```bash + npm pkg set contributors[].name='Foo' contributors[].name='Bar' + ``` + + It's also possible to parse values as json prior to saving them to your + `package.json` file, for example in order to set a `"private": true` + property: + + ```bash + npm pkg set private=true --json + ``` + + It also enables saving values as numbers: + + ```bash + npm pkg set tap.timeout=60 --json + ``` + +* `npm pkg delete <key>` + + Deletes a `key` from your `package.json` + + The same syntax used to set values from your package can also be used + to remove existing ones. For example, in order to remove a script named + build: + + ```bash + npm pkg delete scripts.build + ``` + +### Workspaces support + +You can set/get/delete items across your configured workspaces by using the +`workspace` or `workspaces` config options. + +For example, setting a `funding` value across all configured workspaces +of a project: + +```bash +npm pkg set funding=https://example.com --ws +``` + +When using `npm pkg get` to retrieve info from your configured workspaces, the +returned result will be in a json format in which top level keys are the +names of each workspace, the values of these keys will be the result values +returned from each of the configured workspaces, e.g: + +``` +npm pkg get name version --ws +{ + "a": { + "name": "a", + "version": "1.0.0" + }, + "b": { + "name": "b", + "version": "1.0.0" + } +} +``` + +### Configuration + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `force` + +* Default: false +* Type: Boolean + +Removes various protections against unfortunate side effects, common +mistakes, unnecessary performance degradation, and malicious input. + +* Allow clobbering non-npm files in global installs. +* Allow the `npm version` command to work on an unclean git repository. +* Allow deleting the cache folder with `npm cache clean`. +* Allow installing packages that have an `engines` declaration requiring a + different version of npm. +* Allow installing packages that have an `engines` declaration requiring a + different version of `node`, even if `--engine-strict` is enabled. +* Allow `npm audit fix` to install modules outside your stated dependency + range (including SemVer-major changes). +* Allow unpublishing all versions of a published package. +* Allow conflicting peerDependencies to be installed in the root project. +* Implicitly set `--yes` during `npm init`. +* Allow clobbering existing values in `npm pkg` + +If you don't have a clear idea of what you want to do, it is strongly +recommended that you do not use this option! + +#### `json` + +* Default: false +* Type: Boolean + +Whether or not to output JSON data, rather than the normal output. + +* In `npm pkg set` it enables parsing set values with JSON.parse() before + saving them to your `package.json`. + +Not supported by all npm commands. + +#### `workspace` + +* Default: +* Type: String (can be set multiple times) + +Enable running a command in the context of the configured workspaces of the +current project while filtering by running only the workspaces defined by +this configuration option. + +Valid values for the `workspace` config are either: + +* Workspace names +* Path to a workspace directory +* Path to a parent workspace directory (will result to selecting all of the + nested workspaces) + +When set for the `npm init` command, this may be set to the folder of a +workspace which does not yet exist, to create the folder and set it up as a +brand new workspace within the project. + +This value is not exported to the environment for child processes. + +#### `workspaces` + +* Default: false +* Type: Boolean + +Enable running a command in the context of **all** the configured +workspaces. + +This value is not exported to the environment for child processes. + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> +## See Also + +* [npm install](/commands/npm-install) +* [npm init](/commands/npm-init) +* [npm config](/commands/npm-config) +* [npm set-script](/commands/npm-set-script) +* [workspaces](/using-npm/workspaces) diff --git a/docs/content/commands/npm-prefix.md b/docs/content/commands/npm-prefix.md index 9c33bb18901ef..0523c9e19513d 100644 --- a/docs/content/commands/npm-prefix.md +++ b/docs/content/commands/npm-prefix.md @@ -10,6 +10,8 @@ description: Display prefix npm prefix [-g] ``` +Note: This command is unaware of workspaces. + ### Description Print the local prefix to standard output. This is the closest parent directory @@ -31,6 +33,26 @@ npm prefix -g /usr/local ``` +### Configuration + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `global` + +* Default: false +* Type: Boolean + +Operates in "global" mode, so that packages are installed into the `prefix` +folder instead of the current working directory. See +[folders](/configuring-npm/folders) for more on the differences in behavior. + +* packages are installed into the `{prefix}/lib/node_modules` folder, instead + of the current working directory. +* bin files are linked to `{prefix}/bin` +* man pages are linked to `{prefix}/share/man` + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> + ### See Also * [npm root](/commands/npm-root) diff --git a/docs/content/commands/npm-profile.md b/docs/content/commands/npm-profile.md index 88edf26d87c41..079440d785815 100644 --- a/docs/content/commands/npm-profile.md +++ b/docs/content/commands/npm-profile.md @@ -14,6 +14,8 @@ npm profile enable-2fa [auth-and-writes|auth-only] npm profile disable-2fa ``` +Note: This command is unaware of workspaces. + ### Description Change your profile information on the registry. Note that this command @@ -69,12 +71,58 @@ support this interface. ### Details -All of the `npm profile` subcommands accept `--json` and `--parseable` and -will tailor their output based on those. Some of these commands may not be -available on non npmjs.com registries. +Some of these commands may not be available on non npmjs.com registries. + +### Configuration + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `registry` + +* Default: "https://registry.npmjs.org/" +* Type: URL + +The base URL of the npm registry. + +#### `json` + +* Default: false +* Type: Boolean + +Whether or not to output JSON data, rather than the normal output. + +* In `npm pkg set` it enables parsing set values with JSON.parse() before + saving them to your `package.json`. + +Not supported by all npm commands. + +#### `parseable` + +* Default: false +* Type: Boolean + +Output parseable results from commands that write to standard output. For +`npm search`, this will be tab-separated table format. + +#### `otp` + +* Default: null +* Type: null or String + +This is a one-time password from a two-factor authenticator. It's needed +when publishing or changing package permissions with `npm access`. + +If not set, and a registry response fails with a challenge for a one-time +password, npm will prompt on the command line for one. + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> ### See Also * [npm adduser](/commands/npm-adduser) -* [npm logout](/commands/npm-logout) +* [npm registry](/using-npm/registry) * [npm config](/commands/npm-config) +* [npmrc](/configuring-npm/npmrc) +* [npm owner](/commands/npm-owner) +* [npm whoami](/commands/npm-whoami) +* [npm token](/commands/npm-token) diff --git a/docs/content/commands/npm-prune.md b/docs/content/commands/npm-prune.md index 088c1c3470faf..d9b5b068f7a4b 100644 --- a/docs/content/commands/npm-prune.md +++ b/docs/content/commands/npm-prune.md @@ -33,6 +33,87 @@ only need this command with the `--production` flag. However, in the real world, operation is not always "normal". When crashes or mistakes happen, this command can help clean up any resulting garbage. +### Configuration + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `omit` + +* Default: 'dev' if the `NODE_ENV` environment variable is set to + 'production', otherwise empty. +* Type: "dev", "optional", or "peer" (can be set multiple times) + +Dependency types to omit from the installation tree on disk. + +Note that these dependencies _are_ still resolved and added to the +`package-lock.json` or `npm-shrinkwrap.json` file. They are just not +physically installed on disk. + +If a package type appears in both the `--include` and `--omit` lists, then +it will be included. + +If the resulting omit list includes `'dev'`, then the `NODE_ENV` environment +variable will be set to `'production'` for all lifecycle scripts. + +#### `dry-run` + +* Default: false +* Type: Boolean + +Indicates that you don't want npm to make any changes and that it should +only report what it would have done. This can be passed into any of the +commands that modify your local installation, eg, `install`, `update`, +`dedupe`, `uninstall`, as well as `pack` and `publish`. + +Note: This is NOT honored by other network related commands, eg `dist-tags`, +`owner`, etc. + +#### `json` + +* Default: false +* Type: Boolean + +Whether or not to output JSON data, rather than the normal output. + +* In `npm pkg set` it enables parsing set values with JSON.parse() before + saving them to your `package.json`. + +Not supported by all npm commands. + +#### `workspace` + +* Default: +* Type: String (can be set multiple times) + +Enable running a command in the context of the configured workspaces of the +current project while filtering by running only the workspaces defined by +this configuration option. + +Valid values for the `workspace` config are either: + +* Workspace names +* Path to a workspace directory +* Path to a parent workspace directory (will result to selecting all of the + nested workspaces) + +When set for the `npm init` command, this may be set to the folder of a +workspace which does not yet exist, to create the folder and set it up as a +brand new workspace within the project. + +This value is not exported to the environment for child processes. + +#### `workspaces` + +* Default: false +* Type: Boolean + +Enable running a command in the context of **all** the configured +workspaces. + +This value is not exported to the environment for child processes. + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> + ### See Also * [npm uninstall](/commands/npm-uninstall) diff --git a/docs/content/commands/npm-publish.md b/docs/content/commands/npm-publish.md index fc13e67222358..0d25d7d29da8d 100644 --- a/docs/content/commands/npm-publish.md +++ b/docs/content/commands/npm-publish.md @@ -47,6 +47,13 @@ by specifying a different default registry or using a actually publishing to the registry. Reports the details of what would have been published. +* `[--workspaces]`: Enables workspace context while publishing. All + workspace packages will be published. + +* `[--workspace]`: Enables workspaces context and limits results to only + those specified by this config item. Only the packages in the + workspaces given will be published. + The publish will fail if the package name and version combination already exists in the specified registry. @@ -97,6 +104,92 @@ See [`developers`](/using-npm/developers) for full details on what's included in the published package, as well as details on how the package is built. +### Configuration + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `tag` + +* Default: "latest" +* Type: String + +If you ask npm to install a package and don't tell it a specific version, +then it will install the specified tag. + +Also the tag that is added to the package@version specified by the `npm tag` +command, if no explicit tag is given. + +When used by the `npm diff` command, this is the tag used to fetch the +tarball that will be compared with the local files by default. + +#### `access` + +* Default: 'restricted' for scoped packages, 'public' for unscoped packages +* Type: null, "restricted", or "public" + +When publishing scoped packages, the access level defaults to `restricted`. +If you want your scoped package to be publicly viewable (and installable) +set `--access=public`. The only valid values for `access` are `public` and +`restricted`. Unscoped packages _always_ have an access level of `public`. + +#### `dry-run` + +* Default: false +* Type: Boolean + +Indicates that you don't want npm to make any changes and that it should +only report what it would have done. This can be passed into any of the +commands that modify your local installation, eg, `install`, `update`, +`dedupe`, `uninstall`, as well as `pack` and `publish`. + +Note: This is NOT honored by other network related commands, eg `dist-tags`, +`owner`, etc. + +#### `otp` + +* Default: null +* Type: null or String + +This is a one-time password from a two-factor authenticator. It's needed +when publishing or changing package permissions with `npm access`. + +If not set, and a registry response fails with a challenge for a one-time +password, npm will prompt on the command line for one. + +#### `workspace` + +* Default: +* Type: String (can be set multiple times) + +Enable running a command in the context of the configured workspaces of the +current project while filtering by running only the workspaces defined by +this configuration option. + +Valid values for the `workspace` config are either: + +* Workspace names +* Path to a workspace directory +* Path to a parent workspace directory (will result to selecting all of the + nested workspaces) + +When set for the `npm init` command, this may be set to the folder of a +workspace which does not yet exist, to create the folder and set it up as a +brand new workspace within the project. + +This value is not exported to the environment for child processes. + +#### `workspaces` + +* Default: false +* Type: Boolean + +Enable running a command in the context of **all** the configured +workspaces. + +This value is not exported to the environment for child processes. + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> + ### See Also * [npm-packlist package](http://npm.im/npm-packlist) diff --git a/docs/content/commands/npm-rebuild.md b/docs/content/commands/npm-rebuild.md index 0a7ade6b165b4..49c822d730526 100644 --- a/docs/content/commands/npm-rebuild.md +++ b/docs/content/commands/npm-rebuild.md @@ -24,6 +24,82 @@ If one or more package names (and optionally version ranges) are provided, then only packages with a name and version matching one of the specifiers will be rebuilt. +### Configuration + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `global` + +* Default: false +* Type: Boolean + +Operates in "global" mode, so that packages are installed into the `prefix` +folder instead of the current working directory. See +[folders](/configuring-npm/folders) for more on the differences in behavior. + +* packages are installed into the `{prefix}/lib/node_modules` folder, instead + of the current working directory. +* bin files are linked to `{prefix}/bin` +* man pages are linked to `{prefix}/share/man` + +#### `bin-links` + +* Default: true +* Type: Boolean + +Tells npm to create symlinks (or `.cmd` shims on Windows) for package +executables. + +Set to false to have it not do this. This can be used to work around the +fact that some file systems don't support symlinks, even on ostensibly Unix +systems. + +#### `ignore-scripts` + +* Default: false +* Type: Boolean + +If true, npm does not run scripts specified in package.json files. + +Note that commands explicitly intended to run a particular script, such as +`npm start`, `npm stop`, `npm restart`, `npm test`, and `npm run-script` +will still run their intended script if `ignore-scripts` is set, but they +will *not* run any pre- or post-scripts. + +#### `workspace` + +* Default: +* Type: String (can be set multiple times) + +Enable running a command in the context of the configured workspaces of the +current project while filtering by running only the workspaces defined by +this configuration option. + +Valid values for the `workspace` config are either: + +* Workspace names +* Path to a workspace directory +* Path to a parent workspace directory (will result to selecting all of the + nested workspaces) + +When set for the `npm init` command, this may be set to the folder of a +workspace which does not yet exist, to create the folder and set it up as a +brand new workspace within the project. + +This value is not exported to the environment for child processes. + +#### `workspaces` + +* Default: false +* Type: Boolean + +Enable running a command in the context of **all** the configured +workspaces. + +This value is not exported to the environment for child processes. + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> + ### See Also * [npm install](/commands/npm-install) diff --git a/docs/content/commands/npm-repo.md b/docs/content/commands/npm-repo.md index 670345bece5c5..ade08e7d938e7 100644 --- a/docs/content/commands/npm-repo.md +++ b/docs/content/commands/npm-repo.md @@ -19,18 +19,54 @@ in the current folder and use the `repository` property. ### Configuration -#### browser +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `browser` * Default: OS X: `"open"`, Windows: `"start"`, Others: `"xdg-open"` -* Type: String or Boolean +* Type: null, Boolean, or String -The browser that is called by the `npm repo` command to open websites. +The browser that is called by npm commands to open websites. Set to `false` to suppress browser behavior and instead print urls to terminal. Set to `true` to use default system URL opener. +#### `workspace` + +* Default: +* Type: String (can be set multiple times) + +Enable running a command in the context of the configured workspaces of the +current project while filtering by running only the workspaces defined by +this configuration option. + +Valid values for the `workspace` config are either: + +* Workspace names +* Path to a workspace directory +* Path to a parent workspace directory (will result to selecting all of the + nested workspaces) + +When set for the `npm init` command, this may be set to the folder of a +workspace which does not yet exist, to create the folder and set it up as a +brand new workspace within the project. + +This value is not exported to the environment for child processes. + +#### `workspaces` + +* Default: false +* Type: Boolean + +Enable running a command in the context of **all** the configured +workspaces. + +This value is not exported to the environment for child processes. + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> + ### See Also * [npm docs](/commands/npm-docs) diff --git a/docs/content/commands/npm-restart.md b/docs/content/commands/npm-restart.md index 097c9fee7c9c3..4b905c2670695 100644 --- a/docs/content/commands/npm-restart.md +++ b/docs/content/commands/npm-restart.md @@ -34,6 +34,32 @@ If it does _not_ have a `"restart"` script specified, but it does have 8. poststart 9. postrestart +### Configuration + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `ignore-scripts` + +* Default: false +* Type: Boolean + +If true, npm does not run scripts specified in package.json files. + +Note that commands explicitly intended to run a particular script, such as +`npm start`, `npm stop`, `npm restart`, `npm test`, and `npm run-script` +will still run their intended script if `ignore-scripts` is set, but they +will *not* run any pre- or post-scripts. + +#### `script-shell` + +* Default: '/bin/sh' on POSIX systems, 'cmd.exe' on Windows +* Type: null or String + +The shell to use for scripts run with the `npm exec`, `npm run` and `npm +init <pkg>` commands. + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> + ### See Also * [npm run-script](/commands/npm-run-script) diff --git a/docs/content/commands/npm-root.md b/docs/content/commands/npm-root.md index 0d694ac876e92..2d072c16dec00 100644 --- a/docs/content/commands/npm-root.md +++ b/docs/content/commands/npm-root.md @@ -23,6 +23,26 @@ global_node_modules="$(npm root --global)" echo "Global packages installed in: ${global_node_modules}" ``` +### Configuration + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `global` + +* Default: false +* Type: Boolean + +Operates in "global" mode, so that packages are installed into the `prefix` +folder instead of the current working directory. See +[folders](/configuring-npm/folders) for more on the differences in behavior. + +* packages are installed into the `{prefix}/lib/node_modules` folder, instead + of the current working directory. +* bin files are linked to `{prefix}/bin` +* man pages are linked to `{prefix}/share/man` + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> + ### See Also * [npm prefix](/commands/npm-prefix) diff --git a/docs/content/commands/npm-run-script.md b/docs/content/commands/npm-run-script.md index 8b89435e1a97b..5e3828c40717d 100644 --- a/docs/content/commands/npm-run-script.md +++ b/docs/content/commands/npm-run-script.md @@ -8,6 +8,8 @@ description: Run arbitrary package scripts ```bash npm run-script <command> [--if-present] [--silent] [-- <args>] +npm run-script <command> [--workspace=<workspace-name>] +npm run-script <command> [--workspaces] aliases: run, rum, urn ``` @@ -31,7 +33,7 @@ For example: npm run test -- --grep="pattern" ``` -The arguments will only be passed to the script specified after ```npm run``` +The arguments will only be passed to the script specified after `npm run` and not to any `pre` or `post` script. The `env` script is a special built-in command that can be used to list @@ -68,48 +70,139 @@ can use the `INIT_CWD` environment variable, which holds the full path you were in when you ran `npm run`. `npm run` sets the `NODE` environment variable to the `node` executable -with which `npm` is executed. Also, if the `--scripts-prepend-node-path` is -passed, the directory within which `node` resides is added to the `PATH`. -If `--scripts-prepend-node-path=auto` is passed (which has been the default -in `npm` v3), this is only performed when that `node` executable is not -found in the `PATH`. +with which `npm` is executed. If you try to run a script without having a `node_modules` directory and it fails, you will be given a warning to run `npm install`, just in case you've forgotten. +### Workspaces support + +You may use the `workspace` or `workspaces` configs in order to run an +arbitrary command from a package's `"scripts"` object in the context of the +specified workspaces. If no `"command"` is provided, it will list the available +scripts for each of these configured workspaces. + +Given a project with configured workspaces, e.g: + +``` +. ++-- package.json +`-- packages + +-- a + | `-- package.json + +-- b + | `-- package.json + `-- c + `-- package.json +``` + +Assuming the workspace configuration is properly set up at the root level +`package.json` file. e.g: + +``` +{ + "workspaces": [ "./packages/*" ] +} +``` + +And that each of the configured workspaces has a configured `test` script, +we can run tests in all of them using the `workspaces` config: + +``` +npm test --workspaces +``` + +#### Filtering workspaces + +It's also possible to run a script in a single workspace using the `workspace` +config along with a name or directory path: + +``` +npm test --workspace=a +``` + +The `workspace` config can also be specified multiple times in order to run a +specific script in the context of multiple workspaces. When defining values for +the `workspace` config in the command line, it also possible to use `-w` as a +shorthand, e.g: + +``` +npm test -w a -w b +``` + +This last command will run `test` in both `./packages/a` and `./packages/b` +packages. + ### Configuration -#### if-present +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `workspace` -* Type: Boolean -* Default: false +* Default: +* Type: String (can be set multiple times) -You can use the `--if-present` flag to avoid exiting with a non-zero exit code -when the script is undefined. This lets you run potentially undefined scripts -without breaking the execution chain. +Enable running a command in the context of the configured workspaces of the +current project while filtering by running only the workspaces defined by +this configuration option. -#### ignore-scripts +Valid values for the `workspace` config are either: -* Type: Boolean -* Default: false +* Workspace names +* Path to a workspace directory +* Path to a parent workspace directory (will result to selecting all of the + nested workspaces) + +When set for the `npm init` command, this may be set to the folder of a +workspace which does not yet exist, to create the folder and set it up as a +brand new workspace within the project. + +This value is not exported to the environment for child processes. -Skips running `pre` and `post` scripts. +#### `workspaces` -#### script-shell +* Default: false +* Type: Boolean -* Type: String -* Default: `null` +Enable running a command in the context of **all** the configured +workspaces. -Optional custom script to use to execute the command. If not defined defaults -to `/bin/sh` on Unix, defaults to `env.comspec` or `cmd.exe` on Windows. +This value is not exported to the environment for child processes. -#### silent +#### `if-present` +* Default: false * Type: Boolean + +If true, npm will not exit with an error code when `run-script` is invoked +for a script that isn't defined in the `scripts` section of `package.json`. +This option can be used when it's desirable to optionally run a script when +it's present and fail if the script fails. This is useful, for example, when +running scripts that may only apply for some builds in an otherwise generic +CI setup. + +#### `ignore-scripts` + * Default: false +* Type: Boolean + +If true, npm does not run scripts specified in package.json files. + +Note that commands explicitly intended to run a particular script, such as +`npm start`, `npm stop`, `npm restart`, `npm test`, and `npm run-script` +will still run their intended script if `ignore-scripts` is set, but they +will *not* run any pre- or post-scripts. + +#### `script-shell` + +* Default: '/bin/sh' on POSIX systems, 'cmd.exe' on Windows +* Type: null or String + +The shell to use for scripts run with the `npm exec`, `npm run` and `npm +init <pkg>` commands. -You can use the `--silent` flag to prevent showing `npm ERR!` output on error. +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> ### See Also @@ -119,3 +212,4 @@ You can use the `--silent` flag to prevent showing `npm ERR!` output on error. * [npm restart](/commands/npm-restart) * [npm stop](/commands/npm-stop) * [npm config](/commands/npm-config) +* [npm workspaces](/using-npm/workspaces) diff --git a/docs/content/commands/npm-search.md b/docs/content/commands/npm-search.md index 35178bcb0a580..e30287635b56f 100644 --- a/docs/content/commands/npm-search.md +++ b/docs/content/commands/npm-search.md @@ -12,6 +12,8 @@ npm search [-l|--long] [--json] [--parseable] [--no-description] [search terms . aliases: s, se, find ``` +Note: This command is unaware of workspaces. + ### Description Search the registry for packages matching the search terms. `npm search` @@ -37,91 +39,97 @@ expression characters in most shells.) ### Configuration -All of the following can be defined in a `.npmrc` file, or passed as -parameters to the cli prefixed with `--` (e.g. `--json`) +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `long` -#### description +* Default: false +* Type: Boolean -* Default: true +Show extended information in `ls`, `search`, and `help-search`. + +#### `json` + +* Default: false * Type: Boolean -#### color +Whether or not to output JSON data, rather than the normal output. - * Default: true - * Type: Boolean +* In `npm pkg set` it enables parsing set values with JSON.parse() before + saving them to your `package.json`. -Used as `--no-color`, disables color highlighting of matches in the -results. +Not supported by all npm commands. -#### json +#### `color` -* Default: false -* Type: Boolean +* Default: true unless the NO_COLOR environ is set to something other than '0' +* Type: "always" or Boolean -Output search results as a JSON array. +If false, never shows colors. If `"always"` then always shows colors. If +true, then only prints color codes for tty file descriptors. -#### parseable +#### `parseable` * Default: false * Type: Boolean -Output search results as lines with tab-separated columns. +Output parseable results from commands that write to standard output. For +`npm search`, this will be tab-separated table format. -#### long +#### `description` -* Default: false +* Default: true * Type: Boolean -Display full package descriptions and other long text across multiple -lines. When disabled (which is the default) the output will -truncate search results to fit neatly on a single line. Modules with -extremely long names will fall on multiple lines. +Show the description in `npm search` -#### searchopts +#### `searchopts` * Default: "" * Type: String Space-separated options that are always passed to search. -#### searchexclude +#### `searchexclude` * Default: "" * Type: String Space-separated options that limit the results from search. -#### registry +#### `registry` - * Default: https://registry.npmjs.org/ - * Type: url +* Default: "https://registry.npmjs.org/" +* Type: URL -Search the specified registry for modules. If you have configured npm to -point to a different default registry (such as your internal private -module repository), `npm search` will also default to that registry when -searching. +The base URL of the npm registry. -### A note on caching +#### `prefer-online` -The npm cli caches search results with the same terms and options -locally in its cache. You can use the following to change how and when -the cli uses this cache. See [`npm cache`](/commands/npm-cache) for more -on how the cache works. +* Default: false +* Type: Boolean -#### prefer-online +If true, staleness checks for cached data will be forced, making the CLI +look for updates immediately even for fresh package data. -Forces staleness checks for cached searches, making the cli look for -updates immediately even for fresh search results. +#### `prefer-offline` -#### prefer-offline +* Default: false +* Type: Boolean -Bypasses staleness checks for cached searches. Missing data will still -be requested from the server. To force full offline mode, use `offline`. +If true, staleness checks for cached data will be bypassed, but missing data +will be requested from the server. To force full offline mode, use +`--offline`. + +#### `offline` + +* Default: false +* Type: Boolean -#### offline +Force offline mode: no network requests will be done during install. To +allow the CLI to fill in missing cache data, see `--prefer-offline`. -Forces full offline mode. Any searches not locally cached will result in -an error. +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> ### See Also diff --git a/docs/content/commands/npm-set-script.md b/docs/content/commands/npm-set-script.md index 7bc8f75d23653..c5d5df53203b1 100644 --- a/docs/content/commands/npm-set-script.md +++ b/docs/content/commands/npm-set-script.md @@ -5,7 +5,7 @@ description: Set tasks in the scripts section of package.json --- ### Synopsis -An npm command that lets you create a task in the scripts section of the package.json. +An npm command that lets you create a task in the `scripts` section of the `package.json`. ```bash npm set-script [<script>] [<command>] @@ -26,6 +26,44 @@ npm set-script [<script>] [<command>] } ``` +### Configuration + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `workspace` + +* Default: +* Type: String (can be set multiple times) + +Enable running a command in the context of the configured workspaces of the +current project while filtering by running only the workspaces defined by +this configuration option. + +Valid values for the `workspace` config are either: + +* Workspace names +* Path to a workspace directory +* Path to a parent workspace directory (will result to selecting all of the + nested workspaces) + +When set for the `npm init` command, this may be set to the folder of a +workspace which does not yet exist, to create the folder and set it up as a +brand new workspace within the project. + +This value is not exported to the environment for child processes. + +#### `workspaces` + +* Default: false +* Type: Boolean + +Enable running a command in the context of **all** the configured +workspaces. + +This value is not exported to the environment for child processes. + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> + ### See Also * [npm run-script](/commands/npm-run-script) diff --git a/docs/content/commands/npm-shrinkwrap.md b/docs/content/commands/npm-shrinkwrap.md index dce50b7843bc3..6786229469d2c 100644 --- a/docs/content/commands/npm-shrinkwrap.md +++ b/docs/content/commands/npm-shrinkwrap.md @@ -10,6 +10,8 @@ description: Lock down dependency versions for publication npm shrinkwrap ``` +Note: This command is unaware of workspaces. + ### Description This command repurposes `package-lock.json` into a publishable diff --git a/docs/content/commands/npm-star.md b/docs/content/commands/npm-star.md index aab6e107747fd..5af08e2da67b8 100644 --- a/docs/content/commands/npm-star.md +++ b/docs/content/commands/npm-star.md @@ -10,6 +10,8 @@ description: Mark your favorite packages npm star [<pkg>...] ``` +Note: This command is unaware of workspaces. + ### Description "Starring" a package means that you have some interest in it. It's @@ -31,6 +33,28 @@ You can also "unstar" a package using [`npm unstar`](/commands/npm-unstar) You can see all your starred packages using [`npm stars`](/commands/npm-stars) +### Configuration + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `registry` + +* Default: "https://registry.npmjs.org/" +* Type: URL + +The base URL of the npm registry. + +#### `unicode` + +* Default: false on windows, true on mac/unix systems with a unicode locale, + as defined by the `LC_ALL`, `LC_CTYPE`, or `LANG` environment variables. +* Type: Boolean + +When set to true, npm uses unicode characters in the tree output. When +false, it uses ascii characters instead of unicode glyphs. + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> + ### See Also * [npm unstar](/commands/npm-unstar) diff --git a/docs/content/commands/npm-stars.md b/docs/content/commands/npm-stars.md index dab11bc669d1a..edea74a0c276d 100644 --- a/docs/content/commands/npm-stars.md +++ b/docs/content/commands/npm-stars.md @@ -9,6 +9,8 @@ description: View packages marked as favorites npm stars [<user>] ``` +Note: This command is unaware of workspaces. + ### Description If you have starred a lot of neat things and want to find them again @@ -17,6 +19,19 @@ quickly this command lets you do just that. You may also want to see your friend's favorite packages, in this case you will most certainly enjoy this command. +### Configuration + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `registry` + +* Default: "https://registry.npmjs.org/" +* Type: URL + +The base URL of the npm registry. + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> + ### See Also * [npm star](/commands/npm-star) diff --git a/docs/content/commands/npm-start.md b/docs/content/commands/npm-start.md index 4791719b592f6..1c532de44beec 100644 --- a/docs/content/commands/npm-start.md +++ b/docs/content/commands/npm-start.md @@ -45,6 +45,32 @@ npm start ``` +### Configuration + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `ignore-scripts` + +* Default: false +* Type: Boolean + +If true, npm does not run scripts specified in package.json files. + +Note that commands explicitly intended to run a particular script, such as +`npm start`, `npm stop`, `npm restart`, `npm test`, and `npm run-script` +will still run their intended script if `ignore-scripts` is set, but they +will *not* run any pre- or post-scripts. + +#### `script-shell` + +* Default: '/bin/sh' on POSIX systems, 'cmd.exe' on Windows +* Type: null or String + +The shell to use for scripts run with the `npm exec`, `npm run` and `npm +init <pkg>` commands. + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> + ### See Also * [npm run-script](/commands/npm-run-script) diff --git a/docs/content/commands/npm-stop.md b/docs/content/commands/npm-stop.md index 9e8f9be360fd9..859de54c39142 100644 --- a/docs/content/commands/npm-stop.md +++ b/docs/content/commands/npm-stop.md @@ -38,6 +38,32 @@ npm stop ``` +### Configuration + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `ignore-scripts` + +* Default: false +* Type: Boolean + +If true, npm does not run scripts specified in package.json files. + +Note that commands explicitly intended to run a particular script, such as +`npm start`, `npm stop`, `npm restart`, `npm test`, and `npm run-script` +will still run their intended script if `ignore-scripts` is set, but they +will *not* run any pre- or post-scripts. + +#### `script-shell` + +* Default: '/bin/sh' on POSIX systems, 'cmd.exe' on Windows +* Type: null or String + +The shell to use for scripts run with the `npm exec`, `npm run` and `npm +init <pkg>` commands. + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> + ### See Also * [npm run-script](/commands/npm-run-script) diff --git a/docs/content/commands/npm-team.md b/docs/content/commands/npm-team.md index 96aacd8ae95f2..c7d5defcc63c1 100644 --- a/docs/content/commands/npm-team.md +++ b/docs/content/commands/npm-team.md @@ -16,6 +16,8 @@ npm team rm <scope:team> <user> npm team ls <scope>|<scope:team> ``` +Note: This command is unaware of workspaces. + ### Description Used to manage teams in organizations, and change team memberships. Does not @@ -99,6 +101,50 @@ is done through the website, not the npm CLI. To use teams to manage permissions on packages belonging to your organization, use the `npm access` command to grant or revoke the appropriate permissions. +### Configuration + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `registry` + +* Default: "https://registry.npmjs.org/" +* Type: URL + +The base URL of the npm registry. + +#### `otp` + +* Default: null +* Type: null or String + +This is a one-time password from a two-factor authenticator. It's needed +when publishing or changing package permissions with `npm access`. + +If not set, and a registry response fails with a challenge for a one-time +password, npm will prompt on the command line for one. + +#### `parseable` + +* Default: false +* Type: Boolean + +Output parseable results from commands that write to standard output. For +`npm search`, this will be tab-separated table format. + +#### `json` + +* Default: false +* Type: Boolean + +Whether or not to output JSON data, rather than the normal output. + +* In `npm pkg set` it enables parsing set values with JSON.parse() before + saving them to your `package.json`. + +Not supported by all npm commands. + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> + ### See Also * [npm access](/commands/npm-access) diff --git a/docs/content/commands/npm-test.md b/docs/content/commands/npm-test.md index 2cc6a2e38b0f1..73cdf6de8572f 100644 --- a/docs/content/commands/npm-test.md +++ b/docs/content/commands/npm-test.md @@ -35,7 +35,31 @@ npm test (test.js output would be here) ``` +### Configuration +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `ignore-scripts` + +* Default: false +* Type: Boolean + +If true, npm does not run scripts specified in package.json files. + +Note that commands explicitly intended to run a particular script, such as +`npm start`, `npm stop`, `npm restart`, `npm test`, and `npm run-script` +will still run their intended script if `ignore-scripts` is set, but they +will *not* run any pre- or post-scripts. + +#### `script-shell` + +* Default: '/bin/sh' on POSIX systems, 'cmd.exe' on Windows +* Type: null or String + +The shell to use for scripts run with the `npm exec`, `npm run` and `npm +init <pkg>` commands. + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> ### See Also diff --git a/docs/content/commands/npm-token.md b/docs/content/commands/npm-token.md index 652079453702e..c586a6915233d 100644 --- a/docs/content/commands/npm-token.md +++ b/docs/content/commands/npm-token.md @@ -9,7 +9,9 @@ description: Manage your authentication tokens npm token list [--json|--parseable] npm token create [--read-only] [--cidr=1.1.1.1/24,2.2.2.2/16] npm token revoke <id|token> - ``` +``` + +Note: This command is unaware of workspaces. ### Description @@ -71,3 +73,53 @@ This lets you list, create and revoke authentication tokens. found in your `.npmrc`), and ids as seen in the parseable or json output of `npm token list`. This will NOT accept the truncated token found in the normal `npm token list` output. + +### Configuration + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `read-only` + +* Default: false +* Type: Boolean + +This is used to mark a token as unable to publish when configuring limited +access tokens with the `npm token create` command. + +#### `cidr` + +* Default: null +* Type: null or String (can be set multiple times) + +This is a list of CIDR address to be used when configuring limited access +tokens with the `npm token create` command. + +#### `registry` + +* Default: "https://registry.npmjs.org/" +* Type: URL + +The base URL of the npm registry. + +#### `otp` + +* Default: null +* Type: null or String + +This is a one-time password from a two-factor authenticator. It's needed +when publishing or changing package permissions with `npm access`. + +If not set, and a registry response fails with a challenge for a one-time +password, npm will prompt on the command line for one. + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> + +### See Also + +* [npm adduser](/commands/npm-adduser) +* [npm registry](/using-npm/registry) +* [npm config](/commands/npm-config) +* [npmrc](/configuring-npm/npmrc) +* [npm owner](/commands/npm-owner) +* [npm whoami](/commands/npm-whoami) +* [npm profile](/commands/npm-profile) diff --git a/docs/content/commands/npm-uninstall.md b/docs/content/commands/npm-uninstall.md index 258431cbd9f94..b6ba31393834d 100644 --- a/docs/content/commands/npm-uninstall.md +++ b/docs/content/commands/npm-uninstall.md @@ -54,6 +54,54 @@ npm uninstall lodash --no-save `lodash` will not be removed from your `package.json`, `npm-shrinkwrap.json`, or `package-lock.json` files. +### Configuration + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `save` + +* Default: true +* Type: Boolean + +Save installed packages to a package.json file as dependencies. + +When used with the `npm rm` command, removes the dependency from +package.json. + +#### `workspace` + +* Default: +* Type: String (can be set multiple times) + +Enable running a command in the context of the configured workspaces of the +current project while filtering by running only the workspaces defined by +this configuration option. + +Valid values for the `workspace` config are either: + +* Workspace names +* Path to a workspace directory +* Path to a parent workspace directory (will result to selecting all of the + nested workspaces) + +When set for the `npm init` command, this may be set to the folder of a +workspace which does not yet exist, to create the folder and set it up as a +brand new workspace within the project. + +This value is not exported to the environment for child processes. + +#### `workspaces` + +* Default: false +* Type: Boolean + +Enable running a command in the context of **all** the configured +workspaces. + +This value is not exported to the environment for child processes. + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> + ### See Also * [npm prune](/commands/npm-prune) diff --git a/docs/content/commands/npm-unpublish.md b/docs/content/commands/npm-unpublish.md index e9d6e9045c6f9..b17ff45406829 100644 --- a/docs/content/commands/npm-unpublish.md +++ b/docs/content/commands/npm-unpublish.md @@ -7,7 +7,7 @@ description: Remove a package from the registry ### Synopsis To learn more about how the npm registry treats unpublish, see our <a -href="https://www.npmjs.com/policies/unpublish" target="_blank" +href="https://docs.npmjs.com/policies/unpublish" target="_blank" rel="noopener noreferrer"> unpublish policies</a> #### Unpublishing a single version of a package @@ -34,7 +34,7 @@ This removes a package version from the registry, deleting its entry and removing the tarball. The npm registry will return an error if you are not [logged -in](/commands/npm-login). +in](/commands/npm-adduser). If you do not specify a version or if you remove all of a package's versions then the registry will remove the root package entry entirely. @@ -45,6 +45,82 @@ you must use a new version number. If you unpublish the entire package, you may not publish any new versions of that package until 24 hours have passed. +### Configuration + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `dry-run` + +* Default: false +* Type: Boolean + +Indicates that you don't want npm to make any changes and that it should +only report what it would have done. This can be passed into any of the +commands that modify your local installation, eg, `install`, `update`, +`dedupe`, `uninstall`, as well as `pack` and `publish`. + +Note: This is NOT honored by other network related commands, eg `dist-tags`, +`owner`, etc. + +#### `force` + +* Default: false +* Type: Boolean + +Removes various protections against unfortunate side effects, common +mistakes, unnecessary performance degradation, and malicious input. + +* Allow clobbering non-npm files in global installs. +* Allow the `npm version` command to work on an unclean git repository. +* Allow deleting the cache folder with `npm cache clean`. +* Allow installing packages that have an `engines` declaration requiring a + different version of npm. +* Allow installing packages that have an `engines` declaration requiring a + different version of `node`, even if `--engine-strict` is enabled. +* Allow `npm audit fix` to install modules outside your stated dependency + range (including SemVer-major changes). +* Allow unpublishing all versions of a published package. +* Allow conflicting peerDependencies to be installed in the root project. +* Implicitly set `--yes` during `npm init`. +* Allow clobbering existing values in `npm pkg` + +If you don't have a clear idea of what you want to do, it is strongly +recommended that you do not use this option! + +#### `workspace` + +* Default: +* Type: String (can be set multiple times) + +Enable running a command in the context of the configured workspaces of the +current project while filtering by running only the workspaces defined by +this configuration option. + +Valid values for the `workspace` config are either: + +* Workspace names +* Path to a workspace directory +* Path to a parent workspace directory (will result to selecting all of the + nested workspaces) + +When set for the `npm init` command, this may be set to the folder of a +workspace which does not yet exist, to create the folder and set it up as a +brand new workspace within the project. + +This value is not exported to the environment for child processes. + +#### `workspaces` + +* Default: false +* Type: Boolean + +Enable running a command in the context of **all** the configured +workspaces. + +This value is not exported to the environment for child processes. + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> + ### See Also * [npm deprecate](/commands/npm-deprecate) @@ -52,4 +128,4 @@ passed. * [npm registry](/using-npm/registry) * [npm adduser](/commands/npm-adduser) * [npm owner](/commands/npm-owner) -* [npm login](/commands/npm-login) +* [npm login](/commands/npm-adduser) diff --git a/docs/content/commands/npm-unstar.md b/docs/content/commands/npm-unstar.md index 5471d908004e1..f1a1f7a2e3525 100644 --- a/docs/content/commands/npm-unstar.md +++ b/docs/content/commands/npm-unstar.md @@ -10,6 +10,8 @@ description: Remove an item from your favorite packages npm unstar [<pkg>...] ``` +Note: This command is unaware of workspaces. + ### Description "Unstarring" a package is the opposite of [`npm star`](/commands/npm-star), @@ -27,6 +29,39 @@ You can "star" a package using [`npm star`](/commands/npm-star) You can see all your starred packages using [`npm stars`](/commands/npm-stars) +### Configuration + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `registry` + +* Default: "https://registry.npmjs.org/" +* Type: URL + +The base URL of the npm registry. + +#### `unicode` + +* Default: false on windows, true on mac/unix systems with a unicode locale, + as defined by the `LC_ALL`, `LC_CTYPE`, or `LANG` environment variables. +* Type: Boolean + +When set to true, npm uses unicode characters in the tree output. When +false, it uses ascii characters instead of unicode glyphs. + +#### `otp` + +* Default: null +* Type: null or String + +This is a one-time password from a two-factor authenticator. It's needed +when publishing or changing package permissions with `npm access`. + +If not set, and a registry response fails with a challenge for a one-time +password, npm will prompt on the command line for one. + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> + ### See Also * [npm star](/commands/npm-star) diff --git a/docs/content/commands/npm-update.md b/docs/content/commands/npm-update.md index 012c8472c93fe..c4f7694e19a81 100644 --- a/docs/content/commands/npm-update.md +++ b/docs/content/commands/npm-update.md @@ -15,11 +15,11 @@ aliases: up, upgrade ### Description This command will update all the packages listed to the latest version -(specified by the `tag` config), respecting semver. +(specified by the `tag` config), respecting the semver constraints of +both your package and its dependencies (if they also require the same +package). -It will also install missing packages. As with all commands that install -packages, the `--dev` flag will cause `devDependencies` to be processed -as well. +It will also install missing packages. If the `-g` flag is specified, this command will update globally installed packages. @@ -103,6 +103,39 @@ Then `npm update` will install `dep1@0.4.1`, because that is the highest-sorting version that satisfies `^0.4.0` (`>= 0.4.0 <0.5.0`) +#### Subdependencies + +Suppose your app now also has a dependency on `dep2` + +```json +{ + "name": "my-app", + "dependencies": { + "dep1": "^1.0.0", + "dep2": "1.0.0" + } +} +``` + +and `dep2` itself depends on this limited range of `dep1` + +```json +{ +"name": "dep2", + "dependencies": { + "dep1": "~1.1.1" + } +} +``` + +Then `npm update` will install `dep1@1.1.2` because that is the highest +version that `dep2` allows. npm will prioritize having a single version +of `dep1` in your tree rather than two when that single version can +satisfy the semver requirements of multiple dependencies in your tree. +In this case if you really did need your package to use a newer version +you would need to use `npm install`. + + #### Updating Globally-Installed Packages `npm update -g` will apply the `update` action to each globally installed @@ -116,6 +149,184 @@ need to run `npm install -g [<pkg>...]` NOTE: If a package has been upgraded to a version newer than `latest`, it will be _downgraded_. +### Configuration + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `global` + +* Default: false +* Type: Boolean + +Operates in "global" mode, so that packages are installed into the `prefix` +folder instead of the current working directory. See +[folders](/configuring-npm/folders) for more on the differences in behavior. + +* packages are installed into the `{prefix}/lib/node_modules` folder, instead + of the current working directory. +* bin files are linked to `{prefix}/bin` +* man pages are linked to `{prefix}/share/man` + +#### `global-style` + +* Default: false +* Type: Boolean + +Causes npm to install the package into your local `node_modules` folder with +the same layout it uses with the global `node_modules` folder. Only your +direct dependencies will show in `node_modules` and everything they depend +on will be flattened in their `node_modules` folders. This obviously will +eliminate some deduping. If used with `legacy-bundling`, `legacy-bundling` +will be preferred. + +#### `legacy-bundling` + +* Default: false +* Type: Boolean + +Causes npm to install the package such that versions of npm prior to 1.4, +such as the one included with node 0.8, can install the package. This +eliminates all automatic deduping. If used with `global-style` this option +will be preferred. + +#### `strict-peer-deps` + +* Default: false +* Type: Boolean + +If set to `true`, and `--legacy-peer-deps` is not set, then _any_ +conflicting `peerDependencies` will be treated as an install failure, even +if npm could reasonably guess the appropriate resolution based on non-peer +dependency relationships. + +By default, conflicting `peerDependencies` deep in the dependency graph will +be resolved using the nearest non-peer dependency specification, even if +doing so will result in some packages receiving a peer dependency outside +the range set in their package's `peerDependencies` object. + +When such and override is performed, a warning is printed, explaining the +conflict and the packages involved. If `--strict-peer-deps` is set, then +this warning is treated as a failure. + +#### `package-lock` + +* Default: true +* Type: Boolean + +If set to false, then ignore `package-lock.json` files when installing. This +will also prevent _writing_ `package-lock.json` if `save` is true. + +When package package-locks are disabled, automatic pruning of extraneous +modules will also be disabled. To remove extraneous modules with +package-locks disabled use `npm prune`. + +#### `omit` + +* Default: 'dev' if the `NODE_ENV` environment variable is set to + 'production', otherwise empty. +* Type: "dev", "optional", or "peer" (can be set multiple times) + +Dependency types to omit from the installation tree on disk. + +Note that these dependencies _are_ still resolved and added to the +`package-lock.json` or `npm-shrinkwrap.json` file. They are just not +physically installed on disk. + +If a package type appears in both the `--include` and `--omit` lists, then +it will be included. + +If the resulting omit list includes `'dev'`, then the `NODE_ENV` environment +variable will be set to `'production'` for all lifecycle scripts. + +#### `ignore-scripts` + +* Default: false +* Type: Boolean + +If true, npm does not run scripts specified in package.json files. + +Note that commands explicitly intended to run a particular script, such as +`npm start`, `npm stop`, `npm restart`, `npm test`, and `npm run-script` +will still run their intended script if `ignore-scripts` is set, but they +will *not* run any pre- or post-scripts. + +#### `audit` + +* Default: true +* Type: Boolean + +When "true" submit audit reports alongside the current npm command to the +default registry and all registries configured for scopes. See the +documentation for [`npm audit`](/commands/npm-audit) for details on what is +submitted. + +#### `bin-links` + +* Default: true +* Type: Boolean + +Tells npm to create symlinks (or `.cmd` shims on Windows) for package +executables. + +Set to false to have it not do this. This can be used to work around the +fact that some file systems don't support symlinks, even on ostensibly Unix +systems. + +#### `fund` + +* Default: true +* Type: Boolean + +When "true" displays the message at the end of each `npm install` +acknowledging the number of dependencies looking for funding. See [`npm +fund`](/commands/npm-fund) for details. + +#### `dry-run` + +* Default: false +* Type: Boolean + +Indicates that you don't want npm to make any changes and that it should +only report what it would have done. This can be passed into any of the +commands that modify your local installation, eg, `install`, `update`, +`dedupe`, `uninstall`, as well as `pack` and `publish`. + +Note: This is NOT honored by other network related commands, eg `dist-tags`, +`owner`, etc. + +#### `workspace` + +* Default: +* Type: String (can be set multiple times) + +Enable running a command in the context of the configured workspaces of the +current project while filtering by running only the workspaces defined by +this configuration option. + +Valid values for the `workspace` config are either: + +* Workspace names +* Path to a workspace directory +* Path to a parent workspace directory (will result to selecting all of the + nested workspaces) + +When set for the `npm init` command, this may be set to the folder of a +workspace which does not yet exist, to create the folder and set it up as a +brand new workspace within the project. + +This value is not exported to the environment for child processes. + +#### `workspaces` + +* Default: false +* Type: Boolean + +Enable running a command in the context of **all** the configured +workspaces. + +This value is not exported to the environment for child processes. + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> ### See Also diff --git a/docs/content/commands/npm-version.md b/docs/content/commands/npm-version.md index 0eb814b9899b0..a3e34153a06da 100644 --- a/docs/content/commands/npm-version.md +++ b/docs/content/commands/npm-version.md @@ -14,35 +14,128 @@ npm version [<newversion> | major | minor | patch | premajor | preminor | prepat 'npm ls' to inspect current package/dependency versions ``` +### Configuration + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `allow-same-version` + +* Default: false +* Type: Boolean + +Prevents throwing an error when `npm version` is used to set the new version +to the same value as the current version. + +#### `commit-hooks` + +* Default: true +* Type: Boolean + +Run git commit hooks when using the `npm version` command. + +#### `git-tag-version` + +* Default: true +* Type: Boolean + +Tag the commit when using the `npm version` command. + +#### `json` + +* Default: false +* Type: Boolean + +Whether or not to output JSON data, rather than the normal output. + +* In `npm pkg set` it enables parsing set values with JSON.parse() before + saving them to your `package.json`. + +Not supported by all npm commands. + +#### `preid` + +* Default: "" +* Type: String + +The "prerelease identifier" to use as a prefix for the "prerelease" part of +a semver. Like the `rc` in `1.2.0-rc.8`. + +#### `sign-git-tag` + +* Default: false +* Type: Boolean + +If set to true, then the `npm version` command will tag the version using +`-s` to add a signature. + +Note that git requires you to have set up GPG keys in your git configs for +this to work properly. + +#### `workspace` + +* Default: +* Type: String (can be set multiple times) + +Enable running a command in the context of the configured workspaces of the +current project while filtering by running only the workspaces defined by +this configuration option. + +Valid values for the `workspace` config are either: + +* Workspace names +* Path to a workspace directory +* Path to a parent workspace directory (will result to selecting all of the + nested workspaces) + +When set for the `npm init` command, this may be set to the folder of a +workspace which does not yet exist, to create the folder and set it up as a +brand new workspace within the project. + +This value is not exported to the environment for child processes. + +#### `workspaces` + +* Default: false +* Type: Boolean + +Enable running a command in the context of **all** the configured +workspaces. + +This value is not exported to the environment for child processes. + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> + ### Description -Run this in a package directory to bump the version and write the new -data back to `package.json`, `package-lock.json`, and, if present, `npm-shrinkwrap.json`. +Run this in a package directory to bump the version and write the new data +back to `package.json`, `package-lock.json`, and, if present, +`npm-shrinkwrap.json`. -The `newversion` argument should be a valid semver string, a -valid second argument to [semver.inc](https://github.com/npm/node-semver#functions) (one of `patch`, `minor`, `major`, -`prepatch`, `preminor`, `premajor`, `prerelease`), or `from-git`. In the second case, -the existing version will be incremented by 1 in the specified field. -`from-git` will try to read the latest git tag, and use that as the new npm version. +The `newversion` argument should be a valid semver string, a valid second +argument to [semver.inc](https://github.com/npm/node-semver#functions) (one +of `patch`, `minor`, `major`, `prepatch`, `preminor`, `premajor`, +`prerelease`), or `from-git`. In the second case, the existing version will +be incremented by 1 in the specified field. `from-git` will try to read +the latest git tag, and use that as the new npm version. -If run in a git repo, it will also create a version commit and tag. -This behavior is controlled by `git-tag-version` (see below), and can -be disabled on the command line by running `npm --no-git-tag-version version`. +If run in a git repo, it will also create a version commit and tag. This +behavior is controlled by `git-tag-version` (see below), and can be +disabled on the command line by running `npm --no-git-tag-version version`. It will fail if the working directory is not clean, unless the `-f` or `--force` flag is set. -If supplied with `-m` or `--message` config option, npm will -use it as a commit message when creating a version commit. If the -`message` config contains `%s` then that will be replaced with the -resulting version number. For example: +If supplied with `-m` or `--message` config option, npm will use it as a +commit message when creating a version commit. If the `message` config +contains `%s` then that will be replaced with the resulting version number. +For example: ```bash npm version patch -m "Upgrade to %s for reasons" ``` -If the `sign-git-tag` config is set, then the tag will be signed using -the `-s` flag to git. Note that you must have a default GPG key set up -in your git config for this to work properly. For example: +If the `sign-git-tag` config is set, then the tag will be signed using the +`-s` flag to git. Note that you must have a default GPG key set up in your +git config for this to work properly. For example: ```bash $ npm config set sign-git-tag true @@ -55,70 +148,45 @@ user: "isaacs (http://blog.izs.me/) <i@izs.me>" Enter passphrase: ``` -If `preversion`, `version`, or `postversion` are in the `scripts` property of -the package.json, they will be executed as part of running `npm version`. +If `preversion`, `version`, or `postversion` are in the `scripts` property +of the package.json, they will be executed as part of running `npm +version`. The exact order of execution is as follows: - 1. Check to make sure the git working directory is clean before we get started. - Your scripts may add files to the commit in future steps. - This step is skipped if the `--force` flag is set. - 2. Run the `preversion` script. These scripts have access to the old `version` in package.json. - A typical use would be running your full test suite before deploying. - Any files you want added to the commit should be explicitly added using `git add`. - 3. Bump `version` in `package.json` as requested (`patch`, `minor`, `major`, etc). - 4. Run the `version` script. These scripts have access to the new `version` in package.json - (so they can incorporate it into file headers in generated files for example). - Again, scripts should explicitly add generated files to the commit using `git add`. - 5. Commit and tag. - 6. Run the `postversion` script. Use it to clean up the file system or automatically push - the commit and/or tag. + +1. Check to make sure the git working directory is clean before we get + started. Your scripts may add files to the commit in future steps. + This step is skipped if the `--force` flag is set. +2. Run the `preversion` script. These scripts have access to the old + `version` in package.json. A typical use would be running your full + test suite before deploying. Any files you want added to the commit + should be explicitly added using `git add`. +3. Bump `version` in `package.json` as requested (`patch`, `minor`, + `major`, etc). +4. Run the `version` script. These scripts have access to the new `version` + in package.json (so they can incorporate it into file headers in + generated files for example). Again, scripts should explicitly add + generated files to the commit using `git add`. +5. Commit and tag. +6. Run the `postversion` script. Use it to clean up the file system or + automatically push the commit and/or tag. Take the following example: ```json - "scripts": { - "preversion": "npm test", - "version": "npm run build && git add -A dist", - "postversion": "git push && git push --tags && rm -rf build/temp" - } +{ + "scripts": { + "preversion": "npm test", + "version": "npm run build && git add -A dist", + "postversion": "git push && git push --tags && rm -rf build/temp" + } +} ``` -This runs all your tests and proceeds only if they pass. Then runs your `build` script, and -adds everything in the `dist` directory to the commit. After the commit, it pushes the new commit -and tag up to the server, and deletes the `build/temp` directory. - -### Configuration - -#### `allow-same-version` - -* Default: `false` -* Type: Boolean - -Prevents throwing an error when `npm version` is used to set the new version -to the same value as the current version. - -#### `git-tag-version` - -* Default: `true` -* Type: Boolean - -Commit and tag the version change. - -#### `commit-hooks` - -* Default: `true` -* Type: Boolean - -Run git commit hooks when committing the version change. - -#### `sign-git-tag` - -* Default: `false` -* Type: Boolean - -Pass the `-s` flag to git to sign the tag. - -Note that you must have a default GPG key set up in your git config for this to work properly. +This runs all your tests and proceeds only if they pass. Then runs your +`build` script, and adds everything in the `dist` directory to the commit. +After the commit, it pushes the new commit and tag up to the server, and +deletes the `build/temp` directory. ### See Also diff --git a/docs/content/commands/npm-view.md b/docs/content/commands/npm-view.md index bf09c2ba4f361..b3d5df86e34a4 100644 --- a/docs/content/commands/npm-view.md +++ b/docs/content/commands/npm-view.md @@ -14,8 +14,7 @@ aliases: info, show, v ### Description -This command shows data about a package and prints it to the stream -referenced by the `outfd` config, which defaults to stdout. +This command shows data about a package and prints it to stdout. As an example, to view information about the `connect` package from the registry, you would run: @@ -50,7 +49,7 @@ npm view opts@$(npm view ronn dependencies.opts) For fields that are arrays, requesting a non-numeric field will return all of the values from the objects in the list. For example, to get all -the contributor names for the `express` package, you would run: +the contributor email addresses for the `express` package, you would run: ```bash npm view express contributors.email @@ -74,7 +73,7 @@ npm view express contributors.name contributors.email "Person" fields are shown as a string if they would be shown as an object. So, for example, this will show the list of `npm` contributors in -the shortened string format. (See [`package.json`](/configuring-npm/package.json) for more on this.) +the shortened string format. (See [`package.json`](/configuring-npm/package-json) for more on this.) ```bash npm view npm contributors @@ -95,6 +94,56 @@ this: npm view connect versions ``` +### Configuration + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `json` + +* Default: false +* Type: Boolean + +Whether or not to output JSON data, rather than the normal output. + +* In `npm pkg set` it enables parsing set values with JSON.parse() before + saving them to your `package.json`. + +Not supported by all npm commands. + +#### `workspace` + +* Default: +* Type: String (can be set multiple times) + +Enable running a command in the context of the configured workspaces of the +current project while filtering by running only the workspaces defined by +this configuration option. + +Valid values for the `workspace` config are either: + +* Workspace names +* Path to a workspace directory +* Path to a parent workspace directory (will result to selecting all of the + nested workspaces) + +When set for the `npm init` command, this may be set to the folder of a +workspace which does not yet exist, to create the folder and set it up as a +brand new workspace within the project. + +This value is not exported to the environment for child processes. + +#### `workspaces` + +* Default: false +* Type: Boolean + +Enable running a command in the context of **all** the configured +workspaces. + +This value is not exported to the environment for child processes. + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> + ### Output If only a single string field for a single version is output, then it diff --git a/docs/content/commands/npm-whoami.md b/docs/content/commands/npm-whoami.md index 43b301c51707a..09253b3b633e2 100644 --- a/docs/content/commands/npm-whoami.md +++ b/docs/content/commands/npm-whoami.md @@ -10,9 +10,31 @@ description: Display npm username npm whoami [--registry <registry>] ``` +Note: This command is unaware of workspaces. + ### Description -Print the `username` config to standard output. +Display the npm username of the currently logged-in user. + +If logged into a registry that provides token-based authentication, then +connect to the `/-/whoami` registry endpoint to find the username +associated with the token, and print to standard output. + +If logged into a registry that uses Basic Auth, then simply print the +`username` portion of the authentication string. + +### Configuration + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `registry` + +* Default: "https://registry.npmjs.org/" +* Type: URL + +The base URL of the npm registry. + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> ### See Also diff --git a/docs/content/commands/npm.md b/docs/content/commands/npm.md index d01146d37041c..de510870640d1 100644 --- a/docs/content/commands/npm.md +++ b/docs/content/commands/npm.md @@ -31,7 +31,7 @@ Run `npm help` to get a list of available commands. npm comes preconfigured to use npm's public registry at https://registry.npmjs.org by default. Use of the npm public registry is subject to terms of use available at -https://www.npmjs.com/policies/terms. +https://docs.npmjs.com/policies/terms. You can configure npm to use any compatible registry you like, and even run your own registry. Use of someone else's registry is governed by @@ -62,10 +62,8 @@ requires compiling of C++ Code, npm will use [node-gyp](https://github.com/nodejs/node-gyp) for that task. For a Unix system, [node-gyp](https://github.com/nodejs/node-gyp) needs Python, make and a buildchain like GCC. On Windows, -Python and Microsoft Visual Studio C++ are needed. Python 3 is -not supported by [node-gyp](https://github.com/nodejs/node-gyp). -For more information visit -[the node-gyp repository](https://github.com/nodejs/node-gyp) and +Python and Microsoft Visual Studio C++ are needed. For more information +visit [the node-gyp repository](https://github.com/nodejs/node-gyp) and the [node-gyp Wiki](https://github.com/nodejs/node-gyp/wiki). ### Directories @@ -107,7 +105,7 @@ following help topics: Create an account or log in. When you do this, npm will store credentials in the user config file config file. * publish: - Use the [`npm publish`](/commands/npm-publish`) command to upload your + Use the [`npm publish`](/commands/npm-publish) command to upload your code to the registry. #### Configuration diff --git a/docs/content/configuring-npm/folders.md b/docs/content/configuring-npm/folders.md index 3ec716f2c67fd..218870765b262 100644 --- a/docs/content/configuring-npm/folders.md +++ b/docs/content/configuring-npm/folders.md @@ -45,14 +45,16 @@ Global installs on Windows go to `{prefix}/node_modules` (that is, no Scoped packages are installed the same way, except they are grouped together in a sub-folder of the relevant `node_modules` folder with the name of that scope prefix by the @ symbol, e.g. `npm install @myorg/package` would place -the package in `{prefix}/node_modules/@myorg/package`. See [`scope`](/using-npm/scope) for more details. +the package in `{prefix}/node_modules/@myorg/package`. See +[`scope`](/using-npm/scope) for more details. If you wish to `require()` a package, then install it locally. #### Executables When in global mode, executables are linked into `{prefix}/bin` on Unix, -or directly into `{prefix}` on Windows. +or directly into `{prefix}` on Windows. Ensure that path is in your +terminal's `PATH` environment to run them. When in local mode, executables are linked into `./node_modules/.bin` so that they can be made available to scripts run @@ -205,7 +207,7 @@ not be included in the package tarball. This allows a package maintainer to install all of their dependencies (and dev dependencies) locally, but only re-publish those items that -cannot be found elsewhere. See [`package.json`](/configuring-npm/package.json) for more information. +cannot be found elsewhere. See [`package.json`](/configuring-npm/package-json) for more information. ### See also diff --git a/docs/content/configuring-npm/package-json.md b/docs/content/configuring-npm/package-json.md index 4b3fd2ba93459..0fc5dc5075ee3 100644 --- a/docs/content/configuring-npm/package-json.md +++ b/docs/content/configuring-npm/package-json.md @@ -325,6 +325,8 @@ This should be a module relative to the root of your package folder. For most modules, it makes the most sense to have a main script and often not much else. +If `main` is not set it defaults to `index.js` in the packages root folder. + ### browser If your module is meant to be used client-side the browser field should be @@ -339,9 +341,12 @@ install into the PATH. npm makes this pretty easy (in fact, it uses this feature to install the "npm" executable.) To use this, supply a `bin` field in your package.json which is a map of -command name to local file name. On install, npm will symlink that file -into `prefix/bin` for global installs, or `./node_modules/.bin/` for local -installs. +command name to local file name. When this package is installed +globally, that file will be linked where global bins go so it is +available to run by name. When this package is installed as a +dependency in another package, the file will be linked where it will be +available to that package either directly by `npm exec` or by name in other +scripts when invoking them via `npm run-script`. For example, myapp could have this: @@ -384,6 +389,11 @@ Please make sure that your file(s) referenced in `bin` starts with `#!/usr/bin/env node`, otherwise the scripts are started without the node executable! +Note that you can also set the executable files using [directories.bin](#directoriesbin). + +See [folders](/configuring-npm/folders#executables) for more info on +executables. + ### man Specify either a single file or an array of filenames to put in place for @@ -545,12 +555,8 @@ had the following: } ``` -and then had a "start" command that then referenced the -`npm_package_config_port` environment variable, then the user could -override that by doing `npm config set foo:port 8001`. - -See [`config`](/using-npm/config) and [`scripts`](/using-npm/scripts) for -more on package configs. +It could also have a "start" command that referenced the +`npm_package_config_port` environment variable. ### dependencies @@ -562,8 +568,7 @@ tarball or git URL. **Please do not put test harnesses or transpilers or other "development" time tools in your `dependencies` object.** See `devDependencies`, below. -See [semver]([/using-npm/semver](https://github.com/npm/node-semver#versions)) -for more details about specifying version ranges. +See [semver](https://github.com/npm/node-semver#versions) for more details about specifying version ranges. * `version` Must match `version` exactly * `>version` Must be greater than `version` diff --git a/docs/content/configuring-npm/package-lock-json.md b/docs/content/configuring-npm/package-lock-json.md index 4d994bbc8c0a2..c06540fb3ffae 100644 --- a/docs/content/configuring-npm/package-lock-json.md +++ b/docs/content/configuring-npm/package-lock-json.md @@ -36,8 +36,8 @@ various purposes: Both of these files have the same format, and perform similar functions in the root of a project. -The difference is that `package-lock.json` is that it cannot be published, -and it will be ignored if found in any place other than the root project. +The difference is that `package-lock.json` cannot be published, and it will +be ignored if found in any place other than the root project. In contrast, [npm-shrinkwrap.json](/configuring-npm/npm-shrinkwrap-json) allows publication, and defines the dependency tree from the point encountered. diff --git a/docs/content/using-npm/config.md b/docs/content/using-npm/config.md index 1032adafbeb22..c4d1afed35cc8 100644 --- a/docs/content/using-npm/config.md +++ b/docs/content/using-npm/config.md @@ -59,30 +59,49 @@ internal to npm, and are defaults if nothing else is specified. The following shorthands are parsed on the command-line: -* `-v`: `--version` -* `-h`, `-?`, `--help`, `-H`: `--usage` -* `-s`, `--silent`: `--loglevel silent` -* `-q`, `--quiet`: `--loglevel warn` -* `-d`: `--loglevel info` -* `-dd`, `--verbose`: `--loglevel verbose` -* `-ddd`: `--loglevel silly` +<!-- AUTOGENERATED CONFIG SHORTHANDS START --> +<!-- automatically generated, do not edit manually --> +* `-a`: `--all` +* `--enjoy-by`: `--before` +* `-c`: `--call` +* `--desc`: `--description` +* `-f`: `--force` * `-g`: `--global` -* `-C`: `--prefix` +* `-L`: `--location` +* `-d`: `--loglevel info` +* `-s`: `--loglevel silent` +* `--silent`: `--loglevel silent` +* `--ddd`: `--loglevel silly` +* `--dd`: `--loglevel verbose` +* `--verbose`: `--loglevel verbose` +* `-q`: `--loglevel warn` +* `--quiet`: `--loglevel warn` * `-l`: `--long` * `-m`: `--message` -* `-p`, `--porcelain`: `--parseable` -* `-reg`: `--registry` -* `-f`: `--force` -* `-desc`: `--description` +* `--local`: `--no-global` +* `-n`: `--no-yes` +* `--no`: `--no-yes` +* `-p`: `--parseable` +* `--porcelain`: `--parseable` +* `-C`: `--prefix` +* `--readonly`: `--read-only` +* `--reg`: `--registry` * `-S`: `--save` -* `-P`: `--save-prod` -* `-D`: `--save-dev` -* `-O`: `--save-optional` * `-B`: `--save-bundle` +* `-D`: `--save-dev` * `-E`: `--save-exact` +* `-O`: `--save-optional` +* `-P`: `--save-prod` +* `-?`: `--usage` +* `-h`: `--usage` +* `-H`: `--usage` +* `--help`: `--usage` +* `-v`: `--version` +* `-w`: `--workspace` +* `--ws`: `--workspaces` * `-y`: `--yes` -* `-n`: `--yes false` -* `ll` and `la` commands: `ls --long` + +<!-- AUTOGENERATED CONFIG SHORTHANDS END --> If the specified configuration param resolves unambiguously to a known configuration parameter, then it is expanded to that configuration @@ -107,26 +126,39 @@ npm ls --global --parseable --long --loglevel info ### Config Settings -#### access +<!-- AUTOGENERATED CONFIG DESCRIPTIONS START --> +<!-- automatically generated, do not edit manually --> +#### `_auth` + +* Default: null +* Type: null or String + +A basic-auth string to use when authenticating against the npm registry. -* Default: `restricted` -* Type: Access +Warning: This should generally not be set via a command-line option. It is +safer to use a registry-provided authentication bearer token stored in the +~/.npmrc file by running `npm login`. -When publishing scoped packages, the access level defaults to `restricted`. If -you want your scoped package to be publicly viewable (and installable) set -`--access=public`. The only valid values for `access` are `public` and +#### `access` + +* Default: 'restricted' for scoped packages, 'public' for unscoped packages +* Type: null, "restricted", or "public" + +When publishing scoped packages, the access level defaults to `restricted`. +If you want your scoped package to be publicly viewable (and installable) +set `--access=public`. The only valid values for `access` are `public` and `restricted`. Unscoped packages _always_ have an access level of `public`. -#### all +#### `all` -* Default: `false` +* Default: false * Type: Boolean When running `npm outdated` and `npm ls`, setting `--all` will show all outdated or installed packages, rather than only those directly depended upon by the current project. -#### allow-same-version +#### `allow-same-version` * Default: false * Type: Boolean @@ -134,78 +166,55 @@ upon by the current project. Prevents throwing an error when `npm version` is used to set the new version to the same value as the current version. -#### always-auth - -* Default: false -* Type: Boolean - -Force npm to always require authentication when accessing the registry, -even for `GET` requests. - -#### also - -* Default: null -* Type: String - -When "dev" or "development" and running local `npm shrinkwrap`, -`npm outdated`, or `npm update`, is an alias for `--dev`. - -#### audit +#### `audit` * Default: true * Type: Boolean -When "true" submit audit reports alongside `npm install` runs to the default -registry and all registries configured for scopes. See the documentation -for [`npm audit`](/commands/npm-audit) for details on what is submitted. - -#### audit-level - -* Default: `"low"` -* Type: `'low'`, `'moderate'`, `'high'`, `'critical'` - -The minimum level of vulnerability for `npm audit` to exit with -a non-zero exit code. +When "true" submit audit reports alongside the current npm command to the +default registry and all registries configured for scopes. See the +documentation for [`npm audit`](/commands/npm-audit) for details on what is +submitted. -#### auth-type +#### `audit-level` -* Default: `'legacy'` -* Type: `'legacy'`, `'sso'`, `'saml'`, `'oauth'` +* Default: null +* Type: null, "info", "low", "moderate", "high", "critical", or "none" -What authentication strategy to use with `adduser`/`login`. +The minimum level of vulnerability for `npm audit` to exit with a non-zero +exit code. -#### before +#### `before` -* Alias: enjoy-by * Default: null -* Type: Date +* Type: null or Date -If passed to `npm install`, will rebuild the npm tree such that only versions -that were available **on or before** the `--before` time get installed. -If there's no versions available for the current set of direct dependencies, the -command will error. +If passed to `npm install`, will rebuild the npm tree such that only +versions that were available **on or before** the `--before` time get +installed. If there's no versions available for the current set of direct +dependencies, the command will error. If the requested version is a `dist-tag` and the given tag does not pass the -`--before` filter, the most recent version less than or equal to that tag will -be used. For example, `foo@latest` might install `foo@1.2` even though `latest` -is `2.0`. +`--before` filter, the most recent version less than or equal to that tag +will be used. For example, `foo@latest` might install `foo@1.2` even though +`latest` is `2.0`. -#### bin-links +#### `bin-links` -* Default: `true` +* Default: true * Type: Boolean Tells npm to create symlinks (or `.cmd` shims on Windows) for package executables. -Set to false to have it not do this. This can be used to work around -the fact that some file systems don't support symlinks, even on -ostensibly Unix systems. +Set to false to have it not do this. This can be used to work around the +fact that some file systems don't support symlinks, even on ostensibly Unix +systems. -#### browser +#### `browser` * Default: OS X: `"open"`, Windows: `"start"`, Others: `"xdg-open"` -* Type: String or Boolean +* Type: null, Boolean, or String The browser that is called by npm commands to open websites. @@ -214,87 +223,50 @@ terminal. Set to `true` to use default system URL opener. -#### ca +#### `ca` -* Default: The npm CA certificate -* Type: String, Array or null +* Default: null +* Type: null or String (can be set multiple times) The Certificate Authority signing certificate that is trusted for SSL -connections to the registry. Values should be in PEM format (Windows calls it "Base-64 encoded X.509 (.CER)") with newlines -replaced by the string "\n". For example: +connections to the registry. Values should be in PEM format (Windows calls +it "Base-64 encoded X.509 (.CER)") with newlines replaced by the string +"\n". For example: -```bash +```ini ca="-----BEGIN CERTIFICATE-----\nXXXX\nXXXX\n-----END CERTIFICATE-----" ``` -Set to `null` to only allow "known" registrars, or to a specific CA cert -to trust only that specific signing authority. +Set to `null` to only allow "known" registrars, or to a specific CA cert to +trust only that specific signing authority. Multiple CAs can be trusted by specifying an array of certificates: -```bash +```ini ca[]="..." ca[]="..." ``` See also the `strict-ssl` config. -#### cafile - -* Default: `null` -* Type: path - -A path to a file containing one or multiple Certificate Authority signing -certificates. Similar to the `ca` setting, but allows for multiple CA's, as -well as for the CA information to be stored in a file on disk. - -#### cache - -* Default: Windows: `%AppData%\npm-cache`, Posix: `~/.npm` -* Type: path - -The location of npm's cache directory. See [`npm cache`](/commands/npm-cache) - -#### cache-lock-stale - -* Default: 60000 (1 minute) -* Type: Number - -The number of ms before cache folder lockfiles are considered stale. +#### `cache` -#### cache-lock-retries +* Default: Windows: `%LocalAppData%\npm-cache`, Posix: `~/.npm` +* Type: Path -* Default: 10 -* Type: Number - -Number of times to retry to acquire a lock on cache folder lockfiles. - -#### cache-lock-wait - -* Default: 10000 (10 seconds) -* Type: Number - -Number of ms to wait for cache lock files to expire. - -#### cache-max - -* Default: Infinity -* Type: Number - -**DEPRECATED**: This option has been deprecated in favor of `--prefer-online`. - -`--cache-max=0` is an alias for `--prefer-online`. +The location of npm's cache directory. See [`npm +cache`](/commands/npm-cache) -#### cache-min +#### `cafile` -* Default: 10 -* Type: Number - -**DEPRECATED**: This option has been deprecated in favor of `--prefer-offline`. +* Default: null +* Type: Path -`--cache-min=9999 (or bigger)` is an alias for `--prefer-offline`. +A path to a file containing one or multiple Certificate Authority signing +certificates. Similar to the `ca` setting, but allows for multiple CA's, as +well as for the CA information to be stored in a file on disk. -#### call +#### `call` * Default: "" * Type: String @@ -306,154 +278,206 @@ custom command to be run along with the installed packages. npm exec --package yo --package generator-node --call "yo node" ``` -#### cert -* Default: `null` -* Type: String +#### `cert` + +* Default: null +* Type: null or String -A client certificate to pass when accessing the registry. Values should be in -PEM format (Windows calls it "Base-64 encoded X.509 (.CER)") with newlines replaced by the string "\n". For example: +A client certificate to pass when accessing the registry. Values should be +in PEM format (Windows calls it "Base-64 encoded X.509 (.CER)") with +newlines replaced by the string "\n". For example: -```bash +```ini cert="-----BEGIN CERTIFICATE-----\nXXXX\nXXXX\n-----END CERTIFICATE-----" ``` -It is _not_ the path to a certificate file (and there is no "certfile" option). +It is _not_ the path to a certificate file (and there is no "certfile" +option). -#### cidr +#### `ci-name` -* Default: `null` -* Type: String, Array, null +* Default: The name of the current CI system, or `null` when not on a known CI + platform. +* Type: null or String -This is a list of CIDR address to be used when configuring limited access tokens with the `npm token create` command. +The name of a continuous integration system. If not set explicitly, npm will +detect the current CI environment using the +[`@npmcli/ci-detect`](http://npm.im/@npmcli/ci-detect) module. -#### commit-hooks +#### `cidr` -* Default: `true` -* Type: Boolean +* Default: null +* Type: null or String (can be set multiple times) -Run git commit hooks when using the `npm version` command. +This is a list of CIDR address to be used when configuring limited access +tokens with the `npm token create` command. -#### color +#### `color` -* Default: true -* Type: Boolean or `"always"` +* Default: true unless the NO_COLOR environ is set to something other than '0' +* Type: "always" or Boolean -If false, never shows colors. If `"always"` then always shows colors. -If true, then only prints color codes for tty file descriptors. +If false, never shows colors. If `"always"` then always shows colors. If +true, then only prints color codes for tty file descriptors. -This option can also be changed using the environment: colors are -disabled when the environment variable `NO_COLOR` is set to any value. +#### `commit-hooks` -#### depth +* Default: true +* Type: Boolean -* Default: null +Run git commit hooks when using the `npm version` command. + +#### `depth` + +* Default: `Infinity` if `--all` is set, otherwise `1` * Type: null or Number The depth to go when recursing packages for `npm ls`. -To make this default to `Infinity` instead of `null`, set `--all`. +If not set, `npm ls` will show only the immediate dependencies of the root +project. If `--all` is set, then npm will show all dependencies by default. -#### description +#### `description` * Default: true * Type: Boolean Show the description in `npm search` -#### dev +#### `diff` + +* Default: +* Type: String (can be set multiple times) + +Define arguments to compare in `npm diff`. + +#### `diff-dst-prefix` + +* Default: "b/" +* Type: String + +Destination prefix to be used in `npm diff` output. + +#### `diff-ignore-all-space` * Default: false * Type: Boolean -\[Deprecated\] Install `dev-dependencies` along with packages. +Ignore whitespace when comparing lines in `npm diff`. -#### dry-run +#### `diff-name-only` * Default: false * Type: Boolean -Indicates that you don't want npm to make any changes and that it should -only report what it would have done. This can be passed into any of the -commands that modify your local installation, eg, `install`, `update`, -`dedupe`, `uninstall`. This is NOT currently honored by some network related -commands, eg `dist-tags`, `owner`, etc. +Prints only filenames when using `npm diff`. -#### diff +#### `diff-no-prefix` -* Default: null -* Type: String, Array, null +* Default: false +* Type: Boolean -Define arguments to compare in `npm diff`. +Do not show any source or destination prefix in `npm diff` output. -#### diff-name-only +Note: this causes `npm diff` to ignore the `--diff-src-prefix` and +`--diff-dst-prefix` configs. + +#### `diff-src-prefix` + +* Default: "a/" +* Type: String + +Source prefix to be used in `npm diff` output. + +#### `diff-text` * Default: false * Type: Boolean -Prints only filenames when using `npm diff`. +Treat all files as text in `npm diff`. -#### diff-unified +#### `diff-unified` -* Type: number -* Default: `3` +* Default: 3 +* Type: Number The number of lines of context to print in `npm diff`. -#### diff-ignore-all-space +#### `dry-run` -* Type: Boolean * Default: false +* Type: Boolean -Ignore whitespace when comparing lines in `npm diff. +Indicates that you don't want npm to make any changes and that it should +only report what it would have done. This can be passed into any of the +commands that modify your local installation, eg, `install`, `update`, +`dedupe`, `uninstall`, as well as `pack` and `publish`. -#### diff-no-prefix +Note: This is NOT honored by other network related commands, eg `dist-tags`, +`owner`, etc. + +#### `editor` + +* Default: The EDITOR or VISUAL environment variables, or 'notepad.exe' on + Windows, or 'vim' on Unix systems +* Type: String + +The command to run for `npm edit` and `npm config edit`. + +#### `engine-strict` -* Type: Boolean * Default: false +* Type: Boolean -Do not show any source or destination prefix in `npm diff` output. +If set to true, then npm will stubbornly refuse to install (or even consider +installing) any package that claims to not be compatible with the current +Node.js version. -#### diff-src-prefix +This can be overridden by setting the `--force` flag. -* Type: String -* Default: `"a/"` +#### `fetch-retries` -Source prefix to be used in `npm diff` output. +* Default: 2 +* Type: Number -#### diff-dst-prefix +The "retries" config for the `retry` module to use when fetching packages +from the registry. -* Type: String -* Default: `"b/"` +npm will retry idempotent read requests to the registry in the case of +network failures or 5xx HTTP errors. -Destination prefix to be used in `npm diff` output. +#### `fetch-retry-factor` -#### diff-text +* Default: 10 +* Type: Number -* Alias: `-a` -* Type: Boolean -* Default: false +The "factor" config for the `retry` module to use when fetching packages. -Treat all files as text in `npm diff`. +#### `fetch-retry-maxtimeout` -#### editor +* Default: 60000 (1 minute) +* Type: Number -* Default: `EDITOR` environment variable if set, or `"vi"` on Posix, - or `"notepad"` on Windows. -* Type: path +The "maxTimeout" config for the `retry` module to use when fetching +packages. -The command to run for `npm edit` or `npm config edit`. +#### `fetch-retry-mintimeout` -#### engine-strict +* Default: 10000 (10 seconds) +* Type: Number -* Default: false -* Type: Boolean +The "minTimeout" config for the `retry` module to use when fetching +packages. -If set to true, then npm will stubbornly refuse to install (or even -consider installing) any package that claims to not be compatible with -the current Node.js version. +#### `fetch-timeout` + +* Default: 300000 (5 minutes) +* Type: Number + +The maximum amount of time to wait for HTTP requests to complete. -#### force +#### `force` * Default: false * Type: Boolean @@ -467,17 +491,18 @@ mistakes, unnecessary performance degradation, and malicious input. * Allow installing packages that have an `engines` declaration requiring a different version of npm. * Allow installing packages that have an `engines` declaration requiring a - different version of `node`, even if `--engines-strict` is enabled. + different version of `node`, even if `--engine-strict` is enabled. * Allow `npm audit fix` to install modules outside your stated dependency range (including SemVer-major changes). -* Allow a module to be installed as a direct dependency of itself. * Allow unpublishing all versions of a published package. * Allow conflicting peerDependencies to be installed in the root project. +* Implicitly set `--yes` during `npm init`. +* Allow clobbering existing values in `npm pkg` If you don't have a clear idea of what you want to do, it is strongly recommended that you do not use this option! -#### foreground-scripts +#### `foreground-scripts` * Default: false * Type: Boolean @@ -486,249 +511,222 @@ Run all build scripts (ie, `preinstall`, `install`, and `postinstall`) scripts for installed packages in the foreground process, sharing standard input, output, and error with the main npm process. -Note that this will generally make installs run slower, and be much -noisier, but can be useful for debugging. +Note that this will generally make installs run slower, and be much noisier, +but can be useful for debugging. -#### format-package-lock +#### `format-package-lock` * Default: true * Type: Boolean -Format `package-lock.json` or `npm-shrinkwrap.json` as a human readable file. +Format `package-lock.json` or `npm-shrinkwrap.json` as a human readable +file. -#### fund +#### `fund` * Default: true * Type: Boolean When "true" displays the message at the end of each `npm install` -acknowledging the number of dependencies looking for funding. -See [`npm fund`](/commands/npm-fund) for details. - -#### fetch-retries - -* Default: 2 -* Type: Number - -The "retries" config for the `retry` module to use when fetching -packages from the registry. - -#### fetch-retry-factor +acknowledging the number of dependencies looking for funding. See [`npm +fund`](/commands/npm-fund) for details. -* Default: 10 -* Type: Number - -The "factor" config for the `retry` module to use when fetching -packages. - -#### fetch-retry-mintimeout - -* Default: 10000 (10 seconds) -* Type: Number - -The "minTimeout" config for the `retry` module to use when fetching -packages. +#### `git` -#### fetch-retry-maxtimeout - -* Default: 60000 (1 minute) -* Type: Number - -The "maxTimeout" config for the `retry` module to use when fetching -packages. - -#### fetch-timeout - -* Default: 300000 (5 minutes) -* Type: Number - -The maximum amount of time to wait for HTTP requests to complete. - -#### git - -* Default: `"git"` +* Default: "git" * Type: String -The command to use for git commands. If git is installed on the -computer, but is not in the `PATH`, then set this to the full path to -the git binary. +The command to use for git commands. If git is installed on the computer, +but is not in the `PATH`, then set this to the full path to the git binary. -#### git-tag-version +#### `git-tag-version` -* Default: `true` +* Default: true * Type: Boolean Tag the commit when using the `npm version` command. -#### global +#### `global` * Default: false * Type: Boolean -Operates in "global" mode, so that packages are installed into the -`prefix` folder instead of the current working directory. See +Operates in "global" mode, so that packages are installed into the `prefix` +folder instead of the current working directory. See [folders](/configuring-npm/folders) for more on the differences in behavior. -* packages are installed into the `{prefix}/lib/node_modules` folder, instead of the - current working directory. +* packages are installed into the `{prefix}/lib/node_modules` folder, instead + of the current working directory. * bin files are linked to `{prefix}/bin` * man pages are linked to `{prefix}/share/man` -#### globalconfig - -* Default: {prefix}/etc/npmrc -* Type: path - -The config file to read for global config options. - -#### global-style +#### `global-style` * Default: false * Type: Boolean Causes npm to install the package into your local `node_modules` folder with -the same layout it uses with the global `node_modules` folder. Only your +the same layout it uses with the global `node_modules` folder. Only your direct dependencies will show in `node_modules` and everything they depend -on will be flattened in their `node_modules` folders. This obviously will -eliminate some deduping. If used with `legacy-bundling`, `legacy-bundling` will be -preferred. +on will be flattened in their `node_modules` folders. This obviously will +eliminate some deduping. If used with `legacy-bundling`, `legacy-bundling` +will be preferred. -#### heading +#### `globalconfig` + +* Default: The global --prefix setting plus 'etc/npmrc'. For example, + '/usr/local/etc/npmrc' +* Type: Path + +The config file to read for global config options. -* Default: `"npm"` +#### `heading` + +* Default: "npm" * Type: String The string that starts all the debugging log output. -#### https-proxy +#### `https-proxy` * Default: null -* Type: url +* Type: null or URL A proxy to use for outgoing https requests. If the `HTTPS_PROXY` or `https_proxy` or `HTTP_PROXY` or `http_proxy` environment variables are set, -proxy settings will be honored by the underlying `request` library. +proxy settings will be honored by the underlying `make-fetch-happen` +library. -#### if-present +#### `if-present` * Default: false * Type: Boolean -If true, npm will not exit with an error code when `run-script` is invoked for -a script that isn't defined in the `scripts` section of `package.json`. This -option can be used when it's desirable to optionally run a script when it's -present and fail if the script fails. This is useful, for example, when running -scripts that may only apply for some builds in an otherwise generic CI setup. +If true, npm will not exit with an error code when `run-script` is invoked +for a script that isn't defined in the `scripts` section of `package.json`. +This option can be used when it's desirable to optionally run a script when +it's present and fail if the script fails. This is useful, for example, when +running scripts that may only apply for some builds in an otherwise generic +CI setup. -#### ignore-prepublish +#### `ignore-scripts` * Default: false * Type: Boolean -If true, npm will not run `prepublish` scripts. +If true, npm does not run scripts specified in package.json files. -#### ignore-scripts +Note that commands explicitly intended to run a particular script, such as +`npm start`, `npm stop`, `npm restart`, `npm test`, and `npm run-script` +will still run their intended script if `ignore-scripts` is set, but they +will *not* run any pre- or post-scripts. -* Default: false -* Type: Boolean +#### `include` -If true, npm does not run scripts specified in package.json files. +* Default: +* Type: "prod", "dev", "optional", or "peer" (can be set multiple times) + +Option that allows for defining which types of dependencies to install. -#### include +This is the inverse of `--omit=<type>`. -* Default: `[prod|dev|optional|peer]` -* Type: Array +Dependency types specified in `--include` will not be omitted, regardless of +the order in which omit/include are specified on the command-line. -Option that allows for defining which types of dependencies to install. +#### `include-staged` -#### init-module +* Default: false +* Type: Boolean -* Alias: `init.module` -* Default: ~/.npm-init.js -* Type: path +Allow installing "staged" published packages, as defined by [npm RFC PR +#92](https://github.com/npm/rfcs/pull/92). -A module that will be loaded by the `npm init` command. See the -documentation for the -[init-package-json](https://github.com/npm/init-package-json) module -for more information, or [npm init](/commands/npm-init). +This is experimental, and not implemented by the npm public registry. -#### init-author-name +#### `init-author-email` -* Alias: `init.author.name` * Default: "" * Type: String -The value `npm init` should use by default for the package author's name. +The value `npm init` should use by default for the package author's email. -#### init-author-email +#### `init-author-name` -* Alias: `init.author.email` * Default: "" * Type: String -The value `npm init` should use by default for the package author's email. +The value `npm init` should use by default for the package author's name. -#### init-author-url +#### `init-author-url` -* Alias: `init.author.url` * Default: "" -* Type: String +* Type: "" or URL -The value `npm init` should use by default for the package author's homepage. +The value `npm init` should use by default for the package author's +homepage. -#### init-license +#### `init-license` -* Alias: `init.license` * Default: "ISC" * Type: String The value `npm init` should use by default for the package license. -#### init-version +#### `init-module` + +* Default: "~/.npm-init.js" +* Type: Path + +A module that will be loaded by the `npm init` command. See the +documentation for the +[init-package-json](https://github.com/npm/init-package-json) module for +more information, or [npm init](/commands/npm-init). + +#### `init-version` -* Alias: `init.version` * Default: "1.0.0" -* Type: semver +* Type: SemVer string -The value that `npm init` should use by default for the package -version number, if not already set in package.json. +The value that `npm init` should use by default for the package version +number, if not already set in package.json. -#### json +#### `json` * Default: false * Type: Boolean Whether or not to output JSON data, rather than the normal output. -This feature is currently experimental, and the output data structures for many -commands is either not implemented in JSON yet, or subject to change. Only the -output from `npm ls --json` and `npm search --json` are currently valid. +* In `npm pkg set` it enables parsing set values with JSON.parse() before + saving them to your `package.json`. -#### key +Not supported by all npm commands. -* Default: `null` -* Type: String +#### `key` + +* Default: null +* Type: null or String -A client key to pass when accessing the registry. Values should be in PEM +A client key to pass when accessing the registry. Values should be in PEM format with newlines replaced by the string "\n". For example: -```json +```ini key="-----BEGIN PRIVATE KEY-----\nXXXX\nXXXX\n-----END PRIVATE KEY-----" ``` It is _not_ the path to a key file (and there is no "keyfile" option). -#### legacy-bundling +#### `legacy-bundling` * Default: false * Type: Boolean Causes npm to install the package such that versions of npm prior to 1.4, -such as the one included with node 0.8, can install the package. This +such as the one included with node 0.8, can install the package. This eliminates all automatic deduping. If used with `global-style` this option will be preferred. -#### legacy-peer-deps +#### `legacy-peer-deps` * Default: false * Type: Boolean @@ -736,9 +734,8 @@ will be preferred. Causes npm to completely ignore `peerDependencies` when building a package tree, as in npm versions 3 through 6. -If a package cannot be installed because of overly strict -`peerDependencies` that collide, it provides a way to move forward -resolving the situation. +If a package cannot be installed because of overly strict `peerDependencies` +that collide, it provides a way to move forward resolving the situation. This differs from `--omit=peer`, in that `--omit=peer` will avoid unpacking `peerDependencies` on disk, but will still design a tree such that @@ -747,65 +744,66 @@ This differs from `--omit=peer`, in that `--omit=peer` will avoid unpacking Use of `legacy-peer-deps` is not recommended, as it will not enforce the `peerDependencies` contract that meta-dependencies may rely on. -#### link +#### `link` * Default: false * Type: Boolean -If true, then local installs will link if there is a suitable globally -installed package. +Used with `npm ls`, limiting output to only those packages that are linked. -Note that this means that local installs can cause things to be -installed into the global space at the same time. The link is only done -if one of the two conditions are met: +#### `local-address` -* The package is not already installed globally, or -* the globally installed version is identical to the version that is - being installed locally. +* Default: null +* Type: IP Address -#### local-address +The IP address of the local interface to use when making connections to the +npm registry. Must be IPv4 in versions of Node prior to 0.12. -* Default: undefined -* Type: IP Address +#### `location` -The IP address of the local interface to use when making connections -to the npm registry. Must be IPv4 in versions of Node prior to 0.12. +* Default: "user" unless `--global` is passed, which will also set this value + to "global" +* Type: "global", "user", or "project" -#### loglevel +When passed to `npm config` this refers to which config file to use. + +#### `loglevel` * Default: "notice" -* Type: String -* Values: "silent", "error", "warn", "notice", "http", "timing", "info", - "verbose", "silly" +* Type: "silent", "error", "warn", "notice", "http", "timing", "info", + "verbose", or "silly" -What level of logs to report. On failure, *all* logs are written to +What level of logs to report. On failure, *all* logs are written to `npm-debug.log` in the current working directory. -Any logs of a higher level than the setting are shown. The default is "notice". +Any logs of a higher level than the setting are shown. The default is +"notice". -#### logs-max +See also the `foreground-scripts` config. + +#### `logs-max` * Default: 10 * Type: Number The maximum number of log files to store. -#### long +#### `long` * Default: false * Type: Boolean -Show extended information in `npm ls` and `npm search`. +Show extended information in `ls`, `search`, and `help-search`. -#### maxsockets +#### `maxsockets` -* Default: 50 +* Default: 15 * Type: Number The maximum number of connections to use per origin (protocol/host/port -combination). Passed to the `http` `Agent` used to make the request. +combination). -#### message +#### `message` * Default: "%s" * Type: String @@ -814,73 +812,90 @@ Commit message which is used by `npm version` when creating version commit. Any "%s" in the message will be replaced with the version number. -#### node-options +#### `node-options` * Default: null -* Type: String +* Type: null or String Options to pass through to Node.js via the `NODE_OPTIONS` environment -variable. This does not impact how npm itself is executed but it does -impact how lifecycle scripts are called. +variable. This does not impact how npm itself is executed but it does impact +how lifecycle scripts are called. -#### node-version +#### `node-version` -* Default: process.version -* Type: semver or false +* Default: Node.js `process.version` value +* Type: SemVer string -The node version to use when checking a package's `engines` map. +The node version to use when checking a package's `engines` setting. -#### noproxy +#### `noproxy` -* Default: null -* Type: String or Array +* Default: The value of the NO_PROXY environment variable +* Type: String (can be set multiple times) -A comma-separated string or an array of domain extensions that a proxy should not be used for. +Domain extensions that should bypass any proxies. -#### offline +Also accepts a comma-delimited string. -* Default: false -* Type: Boolean +#### `npm-version` -Force offline mode: no network requests will be done during install. To allow -the CLI to fill in missing cache data, see `--prefer-offline`. +* Default: Output of `npm --version` +* Type: SemVer string -#### only +The npm version to use when checking a package's `engines` setting. -* Default: null -* Type: String +#### `offline` -When "dev" or "development" and running local `npm install` without any -arguments, only devDependencies (and their dependencies) are installed. +* Default: false +* Type: Boolean -When "dev" or "development" and running local `npm ls`, `npm outdated`, or -`npm update`, is an alias for `--dev`. +Force offline mode: no network requests will be done during install. To +allow the CLI to fill in missing cache data, see `--prefer-offline`. -When "prod" or "production" and running local `npm install` without any -arguments, only non-devDependencies (and their dependencies) are -installed. +#### `omit` -When "prod" or "production" and running local `npm ls`, `npm outdated`, or -`npm update`, is an alias for `--production`. +* Default: 'dev' if the `NODE_ENV` environment variable is set to + 'production', otherwise empty. +* Type: "dev", "optional", or "peer" (can be set multiple times) -#### optional +Dependency types to omit from the installation tree on disk. -* Default: true -* Type: Boolean +Note that these dependencies _are_ still resolved and added to the +`package-lock.json` or `npm-shrinkwrap.json` file. They are just not +physically installed on disk. + +If a package type appears in both the `--include` and `--omit` lists, then +it will be included. -Attempt to install packages in the `optionalDependencies` object. Note -that if these packages fail to install, the overall installation -process is not aborted. +If the resulting omit list includes `'dev'`, then the `NODE_ENV` environment +variable will be set to `'production'` for all lifecycle scripts. -#### otp +#### `otp` * Default: null -* Type: Number +* Type: null or String -This is a one-time password from a two-factor authenticator. It's needed +This is a one-time password from a two-factor authenticator. It's needed when publishing or changing package permissions with `npm access`. -#### package-lock +If not set, and a registry response fails with a challenge for a one-time +password, npm will prompt on the command line for one. + +#### `pack-destination` + +* Default: "." +* Type: String + +Directory in which `npm pack` will save tarballs. + +#### `package` + +* Default: +* Type: String (can be set multiple times) + +The package to install for [`npm exec`](/commands/npm-exec) + +#### `package-lock` * Default: true * Type: Boolean @@ -889,75 +904,69 @@ If set to false, then ignore `package-lock.json` files when installing. This will also prevent _writing_ `package-lock.json` if `save` is true. When package package-locks are disabled, automatic pruning of extraneous -modules will also be disabled. To remove extraneous modules with +modules will also be disabled. To remove extraneous modules with package-locks disabled use `npm prune`. -This option is an alias for `--shrinkwrap`. - -#### package-lock-only +#### `package-lock-only` * Default: false * Type: Boolean -If set to true, it will update only the `package-lock.json`, +If set to true, the current operation will only use the `package-lock.json`, +ignoring `node_modules`. + +For `update` this means only the `package-lock.json` will be updated, instead of checking `node_modules` and downloading dependencies. -#### parseable +For `list` this means the output will be based on the tree described by the +`package-lock.json`, rather than the contents of `node_modules`. + +#### `parseable` * Default: false * Type: Boolean -Output parseable results from commands that write to -standard output. For `npm search`, this will be tab-separated table format. +Output parseable results from commands that write to standard output. For +`npm search`, this will be tab-separated table format. -#### prefer-offline +#### `prefer-offline` * Default: false * Type: Boolean If true, staleness checks for cached data will be bypassed, but missing data -will be requested from the server. To force full offline mode, use `--offline`. - -This option is effectively equivalent to `--cache-min=9999999`. +will be requested from the server. To force full offline mode, use +`--offline`. -#### prefer-online +#### `prefer-online` * Default: false * Type: Boolean -If true, staleness checks for cached data will be forced, making the CLI look -for updates immediately even for fresh package data. +If true, staleness checks for cached data will be forced, making the CLI +look for updates immediately even for fresh package data. -#### prefix +#### `prefix` -* Default: see [folders](/configuring-npm/folders) -* Type: path +* Default: In global mode, the folder where the node executable is installed. + In local mode, the nearest parent folder containing either a package.json + file or a node_modules folder. +* Type: Path -The location to install global items. If set on the command line, then -it forces non-global commands to run in the specified folder. +The location to install global items. If set on the command line, then it +forces non-global commands to run in the specified folder. -#### preid +#### `preid` * Default: "" * Type: String -The "prerelease identifier" to use as a prefix for the "prerelease" part of a -semver. Like the `rc` in `1.2.0-rc.8`. - -#### production - -* Default: false -* Type: Boolean - -Set to true to run in "production" mode. - -1. devDependencies are not installed at the topmost level when running - local `npm install` without any arguments. -2. Set the NODE_ENV="production" for lifecycle scripts. +The "prerelease identifier" to use as a prefix for the "prerelease" part of +a semver. Like the `rc` in `1.2.0-rc.8`. -#### progress +#### `progress` -* Default: true, unless TRAVIS or CI env vars set. +* Default: `true` unless running in a known CI system * Type: Boolean When set to `true`, npm will display a progress bar during time intensive @@ -965,56 +974,48 @@ operations, if `process.stderr` is a TTY. Set to `false` to suppress the progress bar. -#### proxy +#### `proxy` * Default: null -* Type: url +* Type: null, false, or URL A proxy to use for outgoing http requests. If the `HTTP_PROXY` or -`http_proxy` environment variables are set, proxy settings will be -honored by the underlying `request` library. +`http_proxy` environment variables are set, proxy settings will be honored +by the underlying `request` library. -#### read-only +#### `read-only` * Default: false * Type: Boolean -This is used to mark a token as unable to publish when configuring limited access tokens with the `npm token create` command. +This is used to mark a token as unable to publish when configuring limited +access tokens with the `npm token create` command. -#### rebuild-bundle +#### `rebuild-bundle` * Default: true * Type: Boolean Rebuild bundled dependencies after installation. -#### registry +#### `registry` -* Default: https://registry.npmjs.org/ -* Type: url +* Default: "https://registry.npmjs.org/" +* Type: URL -The base URL of the npm package registry. +The base URL of the npm registry. -#### rollback - -* Default: true -* Type: Boolean - -Remove failed installs. - -#### save +#### `save` * Default: true * Type: Boolean Save installed packages to a package.json file as dependencies. -When used with the `npm rm` command, it removes it from the `dependencies` -object. - -Only works if there is already a package.json file present. +When used with the `npm rm` command, removes the dependency from +package.json. -#### save-bundle +#### `save-bundle` * Default: false * Type: Boolean @@ -1023,120 +1024,108 @@ If a package would be saved at install time by the use of `--save`, `--save-dev`, or `--save-optional`, then also put it in the `bundleDependencies` list. -When used with the `npm rm` command, it removes it from the -bundledDependencies list. +Ignore if `--save-peer` is set, since peerDependencies cannot be bundled. -#### save-prod +#### `save-dev` * Default: false * Type: Boolean -Makes sure that a package will be saved into `dependencies` specifically. This -is useful if a package already exists in `devDependencies` or -`optionalDependencies`, but you want to move it to be a production dep. This is -also the default behavior if `--save` is true, and neither `--save-dev` or -`--save-optional` are true. +Save installed packages to a package.json file as `devDependencies`. -#### save-dev +#### `save-exact` * Default: false * Type: Boolean -Save installed packages to a package.json file as `devDependencies`. - -When used with the `npm rm` command, it removes it from the -`devDependencies` object. - -Only works if there is already a package.json file present. +Dependencies saved to package.json will be configured with an exact version +rather than using npm's default semver range operator. -#### save-exact +#### `save-optional` * Default: false * Type: Boolean -Dependencies saved to package.json using `--save`, `--save-dev` or -`--save-optional` will be configured with an exact version rather than -using npm's default semver range operator. +Save installed packages to a package.json file as `optionalDependencies`. -#### save-optional +#### `save-peer` * Default: false * Type: Boolean -Save installed packages to a package.json file as -optionalDependencies. +Save installed packages. to a package.json file as `peerDependencies` -When used with the `npm rm` command, it removes it from the -`devDependencies` object. +#### `save-prefix` -Only works if there is already a package.json file present. - -#### save-prefix - -* Default: '^' +* Default: "^" * Type: String Configure how versions of packages installed to a package.json file via `--save` or `--save-dev` get prefixed. -For example if a package has version `1.2.3`, by default its version is -set to `^1.2.3` which allows minor upgrades for that package, but after -`npm config set save-prefix='~'` it would be set to `~1.2.3` which only allows +For example if a package has version `1.2.3`, by default its version is set +to `^1.2.3` which allows minor upgrades for that package, but after `npm +config set save-prefix='~'` it would be set to `~1.2.3` which only allows patch upgrades. -#### scope +#### `save-prod` -* Default: the scope of the current project, if any, or "" -* Type: String +* Default: false +* Type: Boolean + +Save installed packages into `dependencies` specifically. This is useful if +a package already exists in `devDependencies` or `optionalDependencies`, but +you want to move it to be a non-optional production dependency. -Associate an operation with a scope for a scoped registry. Useful when logging -in to a private registry for the first time: -`npm login --scope=@organization --registry=registry.organization.com`, which -will cause `@organization` to be mapped to the registry for future installation -of packages specified according to the pattern `@organization/package`. +This is the default behavior if `--save` is true, and neither `--save-dev` +or `--save-optional` are true. -#### script-shell +#### `scope` + +* Default: the scope of the current project, if any, or "" +* Type: String -* Default: `null` -* Type: path +Associate an operation with a scope for a scoped registry. -The shell to use for scripts run with the `npm run` command. +Useful when logging in to or out of a private registry: -#### scripts-prepend-node-path +``` +# log in, linking the scope to the custom registry +npm login --scope=@mycorp --registry=https://registry.mycorp.com -* Default: "warn-only" -* Type: Boolean, `"auto"` or `"warn-only"` +# log out, removing the link and the auth token +npm logout --scope=@mycorp +``` -If set to `true`, add the directory in which the current `node` executable -resides to the `PATH` environment variable when running scripts, -even if that means that `npm` will invoke a different `node` executable than -the one which it is running. +This will cause `@mycorp` to be mapped to the registry for future +installation of packages specified according to the pattern +`@mycorp/package`. -If set to `false`, never modify `PATH` with that. +This will also cause `npm init` to create a scoped package. -If set to `"warn-only"`, never modify `PATH` but print a warning if `npm` thinks -that you may want to run it with `true`, e.g. because the `node` executable -in the `PATH` is not the one `npm` was invoked with. +``` +# accept all defaults, and create a package named "@foo/whatever", +# instead of just named "whatever" +npm init --scope=@foo --yes +``` -If set to `auto`, only add that directory to the `PATH` environment variable -if the `node` executable with which `npm` was invoked and the one that is found -first on the `PATH` are different. -#### searchexclude +#### `script-shell` -* Default: "" -* Type: String +* Default: '/bin/sh' on POSIX systems, 'cmd.exe' on Windows +* Type: null or String -Space-separated options that limit the results from search. +The shell to use for scripts run with the `npm exec`, `npm run` and `npm +init <pkg>` commands. -#### searchopts +#### `searchexclude` * Default: "" * Type: String -Space-separated options that are always passed to search. +Space-separated options that limit the results from search. -#### searchlimit +#### `searchlimit` * Default: 20 * Type: Number @@ -1144,33 +1133,30 @@ Space-separated options that are always passed to search. Number of items to limit search results to. Will not apply at all to legacy searches. -#### searchstaleness +#### `searchopts` + +* Default: "" +* Type: String + +Space-separated options that are always passed to search. -* Default: 900 (15 minutes) +#### `searchstaleness` + +* Default: 900 * Type: Number The age of the cache, in seconds, before another registry request is made if using legacy search endpoint. -#### shell +#### `shell` -* Default: SHELL environment variable, or "bash" on Posix, or "cmd" on +* Default: SHELL environment variable, or "bash" on Posix, or "cmd.exe" on Windows -* Type: path +* Type: String The shell to run for the `npm explore` command. -#### shrinkwrap - -* Default: true -* Type: Boolean - -If set to false, then ignore `npm-shrinkwrap.json` files when installing. This -will also prevent _writing_ `npm-shrinkwrap.json` if `save` is true. - -This option is an alias for `--package-lock`. - -#### sign-git-commit +#### `sign-git-commit` * Default: false * Type: Boolean @@ -1178,36 +1164,21 @@ This option is an alias for `--package-lock`. If set to true, then the `npm version` command will commit the new package version using `-S` to add a signature. -Note that git requires you to have set up GPG keys in your git configs -for this to work properly. +Note that git requires you to have set up GPG keys in your git configs for +this to work properly. -#### sign-git-tag +#### `sign-git-tag` * Default: false * Type: Boolean -If set to true, then the `npm version` command will tag the version -using `-s` to add a signature. - -Note that git requires you to have set up GPG keys in your git configs -for this to work properly. - -#### sso-poll-frequency - -* Default: 500 -* Type: Number +If set to true, then the `npm version` command will tag the version using +`-s` to add a signature. -When used with SSO-enabled `auth-type`s, configures how regularly the registry -should be polled while the user is completing authentication. +Note that git requires you to have set up GPG keys in your git configs for +this to work properly. -#### sso-type - -* Default: 'oauth' -* Type: 'oauth', 'saml', or null - -If `--auth-type=sso`, the type of SSO type to use. - -#### strict-peer-deps +#### `strict-peer-deps` * Default: false * Type: Boolean @@ -1217,145 +1188,370 @@ conflicting `peerDependencies` will be treated as an install failure, even if npm could reasonably guess the appropriate resolution based on non-peer dependency relationships. -By default, conflicting `peerDependencies` in the dependency graph will be -resolved using the nearest non-peer dependency specification, even if doing -so will result in some packages receiving a peer dependency outside the -range set in their package's `peerDependencies` object. When such and -override is performed, a warning is printed, explaining the conflict and -the packages involved. If `--strict-peer-deps` is set, then the warning is -treated as a failure. +By default, conflicting `peerDependencies` deep in the dependency graph will +be resolved using the nearest non-peer dependency specification, even if +doing so will result in some packages receiving a peer dependency outside +the range set in their package's `peerDependencies` object. + +When such and override is performed, a warning is printed, explaining the +conflict and the packages involved. If `--strict-peer-deps` is set, then +this warning is treated as a failure. -#### strict-ssl +#### `strict-ssl` * Default: true * Type: Boolean -Whether or not to do SSL key validation when making requests to the -registry via https. +Whether or not to do SSL key validation when making requests to the registry +via https. See also the `ca` config. -#### tag +#### `tag` -* Default: latest +* Default: "latest" * Type: String -If you ask npm to install a package and don't tell it a specific version, then -it will install the specified tag. +If you ask npm to install a package and don't tell it a specific version, +then it will install the specified tag. + +Also the tag that is added to the package@version specified by the `npm tag` +command, if no explicit tag is given. -Also the tag that is added to the package@version specified by the `npm -tag` command, if no explicit tag is given. +When used by the `npm diff` command, this is the tag used to fetch the +tarball that will be compared with the local files by default. -#### tag-version-prefix +#### `tag-version-prefix` -* Default: `"v"` +* Default: "v" * Type: String If set, alters the prefix used when tagging a new version when performing a -version increment using `npm-version`. To remove the prefix altogether, set it -to the empty string: `""`. +version increment using `npm-version`. To remove the prefix altogether, set +it to the empty string: `""`. -Because other tools may rely on the convention that npm version tags look like -`v1.0.0`, _only use this property if it is absolutely necessary_. In +Because other tools may rely on the convention that npm version tags look +like `v1.0.0`, _only use this property if it is absolutely necessary_. In particular, use care when overriding this setting for public packages. -#### timing +#### `timing` -* Default: `false` +* Default: false * Type: Boolean If true, writes an `npm-debug` log to `_logs` and timing information to -`_timing.json`, both in your cache. `_timing.json` is a newline delimited -list of JSON objects. You can quickly view it with this -[json](https://www.npmjs.com/package/json) command line: -`json -g < ~/.npm/_timing.json`. +`_timing.json`, both in your cache, even if the command completes +successfully. `_timing.json` is a newline delimited list of JSON objects. + +You can quickly view it with this [json](https://npm.im/json) command line: +`npm exec -- json -g < ~/.npm/_timing.json`. -#### tmp +#### `umask` + +* Default: 0 +* Type: Octal numeric string in range 0000..0777 (0..511) -* Default: TMPDIR environment variable, or "/tmp" -* Type: path +The "umask" value to use when setting the file creation mode on files and +folders. -Where to store temporary files and folders. All temp files are deleted -on success, but left behind on failure for forensic purposes. +Folders and executables are given a mode which is `0o777` masked against +this value. Other files are given a mode which is `0o666` masked against +this value. -#### unicode +Note that the underlying system will _also_ apply its own umask value to +files and folders that are created, and npm does not circumvent this, but +rather adds the `--umask` config to it. -* Default: false on windows, true on mac/unix systems with a unicode locale +Thus, the effective default umask value on most POSIX systems is 0o22, +meaning that folders and executables are created with a mode of 0o755 and +other files are created with a mode of 0o644. + +#### `unicode` + +* Default: false on windows, true on mac/unix systems with a unicode locale, + as defined by the `LC_ALL`, `LC_CTYPE`, or `LANG` environment variables. * Type: Boolean -When set to true, npm uses unicode characters in the tree output. When -false, it uses ascii characters to draw trees. +When set to true, npm uses unicode characters in the tree output. When +false, it uses ascii characters instead of unicode glyphs. -#### update-notifier +#### `update-notifier` * Default: true * Type: Boolean -Set to false to suppress the update notification when using an older -version of npm than the latest. +Set to false to suppress the update notification when using an older version +of npm than the latest. -#### usage +#### `usage` * Default: false * Type: Boolean -Set to show short usage output (like the -H output) -instead of complete help when doing [`npm help`](/commands/npm-help). - -#### userconfig +Show short usage output about the command specified. -* Default: ~/.npmrc -* Type: path +#### `user-agent` -The location of user-level configuration settings. - -#### umask +* Default: "npm/{npm-version} node/{node-version} {platform} {arch} + workspaces/{workspaces} {ci}" +* Type: String -* Default: 022 -* Type: Octal numeric string in range 0000..0777 (0..511) +Sets the User-Agent request header. The following fields are replaced with +their actual counterparts: -The "umask" value to use when setting the file creation mode on files -and folders. +* `{npm-version}` - The npm version in use +* `{node-version}` - The Node.js version in use +* `{platform}` - The value of `process.platform` +* `{arch}` - The value of `process.arch` +* `{workspaces}` - Set to `true` if the `workspaces` or `workspace` options + are set. +* `{ci}` - The value of the `ci-name` config, if set, prefixed with `ci/`, or + an empty string if `ci-name` is empty. -Folders and executables are given a mode which is `0777` masked against -this value. Other files are given a mode which is `0666` masked against -this value. Thus, the defaults are `0755` and `0644` respectively. +#### `userconfig` -#### user-agent +* Default: "~/.npmrc" +* Type: Path -* Default: node/{process.version} {process.platform} {process.arch} -* Type: String +The location of user-level configuration settings. -Sets a User-Agent to the request header +This may be overridden by the `npm_config_userconfig` environment variable +or the `--userconfig` command line option, but may _not_ be overridden by +settings in the `globalconfig` file. -#### version +#### `version` * Default: false -* Type: boolean +* Type: Boolean If true, output the npm version and exit successfully. Only relevant when specified explicitly on the command line. -#### versions +#### `versions` * Default: false -* Type: boolean +* Type: Boolean -If true, output the npm version as well as node's `process.versions` map, and -exit successfully. +If true, output the npm version as well as node's `process.versions` map and +the version in the current working directory's `package.json` file if one +exists, and exit successfully. Only relevant when specified explicitly on the command line. -#### viewer +#### `viewer` * Default: "man" on Posix, "browser" on Windows -* Type: path +* Type: String The program to use to view help content. Set to `"browser"` to view html help content in the default web browser. +#### `which` + +* Default: null +* Type: null or Number + +If there are multiple funding sources, which 1-indexed source URL to open. + +#### `workspace` + +* Default: +* Type: String (can be set multiple times) + +Enable running a command in the context of the configured workspaces of the +current project while filtering by running only the workspaces defined by +this configuration option. + +Valid values for the `workspace` config are either: + +* Workspace names +* Path to a workspace directory +* Path to a parent workspace directory (will result to selecting all of the + nested workspaces) + +When set for the `npm init` command, this may be set to the folder of a +workspace which does not yet exist, to create the folder and set it up as a +brand new workspace within the project. + +This value is not exported to the environment for child processes. + +#### `workspaces` + +* Default: false +* Type: Boolean + +Enable running a command in the context of **all** the configured +workspaces. + +This value is not exported to the environment for child processes. + +#### `yes` + +* Default: null +* Type: null or Boolean + +Automatically answer "yes" to any prompts that npm might print on the +command line. + +#### `also` + +* Default: null +* Type: null, "dev", or "development" +* DEPRECATED: Please use --include=dev instead. + +When set to `dev` or `development`, this is an alias for `--include=dev`. + +#### `auth-type` + +* Default: "legacy" +* Type: "legacy", "sso", "saml", or "oauth" +* DEPRECATED: This method of SSO/SAML/OAuth is deprecated and will be removed + in a future version of npm in favor of web-based login. + +What authentication strategy to use with `adduser`/`login`. + +#### `cache-max` + +* Default: Infinity +* Type: Number +* DEPRECATED: This option has been deprecated in favor of `--prefer-online` + +`--cache-max=0` is an alias for `--prefer-online` + +#### `cache-min` + +* Default: 0 +* Type: Number +* DEPRECATED: This option has been deprecated in favor of `--prefer-offline`. + +`--cache-min=9999 (or bigger)` is an alias for `--prefer-offline`. + +#### `dev` + +* Default: false +* Type: Boolean +* DEPRECATED: Please use --include=dev instead. + +Alias for `--include=dev`. + +#### `init.author.email` + +* Default: "" +* Type: String +* DEPRECATED: Use `--init-author-email` instead. + +Alias for `--init-author-email` + +#### `init.author.name` + +* Default: "" +* Type: String +* DEPRECATED: Use `--init-author-name` instead. + +Alias for `--init-author-name` + +#### `init.author.url` + +* Default: "" +* Type: "" or URL +* DEPRECATED: Use `--init-author-url` instead. + +Alias for `--init-author-url` + +#### `init.license` + +* Default: "ISC" +* Type: String +* DEPRECATED: Use `--init-license` instead. + +Alias for `--init-license` + +#### `init.module` + +* Default: "~/.npm-init.js" +* Type: Path +* DEPRECATED: Use `--init-module` instead. + +Alias for `--init-module` + +#### `init.version` + +* Default: "1.0.0" +* Type: SemVer string +* DEPRECATED: Use `--init-version` instead. + +Alias for `--init-version` + +#### `only` + +* Default: null +* Type: null, "prod", or "production" +* DEPRECATED: Use `--omit=dev` to omit dev dependencies from the install. + +When set to `prod` or `production`, this is an alias for `--omit=dev`. + +#### `optional` + +* Default: null +* Type: null or Boolean +* DEPRECATED: Use `--omit=optional` to exclude optional dependencies, or + `--include=optional` to include them. + +Default value does install optional deps unless otherwise omitted. + +Alias for --include=optional or --omit=optional + +#### `production` + +* Default: null +* Type: null or Boolean +* DEPRECATED: Use `--omit=dev` instead. + +Alias for `--omit=dev` + +#### `shrinkwrap` + +* Default: true +* Type: Boolean +* DEPRECATED: Use the --package-lock setting instead. + +Alias for --package-lock + +#### `sso-poll-frequency` + +* Default: 500 +* Type: Number +* DEPRECATED: The --auth-type method of SSO/SAML/OAuth will be removed in a + future version of npm in favor of web-based login. + +When used with SSO-enabled `auth-type`s, configures how regularly the +registry should be polled while the user is completing authentication. + +#### `sso-type` + +* Default: "oauth" +* Type: null, "oauth", or "saml" +* DEPRECATED: The --auth-type method of SSO/SAML/OAuth will be removed in a + future version of npm in favor of web-based login. + +If `--auth-type=sso`, the type of SSO type to use. + +#### `tmp` + +* Default: The value returned by the Node.js `os.tmpdir()` method + <https://nodejs.org/api/os.html#os_os_tmpdir> +* Type: Path +* DEPRECATED: This setting is no longer used. npm stores temporary files in a + special location in the cache, and they are managed by + [`cacache`](http://npm.im/cacache). + +Historically, the location where temporary files were stored. No longer +relevant. + +<!-- AUTOGENERATED CONFIG DESCRIPTIONS END --> + ### See also * [npm config](/commands/npm-config) diff --git a/docs/content/using-npm/registry.md b/docs/content/using-npm/registry.md index c07fa7a48e888..3b07ab11c5bdc 100644 --- a/docs/content/using-npm/registry.md +++ b/docs/content/using-npm/registry.md @@ -12,7 +12,7 @@ package info. npm is configured to use the **npm public registry** at <https://registry.npmjs.org> by default. Use of the npm public registry is -subject to terms of use available at <https://www.npmjs.com/policies/terms>. +subject to terms of use available at <https://docs.npmjs.com/policies/terms>. You can configure npm to use any compatible registry you like, and even run your own registry. Use of someone else's registry may be governed by their diff --git a/docs/content/using-npm/scripts.md b/docs/content/using-npm/scripts.md index 00ef822bf9001..8fd5c5c0dbc9d 100644 --- a/docs/content/using-npm/scripts.md +++ b/docs/content/using-npm/scripts.md @@ -8,9 +8,9 @@ description: How npm handles the "scripts" field The `"scripts"` property of your `package.json` file supports a number of built-in scripts and their preset life cycle events as well as -arbitrary scripts. These all can be executed by running `npm run-script -<stage>` or `npm run <stage>` for short. *Pre* and *post* commands with -matching names will be run for those as well (e.g. `premyscript`, +arbitrary scripts. These all can be executed by running +`npm run-script <stage>` or `npm run <stage>` for short. *Pre* and *post* +commands with matching names will be run for those as well (e.g. `premyscript`, `myscript`, `postmyscript`). Scripts from dependencies can be run with `npm explore <pkg> -- npm run <stage>`. @@ -54,7 +54,8 @@ situations. These scripts happen in addition to the `pre<event>`, `post<event>`, the prepare script will be run, before the package is packaged and installed. -* As of `npm@7` these scripts run in the background +* As of `npm@7` these scripts run in the background. + To see the output, run with: `--foreground-scripts`. **prepublish** (DEPRECATED) * Does not run during `npm publish`, but does run during `npm ci` @@ -118,11 +119,6 @@ The advantage of doing these things at `prepublish` time is that they can be don * `prepare` -#### [`npm env`](/commands/npm-env) - -* `env` (You can override the default behavior of `npm env` by defining - a custom `env` entry in your `scripts` object) - #### [`npm install`](/commands/npm-install) These also run when you run `npm install -g <pkg-name>` @@ -179,7 +175,7 @@ If there is a `restart` script defined, these events are run, otherwise * `restart` * `postrestart` -#### [`npm run <user defined>`](/commands/npm-run) +#### [`npm run <user defined>`](/commands/npm-run-script) * `pre<user-defined>` * `<user-defined>` @@ -250,41 +246,7 @@ package.json file, then your package scripts would have the in your code with `process.env.npm_package_name` and `process.env.npm_package_version`, and so on for other fields. -#### configuration - -Configuration parameters are put in the environment with the -`npm_config_` prefix. For instance, you can view the effective `root` -config by checking the `npm_config_root` environment variable. - -#### Special: package.json "config" object - -The package.json "config" keys are overwritten in the environment if -there is a config param of `<name>[@<version>]:<key>`. For example, -if the package.json has this: - -```json -{ - "name" : "foo", - "config" : { - "port" : "8080" - }, - "scripts" : { - "start" : "node server.js" - } -} -``` - -and the server.js is this: - -```javascript -http.createServer(...).listen(process.env.npm_package_config_port) -``` - -then the user could change the behavior by doing: - -```bash - npm config set foo:port 80 - ``` +See [`package-json.md`](/using-npm/package-json) for more on package configs. #### current lifecycle event @@ -309,7 +271,7 @@ For example, if your package.json contains this: { "scripts" : { "install" : "scripts/install.js", - "postinstall" : "scripts/postinstall.js", + "postinstall" : "scripts/install.js", "uninstall" : "scripts/uninstall.js" } } @@ -342,23 +304,10 @@ Scripts are run by passing the line as a script argument to `sh`. If the script exits with a code other than 0, then this will abort the process. -Note that these script files don't have to be nodejs or even -javascript programs. They just have to be some kind of executable +Note that these script files don't have to be Node.js or even +JavaScript programs. They just have to be some kind of executable file. -### Hook Scripts - -If you want to run a specific script at a specific lifecycle event for -ALL packages, then you can use a hook script. - -Place an executable file at `node_modules/.hooks/{eventname}`, and -it'll get run for all packages when they are going through that point -in the package lifecycle for any packages installed in that root. - -Hook scripts are run exactly the same way as package.json scripts. -That is, they are in a separate child process, with the env described -above. - ### Best Practices * Don't exit with a non-zero error code unless you *really* mean it. diff --git a/docs/content/using-npm/workspaces.md b/docs/content/using-npm/workspaces.md index 2024627c75867..7cc125b3c7a7c 100644 --- a/docs/content/using-npm/workspaces.md +++ b/docs/content/using-npm/workspaces.md @@ -21,7 +21,7 @@ single **workspace**, meaning it's a nested package within the current local file system that is explicitly defined in the [`package.json`](/configuring-npm/package-json#workspaces) `workspaces` configuration. -### Installing workspaces +### Defining workspaces Workspaces are usually defined via the `workspaces` property of the [`package.json`](/configuring-npm/package-json#workspaces) file, e.g: @@ -36,8 +36,8 @@ Workspaces are usually defined via the `workspaces` property of the ``` Given the above `package.json` example living at a current working -directory `.` that contains a folder named `workspace-a` that disposes -of a `package.json` inside it, defining a nodejs package, e.g: +directory `.` that contains a folder named `workspace-a` that itself contains +a `package.json` inside it, defining a Node.js package, e.g: ``` . @@ -63,6 +63,49 @@ structure of files and folders: `-- package.json ``` +### Getting started with workspaces + +You may automate the required steps to define a new workspace using +[npm init](/commands/npm-init). For example in a project that already has a +`package.json` defined you can run: + +``` +npm init -w ./packages/a +``` + +This command will create the missing folders and a new `package.json` +file (if needed) while also making sure to properly configure the +`"workspaces"` property of your root project `package.json`. + +### Adding dependencies to a workspace + +It's possible to directly add/remove/update dependencies of your workspaces +using the [`workspace` config](/using-npm/config#workspace). + +For example, assuming the following structure: + +``` +. ++-- package.json +`-- packages + +-- a + | `-- package.json + `-- b + `-- package.json +``` + +If you want to add a dependency named `abbrev` from the registry as a +dependency of your workspace **a**, you may use the workspace config to tell +the npm installer that package should be added as a dependency of the provided +workspace: + +``` +npm install abbrev -w a +``` + +Note: other installing commands such as `uninstall`, `ci`, etc will also +respect the provided `workspace` configuration. + ### Using workspaces Given the [specifities of how Node.js handles module resolution](https://nodejs.org/dist/latest-v14.x/docs/api/modules.html#modules_all_together) it's possible to consume any defined workspace @@ -88,8 +131,55 @@ This demonstrates how the nature of `node_modules` resolution allows for in such a way that is also easy to [publish](/commands/npm-publish) these nested workspaces to be consumed elsewhere. +### Running commands in the context of workspaces + +You can use the `workspace` configuration option to run commands in the context +of a configured workspace. + +Following is a quick example on how to use the `npm run` command in the context +of nested workspaces. For a project containing multiple workspaces, e.g: + +``` +. ++-- package.json +`-- packages + +-- a + | `-- package.json + `-- b + `-- package.json +``` + +By running a command using the `workspace` option, it's possible to run the +given command in the context of that specific workspace. e.g: + +``` +npm run test --workspace=a +``` + +This will run the `test` script defined within the +`./packages/a/package.json` file. + +Please note that you can also specify this argument multiple times in the +command-line in order to target multiple workspaces, e.g: + +``` +npm run test --workspace=a --workspace=b +``` + +It's also possible to use the `workspaces` (plural) configuration option to +enable the same behavior but running that command in the context of **all** +configured workspaces. e.g: + +``` +npm run test --workspaces +``` + +Will run the `test` script in both `./packages/a` and `./packages/b`. + ### See also * [npm install](/commands/npm-install) * [npm publish](/commands/npm-publish) +* [npm run-script](/commands/npm-run-script) +* [config](/using-npm/config) diff --git a/docs/dockhand.js b/docs/dockhand.js index 7f2c90dae9b09..77a20f7de1357 100644 --- a/docs/dockhand.js +++ b/docs/dockhand.js @@ -1,341 +1,328 @@ #!/usr/bin/env node -const path = require('path'); -const fs = require('fs'); -const yaml = require('yaml'); -const cmark = require('cmark-gfm'); -const mdx = require('@mdx-js/mdx'); -const mkdirp = require('mkdirp'); -const jsdom = require('jsdom'); +const path = require('path') +const fs = require('fs') +const yaml = require('yaml') +const cmark = require('cmark-gfm') +const mdx = require('@mdx-js/mdx') +const mkdirp = require('mkdirp') +const jsdom = require('jsdom') const npm = require('../lib/npm.js') -const config = require('./config.json'); +const config = require('./config.json') -const docsRoot = __dirname; -const inputRoot = path.join(docsRoot, 'content'); -const outputRoot = path.join(docsRoot, 'output'); +const docsRoot = __dirname +const inputRoot = path.join(docsRoot, 'content') +const outputRoot = path.join(docsRoot, 'output') -const template = fs.readFileSync('template.html').toString(); +const template = fs.readFileSync('template.html').toString() -const run = async function() { - try { - const navPaths = await getNavigationPaths(); - const fsPaths = await renderFilesystemPaths(); +const run = async function () { + try { + const navPaths = await getNavigationPaths() + const fsPaths = await renderFilesystemPaths() - if (!ensureNavigationComplete(navPaths, fsPaths)) { - process.exit(1); - } - } - catch (error) { - console.error(error); - } + if (!ensureNavigationComplete(navPaths, fsPaths)) + process.exit(1) + } catch (error) { + console.error(error) + } } -run(); +run() -function ensureNavigationComplete(navPaths, fsPaths) { - const unmatchedNav = { }, unmatchedFs = { }; +function ensureNavigationComplete (navPaths, fsPaths) { + const unmatchedNav = { }; const unmatchedFs = { } - for (const navPath of navPaths) { - unmatchedNav[navPath] = true; - } + for (const navPath of navPaths) + unmatchedNav[navPath] = true - for (let fsPath of fsPaths) { - fsPath = '/' + fsPath.replace(/\.md$/, ""); + for (let fsPath of fsPaths) { + fsPath = '/' + fsPath.replace(/\.md$/, '') - if (unmatchedNav[fsPath]) { - delete unmatchedNav[fsPath]; - } - else { - unmatchedFs[fsPath] = true; - } - } + if (unmatchedNav[fsPath]) + delete unmatchedNav[fsPath] - const missingNav = Object.keys(unmatchedNav).sort(); - const missingFs = Object.keys(unmatchedFs).sort() + else + unmatchedFs[fsPath] = true + } - if (missingNav.length > 0 || missingFs.length > 0) { - let message = "Error: documentation navigation (nav.yml) does not match filesystem.\n"; + const missingNav = Object.keys(unmatchedNav).sort() + const missingFs = Object.keys(unmatchedFs).sort() - if (missingNav.length > 0) { - message += "\nThe following path(s) exist on disk but are not present in nav.yml:\n\n"; + if (missingNav.length > 0 || missingFs.length > 0) { + let message = 'Error: documentation navigation (nav.yml) does not match filesystem.\n' - for (const nav of missingNav) { - message += ` ${nav}\n`; - } - } + if (missingNav.length > 0) { + message += '\nThe following path(s) exist on disk but are not present in nav.yml:\n\n' - if (missingNav.length > 0 && missingFs.length > 0) { - message += "\nThe following path(s) exist in nav.yml but are not present on disk:\n\n"; + for (const nav of missingNav) + message += ` ${nav}\n` + } - for (const fs of missingFs) { - message += ` ${fs}\n`; - } - } + if (missingNav.length > 0 && missingFs.length > 0) { + message += '\nThe following path(s) exist in nav.yml but are not present on disk:\n\n' - message += "\nUpdate nav.yml to ensure that all files are listed in the appropriate place."; + for (const fs of missingFs) + message += ` ${fs}\n` + } - console.error(message); + message += '\nUpdate nav.yml to ensure that all files are listed in the appropriate place.' - return false; - } + console.error(message) - return true; + return false + } + + return true } -function getNavigationPaths() { - const navFilename = path.join(docsRoot, 'nav.yml'); - const nav = yaml.parse(fs.readFileSync(navFilename).toString(), 'utf8'); +function getNavigationPaths () { + const navFilename = path.join(docsRoot, 'nav.yml') + const nav = yaml.parse(fs.readFileSync(navFilename).toString(), 'utf8') - return walkNavigation(nav); + return walkNavigation(nav) } -function walkNavigation(entries) { - const paths = [ ] +function walkNavigation (entries) { + const paths = [] - for (const entry of entries) { - if (entry.children) { - paths.push(... walkNavigation(entry.children)); - } - else { - paths.push(entry.url); - } - } + for (const entry of entries) { + if (entry.children) + paths.push(...walkNavigation(entry.children)) - return paths; + else + paths.push(entry.url) + } + + return paths } -async function renderFilesystemPaths() { - return await walkFilesystem(inputRoot); +async function renderFilesystemPaths () { + return await walkFilesystem(inputRoot) } -async function walkFilesystem(root, dirRelative) { - const paths = [ ] +async function walkFilesystem (root, dirRelative) { + const paths = [] + + const dirPath = dirRelative ? path.join(root, dirRelative) : root + const children = fs.readdirSync(dirPath) - const dirPath = dirRelative ? path.join(root, dirRelative) : root; - const children = fs.readdirSync(dirPath); + for (const childFilename of children) { + const childRelative = dirRelative ? + path.join(dirRelative, childFilename) : + childFilename + const childPath = path.join(root, childRelative) - for (const childFilename of children) { - const childRelative = dirRelative ? path.join(dirRelative, childFilename) : childFilename; - const childPath = path.join(root, childRelative); + if (fs.lstatSync(childPath).isDirectory()) + paths.push(...await walkFilesystem(root, childRelative)) - if (fs.lstatSync(childPath).isDirectory()) { - paths.push(... await walkFilesystem(root, childRelative)); - } - else { - await renderFile(childRelative); - paths.push(childRelative); - } + else { + await renderFile(childRelative) + paths.push(childRelative) } + } - return paths; + return paths } -async function renderFile(childPath) { - const inputPath = path.join(inputRoot, childPath); - - if (!inputPath.match(/\.md$/)) { - console.log(`warning: unknown file type ${inputPath}, ignored`); - return; +async function renderFile (childPath) { + const inputPath = path.join(inputRoot, childPath) + + if (!inputPath.match(/\.md$/)) { + console.log(`warning: unknown file type ${inputPath}, ignored`) + return + } + + const outputPath = path.join(outputRoot, childPath.replace(/\.md$/, '.html')) + + let md = fs.readFileSync(inputPath).toString() + let frontmatter = { } + + // Take the leading frontmatter out of the markdown + md = md.replace(/^---\n([\s\S]+)\n---\n/, (header, fm) => { + frontmatter = yaml.parse(fm, 'utf8') + return '' + }) + + // Replace any tokens in the source + md = md.replace(/@VERSION@/, npm.version) + + // Render the markdown into an HTML snippet using a GFM renderer. + const content = cmark.renderHtmlSync(md, { + smart: true, + githubPreLang: true, + strikethroughDoubleTilde: true, + unsafe: false, + extensions: { + table: true, + strikethrough: true, + tagfilter: true, + autolink: true, + }, + }) + + // Test that mdx can parse this markdown file. We don't actually + // use the output, it's just to ensure that the upstream docs + // site (docs.npmjs.com) can parse it when this file gets there. + try { + await mdx(md, { skipExport: true }) + } catch (error) { + throw new MarkdownError(childPath, error) + } + + // Inject this data into the template, using a mustache-like + // replacement scheme. + const html = template.replace(/{{\s*([\w.]+)\s*}}/g, (token, key) => { + switch (key) { + case 'content': + return `<div id="_content">${content}</div>` + case 'path': + return childPath + case 'url_path': + return encodeURI(childPath) + + case 'toc': + return '<div id="_table_of_contents"></div>' + + case 'title': + case 'section': + case 'description': + return frontmatter[key] + + case 'config.github_repo': + case 'config.github_branch': + case 'config.github_path': + return config[key.replace(/^config\./, '')] + + default: + console.log(`warning: unknown token '${token}' in ${inputPath}`) + return '' } + }) - const outputPath = path.join(outputRoot, childPath.replace(/\.md$/, '.html')); - - let md = fs.readFileSync(inputPath).toString(); - let frontmatter = { }; - - // Take the leading frontmatter out of the markdown - md = md.replace(/^---\n([\s\S]+)\n---\n/, (header, fm) => { - frontmatter = yaml.parse(fm, 'utf8'); - return ''; - }); - - // Replace any tokens in the source - md = md.replace(/@VERSION@/, npm.version); - - // Render the markdown into an HTML snippet using a GFM renderer. - const content = cmark.renderHtmlSync(md, { - 'smart': true, - 'githubPreLang': true, - 'strikethroughDoubleTilde': true, - 'unsafe': false, - extensions: { - 'table': true, - 'strikethrough': true, - 'tagfilter': true, - 'autolink': true - } - }); - - // Test that mdx can parse this markdown file. We don't actually - // use the output, it's just to ensure that the upstream docs - // site (docs.npmjs.com) can parse it when this file gets there. - try { - await mdx(md, { skipExport: true }); - } - catch (error) { - throw new MarkdownError(childPath, error); - } + const dom = new jsdom.JSDOM(html) + const document = dom.window.document - // Inject this data into the template, using a mustache-like - // replacement scheme. - const html = template.replace(/\{\{\s*([\w\.]+)\s*\}\}/g, (token, key) => { - switch (key) { - case 'content': - return `<div id="_content">${content}</div>`; - case 'path': - return childPath; - case 'url_path': - return encodeURI(childPath); - - case 'toc': - return '<div id="_table_of_contents"></div>'; - - case 'title': - case 'section': - case 'description': - return frontmatter[key]; - - case 'config.github_repo': - case 'config.github_branch': - case 'config.github_path': - return config[key.replace(/^config\./, '')]; - - default: - console.log(`warning: unknown token '${token}' in ${inputPath}`); - return ''; - } - return key; - }); - - const dom = new jsdom.JSDOM(html); - const document = dom.window.document; - - // Rewrite relative URLs in links and image sources to be relative to - // this file; this is for supporting `file://` links. HTML pages need - // suffix appended. - const links = [ - { tag: 'a', attr: 'href', suffix: '.html' }, - { tag: 'img', attr: 'src' } - ]; - - for (let linktype of links) { - for (let tag of document.querySelectorAll(linktype.tag)) { - let url = tag.getAttribute(linktype.attr); - - if (url.startsWith('/')) { - const childDepth = childPath.split('/').length - 1; - const prefix = childDepth > 0 ? '../'.repeat(childDepth) : './'; - - url = url.replace(/^\//, prefix); - - if (linktype.suffix) { - url += linktype.suffix; - } - - tag.setAttribute(linktype.attr, url); - } - } - } - - // Give headers a unique id so that they can be linked within the doc - const headerIds = [ ]; - for (let header of document.querySelectorAll('h1, h2, h3, h4, h5, h6')) { - if (header.getAttribute('id')) { - headerIds.push(header.getAttribute('id')); - continue; - } + // Rewrite relative URLs in links and image sources to be relative to + // this file; this is for supporting `file://` links. HTML pages need + // suffix appended. + const links = [ + { tag: 'a', attr: 'href', suffix: '.html' }, + { tag: 'img', attr: 'src' }, + ] - const headerText = header.textContent.replace(/[A-Z]/g, x => x.toLowerCase()).replace(/ /g, '-').replace(/[^a-z0-9\-]/g, ''); - let headerId = headerText; - let headerIncrement = 1; + for (const linktype of links) { + for (const tag of document.querySelectorAll(linktype.tag)) { + let url = tag.getAttribute(linktype.attr) - while (document.getElementById(headerId) !== null) { - headerId = headerText + (++headerIncrement); - } + if (url.startsWith('/')) { + const childDepth = childPath.split('/').length - 1 + const prefix = childDepth > 0 ? '../'.repeat(childDepth) : './' - headerIds.push(headerId); - header.setAttribute('id', headerId); - } + url = url.replace(/^\//, prefix) - // Walk the dom and build a table of contents - const toc = document.getElementById('_table_of_contents'); + if (linktype.suffix) + url += linktype.suffix - if (toc) { - toc.appendChild(generateTableOfContents(document)); + tag.setAttribute(linktype.attr, url) + } + } + } + + // Give headers a unique id so that they can be linked within the doc + const headerIds = [] + for (const header of document.querySelectorAll('h1, h2, h3, h4, h5, h6')) { + if (header.getAttribute('id')) { + headerIds.push(header.getAttribute('id')) + continue } - // Write the final output - const output = dom.serialize(); - - mkdirp.sync(path.dirname(outputPath)); - fs.writeFileSync(outputPath, output); -} + const headerText = header.textContent.replace(/[A-Z]/g, x => x.toLowerCase()).replace(/ /g, '-').replace(/[^a-z0-9-]/g, '') + let headerId = headerText + let headerIncrement = 1 -function generateTableOfContents(document) { - const headers = [ ]; - walkHeaders(document.getElementById('_content'), headers); + while (document.getElementById(headerId) !== null) + headerId = headerText + (++headerIncrement) - let parent = null; + headerIds.push(headerId) + header.setAttribute('id', headerId) + } - // The nesting depth of headers are not necessarily the header level. - // (eg, h1 > h3 > h5 is a depth of three even though there's an h5.) - const hierarchy = [ ]; - for (let header of headers) { - const level = headerLevel(header); + // Walk the dom and build a table of contents + const toc = document.getElementById('_table_of_contents') - while (hierarchy.length && hierarchy[hierarchy.length - 1].headerLevel > level) { - hierarchy.pop(); - } + if (toc) + toc.appendChild(generateTableOfContents(document)) - if (!hierarchy.length || hierarchy[hierarchy.length - 1].headerLevel < level) { - const newList = document.createElement('ul'); - newList.headerLevel = level; + // Write the final output + const output = dom.serialize() - if (hierarchy.length) { - hierarchy[hierarchy.length - 1].appendChild(newList); - } + mkdirp.sync(path.dirname(outputPath)) + fs.writeFileSync(outputPath, output) +} - hierarchy.push(newList); - } +function generateTableOfContents (document) { + const headers = [] + walkHeaders(document.getElementById('_content'), headers) + + // The nesting depth of headers are not necessarily the header level. + // (eg, h1 > h3 > h5 is a depth of three even though there's an h5.) + const hierarchy = [] + for (const header of headers) { + const level = headerLevel(header) + + while ( + hierarchy.length && + hierarchy[hierarchy.length - 1].headerLevel > level + ) + hierarchy.pop() + + if ( + !hierarchy.length || + hierarchy[hierarchy.length - 1].headerLevel < level + ) { + const newList = document.createElement('ul') + newList.headerLevel = level + + if (hierarchy.length) + hierarchy[hierarchy.length - 1].appendChild(newList) + + hierarchy.push(newList) + } - const element = document.createElement('li'); + const element = document.createElement('li') - const link = document.createElement('a'); - link.setAttribute('href', `#${header.getAttribute('id')}`); - link.innerHTML = header.innerHTML; - element.appendChild(link); + const link = document.createElement('a') + link.setAttribute('href', `#${header.getAttribute('id')}`) + link.innerHTML = header.innerHTML + element.appendChild(link) - const list = hierarchy[hierarchy.length - 1]; - list.appendChild(element); - } + const list = hierarchy[hierarchy.length - 1] + list.appendChild(element) + } - return hierarchy[0]; + return hierarchy[0] } -function walkHeaders(element, headers) { - for (let child of element.childNodes) { - if (headerLevel(child)) { - headers.push(child); - } - - walkHeaders(child, headers); - } -} +function walkHeaders (element, headers) { + for (const child of element.childNodes) { + if (headerLevel(child)) + headers.push(child) -function headerLevel(node) { - const level = node.tagName ? node.tagName.match(/^[Hh]([123456])$/) : null; - return level ? level[1] : 0; + walkHeaders(child, headers) + } } -function debug(str) { - console.log(str); +function headerLevel (node) { + const level = node.tagName ? node.tagName.match(/^[Hh]([123456])$/) : null + return level ? level[1] : 0 } class MarkdownError extends Error { - constructor(file, inner) { - super(`failed to parse ${file}`); - this.file = file; - this.inner = inner; - } + constructor (file, inner) { + super(`failed to parse ${file}`) + this.file = file + this.inner = inner + } } diff --git a/docs/nav.yml b/docs/nav.yml index afceaba570d6d..a45aefbb03d28 100644 --- a/docs/nav.yml +++ b/docs/nav.yml @@ -66,6 +66,9 @@ - title: npm explore url: /commands/npm-explore description: Browse an installed package + - title: npm find-dupes + url: /commands/npm-find-dupes + description: Find duplication in the package tree - title: npm fund url: /commands/npm-fund description: Retrieve funding information @@ -114,6 +117,9 @@ - title: npm ping url: /commands/npm-ping description: Ping npm registry + - title: npm pkg + url: /commands/npm-pkg + description: Manages your package.json - title: npm prefix url: /commands/npm-prefix description: Display prefix diff --git a/docs/package.json b/docs/package.json new file mode 100644 index 0000000000000..e48785bdba923 --- /dev/null +++ b/docs/package.json @@ -0,0 +1,19 @@ +{ + "name": "docs", + "description": "The npm cli documentation", + "version": "1.0.0", + "scripts": { + "build": "node dockhand" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/cli" + }, + "devDependencies": { + "@mdx-js/mdx": "^1.6.22", + "cmark-gfm": "^0.8.3", + "jsdom": "^16.4.0", + "marked-man": "^0.7.0", + "yaml": "^1.10.0" + } +} diff --git a/lib/access.js b/lib/access.js index e11934af43ebc..2f0a979ff384e 100644 --- a/lib/access.js +++ b/lib/access.js @@ -3,10 +3,9 @@ const path = require('path') const libaccess = require('libnpmaccess') const readPackageJson = require('read-package-json-fast') -const output = require('./utils/output.js') const otplease = require('./utils/otplease.js') -const usageUtil = require('./utils/usage.js') const getIdentity = require('./utils/get-identity.js') +const BaseCommand = require('./base-command.js') const subcommands = [ 'public', @@ -20,24 +19,35 @@ const subcommands = [ '2fa-not-required', ] -class Access { - constructor (npm) { - this.npm = npm +class Access extends BaseCommand { + static get description () { + return 'Set access level on published packages' } - get usage () { - return usageUtil( - 'access', - 'npm access public [<package>]\n' + - 'npm access restricted [<package>]\n' + - 'npm access grant <read-only|read-write> <scope:team> [<package>]\n' + - 'npm access revoke <scope:team> [<package>]\n' + - 'npm access 2fa-required [<package>]\n' + - 'npm access 2fa-not-required [<package>]\n' + - 'npm access ls-packages [<user>|<scope>|<scope:team>]\n' + - 'npm access ls-collaborators [<package> [<user>]]\n' + - 'npm access edit [<package>]' - ) + static get name () { + return 'access' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get params () { + return [ + 'registry', + 'otp', + ] + } + + static get usage () { + return [ + 'public [<package>]', + 'restricted [<package>]', + 'grant <read-only|read-write> <scope:team> [<package>]', + 'revoke <scope:team> [<package>]', + '2fa-required [<package>]', + '2fa-not-required [<package>]', + 'ls-packages [<user>|<scope>|<scope:team>]', + 'ls-collaborators [<package> [<user>]]', + 'edit [<package>]', + ] } async completion (opts) { @@ -67,12 +77,7 @@ class Access { } exec (args, cb) { - this.access(args) - .then(x => cb(null, x)) - .catch(err => err.code === 'EUSAGE' - ? cb(err.message) - : cb(err) - ) + this.access(args).then(() => cb()).catch(cb) } async access ([cmd, ...args]) { @@ -82,7 +87,7 @@ class Access { if (!subcommands.includes(cmd) || !this[cmd]) throw this.usageError(`${cmd} is not a recognized subcommand.`) - return this[cmd](args, { ...this.npm.flatOptions }) + return this[cmd](args, this.npm.flatOptions) } public ([pkg], opts) { @@ -157,7 +162,7 @@ class Access { const pkgs = await libaccess.lsPackages(owner, opts) // TODO - print these out nicely (breaking change) - output(JSON.stringify(pkgs, null, 2)) + this.npm.output(JSON.stringify(pkgs, null, 2)) } get ['ls-collaborators'] () { @@ -169,7 +174,7 @@ class Access { const collabs = await libaccess.lsCollaborators(pkgName, usr, opts) // TODO - print these out nicely (breaking change) - output(JSON.stringify(collabs, null, 2)) + this.npm.output(JSON.stringify(collabs, null, 2)) } async edit () { @@ -203,12 +208,6 @@ class Access { return name } } - - usageError (msg) { - return Object.assign(new Error(`\nUsage: ${msg}\n\n` + this.usage), { - code: 'EUSAGE', - }) - } } module.exports = Access diff --git a/lib/adduser.js b/lib/adduser.js index dac0f5a46840d..e502276a1743c 100644 --- a/lib/adduser.js +++ b/lib/adduser.js @@ -1,7 +1,6 @@ const log = require('npmlog') -const output = require('./utils/output.js') -const usageUtil = require('./utils/usage.js') const replaceInfo = require('./utils/replace-info.js') +const BaseCommand = require('./base-command.js') const authTypes = { legacy: require('./auth/legacy.js'), oauth: require('./auth/oauth.js'), @@ -9,17 +8,20 @@ const authTypes = { sso: require('./auth/sso.js'), } -class AddUser { - constructor (npm) { - this.npm = npm +class AddUser extends BaseCommand { + static get description () { + return 'Add a registry user account' } - /* istanbul ignore next - see test/lib/load-all-commands.js */ - get usage () { - return usageUtil( - 'adduser', - 'npm adduser [--registry=url] [--scope=@orgname] [--always-auth]' - ) + static get name () { + return 'adduser' + } + + static get params () { + return [ + 'registry', + 'scope', + ] } exec (args, cb) { @@ -49,7 +51,7 @@ class AddUser { scope, }) - output(message) + this.npm.output(message) } getRegistry ({ scope, registry }) { @@ -73,11 +75,9 @@ class AddUser { async updateConfig ({ newCreds, registry, scope }) { this.npm.config.delete('_token', 'user') // prevent legacy pollution - + this.npm.config.setCredentialsByURI(registry, newCreds) if (scope) this.npm.config.set(scope + ':registry', registry, 'user') - - this.npm.config.setCredentialsByURI(registry, newCreds) await this.npm.config.save('user') } } diff --git a/lib/audit.js b/lib/audit.js index dfa01cb2709fa..54480d1f0cbf9 100644 --- a/lib/audit.js +++ b/lib/audit.js @@ -1,23 +1,36 @@ const Arborist = require('@npmcli/arborist') const auditReport = require('npm-audit-report') -const output = require('./utils/output.js') const reifyFinish = require('./utils/reify-finish.js') const auditError = require('./utils/audit-error.js') -const usageUtil = require('./utils/usage.js') +const ArboristWorkspaceCmd = require('./workspaces/arborist-cmd.js') -class Audit { - constructor (npm) { - this.npm = npm +class Audit extends ArboristWorkspaceCmd { + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get description () { + return 'Run a security audit' } /* istanbul ignore next - see test/lib/load-all-commands.js */ - get usage () { - return usageUtil( - 'audit', - 'npm audit [--json] [--production]' + - '\nnpm audit fix ' + - '[--force|--package-lock-only|--dry-run|--production|--only=(dev|prod)]' - ) + static get name () { + return 'audit' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get params () { + return [ + 'audit-level', + 'dry-run', + 'force', + 'json', + 'package-lock-only', + 'omit', + ...super.params, + ] + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get usage () { + return ['[fix]'] } async completion (opts) { @@ -39,11 +52,16 @@ class Audit { } async audit (args) { - const arb = new Arborist({ + const reporter = this.npm.config.get('json') ? 'json' : 'detail' + const opts = { ...this.npm.flatOptions, audit: true, path: this.npm.prefix, - }) + reporter, + workspaces: this.workspaceNames, + } + + const arb = new Arborist(opts) const fix = args[0] === 'fix' await arb.audit({ fix }) if (fix) @@ -51,13 +69,9 @@ class Audit { else { // will throw if there's an error, because this is an audit command auditError(this.npm, arb.auditReport) - const reporter = this.npm.flatOptions.json ? 'json' : 'detail' - const result = auditReport(arb.auditReport, { - ...this.npm.flatOptions, - reporter, - }) + const result = auditReport(arb.auditReport, opts) process.exitCode = process.exitCode || result.exitCode - output(result.report) + this.npm.output(result.report) } } } diff --git a/lib/auth/sso.js b/lib/auth/sso.js index ca8c501684c29..56cff3c06e292 100644 --- a/lib/auth/sso.js +++ b/lib/auth/sso.js @@ -7,13 +7,11 @@ // CLI, we can remove this, and fold the lib/auth/legacy.js back into // lib/adduser.js -const { promisify } = require('util') - const log = require('npmlog') const profile = require('npm-profile') const npmFetch = require('npm-registry-fetch') -const openUrl = promisify(require('../utils/open-url.js')) +const openUrl = require('../utils/open-url.js') const otplease = require('../utils/otplease.js') const pollForSession = ({ registry, token, opts }) => { diff --git a/lib/base-command.js b/lib/base-command.js new file mode 100644 index 0000000000000..870c69acc492d --- /dev/null +++ b/lib/base-command.js @@ -0,0 +1,84 @@ +// Base class for npm.commands[cmd] +const usageUtil = require('./utils/usage.js') +const ConfigDefinitions = require('./utils/config/definitions.js') +const getWorkspaces = require('./workspaces/get-workspaces.js') + +class BaseCommand { + constructor (npm) { + this.wrapWidth = 80 + this.npm = npm + this.workspaces = null + this.workspacePaths = null + } + + get name () { + return this.constructor.name + } + + get description () { + return this.constructor.description + } + + get usage () { + let usage = `npm ${this.constructor.name}\n\n` + if (this.constructor.description) + usage = `${usage}${this.constructor.description}\n\n` + + usage = `${usage}Usage:\n` + if (!this.constructor.usage) + usage = `${usage}npm ${this.constructor.name}` + else + usage = `${usage}${this.constructor.usage.map(u => `npm ${this.constructor.name} ${u}`).join('\n')}` + + if (this.constructor.params) + usage = `${usage}\n\nOptions:\n${this.wrappedParams}` + + // Mostly this just appends aliases, this could be more clear + usage = usageUtil(this.constructor.name, usage) + usage = `${usage}\n\nRun "npm help ${this.constructor.name}" for more info` + return usage + } + + get wrappedParams () { + let results = '' + let line = '' + + for (const param of this.constructor.params) { + const usage = `[${ConfigDefinitions[param].usage}]` + if (line.length && (line.length + usage.length) > this.wrapWidth) { + results = [results, line].filter(Boolean).join('\n') + line = '' + } + line = [line, usage].filter(Boolean).join(' ') + } + results = [results, line].filter(Boolean).join('\n') + return results + } + + usageError (msg) { + if (!msg) { + return Object.assign(new Error(`\nUsage: ${this.usage}`), { + code: 'EUSAGE', + }) + } + + return Object.assign(new Error(`\nUsage: ${msg}\n\n${this.usage}`), { + code: 'EUSAGE', + }) + } + + execWorkspaces (args, filters, cb) { + throw Object.assign( + new Error('This command does not support workspaces.'), + { code: 'ENOWORKSPACES' } + ) + } + + async setWorkspaces (filters) { + const ws = await getWorkspaces(filters, { path: this.npm.localPrefix }) + this.workspaces = ws + this.workspaceNames = [...ws.keys()] + this.workspacePaths = [...ws.values()] + } +} +module.exports = BaseCommand diff --git a/lib/bin.js b/lib/bin.js index 11490c41cbcc5..20e13f160f276 100644 --- a/lib/bin.js +++ b/lib/bin.js @@ -1,15 +1,17 @@ -const output = require('./utils/output.js') const envPath = require('./utils/path.js') -const usageUtil = require('./utils/usage.js') +const BaseCommand = require('./base-command.js') -class Bin { - constructor (npm) { - this.npm = npm +class Bin extends BaseCommand { + static get description () { + return 'Display npm bin folder' } - /* istanbul ignore next - see test/lib/load-all-commands.js */ - get usage () { - return usageUtil('bin', 'npm bin [-g]') + static get name () { + return 'bin' + } + + static get params () { + return ['global'] } exec (args, cb) { @@ -18,8 +20,8 @@ class Bin { async bin (args) { const b = this.npm.bin - output(b) - if (this.npm.flatOptions.global && !envPath.includes(b)) + this.npm.output(b) + if (this.npm.config.get('global') && !envPath.includes(b)) console.error('(not in PATH env variable)') } } diff --git a/lib/birthday.js b/lib/birthday.js index 5ea855512f9f6..92b1dd1c2e5fe 100644 --- a/lib/birthday.js +++ b/lib/birthday.js @@ -1,16 +1,9 @@ -class Birthday { - constructor (npm) { - this.npm = npm - Object.defineProperty(this.npm, 'flatOptions', { - value: { - ...npm.flatOptions, - package: ['@npmcli/npm-birthday'], - yes: true, - }, - }) - } +const BaseCommand = require('./base-command.js') +class Birthday extends BaseCommand { exec (args, cb) { + this.npm.config.set('package', ['@npmcli/npm-birthday']) + this.npm.config.set('yes', true) return this.npm.commands.exec(['npm-birthday'], cb) } } diff --git a/lib/bugs.js b/lib/bugs.js index fb0d7c92770c7..05897176104b5 100644 --- a/lib/bugs.js +++ b/lib/bugs.js @@ -1,17 +1,25 @@ const log = require('npmlog') const pacote = require('pacote') const openUrl = require('./utils/open-url.js') -const usageUtil = require('./utils/usage.js') const hostedFromMani = require('./utils/hosted-git-info-from-manifest.js') +const BaseCommand = require('./base-command.js') -class Bugs { - constructor (npm) { - this.npm = npm +class Bugs extends BaseCommand { + static get description () { + return 'Report bugs for a package in a web browser' + } + + static get name () { + return 'bugs' + } + + static get usage () { + return ['[<pkgname>]'] } /* istanbul ignore next - see test/lib/load-all-commands.js */ - get usage () { - return usageUtil('bugs', 'npm bugs [<pkgname>]') + static get params () { + return ['browser', 'registry'] } exec (args, cb) { @@ -40,6 +48,9 @@ class Bugs { if (typeof mani.bugs === 'object' && mani.bugs.url) return mani.bugs.url + + if (typeof mani.bugs === 'object' && mani.bugs.email) + return `mailto:${mani.bugs.email}` } // try to get it from the repo, if possible diff --git a/lib/cache.js b/lib/cache.js index 8469559764fb3..55fb3e863631c 100644 --- a/lib/cache.js +++ b/lib/cache.js @@ -1,27 +1,37 @@ const cacache = require('cacache') const { promisify } = require('util') const log = require('npmlog') -const output = require('./utils/output.js') const pacote = require('pacote') const path = require('path') const rimraf = promisify(require('rimraf')) +const BaseCommand = require('./base-command.js') -const usageUtil = require('./utils/usage.js') -class Cache { - constructor (npm) { - this.npm = npm +class Cache extends BaseCommand { + static get description () { + return 'Manipulates packages cache' } - get usage () { - return usageUtil('cache', - 'npm cache add <tarball file>' + - '\nnpm cache add <folder>' + - '\nnpm cache add <tarball url>' + - '\nnpm cache add <git url>' + - '\nnpm cache add <name>@<version>' + - '\nnpm cache clean' + - '\nnpm cache verify' - ) + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get name () { + return 'cache' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get params () { + return ['cache'] + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get usage () { + return [ + 'add <tarball file>', + 'add <folder>', + 'add <tarball url>', + 'add <git url>', + 'add <name>@<version>', + 'clean', + 'verify', + ] } async completion (opts) { @@ -62,7 +72,7 @@ class Cache { throw new Error('npm cache clear does not accept arguments') const cachePath = path.join(this.npm.cache, '_cacache') - if (!this.npm.flatOptions.force) { + if (!this.npm.config.get('force')) { throw new Error(`As of npm@5, the npm cache self-heals from corruption issues by treating integrity mismatches as cache misses. As a result, data extracted from the cache is guaranteed to be valid. If you @@ -81,33 +91,30 @@ with --force.`) return rimraf(cachePath) } - // npm cache add <tarball-url> - // npm cache add <pkg> <ver> - // npm cache add <tarball> - // npm cache add <folder> + // npm cache add <tarball-url>... + // npm cache add <pkg> <ver>... + // npm cache add <tarball>... + // npm cache add <folder>... async add (args) { const usage = 'Usage:\n' + - ' npm cache add <tarball-url>\n' + - ' npm cache add <pkg>@<ver>\n' + - ' npm cache add <tarball>\n' + - ' npm cache add <folder>\n' + ' npm cache add <tarball-url>...\n' + + ' npm cache add <pkg>@<ver>...\n' + + ' npm cache add <tarball>...\n' + + ' npm cache add <folder>...\n' log.silly('cache add', 'args', args) - const spec = args[0] && args[0] + - (args[1] === undefined || args[1] === null ? '' : `@${args[1]}`) - - if (!spec) + if (args.length === 0) throw Object.assign(new Error(usage), { code: 'EUSAGE' }) - log.silly('cache add', 'spec', spec) - const opts = { ...this.npm.flatOptions } - - // we ask pacote for the thing, and then just throw the data - // away so that it tee-pipes it into the cache like it does - // for a normal request. - await pacote.tarball.stream(spec, stream => { - stream.resume() - return stream.promise() - }, opts) + return Promise.all(args.map(spec => { + log.silly('cache add', 'spec', spec) + // we ask pacote for the thing, and then just throw the data + // away so that it tee-pipes it into the cache like it does + // for a normal request. + return pacote.tarball.stream(spec, stream => { + stream.resume() + return stream.promise() + }, this.npm.flatOptions) + })) } async verify () { @@ -116,13 +123,13 @@ with --force.`) ? `~${cache.substr(process.env.HOME.length)}` : cache const stats = await cacache.verify(cache) - output(`Cache verified and compressed (${prefix})`) - output(`Content verified: ${stats.verifiedContent} (${stats.keptSize} bytes)`) - stats.badContentCount && output(`Corrupted content removed: ${stats.badContentCount}`) - stats.reclaimedCount && output(`Content garbage-collected: ${stats.reclaimedCount} (${stats.reclaimedSize} bytes)`) - stats.missingContent && output(`Missing content: ${stats.missingContent}`) - output(`Index entries: ${stats.totalEntries}`) - output(`Finished in ${stats.runTime.total / 1000}s`) + this.npm.output(`Cache verified and compressed (${prefix})`) + this.npm.output(`Content verified: ${stats.verifiedContent} (${stats.keptSize} bytes)`) + stats.badContentCount && this.npm.output(`Corrupted content removed: ${stats.badContentCount}`) + stats.reclaimedCount && this.npm.output(`Content garbage-collected: ${stats.reclaimedCount} (${stats.reclaimedSize} bytes)`) + stats.missingContent && this.npm.output(`Missing content: ${stats.missingContent}`) + this.npm.output(`Index entries: ${stats.totalEntries}`) + this.npm.output(`Finished in ${stats.runTime.total / 1000}s`) } } diff --git a/lib/ci.js b/lib/ci.js index 03a91a60463f2..6634ffcdc19bc 100644 --- a/lib/ci.js +++ b/lib/ci.js @@ -7,7 +7,6 @@ const fs = require('fs') const readdir = util.promisify(fs.readdir) const log = require('npmlog') -const usageUtil = require('./utils/usage.js') const removeNodeModules = async where => { const rimrafOpts = { glob: false } @@ -18,15 +17,26 @@ const removeNodeModules = async where => { await Promise.all(entries.map(f => rimraf(`${path}/${f}`, rimrafOpts))) process.emit('timeEnd', 'npm-ci:rm') } +const ArboristWorkspaceCmd = require('./workspaces/arborist-cmd.js') -class CI { - constructor (npm) { - this.npm = npm +class CI extends ArboristWorkspaceCmd { + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get description () { + return 'Install a project with a clean slate' } /* istanbul ignore next - see test/lib/load-all-commands.js */ - get usage () { - return usageUtil('ci', 'npm ci') + static get name () { + return 'ci' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get params () { + return [ + 'audit', + 'ignore-scripts', + 'script-shell', + ] } exec (args, cb) { @@ -34,16 +44,22 @@ class CI { } async ci () { - if (this.npm.flatOptions.global) { + if (this.npm.config.get('global')) { const err = new Error('`npm ci` does not work for global packages') err.code = 'ECIGLOBAL' throw err } const where = this.npm.prefix - const { scriptShell, ignoreScripts } = this.npm.flatOptions - const arb = new Arborist({ ...this.npm.flatOptions, path: where }) + const opts = { + ...this.npm.flatOptions, + path: where, + log: this.npm.log, + save: false, // npm ci should never modify the lockfile or package.json + workspaces: this.workspaceNames, + } + const arb = new Arborist(opts) await Promise.all([ arb.loadVirtual().catch(er => { log.verbose('loadVirtual', er.stack) @@ -55,9 +71,9 @@ class CI { }), removeNodeModules(where), ]) - // npm ci should never modify the lockfile or package.json - await arb.reify({ ...this.npm.flatOptions, save: false }) + await arb.reify(opts) + const ignoreScripts = this.npm.config.get('ignore-scripts') // run the same set of scripts that `npm install` runs. if (!ignoreScripts) { const scripts = [ @@ -69,6 +85,7 @@ class CI { 'prepare', 'postprepare', ] + const scriptShell = this.npm.config.get('script-shell') || undefined for (const event of scripts) { await runScript({ path: where, diff --git a/lib/cli.js b/lib/cli.js index 910b674eaa790..9544f8451f8ae 100644 --- a/lib/cli.js +++ b/lib/cli.js @@ -1,5 +1,5 @@ // Separated out for easier unit testing -module.exports = (process) => { +module.exports = async (process) => { // set it here so that regardless of what happens later, we don't // leak any private CLI configs to other programs process.title = 'npm' @@ -19,7 +19,8 @@ module.exports = (process) => { checkForUnsupportedNode() const npm = require('../lib/npm.js') - const errorHandler = require('../lib/utils/error-handler.js') + const exitHandler = require('../lib/utils/exit-handler.js') + exitHandler.setNpm(npm) // if npm is called as "npmg" or "npm_g", then // run in global mode. @@ -31,35 +32,46 @@ module.exports = (process) => { log.info('using', 'npm@%s', npm.version) log.info('using', 'node@%s', process.version) - process.on('uncaughtException', errorHandler) - process.on('unhandledRejection', errorHandler) + process.on('uncaughtException', exitHandler) + process.on('unhandledRejection', exitHandler) + + const updateNotifier = require('../lib/utils/update-notifier.js') // now actually fire up npm and run the command. // this is how to use npm programmatically: - const updateNotifier = require('../lib/utils/update-notifier.js') - npm.load(async er => { - if (er) - return errorHandler(er) + try { + await npm.load() if (npm.config.get('version', 'cli')) { - console.log(npm.version) - return errorHandler.exit(0) + npm.output(npm.version) + return exitHandler() } + // npm --versions=cli if (npm.config.get('versions', 'cli')) { npm.argv = ['version'] npm.config.set('usage', false, 'cli') } - npm.updateNotification = await updateNotifier(npm) + updateNotifier(npm) const cmd = npm.argv.shift() + if (!cmd) { + npm.output(npm.usage) + process.exitCode = 1 + return exitHandler() + } + const impl = npm.commands[cmd] - if (impl) - impl(npm.argv, errorHandler) - else { - npm.config.set('usage', false) - npm.argv.unshift(cmd) - npm.commands.help(npm.argv, errorHandler) + if (!impl) { + const didYouMean = require('./utils/did-you-mean.js') + const suggestions = await didYouMean(npm, npm.localPrefix, cmd) + npm.output(`Unknown command: "${cmd}"${suggestions}\n\nTo see a list of supported npm commands, run:\n npm help`) + process.exitCode = 1 + return exitHandler() } - }) + + impl(npm.argv, exitHandler) + } catch (err) { + return exitHandler(err) + } } diff --git a/lib/completion.js b/lib/completion.js index 4c37e6ef354ef..fa3b5f2dd36cc 100644 --- a/lib/completion.js +++ b/lib/completion.js @@ -29,30 +29,30 @@ // as an array. // -const { types, shorthands } = require('./utils/config.js') +const { definitions, shorthands } = require('./utils/config/index.js') const deref = require('./utils/deref-command.js') const { aliases, cmdList, plumbing } = require('./utils/cmd-list.js') const aliasNames = Object.keys(aliases) const fullList = cmdList.concat(aliasNames).filter(c => !plumbing.includes(c)) const nopt = require('nopt') -const configNames = Object.keys(types) +const configNames = Object.keys(definitions) const shorthandNames = Object.keys(shorthands) const allConfs = configNames.concat(shorthandNames) const isWindowsShell = require('./utils/is-windows-shell.js') -const output = require('./utils/output.js') const fileExists = require('./utils/file-exists.js') -const usageUtil = require('./utils/usage.js') const { promisify } = require('util') +const BaseCommand = require('./base-command.js') -class Completion { - constructor (npm) { - this.npm = npm +class Completion extends BaseCommand { + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get description () { + return 'Tab Completion for npm' } /* istanbul ignore next - see test/lib/load-all-commands.js */ - get usage () { - return usageUtil('completion', 'source <(npm completion)') + static get name () { + return 'completion' } // completion for the completion command @@ -131,14 +131,14 @@ class Completion { if (partialWords.slice(0, -1).indexOf('--') === -1) { if (word.charAt(0) === '-') - return wrap(opts, configCompl(opts)) + return this.wrap(opts, configCompl(opts)) if (words[w - 1] && words[w - 1].charAt(0) === '-' && !isFlag(words[w - 1])) { // awaiting a value for a non-bool config. // don't even try to do this for now - return wrap(opts, configValueCompl(opts)) + return this.wrap(opts, configValueCompl(opts)) } } @@ -147,12 +147,16 @@ class Completion { // take a little shortcut and use npm's arg parsing logic. // don't have to worry about the last arg being implicitly // boolean'ed, since the last block will catch that. + const types = Object.entries(definitions).reduce((types, [key, def]) => { + types[key] = def.type + return types + }, {}) const parsed = opts.conf = nopt(types, shorthands, partialWords.slice(0, -1), 0) // check if there's a command already. const cmd = parsed.argv.remain[1] if (!cmd) - return wrap(opts, cmdCompl(opts)) + return this.wrap(opts, cmdCompl(opts)) Object.keys(parsed).forEach(k => this.npm.config.set(k, parsed[k])) @@ -162,9 +166,29 @@ class Completion { const impl = this.npm.commands[cmd] if (impl && impl.completion) { const comps = await impl.completion(opts) - return wrap(opts, comps) + return this.wrap(opts, comps) } } + + // The command should respond with an array. Loop over that, + // wrapping quotes around any that have spaces, and writing + // them to stdout. + // If any of the items are arrays, then join them with a space. + // Ie, returning ['a', 'b c', ['d', 'e']] would allow it to expand + // to: 'a', 'b c', or 'd' 'e' + wrap (opts, compls) { + if (!Array.isArray(compls)) + compls = compls ? [compls] : [] + + compls = compls.map(c => + Array.isArray(c) ? c.map(escape).join(' ') : escape(c)) + + if (opts.partialWord) + compls = compls.filter(c => c.startsWith(opts.partialWord)) + + if (compls.length > 0) + this.npm.output(compls.join('\n')) + } } const dumpScript = async () => { @@ -214,26 +238,6 @@ const unescape = w => w.charAt(0) === '\'' ? w.replace(/^'|'$/g, '') const escape = w => !/\s+/.test(w) ? w : '\'' + w + '\'' -// The command should respond with an array. Loop over that, -// wrapping quotes around any that have spaces, and writing -// them to stdout. -// If any of the items are arrays, then join them with a space. -// Ie, returning ['a', 'b c', ['d', 'e']] would allow it to expand -// to: 'a', 'b c', or 'd' 'e' -const wrap = (opts, compls) => { - if (!Array.isArray(compls)) - compls = compls ? [compls] : [] - - compls = compls.map(c => - Array.isArray(c) ? c.map(escape).join(' ') : escape(c)) - - if (opts.partialWord) - compls = compls.filter(c => c.startsWith(opts.partialWord)) - - if (compls.length > 0) - output(compls.join('\n')) -} - // the current word has a dash. Return the config names, // with the same number of dashes as the current word has. const configCompl = opts => { @@ -256,7 +260,7 @@ const isFlag = word => { const split = word.match(/^(-*)((?:no-)+)?(.*)$/) const no = split[2] const conf = split[3] - const type = types[conf] + const {type} = definitions[conf] return no || type === Boolean || (Array.isArray(type) && type.includes(Boolean)) || diff --git a/lib/config.js b/lib/config.js index 2805db9b80ec7..a56dd92ffbde6 100644 --- a/lib/config.js +++ b/lib/config.js @@ -1,6 +1,5 @@ -const { defaults, types } = require('./utils/config.js') -const usageUtil = require('./utils/usage.js') -const output = require('./utils/output.js') +// don't expand so that we only assemble the set of defaults when needed +const configDefs = require('./utils/config/index.js') const mkdirp = require('mkdirp-infer-owner') const { dirname } = require('path') @@ -29,22 +28,37 @@ const keyValues = args => { const publicVar = k => !/^(\/\/[^:]+:)?_/.test(k) -class Config { - constructor (npm) { - this.npm = npm +const BaseCommand = require('./base-command.js') +class Config extends BaseCommand { + static get description () { + return 'Manage the npm configuration files' } - get usage () { - return usageUtil( - 'config', - 'npm config set <key>=<value> [<key>=<value> ...]' + - '\nnpm config get [<key> [<key> ...]]' + - '\nnpm config delete <key> [<key> ...]' + - '\nnpm config list [--json]' + - '\nnpm config edit' + - '\nnpm set <key>=<value> [<key>=<value> ...]' + - '\nnpm get [<key> [<key> ...]]' - ) + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get name () { + return 'config' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get usage () { + return [ + 'set <key>=<value> [<key>=<value> ...]', + 'get [<key> [<key> ...]]', + 'delete <key> [<key> ...]', + 'list [--json]', + 'edit', + ] + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get params () { + return [ + 'json', + 'global', + 'editor', + 'location', + 'long', + ] } async completion (opts) { @@ -72,7 +86,7 @@ class Config { case 'get': case 'delete': case 'rm': - return Object.keys(types) + return Object.keys(configDefs.definitions) case 'edit': case 'list': case 'ls': @@ -85,6 +99,11 @@ class Config { this.config(args).then(() => cb()).catch(cb) } + execWorkspaces (args, filters, cb) { + this.npm.log.warn('config', 'This command does not support workspaces.') + this.exec(args, cb) + } + async config ([action, ...args]) { this.npm.log.disableProgress() try { @@ -102,7 +121,7 @@ class Config { break case 'list': case 'ls': - await (this.npm.flatOptions.json ? this.listJson() : this.list()) + await (this.npm.config.get('json') ? this.listJson() : this.list()) break case 'edit': await this.edit() @@ -119,7 +138,7 @@ class Config { if (!args.length) throw this.usageError() - const where = this.npm.flatOptions.global ? 'global' : 'user' + const where = this.npm.config.get('location') for (const [key, val] of Object.entries(keyValues(args))) { this.npm.log.info('config', 'set %j %j', key, val) this.npm.config.set(key, val || '', where) @@ -142,22 +161,22 @@ class Config { const pref = keys.length > 1 ? `${key}=` : '' out.push(pref + this.npm.config.get(key)) } - output(out.join('\n')) + this.npm.output(out.join('\n')) } async del (keys) { if (!keys.length) throw this.usageError() - const where = this.npm.flatOptions.global ? 'global' : 'user' + const where = this.npm.config.get('location') for (const key of keys) this.npm.config.delete(key, where) await this.npm.config.save(where) } async edit () { - const { editor: e, global } = this.npm.flatOptions - const where = global ? 'global' : 'user' + const e = this.npm.config.get('editor') + const where = this.npm.config.get('location') const file = this.npm.config.data.get(where).source // save first, just to make sure it's synced up @@ -167,7 +186,8 @@ class Config { const data = ( await readFile(file, 'utf8').catch(() => '') ).replace(/\r\n/g, '\n') - const defData = Object.entries(defaults).reduce((str, [key, val]) => { + const entries = Object.entries(configDefs.defaults) + const defData = entries.reduce((str, [key, val]) => { const obj = { [key]: val } const i = ini.stringify(obj) .replace(/\r\n/g, '\n') // normalizes output from ini.stringify @@ -189,7 +209,7 @@ class Config { ; Configs like \`//<hostname>/:_authToken\` are auth that is restricted ; to the registry host specified. -${data.split('\n').sort((a, b) => a.localeCompare(b)).join('\n').trim()} +${data.split('\n').sort((a, b) => a.localeCompare(b, 'en')).join('\n').trim()} ;;;; ; all available options shown below with default values @@ -212,12 +232,12 @@ ${defData} async list () { const msg = [] - const { long } = this.npm.flatOptions + const long = this.npm.config.get('long') for (const [where, { data, source }] of this.npm.config.data.entries()) { if (where === 'default' && !long) continue - const keys = Object.keys(data).sort((a, b) => a.localeCompare(b)) + const keys = Object.keys(data).sort((a, b) => a.localeCompare(b, 'en')) if (!keys.length) continue @@ -241,7 +261,7 @@ ${defData} ) } - output(msg.join('\n').trim()) + this.npm.output(msg.join('\n').trim()) } async listJson () { @@ -252,11 +272,7 @@ ${defData} publicConf[key] = this.npm.config.get(key) } - output(JSON.stringify(publicConf, null, 2)) - } - - usageError () { - return Object.assign(new Error(this.usage), { code: 'EUSAGE' }) + this.npm.output(JSON.stringify(publicConf, null, 2)) } } diff --git a/lib/dedupe.js b/lib/dedupe.js index 59978895effb2..aaa7a30d10416 100644 --- a/lib/dedupe.js +++ b/lib/dedupe.js @@ -1,16 +1,35 @@ // dedupe duplicated packages, or find them in the tree const Arborist = require('@npmcli/arborist') -const usageUtil = require('./utils/usage.js') const reifyFinish = require('./utils/reify-finish.js') -class Dedupe { - constructor (npm) { - this.npm = npm +const ArboristWorkspaceCmd = require('./workspaces/arborist-cmd.js') + +class Dedupe extends ArboristWorkspaceCmd { + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get description () { + return 'Reduce duplication in the package tree' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get name () { + return 'dedupe' } /* istanbul ignore next - see test/lib/load-all-commands.js */ - get usage () { - return usageUtil('dedupe', 'npm dedupe') + static get params () { + return [ + 'global-style', + 'legacy-bundling', + 'strict-peer-deps', + 'package-lock', + 'omit', + 'ignore-scripts', + 'audit', + 'bin-links', + 'fund', + 'dry-run', + ...super.params, + ] } exec (args, cb) { @@ -26,12 +45,15 @@ class Dedupe { const dryRun = this.npm.config.get('dry-run') const where = this.npm.prefix - const arb = new Arborist({ + const opts = { ...this.npm.flatOptions, + log: this.npm.log, path: where, dryRun, - }) - await arb.dedupe(this.npm.flatOptions) + workspaces: this.workspaceNames, + } + const arb = new Arborist(opts) + await arb.dedupe(opts) await reifyFinish(this.npm, arb) } } diff --git a/lib/deprecate.js b/lib/deprecate.js index 48f27ab6c35e8..156bbf875ea42 100644 --- a/lib/deprecate.js +++ b/lib/deprecate.js @@ -4,18 +4,29 @@ const npa = require('npm-package-arg') const semver = require('semver') const getIdentity = require('./utils/get-identity.js') const libaccess = require('libnpmaccess') -const usageUtil = require('./utils/usage.js') +const BaseCommand = require('./base-command.js') -class Deprecate { - constructor (npm) { - this.npm = npm +class Deprecate extends BaseCommand { + static get description () { + return 'Deprecate a version of a package' } - get usage () { - return usageUtil( - 'deprecate', - 'npm deprecate <pkg>[@<version>] <message>' - ) + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get name () { + return 'deprecate' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get usage () { + return ['<pkg>[@<version>] <message>'] + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get params () { + return [ + 'registry', + 'otp', + ] } async completion (opts) { @@ -38,7 +49,8 @@ class Deprecate { } async deprecate ([pkg, msg]) { - if (!pkg || !msg) + // msg == null becase '' is a valid value, it indicates undeprecate + if (!pkg || msg == null) throw this.usageError() // fetch the data and make sure it exists. @@ -71,12 +83,6 @@ class Deprecate { ignoreBody: true, })) } - - usageError () { - return Object.assign(new Error(`\nUsage: ${this.usage}`), { - code: 'EUSAGE', - }) - } } module.exports = Deprecate diff --git a/lib/diff.js b/lib/diff.js index ea0340a4909d2..01658c4664d05 100644 --- a/lib/diff.js +++ b/lib/diff.js @@ -1,45 +1,61 @@ const { resolve } = require('path') const semver = require('semver') -const libdiff = require('libnpmdiff') +const libnpmdiff = require('libnpmdiff') const npa = require('npm-package-arg') const Arborist = require('@npmcli/arborist') const npmlog = require('npmlog') const pacote = require('pacote') const pickManifest = require('npm-pick-manifest') -const usageUtil = require('./utils/usage.js') -const output = require('./utils/output.js') -const readLocalPkg = require('./utils/read-local-package.js') +const readPackageName = require('./utils/read-package-name.js') +const BaseCommand = require('./base-command.js') -class Diff { - constructor (npm) { - this.npm = npm +class Diff extends BaseCommand { + static get description () { + return 'The registry diff command' } - get usage () { - return usageUtil( - 'diff', - 'npm diff [...<paths>]' + - '\nnpm diff --diff=<pkg-name> [...<paths>]' + - '\nnpm diff --diff=<version-a> [--diff=<version-b>] [...<paths>]' + - '\nnpm diff --diff=<spec-a> [--diff=<spec-b>] [...<paths>]' + - '\nnpm diff [--diff-ignore-all-space] [--diff-name-only] [...<paths>] [...<paths>]' - ) + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get name () { + return 'diff' } - get where () { - const globalTop = resolve(this.npm.globalDir, '..') - const { global } = this.npm.flatOptions - return global ? globalTop : this.npm.prefix + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get usage () { + return [ + '[...<paths>]', + ] + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get params () { + return [ + 'diff', + 'diff-name-only', + 'diff-unified', + 'diff-ignore-all-space', + 'diff-no-prefix', + 'diff-src-prefix', + 'diff-dst-prefix', + 'diff-text', + 'global', + 'tag', + 'workspace', + 'workspaces', + ] } exec (args, cb) { this.diff(args).then(() => cb()).catch(cb) } + execWorkspaces (args, filters, cb) { + this.diffWorkspaces(args, filters).then(() => cb()).catch(cb) + } + async diff (args) { - const specs = this.npm.flatOptions.diff.filter(d => d) + const specs = this.npm.config.get('diff').filter(d => d) if (specs.length > 2) { throw new TypeError( 'Can\'t use more than two --diff arguments.\n\n' + @@ -47,74 +63,84 @@ class Diff { ) } + // diffWorkspaces may have set this already + if (!this.prefix) + this.prefix = this.npm.prefix + + // this is the "top" directory, one up from node_modules + // in global mode we have to walk one up from globalDir because our + // node_modules is sometimes under ./lib, and in global mode we're only ever + // walking through node_modules (because we will have been given a package + // name already) + if (this.npm.config.get('global')) + this.top = resolve(this.npm.globalDir, '..') + else + this.top = this.prefix + const [a, b] = await this.retrieveSpecs(specs) npmlog.info('diff', { src: a, dst: b }) - const res = await libdiff( - [a, b], - { ...this.npm.flatOptions, diffFiles: args } - ) - return output(res) + const res = await libnpmdiff([a, b], { + ...this.npm.flatOptions, + diffFiles: args, + where: this.top, + }) + return this.npm.output(res) } - async retrieveSpecs ([a, b]) { - // no arguments, defaults to comparing cwd - // to its latest published registry version - if (!a) - return this.defaultSpec() - - // single argument, used to compare wanted versions of an - // installed dependency or to compare the cwd to a published version - if (!b) - return this.transformSingleSpec(a) - - const specs = await this.convertVersionsToSpecs([a, b]) - return this.findVersionsByPackageName(specs) + async diffWorkspaces (args, filters) { + await this.setWorkspaces(filters) + for (const workspacePath of this.workspacePaths) { + this.top = workspacePath + this.prefix = workspacePath + await this.diff(args) + } } - async defaultSpec () { - let noPackageJson - let pkgName + // get the package name from the packument at `path` + // throws if no packument is present OR if it does not have `name` attribute + async packageName (path) { + let name try { - pkgName = await readLocalPkg(this.npm) + name = await readPackageName(this.prefix) } catch (e) { npmlog.verbose('diff', 'could not read project dir package.json') - noPackageJson = true } - if (!pkgName || noPackageJson) { - throw new Error( - 'Needs multiple arguments to compare or run from a project dir.\n\n' + - `Usage:\n${this.usage}` - ) - } + if (!name) + throw this.usageError('Needs multiple arguments to compare or run from a project dir.\n') - return [ - `${pkgName}@${this.npm.flatOptions.defaultTag}`, - `file:${this.npm.prefix}`, - ] + return name } - async transformSingleSpec (a) { + async retrieveSpecs ([a, b]) { + if (a && b) { + const specs = await this.convertVersionsToSpecs([a, b]) + return this.findVersionsByPackageName(specs) + } + + // no arguments, defaults to comparing cwd + // to its latest published registry version + if (!a) { + const pkgName = await this.packageName(this.prefix) + return [ + `${pkgName}@${this.npm.config.get('tag')}`, + `file:${this.prefix}`, + ] + } + + // single argument, used to compare wanted versions of an + // installed dependency or to compare the cwd to a published version let noPackageJson let pkgName try { - pkgName = await readLocalPkg(this.npm) + pkgName = await readPackageName(this.prefix) } catch (e) { npmlog.verbose('diff', 'could not read project dir package.json') noPackageJson = true } - const missingPackageJson = new Error( - 'Needs multiple arguments to compare or run from a project dir.\n\n' + - `Usage:\n${this.usage}` - ) - - const specSelf = () => { - if (noPackageJson) - throw missingPackageJson - return `file:${this.npm.prefix}` - } + const missingPackageJson = this.usageError('Needs multiple arguments to compare or run from a project dir.\n') // using a valid semver range, that means it should just diff // the cwd against a published version to the registry using the @@ -122,10 +148,9 @@ class Diff { if (semver.validRange(a)) { if (!pkgName) throw missingPackageJson - return [ `${pkgName}@${a}`, - specSelf(), + `file:${this.prefix}`, ] } @@ -139,7 +164,7 @@ class Diff { try { const opts = { ...this.npm.flatOptions, - path: this.where, + path: this.top, } const arb = new Arborist(opts) actualTree = await arb.loadActual(opts) @@ -151,9 +176,11 @@ class Diff { } if (!node || !node.name || !node.package || !node.package.version) { + if (noPackageJson) + throw missingPackageJson return [ `${spec.name}@${spec.fetchSpec}`, - specSelf(), + `file:${this.prefix}`, ] } @@ -199,14 +226,10 @@ class Diff { } else if (spec.type === 'directory') { return [ `file:${spec.fetchSpec}`, - specSelf(), + `file:${this.prefix}`, ] - } else { - throw new Error( - 'Spec type not supported.\n\n' + - `Usage:\n${this.usage}` - ) - } + } else + throw this.usageError(`Spec type ${spec.type} not supported.\n`) } async convertVersionsToSpecs ([a, b]) { @@ -217,17 +240,14 @@ class Diff { if (semverA && semverB) { let pkgName try { - pkgName = await readLocalPkg(this.npm) + pkgName = await readPackageName(this.prefix) } catch (e) { npmlog.verbose('diff', 'could not read project dir package.json') } - if (!pkgName) { - throw new Error( - 'Needs to be run from a project dir in order to diff two versions.\n\n' + - `Usage:\n${this.usage}` - ) - } + if (!pkgName) + throw this.usageError('Needs to be run from a project dir in order to diff two versions.\n') + return [`${pkgName}@${a}`, `${pkgName}@${b}`] } @@ -248,7 +268,7 @@ class Diff { try { const opts = { ...this.npm.flatOptions, - path: this.where, + path: this.top, } const arb = new Arborist(opts) actualTree = await arb.loadActual(opts) diff --git a/lib/dist-tag.js b/lib/dist-tag.js index 171a88c527e5d..e32dcf61fff80 100644 --- a/lib/dist-tag.js +++ b/lib/dist-tag.js @@ -3,23 +3,32 @@ const npa = require('npm-package-arg') const regFetch = require('npm-registry-fetch') const semver = require('semver') -const output = require('./utils/output.js') const otplease = require('./utils/otplease.js') -const readLocalPkgName = require('./utils/read-local-package.js') -const usageUtil = require('./utils/usage.js') +const readPackageName = require('./utils/read-package-name.js') +const BaseCommand = require('./base-command.js') -class DistTag { - constructor (npm) { - this.npm = npm +class DistTag extends BaseCommand { + static get description () { + return 'Modify package distribution tags' } - get usage () { - return usageUtil( - 'dist-tag', - 'npm dist-tag add <pkg>@<version> [<tag>]' + - '\nnpm dist-tag rm <pkg> <tag>' + - '\nnpm dist-tag ls [<pkg>]' - ) + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get params () { + return ['workspace', 'workspaces'] + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get name () { + return 'dist-tag' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get usage () { + return [ + 'add <pkg>@<version> [<tag>]', + 'rm <pkg> <tag>', + 'ls [<pkg>]', + ] } async completion (opts) { @@ -39,15 +48,14 @@ class DistTag { async distTag ([cmdName, pkg, tag]) { const opts = this.npm.flatOptions - const has = (items) => new Set(items).has(cmdName) - if (has(['add', 'a', 'set', 's'])) + if (['add', 'a', 'set', 's'].includes(cmdName)) return this.add(pkg, tag, opts) - if (has(['rm', 'r', 'del', 'd', 'remove'])) + if (['rm', 'r', 'del', 'd', 'remove'].includes(cmdName)) return this.remove(pkg, tag, opts) - if (has(['ls', 'l', 'sl', 'list'])) + if (['ls', 'l', 'sl', 'list'].includes(cmdName)) return this.list(pkg, opts) if (!pkg) { @@ -55,18 +63,45 @@ class DistTag { // should be listing the existing tags return this.list(cmdName, opts) } else - throw this.usage + throw this.usageError() + } + + execWorkspaces (args, filters, cb) { + this.distTagWorkspaces(args, filters).then(() => cb()).catch(cb) + } + + async distTagWorkspaces ([cmdName, pkg, tag], filters) { + // cmdName is some form of list + // pkg is one of: + // - unset + // - . + // - .@version + if (['ls', 'l', 'sl', 'list'].includes(cmdName) && (!pkg || pkg === '.' || /^\.@/.test(pkg))) + return this.listWorkspaces(filters) + + // pkg is unset + // cmdName is one of: + // - unset + // - . + // - .@version + if (!pkg && (!cmdName || cmdName === '.' || /^\.@/.test(cmdName))) + return this.listWorkspaces(filters) + + // anything else is just a regular dist-tag command + // so we fallback to the non-workspaces implementation + log.warn('Ignoring workspaces for specified package') + return this.distTag([cmdName, pkg, tag]) } async add (spec, tag, opts) { spec = npa(spec || '') const version = spec.rawSpec - const defaultTag = tag || opts.defaultTag + const defaultTag = tag || this.npm.config.get('tag') log.verbose('dist-tag add', defaultTag, 'to', spec.name + '@' + version) if (!spec.name || !version || !defaultTag) - throw this.usage + throw this.usageError() const t = defaultTag.trim() @@ -91,7 +126,7 @@ class DistTag { spec, } await otplease(reqOpts, reqOpts => regFetch(url, reqOpts)) - output(`+${t}: ${spec.name}@${version}`) + this.npm.output(`+${t}: ${spec.name}@${version}`) } async remove (spec, tag, opts) { @@ -99,7 +134,7 @@ class DistTag { log.verbose('dist-tag del', tag, 'from', spec.name) if (!spec.name) - throw this.usage + throw this.usageError() const tags = await this.fetchTags(spec, opts) if (!tags[tag]) { @@ -116,14 +151,16 @@ class DistTag { spec, } await otplease(reqOpts, reqOpts => regFetch(url, reqOpts)) - output(`-${tag}: ${spec.name}@${version}`) + this.npm.output(`-${tag}: ${spec.name}@${version}`) } async list (spec, opts) { if (!spec) { - const pkg = await readLocalPkgName(this.npm) + if (this.npm.config.get('global')) + throw this.usageError() + const pkg = await readPackageName(this.npm.prefix) if (!pkg) - throw this.usage + throw this.usageError() return this.list(pkg, opts) } @@ -133,7 +170,7 @@ class DistTag { const tags = await this.fetchTags(spec, opts) const msg = Object.keys(tags).map(k => `${k}: ${tags[k]}`).sort().join('\n') - output(msg) + this.npm.output(msg) return tags } catch (err) { log.error('dist-tag ls', "Couldn't get dist-tag data for", spec) @@ -141,6 +178,21 @@ class DistTag { } } + async listWorkspaces (filters) { + await this.setWorkspaces(filters) + + for (const name of this.workspaceNames) { + try { + this.npm.output(`${name}:`) + await this.list(npa(name), this.npm.flatOptions) + } catch (err) { + // set the exitCode directly, but ignore the error + // since it will have already been logged by this.list() + process.exitCode = 1 + } + } + } + async fetchTags (spec, opts) { const data = await regFetch.json( `/-/package/${spec.escapedName}/dist-tags`, diff --git a/lib/docs.js b/lib/docs.js index 2dad7a26db4e7..69a19c35c3a13 100644 --- a/lib/docs.js +++ b/lib/docs.js @@ -1,23 +1,38 @@ const log = require('npmlog') const pacote = require('pacote') const openUrl = require('./utils/open-url.js') -const usageUtil = require('./utils/usage.js') const hostedFromMani = require('./utils/hosted-git-info-from-manifest.js') -class Docs { - constructor (npm) { - this.npm = npm +const BaseCommand = require('./base-command.js') +class Docs extends BaseCommand { + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get description () { + return 'Open documentation for a package in a web browser' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get name () { + return 'docs' } /* istanbul ignore next - see test/lib/load-all-commands.js */ - get usage () { - return usageUtil('docs', 'npm docs [<pkgname> [<pkgname> ...]]') + static get params () { + return ['browser', 'registry', 'workspace', 'workspaces'] + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get usage () { + return ['[<pkgname> [<pkgname> ...]]'] } exec (args, cb) { this.docs(args).then(() => cb()).catch(cb) } + execWorkspaces (args, filters, cb) { + this.docsWorkspaces(args, filters).then(() => cb()).catch(cb) + } + async docs (args) { if (!args || !args.length) args = ['.'] @@ -25,6 +40,11 @@ class Docs { await Promise.all(args.map(pkg => this.getDocs(pkg))) } + async docsWorkspaces (args, filters) { + await this.setWorkspaces(filters) + return this.docs(this.workspacePaths) + } + async getDocs (pkg) { const opts = { ...this.npm.flatOptions, fullMetadata: true } const mani = await pacote.manifest(pkg, opts) diff --git a/lib/doctor.js b/lib/doctor.js index 81860004e344e..57488fd698856 100644 --- a/lib/doctor.js +++ b/lib/doctor.js @@ -10,10 +10,8 @@ const semver = require('semver') const { promisify } = require('util') const ansiTrim = require('./utils/ansi-trim.js') const isWindows = require('./utils/is-windows.js') -const output = require('./utils/output.js') const ping = require('./utils/ping.js') -const usageUtil = require('./utils/usage.js') -const { defaults: { registry: defaultRegistry } } = require('./utils/config.js') +const { registry: { default: defaultRegistry } } = require('./utils/config/definitions.js') const lstat = promisify(fs.lstat) const readdir = promisify(fs.readdir) const access = promisify(fs.access) @@ -32,14 +30,21 @@ const maskLabel = mask => { return label.join(', ') } -class Doctor { - constructor (npm) { - this.npm = npm +const BaseCommand = require('./base-command.js') +class Doctor extends BaseCommand { + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get description () { + return 'Check your npm environment' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get name () { + return 'doctor' } /* istanbul ignore next - see test/lib/load-all-commands.js */ - get usage () { - return usageUtil('doctor', 'npm doctor') + static get params () { + return ['registry'] } exec (args, cb) { @@ -111,7 +116,7 @@ class Doctor { const silent = this.npm.log.levels[this.npm.log.level] > this.npm.log.levels.error if (!silent) { - output(table(outTable, tableOpts)) + this.npm.output(table(outTable, tableOpts)) if (!allOk) console.error('') } diff --git a/lib/edit.js b/lib/edit.js index a7dbb38205b02..1cf7ca5c22381 100644 --- a/lib/edit.js +++ b/lib/edit.js @@ -4,18 +4,28 @@ const { resolve } = require('path') const fs = require('graceful-fs') const { spawn } = require('child_process') -const usageUtil = require('./utils/usage.js') const splitPackageNames = require('./utils/split-package-names.js') const completion = require('./utils/completion/installed-shallow.js') +const BaseCommand = require('./base-command.js') -class Edit { - constructor (npm) { - this.npm = npm +class Edit extends BaseCommand { + static get description () { + return 'Edit an installed package' } /* istanbul ignore next - see test/lib/load-all-commands.js */ - get usage () { - return usageUtil('edit', 'npm edit <pkg>[/<subpkg>...]') + static get name () { + return 'edit' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get usage () { + return ['<pkg>[/<subpkg>...]'] + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get params () { + return ['editor'] } /* istanbul ignore next - see test/lib/load-all-commands.js */ diff --git a/lib/exec.js b/lib/exec.js index d1db49128587e..8c64c2f240581 100644 --- a/lib/exec.js +++ b/lib/exec.js @@ -1,18 +1,6 @@ -const output = require('./utils/output.js') -const usageUtil = require('./utils/usage.js') -const { promisify } = require('util') -const read = promisify(require('read')) -const mkdirp = require('mkdirp-infer-owner') -const readPackageJson = require('read-package-json-fast') -const Arborist = require('@npmcli/arborist') -const runScript = require('@npmcli/run-script') -const { resolve, delimiter } = require('path') -const ciDetect = require('@npmcli/ci-detect') -const crypto = require('crypto') -const pacote = require('pacote') -const npa = require('npm-package-arg') -const fileExists = require('./utils/file-exists.js') -const PATH = require('./utils/path.js') +const libexec = require('libnpmexec') +const BaseCommand = require('./base-command.js') +const getLocationMsg = require('./exec/get-workspace-location-msg.js') // it's like this: // @@ -25,7 +13,7 @@ const PATH = require('./utils/path.js') // // npm x -p pkg@version -- foo --registry=/dev/null // -// const pkg = npm.flatOptions.package || getPackageFrom(args[0]) +// const pkg = npm.config.get('package') || getPackageFrom(args[0]) // const cmd = getCommand(pkg, args[0]) // --> npm x -c 'cmd ...args.slice(1)' // @@ -39,258 +27,91 @@ const PATH = require('./utils/path.js') // runScript({ pkg, event: 'npx', ... }) // process.env.npm_lifecycle_event = 'npx' -class Exec { - constructor (npm) { - this.npm = npm +class Exec extends BaseCommand { + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get description () { + return 'Run a command from a local or remote npm package' } - get usage () { - return usageUtil('exec', - 'Run a command from a local or remote npm package.\n\n' + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get params () { + return ['package', 'call', 'workspace', 'workspaces'] + } - 'npm exec -- <pkg>[@<version>] [args...]\n' + - 'npm exec --package=<pkg>[@<version>] -- <cmd> [args...]\n' + - 'npm exec -c \'<cmd> [args...]\'\n' + - 'npm exec --package=foo -c \'<cmd> [args...]\'\n' + - '\n' + - 'npx <pkg>[@<specifier>] [args...]\n' + - 'npx -p <pkg>[@<specifier>] <cmd> [args...]\n' + - 'npx -c \'<cmd> [args...]\'\n' + - 'npx -p <pkg>[@<specifier>] -c \'<cmd> [args...]\'' + - '\n' + - 'Run without --call or positional args to open interactive subshell\n', + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get name () { + return 'exec' + } - '\n--package=<pkg> (may be specified multiple times)\n' + - '-p is a shorthand for --package only when using npx executable\n' + - '-c <cmd> --call=<cmd> (may not be mixed with positional arguments)' - ) + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get usage () { + return [ + '-- <pkg>[@<version>] [args...]', + '--package=<pkg>[@<version>] -- <cmd> [args...]', + '-c \'<cmd> [args...]\'', + '--package=foo -c \'<cmd> [args...]\'', + ] } exec (args, cb) { - this._exec(args).then(() => cb()).catch(cb) + const path = this.npm.localPrefix + const runPath = process.cwd() + this._exec(args, { path, runPath }).then(() => cb()).catch(cb) + } + + execWorkspaces (args, filters, cb) { + this._execWorkspaces(args, filters).then(() => cb()).catch(cb) } // When commands go async and we can dump the boilerplate exec methods this // can be named correctly - async _exec (args) { - const { package: packages, call, shell } = this.npm.flatOptions - - if (call && args.length) + async _exec (_args, { locationMsg, path, runPath }) { + const args = [..._args] + const call = this.npm.config.get('call') + const { + flatOptions, + localBin, + log, + globalBin, + } = this.npm + const output = (...outputArgs) => this.npm.output(...outputArgs) + const scriptShell = this.npm.config.get('script-shell') || undefined + const packages = this.npm.config.get('package') + const yes = this.npm.config.get('yes') + + if (call && _args.length) throw this.usage - const pathArr = [...PATH] - - // nothing to maybe install, skip the arborist dance - if (!call && !args.length && !packages.length) { - return await this.run({ - args, - call, - shell, - pathArr, - }) - } - - const needPackageCommandSwap = args.length && !packages.length - // if there's an argument and no package has been explicitly asked for - // check the local and global bin paths for a binary named the same as - // the argument and run it if it exists, otherwise fall through to - // the behavior of treating the single argument as a package name - if (needPackageCommandSwap) { - let binExists = false - if (await fileExists(`${this.npm.localBin}/${args[0]}`)) { - pathArr.unshift(this.npm.localBin) - binExists = true - } else if (await fileExists(`${this.npm.globalBin}/${args[0]}`)) { - pathArr.unshift(this.npm.globalBin) - binExists = true - } - - if (binExists) { - return await this.run({ - args, - call, - pathArr, - shell, - }) - } - - packages.push(args[0]) - } - - // If we do `npm exec foo`, and have a `foo` locally, then we'll - // always use that, so we don't really need to fetch the manifest. - // So: run npa on each packages entry, and if it is a name with a - // rawSpec==='', then try to readPackageJson at - // node_modules/${name}/package.json, and only pacote fetch if - // that fails. - const manis = await Promise.all(packages.map(async p => { - const spec = npa(p, this.npm.localPrefix) - if (spec.type === 'tag' && spec.rawSpec === '') { - // fall through to the pacote.manifest() approach - try { - const pj = resolve(this.npm.localPrefix, 'node_modules', spec.name) - return await readPackageJson(pj) - } catch (er) {} - } - // Force preferOnline to true so we are making sure to pull in the latest - // This is especially useful if the user didn't give us a version, and - // they expect to be running @latest - return await pacote.manifest(p, { - ...this.npm.flatOptions, - preferOnline: true, - }) - })) - - if (needPackageCommandSwap) - args[0] = this.getBinFromManifest(manis[0]) - - // figure out whether we need to install stuff, or if local is fine - const localArb = new Arborist({ - ...this.npm.flatOptions, - path: this.npm.localPrefix, + return libexec({ + ...flatOptions, + args, + call, + localBin, + locationMsg, + log, + globalBin, + output, + packages, + path, + runPath, + scriptShell, + yes, }) - const tree = await localArb.loadActual() - - // do we have all the packages in manifest list? - const needInstall = manis.some(mani => this.manifestMissing(tree, mani)) - - if (needInstall) { - const installDir = this.cacheInstallDir(packages) - await mkdirp(installDir) - const arb = new Arborist({ ...this.npm.flatOptions, path: installDir }) - const tree = await arb.loadActual() - - // at this point, we have to ensure that we get the exact same - // version, because it's something that has only ever been installed - // by npm exec in the cache install directory - const add = manis.filter(mani => this.manifestMissing(tree, { - ...mani, - _from: `${mani.name}@${mani.version}`, - })) - .map(mani => mani._from) - .sort((a, b) => a.localeCompare(b)) - - // no need to install if already present - if (add.length) { - if (!this.npm.flatOptions.yes) { - // set -n to always say no - if (this.npm.flatOptions.yes === false) - throw 'canceled' - - if (!process.stdin.isTTY || ciDetect()) { - this.npm.log.warn('exec', `The following package${ - add.length === 1 ? ' was' : 's were' - } not found and will be installed: ${ - add.map((pkg) => pkg.replace(/@$/, '')).join(', ') - }`) - } else { - const addList = add.map(a => ` ${a.replace(/@$/, '')}`) - .join('\n') + '\n' - const prompt = `Need to install the following packages:\n${ - addList - }Ok to proceed? ` - const confirm = await read({ prompt, default: 'y' }) - if (confirm.trim().toLowerCase().charAt(0) !== 'y') - throw 'canceled' - } - } - await arb.reify({ ...this.npm.flatOptions, add }) - } - pathArr.unshift(resolve(installDir, 'node_modules/.bin')) - } - - return await this.run({ args, call, pathArr, shell }) } - async run ({ args, call, pathArr, shell }) { - // turn list of args into command string - const script = call || args.shift() || shell - - // do the fakey runScript dance - // still should work if no package.json in cwd - const realPkg = await readPackageJson(`${this.npm.localPrefix}/package.json`) - .catch(() => ({})) - const pkg = { - ...realPkg, - scripts: { - ...(realPkg.scripts || {}), - npx: script, - }, - } + async _execWorkspaces (args, filters) { + await this.setWorkspaces(filters) + const color = this.npm.color - this.npm.log.disableProgress() - try { - if (script === shell) { - if (process.stdin.isTTY) { - if (ciDetect()) - return this.npm.log.warn('exec', 'Interactive mode disabled in CI environment') - output(`\nEntering npm script environment\nType 'exit' or ^D when finished\n`) - } - } - return await runScript({ - ...this.npm.flatOptions, - pkg, - banner: false, - // we always run in cwd, not --prefix - path: process.cwd(), - stdioString: true, - event: 'npx', - args, - env: { - PATH: pathArr.join(delimiter), - }, - stdio: 'inherit', + for (const path of this.workspacePaths) { + const locationMsg = await getLocationMsg({ color, path }) + await this._exec(args, { + locationMsg, + path, + runPath: path, }) - } finally { - this.npm.log.enableProgress() } } - - manifestMissing (tree, mani) { - // if the tree doesn't have a child by that name/version, return true - // true means we need to install it - const child = tree.children.get(mani.name) - // if no child, we have to load it - if (!child) - return true - - // if no version/tag specified, allow whatever's there - if (mani._from === `${mani.name}@`) - return false - - // otherwise the version has to match what we WOULD get - return child.version !== mani.version - } - - getBinFromManifest (mani) { - // if we have a bin matching (unscoped portion of) packagename, use that - // otherwise if there's 1 bin or all bin value is the same (alias), use - // that, otherwise fail - const bin = mani.bin || {} - if (new Set(Object.values(bin)).size === 1) - return Object.keys(bin)[0] - - // XXX probably a util to parse this better? - const name = mani.name.replace(/^@[^/]+\//, '') - if (bin[name]) - return name - - // XXX need better error message - throw Object.assign(new Error('could not determine executable to run'), { - pkgid: mani._id, - }) - } - - cacheInstallDir (packages) { - // only packages not found in ${prefix}/node_modules - return resolve(this.npm.config.get('cache'), '_npx', this.getHash(packages)) - } - - getHash (packages) { - return crypto.createHash('sha512') - .update(packages.sort((a, b) => a.localeCompare(b)).join('\n')) - .digest('hex') - .slice(0, 16) - } } + module.exports = Exec diff --git a/lib/exec/get-workspace-location-msg.js b/lib/exec/get-workspace-location-msg.js new file mode 100644 index 0000000000000..813b11e789222 --- /dev/null +++ b/lib/exec/get-workspace-location-msg.js @@ -0,0 +1,25 @@ +const chalk = require('chalk') +const readPackageJson = require('read-package-json-fast') + +const nocolor = { + dim: s => s, + green: s => s, +} + +const getLocationMsg = async ({ color, path }) => { + const colorize = color ? chalk : nocolor + const { _id } = + await readPackageJson(`${path}/package.json`) + .catch(() => ({})) + + const workspaceMsg = _id + ? ` in workspace ${colorize.green(_id)}` + : ` in a ${colorize.green('new')} workspace` + const locationMsg = ` at location:\n${ + colorize.dim(path) + }` + + return `${workspaceMsg}${locationMsg}` +} + +module.exports = getLocationMsg diff --git a/lib/explain.js b/lib/explain.js index 01541040ef649..7d785d7bfcf44 100644 --- a/lib/explain.js +++ b/lib/explain.js @@ -1,21 +1,33 @@ -const usageUtil = require('./utils/usage.js') const { explainNode } = require('./utils/explain-dep.js') const completion = require('./utils/completion/installed-deep.js') -const output = require('./utils/output.js') const Arborist = require('@npmcli/arborist') const npa = require('npm-package-arg') const semver = require('semver') const { relative, resolve } = require('path') const validName = require('validate-npm-package-name') +const ArboristWorkspaceCmd = require('./workspaces/arborist-cmd.js') -class Explain { - constructor (npm) { - this.npm = npm +class Explain extends ArboristWorkspaceCmd { + static get description () { + return 'Explain installed packages' } /* istanbul ignore next - see test/lib/load-all-commands.js */ - get usage () { - return usageUtil('explain', 'npm explain <folder | specifier>') + static get name () { + return 'explain' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get usage () { + return ['<folder | specifier>'] + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get params () { + return [ + 'json', + 'workspace', + ] } /* istanbul ignore next - see test/lib/load-all-commands.js */ @@ -34,10 +46,18 @@ class Explain { const arb = new Arborist({ path: this.npm.prefix, ...this.npm.flatOptions }) const tree = await arb.loadActual() + if (this.workspaceNames && this.workspaceNames.length) + this.filterSet = arb.workspaceDependencySet(tree, this.workspaceNames) + const nodes = new Set() for (const arg of args) { - for (const node of this.getNodes(tree, arg)) - nodes.add(node) + for (const node of this.getNodes(tree, arg)) { + const filteredOut = this.filterSet + && this.filterSet.size > 0 + && !this.filterSet.has(node) + if (!filteredOut) + nodes.add(node) + } } if (nodes.size === 0) throw `No dependencies found matching ${args.join(', ')}` @@ -59,9 +79,9 @@ class Explain { } if (this.npm.flatOptions.json) - output(JSON.stringify(expls, null, 2)) + this.npm.output(JSON.stringify(expls, null, 2)) else { - output(expls.map(expl => { + this.npm.output(expls.map(expl => { return explainNode(expl, Infinity, this.npm.color) }).join('\n\n')) } @@ -71,7 +91,7 @@ class Explain { // if it's just a name, return packages by that name const { validForOldPackages: valid } = validName(arg) if (valid) - return tree.inventory.query('name', arg) + return tree.inventory.query('packageName', arg) // if it's a location, get that node const maybeLoc = arg.replace(/\\/g, '/').replace(/\/+$/, '') diff --git a/lib/explore.js b/lib/explore.js index fdfe6e1bcf7c8..4417fba7d1fc7 100644 --- a/lib/explore.js +++ b/lib/explore.js @@ -5,17 +5,26 @@ const rpj = require('read-package-json-fast') const runScript = require('@npmcli/run-script') const { join, resolve, relative } = require('path') const completion = require('./utils/completion/installed-shallow.js') -const output = require('./utils/output.js') -const usageUtil = require('./utils/usage.js') +const BaseCommand = require('./base-command.js') -class Explore { - constructor (npm) { - this.npm = npm +class Explore extends BaseCommand { + static get description () { + return 'Browse an installed package' } /* istanbul ignore next - see test/lib/load-all-commands.js */ - get usage () { - return usageUtil('explore', 'npm explore <pkg> [ -- <command>]') + static get name () { + return 'explore' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get usage () { + return ['<pkg> [ -- <command>]'] + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get params () { + return ['shell'] } /* istanbul ignore next - see test/lib/load-all-commands.js */ @@ -54,7 +63,7 @@ class Explore { } if (!args.length) - output(`\nExploring ${path}\nType 'exit' or ^D when finished\n`) + this.npm.output(`\nExploring ${path}\nType 'exit' or ^D when finished\n`) this.npm.log.disableProgress() try { return await runScript({ diff --git a/lib/find-dupes.js b/lib/find-dupes.js index 5061be9cc381a..69b30e8aa3dbb 100644 --- a/lib/find-dupes.js +++ b/lib/find-dupes.js @@ -1,14 +1,31 @@ // dedupe duplicated packages, or find them in the tree -const usageUtil = require('./utils/usage.js') +const ArboristWorkspaceCmd = require('./workspaces/arborist-cmd.js') -class FindDupes { - constructor (npm) { - this.npm = npm +class FindDupes extends ArboristWorkspaceCmd { + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get description () { + return 'Find duplication in the package tree' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get name () { + return 'find-dupes' } /* istanbul ignore next - see test/lib/load-all-commands.js */ - get usage () { - return usageUtil('find-dupes', 'npm find-dupes') + static get params () { + return [ + 'global-style', + 'legacy-bundling', + 'strict-peer-deps', + 'package-lock', + 'omit', + 'ignore-scripts', + 'audit', + 'bin-links', + 'fund', + ...super.params, + ] } exec (args, cb) { diff --git a/lib/fund.js b/lib/fund.js index 1e9724266401f..1e0fa1ecb9d73 100644 --- a/lib/fund.js +++ b/lib/fund.js @@ -12,27 +12,39 @@ const { } = require('libnpmfund') const completion = require('./utils/completion/installed-deep.js') -const output = require('./utils/output.js') const openUrl = require('./utils/open-url.js') -const usageUtil = require('./utils/usage.js') +const ArboristWorkspaceCmd = require('./workspaces/arborist-cmd.js') const getPrintableName = ({ name, version }) => { const printableVersion = version ? `@${version}` : '' return `${name}${printableVersion}` } -class Fund { - constructor (npm) { - this.npm = npm +class Fund extends ArboristWorkspaceCmd { + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get description () { + return 'Retrieve funding information' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get name () { + return 'fund' } /* istanbul ignore next - see test/lib/load-all-commands.js */ - get usage () { - return usageUtil( - 'fund', - 'npm fund', - 'npm fund [--json] [--browser] [--unicode] [[<@scope>/]<pkg> [--which=<fundingSourceNumber>]' - ) + static get params () { + return [ + 'json', + 'browser', + 'unicode', + 'workspace', + 'which', + ] + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get usage () { + return ['[[<@scope>/]<pkg>]'] } /* istanbul ignore next - see test/lib/load-all-commands.js */ @@ -45,14 +57,13 @@ class Fund { } async fund (args) { - const opts = this.npm.flatOptions const spec = args[0] - const numberArg = opts.which + const numberArg = this.npm.config.get('which') const fundingSourceNumber = numberArg && parseInt(numberArg, 10) const badFundingSourceNumber = - numberArg !== undefined && + numberArg !== null && (String(fundingSourceNumber) !== numberArg || fundingSourceNumber < 1) if (badFundingSourceNumber) { @@ -61,14 +72,14 @@ class Fund { throw err } - if (opts.global) { + if (this.npm.config.get('global')) { const err = new Error('`npm fund` does not support global packages') err.code = 'EFUNDGLOBAL' throw err } const where = this.npm.prefix - const arb = new Arborist({ ...opts, path: where }) + const arb = new Arborist({ ...this.npm.flatOptions, path: where }) const tree = await arb.loadActual() if (spec) { @@ -81,23 +92,25 @@ class Fund { return } - const print = opts.json - ? this.printJSON - : this.printHuman + const fundingInfo = getFundingInfo(tree, { + ...this.flatOptions, + log: this.npm.log, + workspaces: this.workspaceNames, + }) - output( - print( - getFundingInfo(tree), - opts - ) - ) + if (this.npm.config.get('json')) + this.npm.output(this.printJSON(fundingInfo)) + else + this.npm.output(this.printHuman(fundingInfo)) } printJSON (fundingInfo) { return JSON.stringify(fundingInfo, null, 2) } - printHuman (fundingInfo, { color, unicode }) { + printHuman (fundingInfo) { + const color = this.npm.color + const unicode = this.npm.config.get('unicode') const seenUrls = new Map() const tree = obj => @@ -206,9 +219,9 @@ class Fund { validSources.forEach(({ type, url }, i) => { const typePrefix = type ? `${type} funding` : 'Funding' const msg = `${typePrefix} available at the following URL` - output(`${i + 1}: ${msg}: ${url}`) + this.npm.output(`${i + 1}: ${msg}: ${url}`) }) - output('Run `npm fund [<@scope>/]<pkg> --which=1`, for example, to open the first funding URL listed in that package') + this.npm.output('Run `npm fund [<@scope>/]<pkg> --which=1`, for example, to open the first funding URL listed in that package') } else { const noFundingError = new Error(`No valid funding method available for: ${spec}`) noFundingError.code = 'ENOFUND' diff --git a/lib/get.js b/lib/get.js index a5b2f5514473d..8cfb259a81323 100644 --- a/lib/get.js +++ b/lib/get.js @@ -1,16 +1,19 @@ -const usageUtil = require('./utils/usage.js') +const BaseCommand = require('./base-command.js') -class Get { - constructor (npm) { - this.npm = npm +class Get extends BaseCommand { + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get description () { + return 'Get a value from the npm configuration' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get name () { + return 'get' } /* istanbul ignore next - see test/lib/load-all-commands.js */ - get usage () { - return usageUtil( - 'get', - 'npm get [<key> ...] (See `npm config`)' - ) + static get usage () { + return ['[<key> ...] (See `npm config`)'] } /* istanbul ignore next - see test/lib/load-all-commands.js */ diff --git a/lib/help-search.js b/lib/help-search.js index ed2bc23b9109d..877989fd0148e 100644 --- a/lib/help-search.js +++ b/lib/help-search.js @@ -1,22 +1,29 @@ const fs = require('fs') const path = require('path') const color = require('ansicolors') -const output = require('./utils/output.js') -const usageUtil = require('./utils/usage.js') -const npmUsage = require('./utils/npm-usage.js') const { promisify } = require('util') const glob = promisify(require('glob')) const readFile = promisify(fs.readFile) -const didYouMean = require('./utils/did-you-mean.js') -const { cmdList } = require('./utils/cmd-list.js') +const BaseCommand = require('./base-command.js') -class HelpSearch { - constructor (npm) { - this.npm = npm +class HelpSearch extends BaseCommand { + static get description () { + return 'Search npm help documentation' } - get usage () { - return usageUtil('help-search', 'npm help-search <text>') + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get name () { + return 'help-search' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get usage () { + return ['<text>'] + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get params () { + return ['long'] } exec (args, cb) { @@ -25,28 +32,17 @@ class HelpSearch { async helpSearch (args) { if (!args.length) - throw this.usage + return this.npm.output(this.usage) const docPath = path.resolve(__dirname, '..', 'docs/content') - const files = await glob(`${docPath}/*/*.md`) const data = await this.readFiles(files) const results = await this.searchFiles(args, data, files) - // if only one result, then just show that help section. - if (results.length === 1) { - return this.npm.commands.help([path.basename(results[0].file, '.md')], er => { - if (er) - throw er - }) - } - const formatted = this.formatResults(args, results) if (!formatted.trim()) - npmUsage(this.npm, false) - else { - output(formatted) - output(didYouMean(args[0], cmdList)) - } + this.npm.output(`No matches in help for: ${args.join(' ')}\n`) + else + this.npm.output(formatted) } async readFiles (files) { @@ -165,7 +161,7 @@ class HelpSearch { out.push(' '.repeat((Math.max(1, cols - out.join(' ').length - r.length - 1)))) out.push(r) - if (!this.npm.flatOptions.long) + if (!this.npm.config.get('long')) return out.join('') out.unshift('\n\n') @@ -197,7 +193,7 @@ class HelpSearch { return out.join('') }).join('\n') - const finalOut = results.length && !this.npm.flatOptions.long + const finalOut = results.length && !this.npm.config.get('long') ? 'Top hits for ' + (args.map(JSON.stringify).join(' ')) + '\n' + '—'.repeat(cols - 1) + '\n' + out + '\n' + diff --git a/lib/help.js b/lib/help.js index d7897326f3118..8e4ff67bc284c 100644 --- a/lib/help.js +++ b/lib/help.js @@ -1,34 +1,42 @@ -const npmUsage = require('./utils/npm-usage.js') const { spawn } = require('child_process') const path = require('path') -const log = require('npmlog') const openUrl = require('./utils/open-url.js') -const glob = require('glob') -const output = require('./utils/output.js') +const { promisify } = require('util') +const glob = promisify(require('glob')) -const usage = require('./utils/usage.js') +const BaseCommand = require('./base-command.js') -class Help { - constructor (npm) { - this.npm = npm +// Strips out the number from foo.7 or foo.7. or foo.7.tgz +// We don't currently compress our man pages but if we ever did this would +// seemlessly continue supporting it +const manNumberRegex = /\.(\d+)(\.[^/\\]*)?$/ + +class Help extends BaseCommand { + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get description () { + return 'Get help on npm' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get name () { + return 'help' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get usage () { + return ['<term> [<terms..>]'] } /* istanbul ignore next - see test/lib/load-all-commands.js */ - get usage () { - return usage('help', 'npm help <term> [<terms..>]') + static get params () { + return ['viewer'] } async completion (opts) { if (opts.conf.argv.remain.length > 2) return [] const g = path.resolve(__dirname, '../man/man[0-9]/*.[0-9]') - const files = await new Promise((resolve, reject) => { - glob(g, function (er, files) { - if (er) - return reject(er) - resolve(files) - }) - }) + const files = await glob(g) return Object.keys(files.reduce(function (acc, file) { file = path.basename(file).replace(/\.[0-9]+$/, '') @@ -43,81 +51,45 @@ class Help { } async help (args) { - const argv = this.npm.config.parsedArgv.cooked + // By default we search all of our man subdirectories, but if the user has + // asked for a specific one we limit the search to just there + let manSearch = 'man*' + if (/^\d+$/.test(args[0])) + manSearch = `man${args.shift()}` - let argnum = 0 - if (args.length === 2 && ~~args[0]) - argnum = ~~args.shift() + if (!args.length) + return this.npm.output(this.npm.usage) // npm help foo bar baz: search topics - if (args.length > 1 && args[0]) + if (args.length > 1) return this.helpSearch(args) - const affordances = { - 'find-dupes': 'dedupe', - } - let section = affordances[args[0]] || this.npm.deref(args[0]) || args[0] - - // npm help <noargs>: show basic usage - if (!section) { - npmUsage(this.npm, argv[0] === 'help') - return - } - - // npm <command> -h: show command usage - if (this.npm.config.get('usage') && - this.npm.commands[section] && - this.npm.commands[section].usage) { - this.npm.config.set('loglevel', 'silent') - log.level = 'silent' - output(this.npm.commands[section].usage) - return - } + let section = this.npm.deref(args[0]) || args[0] - let pref = [1, 5, 7] - if (argnum) - pref = [argnum].concat(pref.filter(n => n !== argnum)) + // support `npm help package.json` + section = section.replace('.json', '-json') - // npm help <section>: Try to find the path const manroot = path.resolve(__dirname, '..', 'man') + // find either section.n or npm-section.n + const f = `${manroot}/${manSearch}/?(npm-)${section}.[0-9]*` + let mans = await glob(f) + mans = mans.sort((a, b) => { + // Because of the glob we know the manNumberRegex will pass + const aManNumber = a.match(manNumberRegex)[1] + const bManNumber = b.match(manNumberRegex)[1] - // legacy - if (section === 'global') - section = 'folders' - else if (section.match(/.*json/)) - section = section.replace('.json', '-json') - - // find either /section.n or /npm-section.n - // The glob is used in the glob. The regexp is used much - // further down. Globs and regexps are different - const compextglob = '.+(gz|bz2|lzma|[FYzZ]|xz)' - const compextre = '\\.(gz|bz2|lzma|[FYzZ]|xz)$' - const f = '+(npm-' + section + '|' + section + ').[0-9]?(' + compextglob + ')' - return new Promise((resolve, reject) => { - glob(manroot + '/*/' + f, async (er, mans) => { - if (er) - return reject(er) - - if (!mans.length) { - this.helpSearch(args).then(resolve).catch(reject) - return - } - - mans = mans.map((man) => { - const ext = path.extname(man) - if (man.match(new RegExp(compextre))) - man = path.basename(man, ext) - - return man - }) - - this.viewMan(this.pickMan(mans, pref), (err) => { - if (err) - return reject(err) - return resolve() - }) - }) + // man number sort first so that 1 aka commands are preferred + if (aManNumber !== bManNumber) + return aManNumber - bManNumber + + return a.localeCompare(b, 'en') }) + const man = mans[0] + + if (man) + await this.viewMan(man) + else + return this.helpSearch(args) } helpSearch (args) { @@ -133,32 +105,11 @@ class Help { }) } - pickMan (mans, pref_) { - const nre = /([0-9]+)$/ - const pref = {} - pref_.forEach((sect, i) => pref[sect] = i) - mans = mans.sort((a, b) => { - const an = a.match(nre)[1] - const bn = b.match(nre)[1] - return an === bn ? (a > b ? -1 : 1) - : pref[an] < pref[bn] ? -1 - : 1 - }) - return mans[0] - } - - viewMan (man, cb) { - const nre = /([0-9]+)$/ - const num = man.match(nre)[1] - const section = path.basename(man, '.' + num) - - // at this point, we know that the specified man page exists - const manpath = path.join(__dirname, '..', 'man') + async viewMan (man) { const env = {} Object.keys(process.env).forEach(function (i) { env[i] = process.env[i] }) - env.MANPATH = manpath const viewer = this.npm.config.get('viewer') const opts = { @@ -175,48 +126,39 @@ class Help { break case 'browser': - bin = false - try { - const url = this.htmlMan(man) - openUrl(this.npm, url, 'help available at the following URL').then( - () => cb() - ).catch(cb) - } catch (err) { - cb(err) - } - break + await openUrl(this.npm, this.htmlMan(man), 'help available at the following URL') + return default: - args.push(num, section) + args.push(man) break } - if (bin) { - const proc = spawn(bin, args, opts) + const proc = spawn(bin, args, opts) + return new Promise((resolve, reject) => { proc.on('exit', (code) => { if (code) - return cb(new Error(`help process exited with code: ${code}`)) + return reject(new Error(`help process exited with code: ${code}`)) - return cb() + return resolve() }) - } + }) } + // Returns the path to the html version of the man page htmlMan (man) { - let sect = +man.match(/([0-9]+)$/)[1] - const f = path.basename(man).replace(/[.]([0-9]+)$/, '') + let sect = man.match(manNumberRegex)[1] + const f = path.basename(man).replace(manNumberRegex, '') switch (sect) { - case 1: + case '1': sect = 'commands' break - case 5: + case '5': sect = 'configuring-npm' break - case 7: + case '7': sect = 'using-npm' break - default: - throw new Error('invalid man section: ' + sect) } return 'file://' + path.resolve(__dirname, '..', 'docs', 'output', sect, f + '.html') } diff --git a/lib/hook.js b/lib/hook.js index 312f542d7cff6..2ee81bea648b4 100644 --- a/lib/hook.js +++ b/lib/hook.js @@ -1,22 +1,33 @@ const hookApi = require('libnpmhook') -const output = require('./utils/output.js') const otplease = require('./utils/otplease.js') const relativeDate = require('tiny-relative-date') const Table = require('cli-table3') -const usageUtil = require('./utils/usage.js') -class Hook { - constructor (npm) { - this.npm = npm +const BaseCommand = require('./base-command.js') +class Hook extends BaseCommand { + static get description () { + return 'Manage registry hooks' } - get usage () { - return usageUtil('hook', [ - 'npm hook add <pkg> <url> <secret> [--type=<type>]', - 'npm hook ls [pkg]', - 'npm hook rm <id>', - 'npm hook update <id> <url> <secret>', - ].join('\n')) + static get name () { + return 'hook' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get params () { + return [ + 'registry', + 'otp', + ] + } + + static get usage () { + return [ + 'add <pkg> <url> <secret> [--type=<type>]', + 'ls [pkg]', + 'rm <id>', + 'update <id> <url> <secret>', + ] } exec (args, cb) { @@ -44,12 +55,12 @@ class Hook { async add (pkg, uri, secret, opts) { const hook = await hookApi.add(pkg, uri, secret, opts) if (opts.json) - output(JSON.stringify(hook, null, 2)) + this.npm.output(JSON.stringify(hook, null, 2)) else if (opts.parseable) { - output(Object.keys(hook).join('\t')) - output(Object.keys(hook).map(k => hook[k]).join('\t')) + this.npm.output(Object.keys(hook).join('\t')) + this.npm.output(Object.keys(hook).map(k => hook[k]).join('\t')) } else if (!opts.silent && opts.loglevel !== 'silent') { - output(`+ ${this.hookName(hook)} ${ + this.npm.output(`+ ${this.hookName(hook)} ${ opts.unicode ? ' ➜ ' : ' -> ' } ${hook.endpoint}`) } @@ -58,19 +69,19 @@ class Hook { async ls (pkg, opts) { const hooks = await hookApi.ls({ ...opts, package: pkg }) if (opts.json) - output(JSON.stringify(hooks, null, 2)) + this.npm.output(JSON.stringify(hooks, null, 2)) else if (opts.parseable) { - output(Object.keys(hooks[0]).join('\t')) + this.npm.output(Object.keys(hooks[0]).join('\t')) hooks.forEach(hook => { - output(Object.keys(hook).map(k => hook[k]).join('\t')) + this.npm.output(Object.keys(hook).map(k => hook[k]).join('\t')) }) } else if (!hooks.length) - output("You don't have any hooks configured yet.") + this.npm.output("You don't have any hooks configured yet.") else if (!opts.silent && opts.loglevel !== 'silent') { if (hooks.length === 1) - output('You have one hook configured.') + this.npm.output('You have one hook configured.') else - output(`You have ${hooks.length} hooks configured.`) + this.npm.output(`You have ${hooks.length} hooks configured.`) const table = new Table({ head: ['id', 'target', 'endpoint'] }) hooks.forEach((hook) => { @@ -90,19 +101,19 @@ class Hook { } else table.push([{ colSpan: 2, content: 'never triggered' }]) }) - output(table.toString()) + this.npm.output(table.toString()) } } async rm (id, opts) { const hook = await hookApi.rm(id, opts) if (opts.json) - output(JSON.stringify(hook, null, 2)) + this.npm.output(JSON.stringify(hook, null, 2)) else if (opts.parseable) { - output(Object.keys(hook).join('\t')) - output(Object.keys(hook).map(k => hook[k]).join('\t')) + this.npm.output(Object.keys(hook).join('\t')) + this.npm.output(Object.keys(hook).map(k => hook[k]).join('\t')) } else if (!opts.silent && opts.loglevel !== 'silent') { - output(`- ${this.hookName(hook)} ${ + this.npm.output(`- ${this.hookName(hook)} ${ opts.unicode ? ' ✘ ' : ' X ' } ${hook.endpoint}`) } @@ -111,12 +122,12 @@ class Hook { async update (id, uri, secret, opts) { const hook = await hookApi.update(id, uri, secret, opts) if (opts.json) - output(JSON.stringify(hook, null, 2)) + this.npm.output(JSON.stringify(hook, null, 2)) else if (opts.parseable) { - output(Object.keys(hook).join('\t')) - output(Object.keys(hook).map(k => hook[k]).join('\t')) + this.npm.output(Object.keys(hook).join('\t')) + this.npm.output(Object.keys(hook).map(k => hook[k]).join('\t')) } else if (!opts.silent && opts.loglevel !== 'silent') { - output(`+ ${this.hookName(hook)} ${ + this.npm.output(`+ ${this.hookName(hook)} ${ opts.unicode ? ' ➜ ' : ' -> ' } ${hook.endpoint}`) } diff --git a/lib/init.js b/lib/init.js index af97a9614e368..e4bd20b7210e8 100644 --- a/lib/init.js +++ b/lib/init.js @@ -1,67 +1,152 @@ +const fs = require('fs') +const { relative, resolve } = require('path') +const mkdirp = require('mkdirp-infer-owner') const initJson = require('init-package-json') const npa = require('npm-package-arg') +const rpj = require('read-package-json-fast') +const libexec = require('libnpmexec') +const mapWorkspaces = require('@npmcli/map-workspaces') +const PackageJson = require('@npmcli/package-json') -const usageUtil = require('./utils/usage.js') -const output = require('./utils/output.js') +const getLocationMsg = require('./exec/get-workspace-location-msg.js') +const BaseCommand = require('./base-command.js') -class Init { - constructor (npm) { - this.npm = npm +class Init extends BaseCommand { + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get description () { + return 'Create a package.json file' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get params () { + return ['yes', 'force', 'workspace', 'workspaces'] + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get name () { + return 'init' } /* istanbul ignore next - see test/lib/load-all-commands.js */ - get usage () { - return usageUtil( - 'init', - '\nnpm init [--force|-f|--yes|-y|--scope]' + - '\nnpm init <@scope> (same as `npx <@scope>/create`)' + - '\nnpm init [<@scope>/]<name> (same as `npx [<@scope>/]create-<name>`)' - ) + static get usage () { + return [ + '[--force|-f|--yes|-y|--scope]', + '<@scope> (same as `npx <@scope>/create`)', + '[<@scope>/]<name> (same as `npx [<@scope>/]create-<name>`)', + ] } exec (args, cb) { this.init(args).then(() => cb()).catch(cb) } + execWorkspaces (args, filters, cb) { + this.initWorkspaces(args, filters).then(() => cb()).catch(cb) + } + async init (args) { - // the new npx style way + // npm exec style + if (args.length) + return (await this.execCreate({ args, path: process.cwd() })) + + // no args, uses classic init-package-json boilerplate + await this.template() + } + + async initWorkspaces (args, filters) { + // reads package.json for the top-level folder first, by doing this we + // ensure the command throw if no package.json is found before trying + // to create a workspace package.json file or its folders + const pkg = await rpj(resolve(this.npm.localPrefix, 'package.json')) + const wPath = filterArg => resolve(this.npm.localPrefix, filterArg) + + // npm-exec style, runs in the context of each workspace filter if (args.length) { - const initerName = args[0] - let packageName = initerName - if (/^@[^/]+$/.test(initerName)) - packageName = initerName + '/create' - else { - const req = npa(initerName) - if (req.type === 'git' && req.hosted) { - const { user, project } = req.hosted - packageName = initerName - .replace(user + '/' + project, user + '/create-' + project) - } else if (req.registry) { - packageName = req.name.replace(/^(@[^/]+\/)?/, '$1create-') - if (req.rawSpec) - packageName += '@' + req.rawSpec - } else { - throw Object.assign(new Error( - 'Unrecognized initializer: ' + initerName + - '\nFor more package binary executing power check out `npx`:' + - '\nhttps://www.npmjs.com/package/npx' - ), { code: 'EUNSUPPORTED' }) - } + for (const filterArg of filters) { + const path = wPath(filterArg) + await mkdirp(path) + await this.execCreate({ args, path }) + await this.setWorkspace({ pkg, workspacePath: path }) } - this.npm.config.set('package', []) - const newArgs = [packageName, ...args.slice(1)] - return new Promise((res, rej) => { - this.npm.commands.exec(newArgs, er => er ? rej(er) : res()) - }) + return } - // the old way - const dir = process.cwd() + // no args, uses classic init-package-json boilerplate + for (const filterArg of filters) { + const path = wPath(filterArg) + await mkdirp(path) + await this.template(path) + await this.setWorkspace({ pkg, workspacePath: path }) + } + } + + async execCreate ({ args, path }) { + const [initerName, ...otherArgs] = args + let packageName = initerName + + if (/^@[^/]+$/.test(initerName)) + packageName = initerName + '/create' + else { + const req = npa(initerName) + if (req.type === 'git' && req.hosted) { + const { user, project } = req.hosted + packageName = initerName + .replace(user + '/' + project, user + '/create-' + project) + } else if (req.registry) { + packageName = req.name.replace(/^(@[^/]+\/)?/, '$1create-') + if (req.rawSpec) + packageName += '@' + req.rawSpec + } else { + throw Object.assign(new Error( + 'Unrecognized initializer: ' + initerName + + '\nFor more package binary executing power check out `npx`:' + + '\nhttps://www.npmjs.com/package/npx' + ), { code: 'EUNSUPPORTED' }) + } + } + + const newArgs = [packageName, ...otherArgs] + const { color } = this.npm.flatOptions + const { + flatOptions, + localBin, + log, + globalBin, + } = this.npm + // this function is definitely called. But because of coverage map stuff + // it ends up both uncovered, and the coverage report doesn't even mention. + // the tests do assert that some output happens, so we know this line is + // being hit. + /* istanbul ignore next */ + const output = (...outputArgs) => this.npm.output(...outputArgs) + const locationMsg = await getLocationMsg({ color, path }) + const runPath = path + const scriptShell = this.npm.config.get('script-shell') || undefined + const yes = this.npm.config.get('yes') + + await libexec({ + ...flatOptions, + args: newArgs, + color, + localBin, + locationMsg, + log, + globalBin, + output, + path, + runPath, + scriptShell, + yes, + }) + } + + async template (path = process.cwd()) { this.npm.log.pause() this.npm.log.disableProgress() + const initFile = this.npm.config.get('init-module') - if (!this.npm.flatOptions.yes && !this.npm.flatOptions.force) { - output([ + if (!this.npm.config.get('yes') && !this.npm.config.get('force')) { + this.npm.output([ 'This utility will walk you through creating a package.json file.', 'It only covers the most common items, and tries to guess sensible defaults.', '', @@ -74,9 +159,10 @@ class Init { 'Press ^C at any time to quit.', ].join('\n')) } + // XXX promisify init-package-json await new Promise((res, rej) => { - initJson(dir, initFile, this.npm.config, (er, data) => { + initJson(path, initFile, this.npm.config, (er, data) => { this.npm.log.resume() this.npm.log.enableProgress() this.npm.log.silly('package data', data) @@ -93,5 +179,37 @@ class Init { }) }) } + + async setWorkspace ({ pkg, workspacePath }) { + const workspaces = await mapWorkspaces({ cwd: this.npm.localPrefix, pkg }) + + // skip setting workspace if current package.json glob already satisfies it + for (const wPath of workspaces.values()) { + if (wPath === workspacePath) + return + } + + // if a create-pkg didn't generate a package.json at the workspace + // folder level, it might not be recognized as a workspace by + // mapWorkspaces, so we're just going to avoid touching the + // top-level package.json + try { + fs.statSync(resolve(workspacePath, 'package.json')) + } catch (err) { + return + } + + const pkgJson = await PackageJson.load(this.npm.localPrefix) + + pkgJson.update({ + workspaces: [ + ...(pkgJson.content.workspaces || []), + relative(this.npm.localPrefix, workspacePath), + ], + }) + + await pkgJson.save() + } } + module.exports = Init diff --git a/lib/install-ci-test.js b/lib/install-ci-test.js index d1740999d4b67..871f24b2f32d6 100644 --- a/lib/install-ci-test.js +++ b/lib/install-ci-test.js @@ -1,19 +1,16 @@ // npm install-ci-test // Runs `npm ci` and then runs `npm test` -const usageUtil = require('./utils/usage.js') +const CI = require('./ci.js') -class InstallCITest { - constructor (npm) { - this.npm = npm +class InstallCITest extends CI { + static get description () { + return 'Install a project with a clean slate and run tests' } - get usage () { - return usageUtil( - 'install-ci-test', - 'npm install-ci-test [args]' + - '\nSame args as `npm ci`' - ) + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get name () { + return 'install-ci-test' } exec (args, cb) { diff --git a/lib/install-test.js b/lib/install-test.js index 487f8da00b6d3..d5664119df5ce 100644 --- a/lib/install-test.js +++ b/lib/install-test.js @@ -1,23 +1,16 @@ // npm install-test // Runs `npm install` and then runs `npm test` -const usageUtil = require('./utils/usage.js') +const Install = require('./install.js') -class InstallTest { - constructor (npm) { - this.npm = npm +class InstallTest extends Install { + static get description () { + return 'Install package(s) and run tests' } - get usage () { - return usageUtil( - 'install-test', - 'npm install-test [args]' + - '\nSame args as `npm install`' - ) - } - - async completion (opts) { - return this.npm.commands.install.completion(opts) + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get name () { + return 'install-test' } exec (args, cb) { diff --git a/lib/install.js b/lib/install.js index d7fd384d5bd6f..6611763978e61 100644 --- a/lib/install.js +++ b/lib/install.js @@ -3,35 +3,58 @@ const fs = require('fs') const util = require('util') const readdir = util.promisify(fs.readdir) -const usageUtil = require('./utils/usage.js') const reifyFinish = require('./utils/reify-finish.js') const log = require('npmlog') const { resolve, join } = require('path') const Arborist = require('@npmcli/arborist') const runScript = require('@npmcli/run-script') -class Install { - constructor (npm) { - this.npm = npm +const ArboristWorkspaceCmd = require('./workspaces/arborist-cmd.js') +class Install extends ArboristWorkspaceCmd { + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get description () { + return 'Install a package' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get name () { + return 'install' } /* istanbul ignore next - see test/lib/load-all-commands.js */ - get usage () { - return usageUtil( - 'install', - 'npm install (with no args, in package dir)' + - '\nnpm install [<@scope>/]<pkg>' + - '\nnpm install [<@scope>/]<pkg>@<tag>' + - '\nnpm install [<@scope>/]<pkg>@<version>' + - '\nnpm install [<@scope>/]<pkg>@<version range>' + - '\nnpm install <alias>@npm:<name>' + - '\nnpm install <folder>' + - '\nnpm install <tarball file>' + - '\nnpm install <tarball url>' + - '\nnpm install <git:// url>' + - '\nnpm install <github username>/<github project>', - '[--save-prod|--save-dev|--save-optional|--save-peer] [--save-exact] [--no-save]' - ) + static get params () { + return [ + 'save', + 'save-exact', + 'global', + 'global-style', + 'legacy-bundling', + 'strict-peer-deps', + 'package-lock', + 'omit', + 'ignore-scripts', + 'audit', + 'bin-links', + 'fund', + 'dry-run', + ...super.params, + ] + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get usage () { + return [ + '[<@scope>/]<pkg>', + '[<@scope>/]<pkg>@<tag>', + '[<@scope>/]<pkg>@<version>', + '[<@scope>/]<pkg>@<version range>', + '<alias>@npm:<name>', + '<folder>', + '<tarball file>', + '<tarball url>', + '<git:// url>', + '<github username>/<github project>', + ] } async completion (opts) { @@ -100,7 +123,8 @@ class Install { async install (args) { // the /path/to/node_modules/.. const globalTop = resolve(this.npm.globalDir, '..') - const { ignoreScripts, global: isGlobalInstall } = this.npm.flatOptions + const ignoreScripts = this.npm.config.get('ignore-scripts') + const isGlobalInstall = this.npm.config.get('global') const where = isGlobalInstall ? globalTop : this.npm.prefix // don't try to install the prefix into itself @@ -114,17 +138,19 @@ class Install { if (this.npm.config.get('dev')) log.warn('install', 'Usage of the `--dev` option is deprecated. Use `--include=dev` instead.') - const arb = new Arborist({ + const opts = { ...this.npm.flatOptions, + log: this.npm.log, + auditLevel: null, path: where, - }) - - await arb.reify({ - ...this.npm.flatOptions, add: args, - }) + workspaces: this.workspaceNames, + } + const arb = new Arborist(opts) + await arb.reify(opts) + if (!args.length && !isGlobalInstall && !ignoreScripts) { - const { scriptShell } = this.npm.flatOptions + const scriptShell = this.npm.config.get('script-shell') || undefined const scripts = [ 'preinstall', 'install', diff --git a/lib/link.js b/lib/link.js index 6d5e207105825..febd908718be3 100644 --- a/lib/link.js +++ b/lib/link.js @@ -8,21 +8,46 @@ const npa = require('npm-package-arg') const rpj = require('read-package-json-fast') const semver = require('semver') -const usageUtil = require('./utils/usage.js') const reifyFinish = require('./utils/reify-finish.js') -class Link { - constructor (npm) { - this.npm = npm +const ArboristWorkspaceCmd = require('./workspaces/arborist-cmd.js') +class Link extends ArboristWorkspaceCmd { + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get description () { + return 'Symlink a package folder' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get name () { + return 'link' } /* istanbul ignore next - see test/lib/load-all-commands.js */ - get usage () { - return usageUtil( - 'link', - 'npm link (in package dir)' + - '\nnpm link [<@scope>/]<pkg>[@<version>]' - ) + static get usage () { + return [ + '(in package dir)', + '[<@scope>/]<pkg>[@<version>]', + ] + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get params () { + return [ + 'save', + 'save-exact', + 'global', + 'global-style', + 'legacy-bundling', + 'strict-peer-deps', + 'package-lock', + 'omit', + 'ignore-scripts', + 'audit', + 'bin-links', + 'fund', + 'dry-run', + ...super.params, + ] } async completion (opts) { @@ -61,6 +86,7 @@ class Link { const globalOpts = { ...this.npm.flatOptions, path: globalTop, + log: this.npm.log, global: true, prune: false, } @@ -96,33 +122,51 @@ class Link { // npm link should not save=true by default unless you're // using any of --save-dev or other types const save = - Boolean(this.npm.config.find('save') !== 'default' || this.npm.flatOptions.saveType) + Boolean( + this.npm.config.find('save') !== 'default' || + this.npm.config.get('save-optional') || + this.npm.config.get('save-peer') || + this.npm.config.get('save-dev') || + this.npm.config.get('save-prod') + ) // create a new arborist instance for the local prefix and // reify all the pending names as symlinks there const localArb = new Arborist({ ...this.npm.flatOptions, + prune: false, + log: this.npm.log, path: this.npm.prefix, save, }) await localArb.reify({ ...this.npm.flatOptions, + prune: false, path: this.npm.prefix, + log: this.npm.log, add: names.map(l => `file:${resolve(globalTop, 'node_modules', l)}`), save, + workspaces: this.workspaceNames, }) await reifyFinish(this.npm, localArb) } async linkPkg () { + const wsp = this.workspacePaths + const paths = wsp && wsp.length ? wsp : [this.npm.prefix] + const add = paths.map(path => `file:${path}`) const globalTop = resolve(this.npm.globalDir, '..') const arb = new Arborist({ ...this.npm.flatOptions, path: globalTop, + log: this.npm.log, global: true, }) - await arb.reify({ add: [`file:${this.npm.prefix}`] }) + await arb.reify({ + add, + log: this.npm.log, + }) await reifyFinish(this.npm, arb) } diff --git a/lib/ll.js b/lib/ll.js index 7915f5d27c011..3e3428a7ff5eb 100644 --- a/lib/ll.js +++ b/lib/ll.js @@ -1,13 +1,14 @@ const LS = require('./ls.js') -const usageUtil = require('./utils/usage.js') class LL extends LS { /* istanbul ignore next - see test/lib/load-all-commands.js */ - get usage () { - return usageUtil( - 'll', - 'npm ll [[<@scope>/]<pkg> ...]' - ) + static get name () { + return 'll' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get usage () { + return ['[[<@scope>/]<pkg> ...]'] } exec (args, cb) { diff --git a/lib/logout.js b/lib/logout.js index 9fb1eab21a152..0887ec397bf1a 100644 --- a/lib/logout.js +++ b/lib/logout.js @@ -1,19 +1,25 @@ const log = require('npmlog') const getAuth = require('npm-registry-fetch/auth.js') const npmFetch = require('npm-registry-fetch') -const usageUtil = require('./utils/usage.js') +const BaseCommand = require('./base-command.js') -class Logout { - constructor (npm) { - this.npm = npm +class Logout extends BaseCommand { + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get description () { + return 'Log out of the registry' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get name () { + return 'logout' } /* istanbul ignore next - see test/lib/load-all-commands.js */ - get usage () { - return usageUtil( - 'logout', - 'npm logout [--registry=<url>] [--scope=<@scope>]' - ) + static get params () { + return [ + 'registry', + 'scope', + ] } exec (args, cb) { @@ -21,9 +27,10 @@ class Logout { } async logout (args) { - const { registry, scope } = this.npm.flatOptions + const registry = this.npm.config.get('registry') + const scope = this.npm.config.get('scope') const regRef = scope ? `${scope}:registry` : 'registry' - const reg = this.npm.flatOptions[regRef] || registry + const reg = this.npm.config.get(regRef) || registry const auth = getAuth(reg, this.npm.flatOptions) @@ -34,7 +41,7 @@ class Logout { method: 'DELETE', ignoreBody: true, }) - } else if (auth.username || auth.password) + } else if (auth.isBasicAuth) log.verbose('logout', `clearing user credentials for ${reg}`) else { const msg = `not logged in to ${reg}, so can't log out!` diff --git a/lib/ls.js b/lib/ls.js index 359fe21e6f8cc..7e41892c53442 100644 --- a/lib/ls.js +++ b/lib/ls.js @@ -1,4 +1,5 @@ -const { resolve } = require('path') +const { resolve, relative, sep } = require('path') +const relativePrefix = `.${sep}` const { EOL } = require('os') const archy = require('archy') @@ -7,9 +8,7 @@ const Arborist = require('@npmcli/arborist') const { breadth } = require('treeverse') const npa = require('npm-package-arg') -const usageUtil = require('./utils/usage.js') const completion = require('./utils/completion/installed-deep.js') -const output = require('./utils/output.js') const _depth = Symbol('depth') const _dedupe = Symbol('dedupe') @@ -22,18 +21,39 @@ const _parent = Symbol('parent') const _problems = Symbol('problems') const _required = Symbol('required') const _type = Symbol('type') +const ArboristWorkspaceCmd = require('./workspaces/arborist-cmd.js') -class LS { - constructor (npm) { - this.npm = npm +class LS extends ArboristWorkspaceCmd { + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get description () { + return 'List installed packages' } /* istanbul ignore next - see test/lib/load-all-commands.js */ - get usage () { - return usageUtil( - 'ls', - 'npm ls [[<@scope>/]<pkg> ...]' - ) + static get name () { + return 'ls' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get usage () { + return ['[[<@scope>/]<pkg> ...]'] + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get params () { + return [ + 'all', + 'json', + 'long', + 'parseable', + 'global', + 'depth', + 'omit', + 'link', + 'package-lock-only', + 'unicode', + ...super.params, + ] } /* istanbul ignore next - see test/lib/load-all-commands.js */ @@ -46,24 +66,23 @@ class LS { } async ls (args) { - const { - all, - color, - depth, - json, - long, - global, - parseable, - prefix, - unicode, - } = this.npm.flatOptions - const path = global ? resolve(this.npm.globalDir, '..') : prefix + const all = this.npm.config.get('all') + const color = this.npm.color + const depth = this.npm.config.get('depth') const dev = this.npm.config.get('dev') const development = this.npm.config.get('development') + const global = this.npm.config.get('global') + const json = this.npm.config.get('json') const link = this.npm.config.get('link') + const long = this.npm.config.get('long') const only = this.npm.config.get('only') + const parseable = this.npm.config.get('parseable') const prod = this.npm.config.get('prod') const production = this.npm.config.get('production') + const unicode = this.npm.config.get('unicode') + const packageLockOnly = this.npm.config.get('package-lock-only') + + const path = global ? resolve(this.npm.globalDir, '..') : this.npm.prefix const arb = new Arborist({ global, @@ -71,7 +90,26 @@ class LS { legacyPeerDeps: false, path, }) - const tree = await this.initTree({arb, args }) + const tree = await this.initTree({arb, args, packageLockOnly }) + + // filters by workspaces nodes when using -w <workspace-name> + // We only have to filter the first layer of edges, so we don't + // explore anything that isn't part of the selected workspace set. + let wsNodes + if (this.workspaceNames && this.workspaceNames.length) + wsNodes = arb.workspaceNodes(tree, this.workspaceNames) + const filterBySelectedWorkspaces = edge => { + if (!wsNodes || !wsNodes.length) + return true + + if (edge.from.isProjectRoot) { + return edge.to && + edge.to.isWorkspace & + wsNodes.includes(edge.to.target) + } + + return true + } const seenItems = new Set() const seenNodes = new Map() @@ -94,27 +132,29 @@ class LS { // `nodeResult` is going to be the returned `item` from `visit` getChildren (node, nodeResult) { const seenPaths = new Set() + const workspace = node.isWorkspace + const currentDepth = workspace ? 0 : node[_depth] const shouldSkipChildren = - !(node instanceof Arborist.Node) || (node[_depth] > depthToPrint) + !(node instanceof Arborist.Node) || (currentDepth > depthToPrint) return (shouldSkipChildren) ? [] - : [...(node.target || node).edgesOut.values()] + : [...(node.target).edgesOut.values()] + .filter(filterBySelectedWorkspaces) .filter(filterByEdgesTypes({ + currentDepth, dev, development, link, - node, prod, production, only, - tree, })) .map(mapEdgesToNodes({ seenPaths })) .concat(appendExtraneousChildren({ node, seenPaths })) .sort(sortAlphabetically) .map(augmentNodesWithMetadata({ args, - currentDepth: node[_depth], + currentDepth, nodeResult, seenNodes, })) @@ -147,7 +187,7 @@ class LS { const [rootError] = tree.errors.filter(e => e.code === 'EJSONPARSE' && e.path === resolve(path, 'package.json')) - output( + this.npm.output( json ? jsonOutput({ path, problems, result, rootError, seenItems }) : parseable @@ -166,7 +206,10 @@ class LS { ) } - if (problems.size) { + const shouldThrow = problems.size && + ![...problems].every(problem => problem.startsWith('extraneous:')) + + if (shouldThrow) { throw Object.assign( new Error([...problems].join(EOL)), { code: 'ELSPROBLEMS' } @@ -174,8 +217,13 @@ class LS { } } - async initTree ({ arb, args }) { - const tree = await arb.loadActual() + async initTree ({ arb, args, packageLockOnly }) { + const tree = await ( + packageLockOnly + ? arb.loadVirtual() + : arb.loadActual() + ) + tree[_include] = args.length === 0 tree[_depth] = 0 @@ -239,7 +287,8 @@ const augmentItemWithIncludeMetadata = (node, item) => { const getHumanOutputItem = (node, { args, color, global, long }) => { const { pkgid, path } = node - let printable = pkgid + const workspacePkgId = color ? chalk.green(pkgid) : pkgid + let printable = node.isWorkspace ? workspacePkgId : pkgid // special formatting for top-level package name if (node.isRoot) { @@ -256,6 +305,12 @@ const getHumanOutputItem = (node, { args, color, global, long }) => { ? chalk.yellow.bgBlack : chalk.red.bgBlack const missingMsg = `UNMET ${isOptional(node) ? 'OPTIONAL ' : ''}DEPENDENCY` + const targetLocation = node.root + ? relative(node.root.realpath, node.realpath) + : node.targetLocation + const invalid = node[_invalid] + ? `invalid: ${node[_invalid]}` + : '' const label = ( node[_missing] @@ -269,8 +324,8 @@ const getHumanOutputItem = (node, { args, color, global, long }) => { : '' ) + ( - node[_invalid] - ? ' ' + (color ? chalk.red.bgBlack('invalid') : 'invalid') + invalid + ? ' ' + (color ? chalk.red.bgBlack(invalid) : invalid) : '' ) + ( @@ -279,7 +334,7 @@ const getHumanOutputItem = (node, { args, color, global, long }) => { : '' ) + (isGitNode(node) ? ` (${node.resolved})` : '') + - (node.isLink ? ` -> ${node.realpath}` : '') + + (node.isLink ? ` -> ${relativePrefix}${targetLocation}` : '') + (long ? `${EOL}${node.package.description || ''}` : '') return augmentItemWithIncludeMetadata(node, { label, nodes: [] }) @@ -302,7 +357,7 @@ const getJsonOutputItem = (node, { global, long }) => { if (node.isRoot && hasPackageJson) item.name = node.package.name || node.name - if (long) { + if (long && !node[_missing]) { item.name = item[_name] const { dependencies, ...packageInfo } = node.package Object.assign(item, packageInfo) @@ -321,7 +376,7 @@ const getJsonOutputItem = (node, { global, long }) => { item.extraneous = true if (node[_invalid]) - item.invalid = true + item.invalid = node[_invalid] if (node[_missing] && !isOptional(node)) { item.required = node[_required] @@ -334,22 +389,21 @@ const getJsonOutputItem = (node, { global, long }) => { } const filterByEdgesTypes = ({ + currentDepth, dev, development, link, - node, prod, production, only, - tree, }) => { // filter deps by type, allows for: `npm ls --dev`, `npm ls --prod`, // `npm ls --link`, `npm ls --only=dev`, etc - const filterDev = node === tree && + const filterDev = currentDepth === 0 && (dev || development || /^dev(elopment)?$/.test(only)) - const filterProd = node === tree && + const filterProd = currentDepth === 0 && (prod || production || /^prod(uction)?$/.test(only)) - const filterLink = node === tree && link + const filterLink = currentDepth === 0 && link return (edge) => (filterDev ? edge.dev : true) && @@ -381,9 +435,15 @@ const mapEdgesToNodes = ({ seenPaths }) => (edge) => { if (node.path) seenPaths.add(node.path) - node[_required] = edge.spec + node[_required] = edge.spec || '*' node[_type] = edge.type - node[_invalid] = edge.invalid + + if (edge.invalid) { + const spec = JSON.stringify(node[_required]) + const from = edge.from.location || 'the root project' + node[_invalid] = (node[_invalid] ? node[_invalid] + ', ' : '') + + (`${spec} from ${from}`) + } return node } @@ -403,6 +463,9 @@ const augmentNodesWithMetadata = ({ // revisit that node in tree traversal logic, so we make it so that // we have a diff obj for deduped nodes: if (seenNodes.has(node.path)) { + const { realpath, root } = node + const targetLocation = root ? relative(root.realpath, realpath) + : node.targetLocation node = { name: node.name, version: node.version, @@ -411,9 +474,12 @@ const augmentNodesWithMetadata = ({ path: node.path, isLink: node.isLink, realpath: node.realpath, + targetLocation, + [_type]: node[_type], [_invalid]: node[_invalid], [_missing]: node[_missing], - [_dedupe]: true, + // if it's missing, it's not deduped, it's just missing + [_dedupe]: !node[_missing], } } else { // keeps track of already seen nodes in order to check for dedupes @@ -438,7 +504,7 @@ const augmentNodesWithMetadata = ({ } const sortAlphabetically = (a, b) => - a.pkgid.localeCompare(b.pkgid) + a.pkgid.localeCompare(b.pkgid, 'en') const humanOutput = ({ color, result, seenItems, unicode }) => { // we need to traverse the entire tree in order to determine which items diff --git a/lib/npm.js b/lib/npm.js index 1f8c785e755c4..966d11210c275 100644 --- a/lib/npm.js +++ b/lib/npm.js @@ -1,16 +1,12 @@ -// The order of the code in this file is relevant, because a lot of things -// require('npm.js'), but also we need to use some of those modules. So, -// we define and instantiate the singleton ahead of loading any modules -// required for its methods. - -// these are all dependencies used in the ctor const EventEmitter = require('events') const { resolve, dirname } = require('path') const Config = require('@npmcli/config') +const log = require('npmlog') // Patch the global fs module here at the app level require('graceful-fs').gracefulify(require('fs')) +// TODO make this only ever load once (or unload) in tests const procLogListener = require('./utils/proc-log-listener.js') const proxyCmds = new Proxy({}, { @@ -25,7 +21,7 @@ const proxyCmds = new Proxy({}, { // old way of doing things, until we can make breaking changes to the // npm.commands[x] api target[actual] = new Proxy( - (args, cb) => npm[_runCmd](cmd, impl, args, cb), + (args, cb) => npm[_runCmd](actual, impl, args, cb), { get: (target, attr, receiver) => { return Reflect.get(impl, attr, receiver) @@ -36,40 +32,74 @@ const proxyCmds = new Proxy({}, { }, }) -const { types, defaults, shorthands } = require('./utils/config.js') +// Timers in progress +const timers = new Map() +// Finished timers +const timings = {} + +const processOnTimeHandler = (name) => { + timers.set(name, Date.now()) +} + +const processOnTimeEndHandler = (name) => { + if (timers.has(name)) { + const ms = Date.now() - timers.get(name) + log.timing(name, `Completed in ${ms}ms`) + timings[name] = ms + timers.delete(name) + } else + log.silly('timing', "Tried to end timer that doesn't exist:", name) +} + +const { definitions, flatten, shorthands } = require('./utils/config/index.js') const { shellouts } = require('./utils/cmd-list.js') +const usage = require('./utils/npm-usage.js') + +const which = require('which') + +const deref = require('./utils/deref-command.js') +const setupLog = require('./utils/setup-log.js') +const cleanUpLogFiles = require('./utils/cleanup-log-files.js') +const getProjectScope = require('./utils/get-project-scope.js') let warnedNonDashArg = false const _runCmd = Symbol('_runCmd') const _load = Symbol('_load') -const _flatOptions = Symbol('_flatOptions') const _tmpFolder = Symbol('_tmpFolder') const _title = Symbol('_title') + const npm = module.exports = new class extends EventEmitter { constructor () { super() - require('./utils/perf.js') - this.modes = { - exec: 0o755, - file: 0o644, - umask: 0o22, - } this.started = Date.now() this.command = null this.commands = proxyCmds + this.timings = timings + this.timers = timers + this.perfStart() procLogListener() process.emit('time', 'npm') this.version = require('../package.json').version this.config = new Config({ npmPath: dirname(__dirname), - types, - defaults, + definitions, + flatten, shorthands, }) this[_title] = process.title this.updateNotification = null } + perfStart () { + process.on('time', processOnTimeHandler) + process.on('timeEnd', processOnTimeEndHandler) + } + + perfStop () { + process.off('time', processOnTimeHandler) + process.off('timeEnd', processOnTimeEndHandler) + } + get shelloutCommands () { return shellouts } @@ -82,8 +112,8 @@ const npm = module.exports = new class extends EventEmitter { [_runCmd] (cmd, impl, args, cb) { if (!this.loaded) { throw new Error( - 'Call npm.load(cb) before using this command.\n' + - 'See the README.md or bin/npm-cli.js for example usage.' + 'Call npm.load() before using this command.\n' + + 'See lib/cli.js for example usage.' ) } @@ -101,13 +131,25 @@ const npm = module.exports = new class extends EventEmitter { args.filter(arg => /^[\u2010-\u2015\u2212\uFE58\uFE63\uFF0D]/.test(arg)) .forEach(arg => { warnedNonDashArg = true - log.error('arg', 'Argument starts with non-ascii dash, this is probably invalid:', arg) + this.log.error('arg', 'Argument starts with non-ascii dash, this is probably invalid:', arg) }) } + const workspacesEnabled = this.config.get('workspaces') + const workspacesFilters = this.config.get('workspace') + const filterByWorkspaces = workspacesEnabled || workspacesFilters.length > 0 + if (this.config.get('usage')) { - console.log(impl.usage) + this.output(impl.usage) cb() + } else if (filterByWorkspaces) { + if (this.config.get('global')) + return cb(new Error('Workspaces not supported for global packages')) + + impl.execWorkspaces(args, this.config.get('workspace'), er => { + process.emit('timeEnd', `command:${cmd}`) + cb(er) + }) } else { impl.exec(args, er => { process.emit('timeEnd', `command:${cmd}`) @@ -116,36 +158,32 @@ const npm = module.exports = new class extends EventEmitter { } } - // call with parsed CLI options and a callback when done loading - // XXX promisify this and stop taking a callback load (cb) { - if (!cb || typeof cb !== 'function') - throw new TypeError('must call as: npm.load(callback)') - - this.once('load', cb) - if (this.loaded || this.loadErr) { - this.emit('load', this.loadErr) - return + if (cb && typeof cb !== 'function') + throw new TypeError('callback must be a function if provided') + + if (!this.loadPromise) { + process.emit('time', 'npm:load') + this.log.pause() + this.loadPromise = new Promise((resolve, reject) => { + this[_load]().catch(er => er).then((er) => { + this.loadErr = er + if (!er && this.config.get('force')) + this.log.warn('using --force', 'Recommended protections disabled.') + + process.emit('timeEnd', 'npm:load') + if (er) + return reject(er) + resolve() + }) + }) } - if (this.loading) - return + if (!cb) + return this.loadPromise - this.loading = true - - process.emit('time', 'npm:load') - this.log.pause() - return this[_load]().catch(er => er).then((er) => { - this.loading = false - this.loadErr = er - if (!er && this.config.get('force')) - this.log.warn('using --force', 'Recommended protections disabled.') - - if (!er && !this[_flatOptions]) - this[_flatOptions] = require('./utils/flat-options.js')(this) - - process.emit('timeEnd', 'npm:load') - this.emit('load', er) - }) + // loadPromise is returned here for legacy purposes, old code was allowing + // the mixing of callback and promise here. + return this.loadPromise.then(cb, cb) } get loaded () { @@ -162,14 +200,24 @@ const npm = module.exports = new class extends EventEmitter { } async [_load] () { - const node = await which(process.argv[0]).catch(er => null) + process.emit('time', 'npm:load:whichnode') + let node + try { + node = which.sync(process.argv[0]) + } catch (_) { + // TODO should we throw here? + } + process.emit('timeEnd', 'npm:load:whichnode') if (node && node.toUpperCase() !== process.execPath.toUpperCase()) { - log.verbose('node symlink', node) + this.log.verbose('node symlink', node) process.execPath = node this.config.execPath = node } + process.emit('time', 'npm:load:configload') await this.config.load() + process.emit('timeEnd', 'npm:load:configload') + this.argv = this.config.parsedArgv.remain // note: this MUST be shorter than the actual argv length, because it // uses the same memory, so node will truncate it if it's too long. @@ -177,33 +225,46 @@ const npm = module.exports = new class extends EventEmitter { // don't show that. (Regrettable historical choice to put it there.) // Any other secrets are configs only, so showing only the positional // args keeps those from being leaked. + process.emit('time', 'npm:load:setTitle') const tokrev = deref(this.argv[0]) === 'token' && this.argv[1] === 'revoke' this.title = tokrev ? 'npm token revoke' + (this.argv[2] ? ' ***' : '') : ['npm', ...this.argv].join(' ') + process.emit('timeEnd', 'npm:load:setTitle') - this.color = setupLog(this.config) + process.emit('time', 'npm:load:setupLog') + setupLog(this.config) + process.emit('timeEnd', 'npm:load:setupLog') process.env.COLOR = this.color ? '1' : '0' - cleanUpLogFiles(this.cache, this.config.get('logs-max'), log.warn) + process.emit('time', 'npm:load:cleanupLog') + cleanUpLogFiles(this.cache, this.config.get('logs-max'), this.log.warn) + process.emit('timeEnd', 'npm:load:cleanupLog') - log.resume() - const umask = this.config.get('umask') - this.modes = { - exec: 0o777 & (~umask), - file: 0o666 & (~umask), - umask, - } + this.log.resume() + process.emit('time', 'npm:load:configScope') const configScope = this.config.get('scope') if (configScope && !/^@/.test(configScope)) this.config.set('scope', `@${configScope}`, this.config.find('scope')) + process.emit('timeEnd', 'npm:load:configScope') + process.emit('time', 'npm:load:projectScope') this.projectScope = this.config.get('scope') || getProjectScope(this.prefix) + process.emit('timeEnd', 'npm:load:projectScope') } get flatOptions () { - return this[_flatOptions] + const { flat } = this.config + if (this.command) + flat.npmCommand = this.command + return flat + } + + get color () { + // This is a special derived value that takes into consideration not only + // the config, but whether or not we are operating in a tty. + return this.flatOptions.color } get lockfileVersion () { @@ -274,6 +335,10 @@ const npm = module.exports = new class extends EventEmitter { this[k] = r } + get usage () { + return usage(this) + } + // XXX add logging to see if we actually use this get tmp () { if (!this[_tmpFolder]) { @@ -282,19 +347,14 @@ const npm = module.exports = new class extends EventEmitter { } return resolve(this.config.get('tmp'), this[_tmpFolder]) } -}() - -// now load everything required by the class methods - -const log = require('npmlog') -const { promisify } = require('util') - -const which = promisify(require('which')) -const deref = require('./utils/deref-command.js') -const setupLog = require('./utils/setup-log.js') -const cleanUpLogFiles = require('./utils/cleanup-log-files.js') -const getProjectScope = require('./utils/get-project-scope.js') + // output to stdout in a progress bar compatible way + output (...msg) { + this.log.clearProgress() + console.log(...msg) + this.log.showProgress() + } +}() if (require.main === module) require('./cli.js')(process) diff --git a/lib/org.js b/lib/org.js index 054e1833dba4b..a494e1eaf9486 100644 --- a/lib/org.js +++ b/lib/org.js @@ -1,21 +1,35 @@ const liborg = require('libnpmorg') -const usageUtil = require('./utils/usage.js') -const output = require('./utils/output.js') const otplease = require('./utils/otplease.js') const Table = require('cli-table3') +const BaseCommand = require('./base-command.js') -class Org { - constructor (npm) { - this.npm = npm +class Org extends BaseCommand { + static get description () { + return 'Manage orgs' } - get usage () { - return usageUtil( - 'org', - 'npm org set orgname username [developer | admin | owner]\n' + - 'npm org rm orgname username\n' + - 'npm org ls orgname [<username>]' - ) + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get name () { + return 'org' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get usage () { + return [ + 'set orgname username [developer | admin | owner]', + 'rm orgname username', + 'ls orgname [<username>]', + ] + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get params () { + return [ + 'registry', + 'otp', + 'json', + 'parseable', + ] } async completion (opts) { @@ -72,17 +86,17 @@ class Org { return liborg.set(org, user, role, opts).then(memDeets => { if (opts.json) - output(JSON.stringify(memDeets, null, 2)) + this.npm.output(JSON.stringify(memDeets, null, 2)) else if (opts.parseable) { - output(['org', 'orgsize', 'user', 'role'].join('\t')) - output([ + this.npm.output(['org', 'orgsize', 'user', 'role'].join('\t')) + this.npm.output([ memDeets.org.name, memDeets.org.size, memDeets.user, memDeets.role, ].join('\t')) } else if (!opts.silent && opts.loglevel !== 'silent') - output(`Added ${memDeets.user} as ${memDeets.role} to ${memDeets.org.name}. You now have ${memDeets.org.size} member${memDeets.org.size === 1 ? '' : 's'} in this org.`) + this.npm.output(`Added ${memDeets.user} as ${memDeets.role} to ${memDeets.org.name}. You now have ${memDeets.org.size} member${memDeets.org.size === 1 ? '' : 's'} in this org.`) return memDeets }) @@ -102,17 +116,17 @@ class Org { org = org.replace(/^[~@]?/, '') const userCount = Object.keys(roster).length if (opts.json) { - output(JSON.stringify({ + this.npm.output(JSON.stringify({ user, org, userCount, deleted: true, })) } else if (opts.parseable) { - output(['user', 'org', 'userCount', 'deleted'].join('\t')) - output([user, org, userCount, true].join('\t')) + this.npm.output(['user', 'org', 'userCount', 'deleted'].join('\t')) + this.npm.output([user, org, userCount, true].join('\t')) } else if (!opts.silent && opts.loglevel !== 'silent') - output(`Successfully removed ${user} from ${org}. You now have ${userCount} member${userCount === 1 ? '' : 's'} in this org.`) + this.npm.output(`Successfully removed ${user} from ${org}. You now have ${userCount} member${userCount === 1 ? '' : 's'} in this org.`) }) } @@ -129,18 +143,18 @@ class Org { roster = newRoster } if (opts.json) - output(JSON.stringify(roster, null, 2)) + this.npm.output(JSON.stringify(roster, null, 2)) else if (opts.parseable) { - output(['user', 'role'].join('\t')) + this.npm.output(['user', 'role'].join('\t')) Object.keys(roster).forEach(user => { - output([user, roster[user]].join('\t')) + this.npm.output([user, roster[user]].join('\t')) }) } else if (!opts.silent && opts.loglevel !== 'silent') { const table = new Table({ head: ['user', 'role'] }) Object.keys(roster).sort().forEach(user => { table.push([user, roster[user]]) }) - output(table.toString()) + this.npm.output(table.toString()) } }) } diff --git a/lib/outdated.js b/lib/outdated.js index fc6967faf60fe..01e268fe96aee 100644 --- a/lib/outdated.js +++ b/lib/outdated.js @@ -2,27 +2,42 @@ const os = require('os') const path = require('path') const pacote = require('pacote') const table = require('text-table') -const color = require('ansicolors') +const color = require('chalk') const styles = require('ansistyles') const npa = require('npm-package-arg') const pickManifest = require('npm-pick-manifest') const Arborist = require('@npmcli/arborist') -const output = require('./utils/output.js') -const usageUtil = require('./utils/usage.js') const ansiTrim = require('./utils/ansi-trim.js') +const ArboristWorkspaceCmd = require('./workspaces/arborist-cmd.js') -class Outdated { - constructor (npm) { - this.npm = npm +class Outdated extends ArboristWorkspaceCmd { + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get description () { + return 'Check for outdated packages' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get name () { + return 'outdated' } /* istanbul ignore next - see test/lib/load-all-commands.js */ - get usage () { - return usageUtil('outdated', - 'npm outdated [[<@scope>/]<pkg> ...]' - ) + static get usage () { + return ['[[<@scope>/]<pkg> ...]'] + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get params () { + return [ + 'all', + 'json', + 'long', + 'parseable', + 'global', + 'workspace', + ] } exec (args, cb) { @@ -30,15 +45,13 @@ class Outdated { } async outdated (args) { - this.opts = this.npm.flatOptions - const global = path.resolve(this.npm.globalDir, '..') - const where = this.opts.global + const where = this.npm.config.get('global') ? global : this.npm.prefix const arb = new Arborist({ - ...this.opts, + ...this.npm.flatOptions, path: where, }) @@ -46,6 +59,11 @@ class Outdated { this.list = [] this.tree = await arb.loadActual() + if (this.workspaceNames && this.workspaceNames.length) { + this.filterSet = + arb.workspaceDependencySet(this.tree, this.workspaceNames) + } + if (args.length !== 0) { // specific deps for (let i = 0; i < args.length; i++) { @@ -53,7 +71,7 @@ class Outdated { this.getEdges(nodes, 'edgesIn') } } else { - if (this.opts.all) { + if (this.npm.config.get('all')) { // all deps in tree const nodes = this.tree.inventory.values() this.getEdges(nodes, 'edgesOut') @@ -67,17 +85,17 @@ class Outdated { })) // sorts list alphabetically - const outdated = this.list.sort((a, b) => a.name.localeCompare(b.name)) + const outdated = this.list.sort((a, b) => a.name.localeCompare(b.name, 'en')) // return if no outdated packages - if (outdated.length === 0 && !this.opts.json) + if (outdated.length === 0 && !this.npm.config.get('json')) return // display results - if (this.opts.json) - output(this.makeJSON(outdated)) - else if (this.opts.parseable) - output(this.makeParseable(outdated)) + if (this.npm.config.get('json')) + this.npm.output(this.makeJSON(outdated)) + else if (this.npm.config.get('parseable')) + this.npm.output(this.makeParseable(outdated)) else { const outList = outdated.map(x => this.makePretty(x)) const outHead = ['Package', @@ -88,24 +106,30 @@ class Outdated { 'Depended by', ] - if (this.opts.long) + if (this.npm.config.get('long')) outHead.push('Package Type', 'Homepage') const outTable = [outHead].concat(outList) - if (this.opts.color) + if (this.npm.color) outTable[0] = outTable[0].map(heading => styles.underline(heading)) const tableOpts = { align: ['l', 'r', 'r', 'r', 'l'], stringLength: s => ansiTrim(s).length, } - output(table(outTable, tableOpts)) + this.npm.output(table(outTable, tableOpts)) } } getEdges (nodes, type) { - if (!nodes) - return this.getEdgesOut(this.tree) + // when no nodes are provided then it should only read direct deps + // from the root node and its workspaces direct dependencies + if (!nodes) { + this.getEdgesOut(this.tree) + this.getWorkspacesEdges() + return + } + for (const node of nodes) { type === 'edgesOut' ? this.getEdgesOut(node) @@ -115,23 +139,52 @@ class Outdated { getEdgesIn (node) { for (const edge of node.edgesIn) - this.edges.add(edge) + this.trackEdge(edge) } getEdgesOut (node) { - if (this.opts.global) { + // TODO: normalize usage of edges and avoid looping through nodes here + if (this.npm.config.get('global')) { for (const child of node.children.values()) - this.edges.add(child) + this.trackEdge(child) } else { for (const edge of node.edgesOut.values()) - this.edges.add(edge) + this.trackEdge(edge) + } + } + + trackEdge (edge) { + const filteredOut = + edge.from + && this.filterSet + && this.filterSet.size > 0 + && !this.filterSet.has(edge.from.target) + + if (filteredOut) + return + + this.edges.add(edge) + } + + getWorkspacesEdges (node) { + if (this.npm.config.get('global')) + return + + for (const edge of this.tree.edgesOut.values()) { + const workspace = edge + && edge.to + && edge.to.target + && edge.to.target.isWorkspace + + if (workspace) + this.getEdgesOut(edge.to.target) } } async getPackument (spec) { const packument = await pacote.packument(spec, { ...this.npm.flatOptions, - fullMetadata: this.npm.flatOptions.long, + fullMetadata: this.npm.config.get('long'), preferOnline: true, }) return packument @@ -148,7 +201,7 @@ class Outdated { : edge.dev ? 'devDependencies' : 'dependencies' - for (const omitType of this.opts.omit || []) { + for (const omitType of this.npm.config.get('omit')) { if (node[omitType]) return } @@ -176,6 +229,10 @@ class Outdated { current !== wanted.version || wanted.version !== latest.version ) { + const dependent = edge.from ? + this.maybeWorkspaceName(edge.from) + : 'global' + this.list.push({ name: edge.name, path, @@ -184,7 +241,7 @@ class Outdated { location, wanted: wanted.version, latest: latest.version, - dependent: edge.from ? edge.from.name : 'global', + dependent, homepage: packument.homepage, }) } @@ -200,6 +257,23 @@ class Outdated { } } + maybeWorkspaceName (node) { + if (!node.isWorkspace) + return node.name + + const humanOutput = + !this.npm.config.get('json') && !this.npm.config.get('parseable') + + const workspaceName = + humanOutput + ? node.pkgid + : node.name + + return this.npm.color && humanOutput + ? color.green(workspaceName) + : workspaceName + } + // formatting functions makePretty (dep) { const { @@ -215,12 +289,12 @@ class Outdated { const columns = [name, current, wanted, latest, location, dependent] - if (this.opts.long) { + if (this.npm.config.get('long')) { columns[6] = type columns[7] = homepage } - if (this.opts.color) { + if (this.npm.color) { columns[0] = color[current === wanted ? 'yellow' : 'red'](columns[0]) // current columns[2] = color.green(columns[2]) // wanted columns[3] = color.magenta(columns[3]) // latest @@ -250,7 +324,7 @@ class Outdated { name + '@' + latest, dependent, ] - if (this.opts.long) + if (this.npm.config.get('long')) out.push(type, homepage) return out.join(':') @@ -277,7 +351,7 @@ class Outdated { dependent, location: path, } - if (this.opts.long) { + if (this.npm.config.get('long')) { out[name].type = type out[name].homepage = homepage } diff --git a/lib/owner.js b/lib/owner.js index 6cb9904880dc2..311b25064e638 100644 --- a/lib/owner.js +++ b/lib/owner.js @@ -3,27 +3,35 @@ const npa = require('npm-package-arg') const npmFetch = require('npm-registry-fetch') const pacote = require('pacote') -const output = require('./utils/output.js') const otplease = require('./utils/otplease.js') -const readLocalPkg = require('./utils/read-local-package.js') -const usageUtil = require('./utils/usage.js') +const readLocalPkgName = require('./utils/read-package-name.js') +const BaseCommand = require('./base-command.js') -class Owner { - constructor (npm) { - this.npm = npm +class Owner extends BaseCommand { + static get description () { + return 'Manage package owners' } - get usage () { - return usageUtil( - 'owner', - 'npm owner add <user> [<@scope>/]<pkg>' + - '\nnpm owner rm <user> [<@scope>/]<pkg>' + - '\nnpm owner ls [<@scope>/]<pkg>' - ) + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get name () { + return 'owner' } - get usageError () { - return Object.assign(new Error(this.usage), { code: 'EUSAGE' }) + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get params () { + return [ + 'registry', + 'otp', + ] + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get usage () { + return [ + 'add <user> [<@scope>/]<pkg>', + 'rm <user> [<@scope>/]<pkg>', + 'ls [<@scope>/]<pkg>', + ] } async completion (opts) { @@ -39,7 +47,9 @@ class Owner { // reaches registry in order to autocomplete rm if (argv[2] === 'rm') { - const pkgName = await readLocalPkg(this.npm) + if (this.npm.config.get('global')) + return [] + const pkgName = await readLocalPkgName(this.npm.prefix) if (!pkgName) return [] @@ -70,15 +80,18 @@ class Owner { case 'remove': return this.rm(args[0], args[1], opts) default: - throw this.usageError + throw this.usageError() } } async ls (pkg, opts) { if (!pkg) { - const pkgName = await readLocalPkg(this.npm) + if (this.npm.config.get('global')) + throw this.usageError() + + const pkgName = await readLocalPkgName(this.npm.prefix) if (!pkgName) - throw this.usageError + throw this.usageError() pkg = pkgName } @@ -89,9 +102,9 @@ class Owner { const packumentOpts = { ...opts, fullMetadata: true } const { maintainers } = await pacote.packument(spec, packumentOpts) if (!maintainers || !maintainers.length) - output('no admin found') + this.npm.output('no admin found') else - output(maintainers.map(o => `${o.name} <${o.email}>`).join('\n')) + this.npm.output(maintainers.map(o => `${o.name} <${o.email}>`).join('\n')) return maintainers } catch (err) { @@ -102,138 +115,145 @@ class Owner { async add (user, pkg, opts) { if (!user) - throw this.usageError + throw this.usageError() if (!pkg) { - const pkgName = await readLocalPkg(this.npm) + if (this.npm.config.get('global')) + throw this.usageError() + const pkgName = await readLocalPkgName(this.npm.prefix) if (!pkgName) - throw this.usageError + throw this.usageError() pkg = pkgName } log.verbose('owner add', '%s to %s', user, pkg) const spec = npa(pkg) - return putOwners(spec, user, opts, validateAddOwner) + return this.putOwners(spec, user, opts, + (newOwner, owners) => this.validateAddOwner(newOwner, owners)) } async rm (user, pkg, opts) { if (!user) - throw this.usageError + throw this.usageError() if (!pkg) { - const pkgName = await readLocalPkg(this.npm) + if (this.npm.config.get('global')) + throw this.usageError() + const pkgName = await readLocalPkgName(this.npm.prefix) if (!pkgName) - throw this.usageError + throw this.usageError() pkg = pkgName } log.verbose('owner rm', '%s from %s', user, pkg) const spec = npa(pkg) - return putOwners(spec, user, opts, validateRmOwner) + return this.putOwners(spec, user, opts, + (rmOwner, owners) => this.validateRmOwner(rmOwner, owners)) } -} -module.exports = Owner -const validateAddOwner = (newOwner, owners) => { - owners = owners || [] - for (const o of owners) { - if (o.name === newOwner.name) { - log.info( - 'owner add', - 'Already a package owner: ' + o.name + ' <' + o.email + '>' + async putOwners (spec, user, opts, validation) { + const uri = `/-/user/org.couchdb.user:${encodeURIComponent(user)}` + let u = '' + + try { + u = await npmFetch.json(uri, opts) + } catch (err) { + log.error('owner mutate', `Error getting user data for ${user}`) + throw err + } + + if (user && (!u || !u.name || u.error)) { + throw Object.assign( + new Error( + "Couldn't get user data for " + user + ': ' + JSON.stringify(u) + ), + { code: 'EOWNERUSER' } ) - return false } - } - return [ - ...owners, - newOwner, - ] -} -const validateRmOwner = (rmOwner, owners) => { - let found = false - const m = owners.filter(function (o) { - var match = (o.name === rmOwner.name) - found = found || match - return !match - }) - - if (!found) { - log.info('owner rm', 'Not a package owner: ' + rmOwner.name) - return false - } + // normalize user data + u = { name: u.name, email: u.email } - if (!m.length) { - throw Object.assign( - new Error( - 'Cannot remove all owners of a package. Add someone else first.' - ), - { code: 'EOWNERRM' } - ) - } + const data = await pacote.packument(spec, { ...opts, fullMetadata: true }) - return m -} + // save the number of maintainers before validation for comparison + const before = data.maintainers ? data.maintainers.length : 0 -const putOwners = async (spec, user, opts, validation) => { - const uri = `/-/user/org.couchdb.user:${encodeURIComponent(user)}` - let u = '' + const m = validation(u, data.maintainers) + if (!m) + return // invalid owners - try { - u = await npmFetch.json(uri, opts) - } catch (err) { - log.error('owner mutate', `Error getting user data for ${user}`) - throw err - } + const body = { + _id: data._id, + _rev: data._rev, + maintainers: m, + } + const dataPath = `/${spec.escapedName}/-rev/${encodeURIComponent(data._rev)}` + const res = await otplease(opts, opts => { + return npmFetch.json(dataPath, { + ...opts, + method: 'PUT', + body, + spec, + }) + }) - if (user && (!u || !u.name || u.error)) { - throw Object.assign( - new Error( - "Couldn't get user data for " + user + ': ' + JSON.stringify(u) - ), - { code: 'EOWNERUSER' } - ) + if (!res.error) { + if (m.length < before) + this.npm.output(`- ${user} (${spec.name})`) + else + this.npm.output(`+ ${user} (${spec.name})`) + } else { + throw Object.assign( + new Error('Failed to update package: ' + JSON.stringify(res)), + { code: 'EOWNERMUTATE' } + ) + } + return res } - // normalize user data - u = { name: u.name, email: u.email } + validateAddOwner (newOwner, owners) { + owners = owners || [] + for (const o of owners) { + if (o.name === newOwner.name) { + log.info( + 'owner add', + 'Already a package owner: ' + o.name + ' <' + o.email + '>' + ) + return false + } + } + return [ + ...owners, + newOwner, + ] + } - const data = await pacote.packument(spec, { ...opts, fullMetadata: true }) + validateRmOwner (rmOwner, owners) { + let found = false + const m = owners.filter(function (o) { + var match = (o.name === rmOwner.name) + found = found || match + return !match + }) - // save the number of maintainers before validation for comparison - const before = data.maintainers ? data.maintainers.length : 0 + if (!found) { + log.info('owner rm', 'Not a package owner: ' + rmOwner.name) + return false + } - const m = validation(u, data.maintainers) - if (!m) - return // invalid owners + if (!m.length) { + throw Object.assign( + new Error( + 'Cannot remove all owners of a package. Add someone else first.' + ), + { code: 'EOWNERRM' } + ) + } - const body = { - _id: data._id, - _rev: data._rev, - maintainers: m, + return m } - const dataPath = `/${spec.escapedName}/-rev/${encodeURIComponent(data._rev)}` - const res = await otplease(opts, opts => - npmFetch.json(dataPath, { - ...opts, - method: 'PUT', - body, - spec, - })) - - if (!res.error) { - if (m.length < before) - output(`- ${user} (${spec.name})`) - else - output(`+ ${user} (${spec.name})`) - } else { - throw Object.assign( - new Error('Failed to update package: ' + JSON.stringify(res)), - { code: 'EOWNERMUTATE' } - ) - } - return res } +module.exports = Owner diff --git a/lib/pack.js b/lib/pack.js index cf1e77f48ee69..8fc89db1a0b2b 100644 --- a/lib/pack.js +++ b/lib/pack.js @@ -3,54 +3,109 @@ const log = require('npmlog') const pacote = require('pacote') const libpack = require('libnpmpack') const npa = require('npm-package-arg') +const path = require('path') const { getContents, logTar } = require('./utils/tar.js') const writeFile = util.promisify(require('fs').writeFile) -const output = require('./utils/output.js') -const usageUtil = require('./utils/usage.js') +const BaseCommand = require('./base-command.js') -class Pack { - constructor (npm) { - this.npm = npm +class Pack extends BaseCommand { + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get description () { + return 'Create a tarball from a package' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get name () { + return 'pack' } /* istanbul ignore next - see test/lib/load-all-commands.js */ - get usage () { - return usageUtil('pack', 'npm pack [[<@scope>/]<pkg>...] [--dry-run]') + static get params () { + return [ + 'dry-run', + 'json', + 'pack-destination', + 'workspace', + 'workspaces', + ] + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get usage () { + return ['[[<@scope>/]<pkg>...]'] } exec (args, cb) { this.pack(args).then(() => cb()).catch(cb) } + execWorkspaces (args, filters, cb) { + this.packWorkspaces(args, filters).then(() => cb()).catch(cb) + } + async pack (args) { if (args.length === 0) args = ['.'] - const { unicode } = this.npm.flatOptions + const unicode = this.npm.config.get('unicode') + const dryRun = this.npm.config.get('dry-run') + const json = this.npm.config.get('json') - // clone the opts because pacote mutates it with resolved/integrity - const tarballs = await Promise.all(args.map(async (arg) => { + // Get the manifests and filenames first so we can bail early on manifest + // errors before making any tarballs + const manifests = [] + for (const arg of args) { const spec = npa(arg) - const { dryRun } = this.npm.flatOptions const manifest = await pacote.manifest(spec, this.npm.flatOptions) + if (!manifest._id) + throw new Error('Invalid package, must have name and version') + const filename = `${manifest.name}-${manifest.version}.tgz` .replace(/^@/, '').replace(/\//, '-') + manifests.push({ arg, filename, manifest }) + } + + // Load tarball names up for printing afterward to isolate from the + // noise generated during packing + const tarballs = [] + for (const { arg, filename, manifest } of manifests) { const tarballData = await libpack(arg, this.npm.flatOptions) const pkgContents = await getContents(manifest, tarballData) + const tarballFilename = path.resolve(this.npm.config.get('pack-destination'), filename) if (!dryRun) - await writeFile(filename, tarballData) + await writeFile(tarballFilename, tarballData) - return pkgContents - })) + tarballs.push(pkgContents) + } + + if (json) { + this.npm.output(JSON.stringify(tarballs, null, 2)) + return + } for (const tar of tarballs) { logTar(tar, { log, unicode }) - output(tar.filename.replace(/^@/, '').replace(/\//, '-')) + this.npm.output(tar.filename.replace(/^@/, '').replace(/\//, '-')) + } + } + + async packWorkspaces (args, filters) { + // If they either ask for nothing, or explicitly include '.' in the args, + // we effectively translate that into each workspace requested + + const useWorkspaces = args.length === 0 || args.includes('.') + + if (!useWorkspaces) { + this.npm.log.warn('Ignoring workspaces for specified package(s)') + return this.pack(args) } + + await this.setWorkspaces(filters) + return this.pack([...this.workspacePaths, ...args.filter(a => a !== '.')]) } } module.exports = Pack diff --git a/lib/ping.js b/lib/ping.js index e43f0640f212b..fbfb177ff87fc 100644 --- a/lib/ping.js +++ b/lib/ping.js @@ -1,16 +1,21 @@ const log = require('npmlog') -const output = require('./utils/output.js') -const usageUtil = require('./utils/usage.js') const pingUtil = require('./utils/ping.js') +const BaseCommand = require('./base-command.js') -class Ping { - constructor (npm) { - this.npm = npm +class Ping extends BaseCommand { + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get description () { + return 'Ping npm registry' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get params () { + return ['registry'] } /* istanbul ignore next - see test/lib/load-all-commands.js */ - get usage () { - return usageUtil('ping', 'npm ping\nping registry') + static get name () { + return 'ping' } exec (args, cb) { @@ -18,14 +23,14 @@ class Ping { } async ping (args) { - log.notice('PING', this.npm.flatOptions.registry) + log.notice('PING', this.npm.config.get('registry')) const start = Date.now() const details = await pingUtil(this.npm.flatOptions) const time = Date.now() - start - log.notice('PONG', `${time / 1000}ms`) - if (this.npm.flatOptions.json) { - output(JSON.stringify({ - registry: this.npm.flatOptions.registry, + log.notice('PONG', `${time}ms`) + if (this.npm.config.get('json')) { + this.npm.output(JSON.stringify({ + registry: this.npm.config.get('registry'), time, details, }, null, 2)) diff --git a/lib/pkg.js b/lib/pkg.js new file mode 100644 index 0000000000000..9ba92c930e1f0 --- /dev/null +++ b/lib/pkg.js @@ -0,0 +1,152 @@ +const PackageJson = require('@npmcli/package-json') +const BaseCommand = require('./base-command.js') +const Queryable = require('./utils/queryable.js') + +class Pkg extends BaseCommand { + static get description () { + return 'Manages your package.json' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get name () { + return 'pkg' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get usage () { + return [ + 'set <key>=<value> [<key>=<value> ...]', + 'get [<key> [<key> ...]]', + 'delete <key> [<key> ...]', + ] + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get params () { + return [ + 'force', + 'json', + 'workspace', + 'workspaces', + ] + } + + exec (args, cb) { + this.prefix = this.npm.localPrefix + this.pkg(args).then(() => cb()).catch(cb) + } + + execWorkspaces (args, filters, cb) { + this.pkgWorkspaces(args, filters).then(() => cb()).catch(cb) + } + + async pkg (args) { + if (this.npm.config.get('global')) { + throw Object.assign( + new Error(`There's no package.json file to manage on global mode`), + { code: 'EPKGGLOBAL' } + ) + } + + const [cmd, ..._args] = args + switch (cmd) { + case 'get': + return this.get(_args) + case 'set': + return this.set(_args) + case 'delete': + return this.delete(_args) + default: + throw this.usageError() + } + } + + async pkgWorkspaces (args, filters) { + await this.setWorkspaces(filters) + const result = {} + for (const [workspaceName, workspacePath] of this.workspaces.entries()) { + this.prefix = workspacePath + result[workspaceName] = await this.pkg(args) + } + // when running in workspaces names, make sure to key by workspace + // name the results of each value retrieved in each ws + this.npm.output(JSON.stringify(result, null, 2)) + } + + async get (args) { + const pkgJson = await PackageJson.load(this.prefix) + + const { content } = pkgJson + let result = !args.length && content + + if (!result) { + const q = new Queryable(content) + result = q.query(args) + + // in case there's only a single result from the query + // just prints that one element to stdout + if (Object.keys(result).length === 1) + result = result[args] + } + + // only outputs if not running with workspaces config, + // in case you're retrieving info for workspaces the pkgWorkspaces + // will handle the output to make sure it get keyed by ws name + if (!this.workspaces) + this.npm.output(JSON.stringify(result, null, 2)) + + return result + } + + async set (args) { + const setError = () => + Object.assign( + new TypeError('npm pkg set expects a key=value pair of args.'), + { code: 'EPKGSET' } + ) + + if (!args.length) + throw setError() + + const force = this.npm.config.get('force') + const json = this.npm.config.get('json') + const pkgJson = await PackageJson.load(this.prefix) + const q = new Queryable(pkgJson.content) + for (const arg of args) { + const [key, ...rest] = arg.split('=') + const value = rest.join('=') + if (!key || !value) + throw setError() + + q.set(key, json ? JSON.parse(value) : value, { force }) + } + + pkgJson.update(q.toJSON()) + await pkgJson.save() + } + + async delete (args) { + const setError = () => + Object.assign( + new TypeError('npm pkg delete expects key args.'), + { code: 'EPKGDELETE' } + ) + + if (!args.length) + throw setError() + + const pkgJson = await PackageJson.load(this.prefix) + const q = new Queryable(pkgJson.content) + for (const key of args) { + if (!key) + throw setError() + + q.delete(key) + } + + pkgJson.update(q.toJSON()) + await pkgJson.save() + } +} + +module.exports = Pkg diff --git a/lib/prefix.js b/lib/prefix.js index e46f9c4cdd94a..172f8d8fadfc8 100644 --- a/lib/prefix.js +++ b/lib/prefix.js @@ -1,14 +1,24 @@ -const output = require('./utils/output.js') -const usageUtil = require('./utils/usage.js') +const BaseCommand = require('./base-command.js') -class Prefix { - constructor (npm) { - this.npm = npm +class Prefix extends BaseCommand { + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get description () { + return 'Display prefix' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get name () { + return 'prefix' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get params () { + return ['global'] } /* istanbul ignore next - see test/lib/load-all-commands.js */ - get usage () { - return usageUtil('prefix', 'npm prefix [-g]') + static get usage () { + return ['[-g]'] } exec (args, cb) { @@ -16,7 +26,7 @@ class Prefix { } async prefix (args) { - return output(this.npm.prefix) + return this.npm.output(this.npm.prefix) } } module.exports = Prefix diff --git a/lib/profile.js b/lib/profile.js index dab99092b0a0f..36e9b03dcee59 100644 --- a/lib/profile.js +++ b/lib/profile.js @@ -7,10 +7,8 @@ const qrcodeTerminal = require('qrcode-terminal') const Table = require('cli-table3') const otplease = require('./utils/otplease.js') -const output = require('./utils/output.js') const pulseTillDone = require('./utils/pulse-till-done.js') const readUserInfo = require('./utils/read-user-info.js') -const usageUtil = require('./utils/usage.js') const qrcode = url => new Promise((resolve) => qrcodeTerminal.generate(url, resolve)) @@ -38,19 +36,35 @@ const writableProfileKeys = [ 'github', ] -class Profile { - constructor (npm) { - this.npm = npm +const BaseCommand = require('./base-command.js') +class Profile extends BaseCommand { + static get description () { + return 'Change settings on your registry profile' } - get usage () { - return usageUtil( - 'profile', - 'npm profile enable-2fa [auth-only|auth-and-writes]\n', - 'npm profile disable-2fa\n', - 'npm profile get [<key>]\n', - 'npm profile set <key> <value>' - ) + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get name () { + return 'profile' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get usage () { + return [ + 'enable-2fa [auth-only|auth-and-writes]', + 'disable-2fa', + 'get [<key>]', + 'set <key> <value>', + ] + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get params () { + return [ + 'registry', + 'json', + 'parseable', + 'otp', + ] } async completion (opts) { @@ -108,15 +122,15 @@ class Profile { async get (args) { const tfa = 'two-factor auth' - const conf = { ...this.npm.flatOptions } - - const info = await pulseTillDone.withPromise(npmProfile.get(conf)) + const info = await pulseTillDone.withPromise( + npmProfile.get(this.npm.flatOptions) + ) if (!info.cidr_whitelist) delete info.cidr_whitelist - if (conf.json) { - output(JSON.stringify(info, null, 2)) + if (this.npm.config.get('json')) { + this.npm.output(JSON.stringify(info, null, 2)) return } @@ -145,27 +159,27 @@ class Profile { .filter((arg) => arg.trim() !== '') .map((arg) => cleaned[arg]) .join('\t') - output(values) + this.npm.output(values) } else { - if (conf.parseable) { + if (this.npm.config.get('parseable')) { for (const key of Object.keys(info)) { if (key === 'tfa') - output(`${key}\t${cleaned[tfa]}`) + this.npm.output(`${key}\t${cleaned[tfa]}`) else - output(`${key}\t${info[key]}`) + this.npm.output(`${key}\t${info[key]}`) } } else { const table = new Table() for (const key of Object.keys(cleaned)) table.push({ [ansistyles.bright(key)]: cleaned[key] }) - output(table.toString()) + this.npm.output(table.toString()) } } } async set (args) { - const conf = { ...this.npm.flatOptions } + const conf = this.npm.flatOptions const prop = (args[0] || '').toLowerCase().trim() let value = args.length > 1 ? args.slice(1).join(' ') : null @@ -214,14 +228,14 @@ class Profile { const result = await otplease(conf, conf => npmProfile.set(newUser, conf)) - if (conf.json) - output(JSON.stringify({ [prop]: result[prop] }, null, 2)) - else if (conf.parseable) - output(prop + '\t' + result[prop]) + if (this.npm.config.get('json')) + this.npm.output(JSON.stringify({ [prop]: result[prop] }, null, 2)) + else if (this.npm.config.get('parseable')) + this.npm.output(prop + '\t' + result[prop]) else if (result[prop] != null) - output('Set', prop, 'to', result[prop]) + this.npm.output('Set', prop, 'to', result[prop]) else - output('Set', prop) + this.npm.output('Set', prop) } async enable2fa (args) { @@ -239,11 +253,10 @@ class Profile { ) } - const conf = { ...this.npm.flatOptions } - if (conf.json || conf.parseable) { + if (this.npm.config.get('json') || this.npm.config.get('parseable')) { throw new Error( 'Enabling two-factor authentication is an interactive operation and ' + - (conf.json ? 'JSON' : 'parseable') + ' output mode is not available' + (this.npm.config.get('json') ? 'JSON' : 'parseable') + ' output mode is not available' ) } @@ -255,7 +268,7 @@ class Profile { // if they're using legacy auth currently then we have to // update them to a bearer token before continuing. - const creds = this.npm.config.getCredentialsByURI(conf.registry) + const creds = this.npm.config.getCredentialsByURI(this.npm.config.get('registry')) const auth = {} if (creds.token) @@ -267,32 +280,29 @@ class Profile { auth.basic = { username: basic[0], password: basic[1] } } - if (conf.otp) - auth.otp = conf.otp - if (!auth.basic && !auth.token) { throw new Error( 'You need to be logged in to registry ' + - `${conf.registry} in order to enable 2fa` + `${this.npm.config.get('registry')} in order to enable 2fa` ) } if (auth.basic) { log.info('profile', 'Updating authentication to bearer token') const result = await npmProfile.createToken( - auth.basic.password, false, [], conf + auth.basic.password, false, [], this.npm.flatOptions ) if (!result.token) { throw new Error( - `Your registry ${conf.registry} does not seem to ` + + `Your registry ${this.npm.config.get('registry')} does not seem to ` + 'support bearer tokens. Bearer tokens are required for ' + 'two-factor authentication' ) } this.npm.config.setCredentialsByURI( - conf.registry, + this.npm.config.get('registry'), { token: result.token } ) await this.npm.config.save('user') @@ -303,21 +313,21 @@ class Profile { info.tfa.password = password log.info('profile', 'Determine if tfa is pending') - const userInfo = await pulseTillDone.withPromise(npmProfile.get(conf)) + const userInfo = await pulseTillDone.withPromise( + npmProfile.get(this.npm.flatOptions) + ) + const conf = { ...this.npm.flatOptions } if (userInfo && userInfo.tfa && userInfo.tfa.pending) { log.info('profile', 'Resetting two-factor authentication') await pulseTillDone.withPromise( npmProfile.set({ tfa: { password, mode: 'disable' } }, conf) ) } else if (userInfo && userInfo.tfa) { - if (conf.otp) - conf.otp = conf.otp - else { - const otp = await readUserInfo.otp( + if (!conf.otp) { + conf.otp = await readUserInfo.otp( 'Enter one-time password from your authenticator app: ' ) - conf.otp = otp } } @@ -327,7 +337,7 @@ class Profile { ) if (challenge.tfa === null) { - output('Two factor authentication mode changed to: ' + mode) + this.npm.output('Two factor authentication mode changed to: ' + mode) return } @@ -344,7 +354,7 @@ class Profile { const secret = otpauth.searchParams.get('secret') const code = await qrcode(challenge.tfa) - output( + this.npm.output( 'Scan into your authenticator app:\n' + code + '\n Or enter code:', secret ) @@ -355,17 +365,17 @@ class Profile { const result = await npmProfile.set({ tfa: [interactiveOTP] }, conf) - output( + this.npm.output( '2FA successfully enabled. Below are your recovery codes, ' + 'please print these out.' ) - output( + this.npm.output( 'You will need these to recover access to your account ' + 'if you lose your authentication device.' ) for (const tfaCode of result.tfa) - output('\t' + tfaCode) + this.npm.output('\t' + tfaCode) } async disable2fa (args) { @@ -373,7 +383,7 @@ class Profile { const info = await pulseTillDone.withPromise(npmProfile.get(conf)) if (!info.tfa || info.tfa.pending) { - output('Two factor authentication not enabled.') + this.npm.output('Two factor authentication not enabled.') return } @@ -390,12 +400,12 @@ class Profile { tfa: { password: password, mode: 'disable' }, }, conf)) - if (conf.json) - output(JSON.stringify({ tfa: false }, null, 2)) - else if (conf.parseable) - output('tfa\tfalse') + if (this.npm.config.get('json')) + this.npm.output(JSON.stringify({ tfa: false }, null, 2)) + else if (this.npm.config.get('parseable')) + this.npm.output('tfa\tfalse') else - output('Two factor authentication disabled.') + this.npm.output('Two factor authentication disabled.') } } module.exports = Profile diff --git a/lib/prune.js b/lib/prune.js index b839301d5194c..a91276fc4fa27 100644 --- a/lib/prune.js +++ b/lib/prune.js @@ -1,18 +1,27 @@ // prune extraneous packages const Arborist = require('@npmcli/arborist') -const usageUtil = require('./utils/usage.js') const reifyFinish = require('./utils/reify-finish.js') -class Prune { - constructor (npm) { - this.npm = npm +const ArboristWorkspaceCmd = require('./workspaces/arborist-cmd.js') +class Prune extends ArboristWorkspaceCmd { + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get description () { + return 'Remove extraneous packages' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get name () { + return 'prune' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get params () { + return ['omit', 'dry-run', 'json', ...super.params] } /* istanbul ignore next - see test/lib/load-all-commands.js */ - get usage () { - return usageUtil('prune', - 'npm prune [[<@scope>/]<pkg>...] [--production]' - ) + static get usage () { + return ['[[<@scope>/]<pkg>...]'] } exec (args, cb) { @@ -21,11 +30,14 @@ class Prune { async prune () { const where = this.npm.prefix - const arb = new Arborist({ + const opts = { ...this.npm.flatOptions, path: where, - }) - await arb.prune(this.npm.flatOptions) + log: this.npm.log, + workspaces: this.workspaceNames, + } + const arb = new Arborist(opts) + await arb.prune(opts) await reifyFinish(this.npm, arb) } } diff --git a/lib/publish.js b/lib/publish.js index c8e82c44c5a3c..9c747eb5068f0 100644 --- a/lib/publish.js +++ b/lib/publish.js @@ -7,64 +7,89 @@ const runScript = require('@npmcli/run-script') const pacote = require('pacote') const npa = require('npm-package-arg') const npmFetch = require('npm-registry-fetch') +const chalk = require('chalk') -const { flatten } = require('./utils/flat-options.js') -const output = require('./utils/output.js') const otplease = require('./utils/otplease.js') -const usageUtil = require('./utils/usage.js') const { getContents, logTar } = require('./utils/tar.js') -// this is the only case in the CLI where we use the old full slow -// 'read-package-json' module, because we want to pull in all the -// defaults and metadata, like git sha's and default scripts and all that. +// for historical reasons, publishConfig in package.json can contain ANY config +// keys that npm supports in .npmrc files and elsewhere. We *may* want to +// revisit this at some point, and have a minimal set that's a SemVer-major +// change that ought to get a RFC written on it. +const flatten = require('./utils/config/flatten.js') + +// this is the only case in the CLI where we want to use the old full slow +// 'read-package-json' module, because we want to pull in all the defaults and +// metadata, like git sha's and default scripts and all that. const readJson = util.promisify(require('read-package-json')) -class Publish { - constructor (npm) { - this.npm = npm +const BaseCommand = require('./base-command.js') +class Publish extends BaseCommand { + static get description () { + return 'Publish a package' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get name () { + return 'publish' } - get usage () { - return usageUtil('publish', - 'npm publish [<folder>] [--tag <tag>] [--access <public|restricted>] [--dry-run]' + - '\n\nPublishes \'.\' if no argument supplied' + - '\nSets tag `latest` if no --tag specified') + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get params () { + return ['tag', 'access', 'dry-run', 'otp', 'workspace', 'workspaces'] + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get usage () { + return [ + '[<folder>]', + ] } exec (args, cb) { this.publish(args).then(() => cb()).catch(cb) } + execWorkspaces (args, filters, cb) { + this.publishWorkspaces(args, filters).then(() => cb()).catch(cb) + } + async publish (args) { if (args.length === 0) args = ['.'] if (args.length !== 1) - throw this.usage + throw this.usageError() log.verbose('publish', args) - const opts = { ...this.npm.flatOptions } - const { unicode, dryRun, json, defaultTag } = opts + const unicode = this.npm.config.get('unicode') + const dryRun = this.npm.config.get('dry-run') + const json = this.npm.config.get('json') + const defaultTag = this.npm.config.get('tag') + const ignoreScripts = this.npm.config.get('ignore-scripts') + const silent = log.level === 'silent' if (semver.validRange(defaultTag)) throw new Error('Tag name must not be a valid SemVer range: ' + defaultTag.trim()) + const opts = { ...this.npm.flatOptions } + // you can publish name@version, ./foo.tgz, etc. // even though the default is the 'file:.' cwd. const spec = npa(args[0]) let manifest = await this.getManifest(spec, opts) if (manifest.publishConfig) - Object.assign(opts, this.publishConfigToOpts(manifest.publishConfig)) + flatten(manifest.publishConfig, opts) // only run scripts for directory type publishes - if (spec.type === 'directory') { + if (spec.type === 'directory' && !ignoreScripts) { await runScript({ event: 'prepublishOnly', path: spec.fetchSpec, stdio: 'inherit', pkg: manifest, - banner: log.level !== 'silent', + banner: !silent, }) } @@ -76,7 +101,7 @@ class Publish { // note that publishConfig might have changed as well! manifest = await this.getManifest(spec, opts) if (manifest.publishConfig) - Object.assign(opts, this.publishConfigToOpts(manifest.publishConfig)) + flatten(manifest.publishConfig, opts) // note that logTar calls npmlog.notice(), so if we ARE in silent mode, // this will do nothing, but we still want it in the debuglog if it fails. @@ -95,13 +120,13 @@ class Publish { await otplease(opts, opts => libpub(manifest, tarballData, opts)) } - if (spec.type === 'directory') { + if (spec.type === 'directory' && !ignoreScripts) { await runScript({ event: 'publish', path: spec.fetchSpec, stdio: 'inherit', pkg: manifest, - banner: log.level !== 'silent', + banner: !silent, }) await runScript({ @@ -109,19 +134,61 @@ class Publish { path: spec.fetchSpec, stdio: 'inherit', pkg: manifest, - banner: log.level !== 'silent', + banner: !silent, }) } - const silent = log.level === 'silent' - if (!silent && json) - output(JSON.stringify(pkgContents, null, 2)) - else if (!silent) - output(`+ ${pkgContents.id}`) + if (!this.suppressOutput) { + if (!silent && json) + this.npm.output(JSON.stringify(pkgContents, null, 2)) + else if (!silent) + this.npm.output(`+ ${pkgContents.id}`) + } return pkgContents } + async publishWorkspaces (args, filters) { + // Suppresses JSON output in publish() so we can handle it here + this.suppressOutput = true + + const results = {} + const json = this.npm.config.get('json') + const silent = log.level === 'silent' + const noop = a => a + const color = this.npm.color ? chalk : { green: noop, bold: noop } + await this.setWorkspaces(filters) + + for (const [name, workspace] of this.workspaces.entries()) { + let pkgContents + try { + pkgContents = await this.publish([workspace]) + } catch (err) { + if (err.code === 'EPRIVATE') { + log.warn( + 'publish', + `Skipping workspace ${ + color.green(name) + }, marked as ${ + color.bold('private') + }` + ) + continue + } + throw err + } + // This needs to be in-line w/ the rest of the output that non-JSON + // publish generates + if (!silent && !json) + this.npm.output(`+ ${pkgContents.id}`) + else + results[name] = pkgContents + } + + if (!silent && json) + this.npm.output(JSON.stringify(results, null, 2)) + } + // if it's a directory, read it from the file system // otherwise, get the full metadata from whatever it is getManifest (spec, opts) { @@ -129,15 +196,5 @@ class Publish { return readJson(`${spec.fetchSpec}/package.json`) return pacote.manifest(spec, { ...opts, fullMetadata: true }) } - - // for historical reasons, publishConfig in package.json can contain - // ANY config keys that npm supports in .npmrc files and elsewhere. - // We *may* want to revisit this at some point, and have a minimal set - // that's a SemVer-major change that ought to get a RFC written on it. - publishConfigToOpts (publishConfig) { - // create a new object that inherits from the config stack - // then squash the css-case into camelCase opts, like we do - return flatten({...this.npm.config.list[0], ...publishConfig}) - } } module.exports = Publish diff --git a/lib/rebuild.js b/lib/rebuild.js index 1091b01589389..9aa0e27f87eb4 100644 --- a/lib/rebuild.js +++ b/lib/rebuild.js @@ -2,18 +2,33 @@ const { resolve } = require('path') const Arborist = require('@npmcli/arborist') const npa = require('npm-package-arg') const semver = require('semver') -const usageUtil = require('./utils/usage.js') -const output = require('./utils/output.js') const completion = require('./utils/completion/installed-deep.js') -class Rebuild { - constructor (npm) { - this.npm = npm +const ArboristWorkspaceCmd = require('./workspaces/arborist-cmd.js') +class Rebuild extends ArboristWorkspaceCmd { + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get description () { + return 'Rebuild a package' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get name () { + return 'rebuild' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get params () { + return [ + 'global', + 'bin-links', + 'ignore-scripts', + ...super.params, + ] } /* istanbul ignore next - see test/lib/load-all-commands.js */ - get usage () { - return usageUtil('rebuild', 'npm rebuild [[<@scope>/]<name>[@<version>] ...]') + static get usage () { + return ['[[<@scope>/]<name>[@<version>] ...]'] } /* istanbul ignore next - see test/lib/load-all-commands.js */ @@ -27,10 +42,12 @@ class Rebuild { async rebuild (args) { const globalTop = resolve(this.npm.globalDir, '..') - const where = this.npm.flatOptions.global ? globalTop : this.npm.prefix + const where = this.npm.config.get('global') ? globalTop : this.npm.prefix const arb = new Arborist({ ...this.npm.flatOptions, path: where, + // TODO when extending ReifyCmd + // workspaces: this.workspaceNames, }) if (args.length) { @@ -52,7 +69,7 @@ class Rebuild { } else await arb.rebuild() - output('rebuilt dependencies successfully') + this.npm.output('rebuilt dependencies successfully') } isNode (specs, node) { diff --git a/lib/repo.js b/lib/repo.js index 60fe6dbaf90b9..e0172d01f63d1 100644 --- a/lib/repo.js +++ b/lib/repo.js @@ -1,26 +1,40 @@ const log = require('npmlog') const pacote = require('pacote') const { URL } = require('url') -const { promisify } = require('util') const hostedFromMani = require('./utils/hosted-git-info-from-manifest.js') -const openUrl = promisify(require('./utils/open-url.js')) -const usageUtil = require('./utils/usage.js') +const openUrl = require('./utils/open-url.js') -class Repo { - constructor (npm) { - this.npm = npm +const BaseCommand = require('./base-command.js') +class Repo extends BaseCommand { + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get description () { + return 'Open package repository page in the browser' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get name () { + return 'repo' } /* istanbul ignore next - see test/lib/load-all-commands.js */ - get usage () { - return usageUtil('repo', 'npm repo [<pkgname> [<pkgname> ...]]') + static get params () { + return ['browser', 'workspace', 'workspaces'] + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get usage () { + return ['[<pkgname> [<pkgname> ...]]'] } exec (args, cb) { this.repo(args).then(() => cb()).catch(cb) } + execWorkspaces (args, filters, cb) { + this.repoWorkspaces(args, filters).then(() => cb()).catch(cb) + } + async repo (args) { if (!args || !args.length) args = ['.'] @@ -28,6 +42,11 @@ class Repo { await Promise.all(args.map(pkg => this.get(pkg))) } + async repoWorkspaces (args, filters) { + await this.setWorkspaces(filters) + return this.repo(this.workspacePaths) + } + async get (pkg) { const opts = { ...this.npm.flatOptions, fullMetadata: true } const mani = await pacote.manifest(pkg, opts) diff --git a/lib/restart.js b/lib/restart.js index d5a7789ca92c0..716ddc909b2be 100644 --- a/lib/restart.js +++ b/lib/restart.js @@ -2,8 +2,22 @@ const LifecycleCmd = require('./utils/lifecycle-cmd.js') // This ends up calling run-script(['restart', ...args]) class Restart extends LifecycleCmd { - constructor (npm) { - super(npm, 'restart') + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get description () { + return 'Restart a package' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get name () { + return 'restart' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get params () { + return [ + 'ignore-scripts', + 'script-shell', + ] } } module.exports = Restart diff --git a/lib/root.js b/lib/root.js index 8e5ac63d7b9b8..635a68e256318 100644 --- a/lib/root.js +++ b/lib/root.js @@ -1,14 +1,18 @@ -const output = require('./utils/output.js') -const usageUtil = require('./utils/usage.js') +const BaseCommand = require('./base-command.js') +class Root extends BaseCommand { + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get description () { + return 'Display npm root' + } -class Root { - constructor (npm) { - this.npm = npm + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get name () { + return 'root' } /* istanbul ignore next - see test/lib/load-all-commands.js */ - get usage () { - return usageUtil('root', 'npm root [-g]') + static get params () { + return ['global'] } exec (args, cb) { @@ -16,7 +20,7 @@ class Root { } async root () { - output(this.npm.dir) + this.npm.output(this.npm.dir) } } module.exports = Root diff --git a/lib/run-script.js b/lib/run-script.js index cdfd88f10f7b8..1daaeb9900de1 100644 --- a/lib/run-script.js +++ b/lib/run-script.js @@ -1,10 +1,9 @@ +const { resolve } = require('path') +const chalk = require('chalk') const runScript = require('@npmcli/run-script') const { isServerPackage } = runScript -const readJson = require('read-package-json-fast') -const { resolve } = require('path') -const output = require('./utils/output.js') +const rpj = require('read-package-json-fast') const log = require('npmlog') -const usageUtil = require('./utils/usage.js') const didYouMean = require('./utils/did-you-mean.js') const isWindowsShell = require('./utils/is-windows-shell.js') @@ -19,17 +18,40 @@ const cmdList = [ 'version', ].reduce((l, p) => l.concat(['pre' + p, p, 'post' + p]), []) -class RunScript { - constructor (npm) { - this.npm = npm +const nocolor = { + reset: s => s, + bold: s => s, + dim: s => s, + blue: s => s, + green: s => s, +} + +const BaseCommand = require('./base-command.js') +class RunScript extends BaseCommand { + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get description () { + return 'Run arbitrary package scripts' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get params () { + return [ + 'workspace', + 'workspaces', + 'if-present', + 'ignore-scripts', + 'script-shell', + ] + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get name () { + return 'run-script' } /* istanbul ignore next - see test/lib/load-all-commands.js */ - get usage () { - return usageUtil( - 'run-script', - 'npm run-script <command> [-- <args>]' - ) + static get usage () { + return ['<command> [-- <args>]'] } async completion (opts) { @@ -37,7 +59,7 @@ class RunScript { if (argv.length === 2) { // find the script name const json = resolve(this.npm.localPrefix, 'package.json') - const { scripts = {} } = await readJson(json).catch(er => ({})) + const { scripts = {} } = await rpj(json).catch(er => ({})) return Object.keys(scripts) } } @@ -49,12 +71,19 @@ class RunScript { this.list(args).then(() => cb()).catch(cb) } - async run (args) { - const path = this.npm.localPrefix - const event = args.shift() - const { scriptShell } = this.npm.flatOptions + execWorkspaces (args, filters, cb) { + if (args.length) + this.runWorkspaces(args, filters).then(() => cb()).catch(cb) + else + this.listWorkspaces(args, filters).then(() => cb()).catch(cb) + } - const pkg = await readJson(`${path}/package.json`) + async run ([event, ...args], { path = this.npm.localPrefix, pkg } = {}) { + // this || undefined is because runScript will be unhappy with the default + // null value + const scriptShell = this.npm.config.get('script-shell') || undefined + + pkg = pkg || (await rpj(`${path}/package.json`)) const { scripts = {} } = pkg if (event === 'restart' && !scripts.restart) @@ -71,14 +100,13 @@ class RunScript { if (this.npm.config.get('if-present')) return - const suggestions = didYouMean(event, Object.keys(scripts)) - throw new Error(`missing script: ${event}${ - suggestions ? `\n${suggestions}` : ''}`) + const suggestions = await didYouMean(this.npm, path, event) + throw new Error(`Missing script: "${event}"${suggestions}\n\nTo see a list of scripts, run:\n npm run`) } // positional args only added to the main event, not pre/post const events = [[event, args]] - if (!this.npm.flatOptions.ignoreScripts) { + if (!this.npm.config.get('ignore-scripts')) { if (scripts[`pre${event}`]) events.unshift([`pre${event}`, []]) @@ -105,9 +133,11 @@ class RunScript { } } - async list () { - const path = this.npm.localPrefix - const { scripts, name } = await readJson(`${path}/package.json`) + async list (args, path) { + path = path || this.npm.localPrefix + const { scripts, name, _id } = await rpj(`${path}/package.json`) + const pkgid = _id || name + const color = this.npm.color if (!scripts) return [] @@ -116,14 +146,14 @@ class RunScript { if (log.level === 'silent') return allScripts - if (this.npm.flatOptions.json) { - output(JSON.stringify(scripts, null, 2)) + if (this.npm.config.get('json')) { + this.npm.output(JSON.stringify(scripts, null, 2)) return allScripts } - if (this.npm.flatOptions.parseable) { + if (this.npm.config.get('parseable')) { for (const [script, cmd] of Object.entries(scripts)) - output(`${script}:${cmd}`) + this.npm.output(`${script}:${cmd}`) return allScripts } @@ -136,22 +166,92 @@ class RunScript { const list = cmdList.includes(script) ? cmds : runScripts list.push(script) } + const colorize = color ? chalk : nocolor - if (cmds.length) - output(`Lifecycle scripts included in ${name}:`) + if (cmds.length) { + this.npm.output(`${ + colorize.reset(colorize.bold('Lifecycle scripts'))} included in ${ + colorize.green(pkgid)}:`) + } for (const script of cmds) - output(prefix + script + indent + scripts[script]) + this.npm.output(prefix + script + indent + colorize.dim(scripts[script])) - if (!cmds.length && runScripts.length) - output(`Scripts available in ${name} via \`npm run-script\`:`) - else if (runScripts.length) - output('\navailable via `npm run-script`:') + if (!cmds.length && runScripts.length) { + this.npm.output(`${ + colorize.bold('Scripts') + } available in ${colorize.green(pkgid)} via \`${ + colorize.blue('npm run-script')}\`:`) + } else if (runScripts.length) + this.npm.output(`\navailable via \`${colorize.blue('npm run-script')}\`:`) for (const script of runScripts) - output(prefix + script + indent + scripts[script]) + this.npm.output(prefix + script + indent + colorize.dim(scripts[script])) + this.npm.output('') return allScripts } + + async runWorkspaces (args, filters) { + const res = [] + await this.setWorkspaces(filters) + + for (const workspacePath of this.workspacePaths) { + const pkg = await rpj(`${workspacePath}/package.json`) + const runResult = await this.run(args, { + path: workspacePath, + pkg, + }).catch(err => { + log.error(`Lifecycle script \`${args[0]}\` failed with error:`) + log.error(err) + log.error(` in workspace: ${pkg._id || pkg.name}`) + log.error(` at location: ${workspacePath}`) + + const scriptMissing = err.message.startsWith('Missing script') + + // avoids exiting with error code in case there's scripts missing + // in some workspaces since other scripts might have succeeded + if (!scriptMissing) + process.exitCode = 1 + + return scriptMissing + }) + res.push(runResult) + } + + // in case **all** tests are missing, then it should exit with error code + if (res.every(Boolean)) + throw new Error(`Missing script: ${args[0]}`) + } + + async listWorkspaces (args, filters) { + await this.setWorkspaces(filters) + + if (log.level === 'silent') + return + + if (this.npm.config.get('json')) { + const res = {} + for (const workspacePath of this.workspacePaths) { + const { scripts, name } = await rpj(`${workspacePath}/package.json`) + res[name] = { ...scripts } + } + this.npm.output(JSON.stringify(res, null, 2)) + return + } + + if (this.npm.config.get('parseable')) { + for (const workspacePath of this.workspacePaths) { + const { scripts, name } = await rpj(`${workspacePath}/package.json`) + for (const [script, cmd] of Object.entries(scripts || {})) + this.npm.output(`${name}:${script}:${cmd}`) + } + return + } + + for (const workspacePath of this.workspacePaths) + await this.list(args, workspacePath) + } } + module.exports = RunScript diff --git a/lib/search.js b/lib/search.js index e0922b9846cdb..dfb987cc07bfd 100644 --- a/lib/search.js +++ b/lib/search.js @@ -5,8 +5,6 @@ const log = require('npmlog') const formatPackageStream = require('./search/format-package-stream.js') const packageFilter = require('./search/package-filter.js') -const output = require('./utils/output.js') -const usageUtil = require('./utils/usage.js') function prepareIncludes (args) { return args @@ -26,17 +24,38 @@ function prepareExcludes (searchexclude) { .filter(s => s) } -class Search { - constructor (npm) { - this.npm = npm +const BaseCommand = require('./base-command.js') +class Search extends BaseCommand { + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get description () { + return 'Search for packages' } /* istanbul ignore next - see test/lib/load-all-commands.js */ - get usage () { - return usageUtil( - 'search', - 'npm search [-l|--long] [--json] [--parseable] [--no-description] [search terms ...]' - ) + static get name () { + return 'search' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get params () { + return [ + 'long', + 'json', + 'color', + 'parseable', + 'description', + 'searchopts', + 'searchexclude', + 'registry', + 'prefer-online', + 'prefer-offline', + 'offline', + ] + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get usage () { + return ['[search terms ...]'] } exec (args, cb) { @@ -83,12 +102,12 @@ class Search { p.on('data', chunk => { if (!anyOutput) anyOutput = true - output(chunk.toString('utf8')) + this.npm.output(chunk.toString('utf8')) }) await p.promise() - if (!anyOutput && !opts.json && !opts.parseable) - output('No matches found for ' + (args.map(JSON.stringify).join(' '))) + if (!anyOutput && !this.npm.config.get('json') && !this.npm.config.get('parseable')) + this.npm.output('No matches found for ' + (args.map(JSON.stringify).join(' '))) log.silly('search', 'search completed') log.clearProgress() diff --git a/lib/search/format-package-stream.js b/lib/search/format-package-stream.js index c908601144c23..c88df5eb4be04 100644 --- a/lib/search/format-package-stream.js +++ b/lib/search/format-package-stream.js @@ -43,6 +43,7 @@ class JSONOutputStream extends Minipass { end () { super.write(this._didFirst ? ']\n' : '\n]\n') + super.end() } } diff --git a/lib/set-script.js b/lib/set-script.js index 25545898e1640..24e4d8f20f666 100644 --- a/lib/set-script.js +++ b/lib/set-script.js @@ -1,62 +1,112 @@ +const { resolve } = require('path') const log = require('npmlog') -const usageUtil = require('./utils/usage.js') -const fs = require('fs') -const parseJSON = require('json-parse-even-better-errors') const rpj = require('read-package-json-fast') +const PackageJson = require('@npmcli/package-json') -class SetScript { - constructor (npm) { - this.npm = npm +const BaseCommand = require('./base-command.js') +class SetScript extends BaseCommand { + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get description () { + return 'Set tasks in the scripts section of package.json' } /* istanbul ignore next - see test/lib/load-all-commands.js */ - get usage () { - return usageUtil('set-script', 'npm set-script [<script>] [<command>]') + static get params () { + return ['workspace', 'workspaces'] } - exec (args, cb) { - this.set(args).then(() => cb()).catch(cb) + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get name () { + return 'set-script' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get usage () { + return ['[<script>] [<command>]'] + } + + async completion (opts) { + const argv = opts.conf.argv.remain + if (argv.length === 2) { + // find the script name + const json = resolve(this.npm.localPrefix, 'package.json') + const { scripts = {} } = await rpj(json).catch(er => ({})) + return Object.keys(scripts) + } } - async set (args) { + validate (args) { if (process.env.npm_lifecycle_event === 'postinstall') throw new Error('Scripts can’t set from the postinstall script') // Parse arguments if (args.length !== 2) throw new Error(`Expected 2 arguments: got ${args.length}`) + } - // Set the script - let manifest - let warn = false - try { - manifest = fs.readFileSync(this.npm.localPrefix + '/package.json', 'utf-8') - } catch (error) { - throw new Error('package.json not found') - } - try { - manifest = parseJSON(manifest) - } catch (error) { - throw new Error(`Invalid package.json: ${error}`) - } - if (!manifest.scripts) - manifest.scripts = {} - if (manifest.scripts[args[0]] && manifest.scripts[args[0]] !== args[1]) - warn = true - manifest.scripts[args[0]] = args[1] - // format content - const packageJsonInfo = await rpj(this.npm.localPrefix + '/package.json') - const { - [Symbol.for('indent')]: indent, - [Symbol.for('newline')]: newline, - } = packageJsonInfo - const format = indent === undefined ? ' ' : indent - const eol = newline === undefined ? '\n' : newline - const content = (JSON.stringify(manifest, null, format) + '\n') - .replace(/\n/g, eol) - fs.writeFileSync(this.npm.localPrefix + '/package.json', content) + exec (args, cb) { + this.setScript(args).then(() => cb()).catch(cb) + } + + async setScript (args) { + this.validate(args) + const warn = await this.doSetScript(this.npm.localPrefix, args[0], args[1]) if (warn) log.warn('set-script', `Script "${args[0]}" was overwritten`) } + + execWorkspaces (args, filters, cb) { + this.setScriptWorkspaces(args, filters).then(() => cb()).catch(cb) + } + + async setScriptWorkspaces (args, filters) { + this.validate(args) + await this.setWorkspaces(filters) + + for (const [name, path] of this.workspaces) { + try { + const warn = await this.doSetScript(path, args[0], args[1]) + if (warn) { + log.warn('set-script', `Script "${args[0]}" was overwritten`) + log.warn(` in workspace: ${name}`) + log.warn(` at location: ${path}`) + } + } catch (err) { + log.error('set-script', err.message) + log.error(` in workspace: ${name}`) + log.error(` at location: ${path}`) + process.exitCode = 1 + } + } + } + + // returns a Boolean that will be true if + // the requested script was overwritten + // and false if it was set as a new script + async doSetScript (path, name, value) { + let warn = false + + const pkgJson = await PackageJson.load(path) + const { scripts } = pkgJson.content + + const overwriting = + scripts + && scripts[name] + && scripts[name] !== value + + if (overwriting) + warn = true + + pkgJson.update({ + scripts: { + ...scripts, + [name]: value, + }, + }) + + await pkgJson.save() + + return warn + } } module.exports = SetScript diff --git a/lib/set.js b/lib/set.js index cbce1547e891b..74a002cd638be 100644 --- a/lib/set.js +++ b/lib/set.js @@ -1,15 +1,18 @@ -const usageUtil = require('./utils/usage.js') +const BaseCommand = require('./base-command.js') -class Set { - constructor (npm) { - this.npm = npm +class Set extends BaseCommand { + static get description () { + return 'Set a value in the npm configuration' } - get usage () { - return usageUtil( - 'set', - 'npm set <key>=<value> [<key>=<value> ...] (See `npm config`)' - ) + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get name () { + return 'set' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get usage () { + return ['<key>=<value> [<key>=<value> ...] (See `npm config`)'] } /* istanbul ignore next - see test/lib/load-all-commands.js */ diff --git a/lib/shrinkwrap.js b/lib/shrinkwrap.js index a7516131d2f7e..5d4a1ada982a4 100644 --- a/lib/shrinkwrap.js +++ b/lib/shrinkwrap.js @@ -5,16 +5,16 @@ const { unlink } = fs.promises || { unlink: util.promisify(fs.unlink) } const Arborist = require('@npmcli/arborist') const log = require('npmlog') -const usageUtil = require('./utils/usage.js') - -class Shrinkwrap { - constructor (npm) { - this.npm = npm +const BaseCommand = require('./base-command.js') +class Shrinkwrap extends BaseCommand { + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get description () { + return 'Lock down dependency versions for publication' } /* istanbul ignore next - see test/lib/load-all-commands.js */ - get usage () { - return usageUtil('shrinkwrap', 'npm shrinkwrap') + static get name () { + return 'shrinkwrap' } exec (args, cb) { @@ -29,7 +29,7 @@ class Shrinkwrap { // // loadVirtual, fall back to loadActual // rename shrinkwrap file type, and tree.meta.save() - if (this.npm.flatOptions.global) { + if (this.npm.config.get('global')) { const er = new Error('`npm shrinkwrap` does not work for global packages') er.code = 'ESHRINKWRAPGLOBAL' throw er diff --git a/lib/star.js b/lib/star.js index b39d23b2c1170..bed9c5c434c92 100644 --- a/lib/star.js +++ b/lib/star.js @@ -2,21 +2,30 @@ const fetch = require('npm-registry-fetch') const log = require('npmlog') const npa = require('npm-package-arg') -const output = require('./utils/output.js') -const usageUtil = require('./utils/usage.js') const getIdentity = require('./utils/get-identity') -class Star { - constructor (npm) { - this.npm = npm +const BaseCommand = require('./base-command.js') +class Star extends BaseCommand { + static get description () { + return 'Mark your favorite packages' } - get usage () { - return usageUtil( - 'star', - 'npm star [<pkg>...]\n' + - 'npm unstar [<pkg>...]' - ) + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get name () { + return 'star' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get usage () { + return ['[<pkg>...]'] + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get params () { + return [ + 'registry', + 'unicode', + ] } exec (args, cb) { @@ -29,7 +38,7 @@ class Star { // if we're unstarring, then show an empty star image // otherwise, show the full star image - const { unicode } = this.npm.flatOptions + const unicode = this.npm.config.get('unicode') const unstar = this.npm.config.get('star.unstar') const full = unicode ? '\u2605 ' : '(*)' const empty = unicode ? '\u2606 ' : '( )' @@ -73,7 +82,7 @@ class Star { body, }) - output(show + ' ' + pkg.name) + this.npm.output(show + ' ' + pkg.name) log.verbose('star', data) return data } diff --git a/lib/stars.js b/lib/stars.js index fe280705b4b5c..f443445153fe2 100644 --- a/lib/stars.js +++ b/lib/stars.js @@ -1,18 +1,30 @@ const log = require('npmlog') const fetch = require('npm-registry-fetch') -const output = require('./utils/output.js') const getIdentity = require('./utils/get-identity.js') -const usageUtil = require('./utils/usage.js') -class Stars { - constructor (npm) { - this.npm = npm +const BaseCommand = require('./base-command.js') +class Stars extends BaseCommand { + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get description () { + return 'View packages marked as favorites' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get name () { + return 'stars' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get usage () { + return ['[<user>]'] } /* istanbul ignore next - see test/lib/load-all-commands.js */ - get usage () { - return usageUtil('stars', 'npm stars [<user>]') + static get params () { + return [ + 'registry', + ] } exec (args, cb) { @@ -36,7 +48,7 @@ class Stars { log.warn('stars', 'user has not starred any packages') for (const row of rows) - output(row.value) + this.npm.output(row.value) } } module.exports = Stars diff --git a/lib/start.js b/lib/start.js index 3d46a3a7ba1ab..0251bff677c6c 100644 --- a/lib/start.js +++ b/lib/start.js @@ -2,8 +2,22 @@ const LifecycleCmd = require('./utils/lifecycle-cmd.js') // This ends up calling run-script(['start', ...args]) class Start extends LifecycleCmd { - constructor (npm) { - super(npm, 'start') + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get description () { + return 'Start a package' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get name () { + return 'start' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get params () { + return [ + 'ignore-scripts', + 'script-shell', + ] } } module.exports = Start diff --git a/lib/stop.js b/lib/stop.js index d7df5887e7efa..ec5fe76ad678d 100644 --- a/lib/stop.js +++ b/lib/stop.js @@ -2,8 +2,22 @@ const LifecycleCmd = require('./utils/lifecycle-cmd.js') // This ends up calling run-script(['stop', ...args]) class Stop extends LifecycleCmd { - constructor (npm) { - super(npm, 'stop') + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get description () { + return 'Stop a package' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get name () { + return 'stop' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get params () { + return [ + 'ignore-scripts', + 'script-shell', + ] } } module.exports = Stop diff --git a/lib/team.js b/lib/team.js index 4947739a045c4..46d5a0977d0bb 100644 --- a/lib/team.js +++ b/lib/team.js @@ -1,24 +1,38 @@ const columns = require('cli-columns') const libteam = require('libnpmteam') -const output = require('./utils/output.js') const otplease = require('./utils/otplease.js') -const usageUtil = require('./utils/usage.js') -class Team { - constructor (npm) { - this.npm = npm +const BaseCommand = require('./base-command.js') +class Team extends BaseCommand { + static get description () { + return 'Manage organization teams and team memberships' } - get usage () { - return usageUtil( - 'team', - 'npm team create <scope:team> [--otp <otpcode>]\n' + - 'npm team destroy <scope:team> [--otp <otpcode>]\n' + - 'npm team add <scope:team> <user> [--otp <otpcode>]\n' + - 'npm team rm <scope:team> <user> [--otp <otpcode>]\n' + - 'npm team ls <scope>|<scope:team>\n' - ) + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get name () { + return 'team' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get usage () { + return [ + 'create <scope:team> [--otp <otpcode>]', + 'destroy <scope:team> [--otp <otpcode>]', + 'add <scope:team> <user> [--otp <otpcode>]', + 'rm <scope:team> <user> [--otp <otpcode>]', + 'ls <scope>|<scope:team>', + ] + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get params () { + return [ + 'registry', + 'otp', + 'parseable', + 'json', + ] } async completion (opts) { @@ -66,82 +80,82 @@ class Team { async create (entity, opts) { await libteam.create(entity, opts) if (opts.json) { - output(JSON.stringify({ + this.npm.output(JSON.stringify({ created: true, team: entity, })) } else if (opts.parseable) - output(`${entity}\tcreated`) + this.npm.output(`${entity}\tcreated`) else if (!opts.silent && opts.loglevel !== 'silent') - output(`+@${entity}`) + this.npm.output(`+@${entity}`) } async destroy (entity, opts) { await libteam.destroy(entity, opts) if (opts.json) { - output(JSON.stringify({ + this.npm.output(JSON.stringify({ deleted: true, team: entity, })) } else if (opts.parseable) - output(`${entity}\tdeleted`) + this.npm.output(`${entity}\tdeleted`) else if (!opts.silent && opts.loglevel !== 'silent') - output(`-@${entity}`) + this.npm.output(`-@${entity}`) } async add (entity, user, opts) { await libteam.add(user, entity, opts) if (opts.json) { - output(JSON.stringify({ + this.npm.output(JSON.stringify({ added: true, team: entity, user, })) } else if (opts.parseable) - output(`${user}\t${entity}\tadded`) + this.npm.output(`${user}\t${entity}\tadded`) else if (!opts.silent && opts.loglevel !== 'silent') - output(`${user} added to @${entity}`) + this.npm.output(`${user} added to @${entity}`) } async rm (entity, user, opts) { await libteam.rm(user, entity, opts) if (opts.json) { - output(JSON.stringify({ + this.npm.output(JSON.stringify({ removed: true, team: entity, user, })) } else if (opts.parseable) - output(`${user}\t${entity}\tremoved`) + this.npm.output(`${user}\t${entity}\tremoved`) else if (!opts.silent && opts.loglevel !== 'silent') - output(`${user} removed from @${entity}`) + this.npm.output(`${user} removed from @${entity}`) } async listUsers (entity, opts) { const users = (await libteam.lsUsers(entity, opts)).sort() if (opts.json) - output(JSON.stringify(users, null, 2)) + this.npm.output(JSON.stringify(users, null, 2)) else if (opts.parseable) - output(users.join('\n')) + this.npm.output(users.join('\n')) else if (!opts.silent && opts.loglevel !== 'silent') { const plural = users.length === 1 ? '' : 's' const more = users.length === 0 ? '' : ':\n' - output(`\n@${entity} has ${users.length} user${plural}${more}`) - output(columns(users, { padding: 1 })) + this.npm.output(`\n@${entity} has ${users.length} user${plural}${more}`) + this.npm.output(columns(users, { padding: 1 })) } } async listTeams (entity, opts) { const teams = (await libteam.lsTeams(entity, opts)).sort() if (opts.json) - output(JSON.stringify(teams, null, 2)) + this.npm.output(JSON.stringify(teams, null, 2)) else if (opts.parseable) - output(teams.join('\n')) + this.npm.output(teams.join('\n')) else if (!opts.silent && opts.loglevel !== 'silent') { const plural = teams.length === 1 ? '' : 's' const more = teams.length === 0 ? '' : ':\n' - output(`\n@${entity} has ${teams.length} team${plural}${more}`) - output(columns(teams.map(t => `@${t}`), { padding: 1 })) + this.npm.output(`\n@${entity} has ${teams.length} team${plural}${more}`) + this.npm.output(columns(teams.map(t => `@${t}`), { padding: 1 })) } } } diff --git a/lib/test.js b/lib/test.js index 12292d2924ed2..e78fdf0c786c8 100644 --- a/lib/test.js +++ b/lib/test.js @@ -2,8 +2,22 @@ const LifecycleCmd = require('./utils/lifecycle-cmd.js') // This ends up calling run-script(['test', ...args]) class Test extends LifecycleCmd { - constructor (npm) { - super(npm, 'test') + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get description () { + return 'Test a package' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get name () { + return 'test' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get params () { + return [ + 'ignore-scripts', + 'script-shell', + ] } exec (args, cb) { diff --git a/lib/token.js b/lib/token.js index ad6d5c6fcb82c..64015d7d40df9 100644 --- a/lib/token.js +++ b/lib/token.js @@ -5,22 +5,37 @@ const log = require('npmlog') const profile = require('npm-profile') const otplease = require('./utils/otplease.js') -const output = require('./utils/output.js') const pulseTillDone = require('./utils/pulse-till-done.js') const readUserInfo = require('./utils/read-user-info.js') -const usageUtil = require('./utils/usage.js') -class Token { - constructor (npm) { - this.npm = npm +const BaseCommand = require('./base-command.js') +class Token extends BaseCommand { + static get description () { + return 'Manage your authentication tokens' } - get usage () { - return usageUtil('token', - 'npm token list\n' + - 'npm token revoke <id|token>\n' + - 'npm token create [--read-only] [--cidr=list]' - ) + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get name () { + return 'token' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get usage () { + return [ + 'list', + 'revoke <id|token>', + 'create [--read-only] [--cidr=list]', + ] + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get params () { + return [ + 'read-only', + 'cidr', + 'registry', + 'otp', + ] } async completion (opts) { @@ -64,12 +79,12 @@ class Token { log.info('token', 'getting list') const tokens = await pulseTillDone.withPromise(profile.listTokens(conf)) if (conf.json) { - output(JSON.stringify(tokens, null, 2)) + this.npm.output(JSON.stringify(tokens, null, 2)) return } else if (conf.parseable) { - output(['key', 'token', 'created', 'readonly', 'CIDR whitelist'].join('\t')) + this.npm.output(['key', 'token', 'created', 'readonly', 'CIDR whitelist'].join('\t')) tokens.forEach((token) => { - output([ + this.npm.output([ token.key, token.token, token.created, @@ -95,7 +110,7 @@ class Token { token.cidr_whitelist ? token.cidr_whitelist.join(', ') : '', ]) }) - output(table.toString()) + this.npm.output(table.toString()) } async rm (args) { @@ -127,11 +142,11 @@ class Token { }) })) if (conf.json) - output(JSON.stringify(toRemove)) + this.npm.output(JSON.stringify(toRemove)) else if (conf.parseable) - output(toRemove.join('\t')) + this.npm.output(toRemove.join('\t')) else - output('Removed ' + toRemove.length + ' token' + (toRemove.length !== 1 ? 's' : '')) + this.npm.output('Removed ' + toRemove.length + ' token' + (toRemove.length !== 1 ? 's' : '')) } async create (args) { @@ -149,14 +164,14 @@ class Token { delete result.key delete result.updated if (conf.json) - output(JSON.stringify(result)) + this.npm.output(JSON.stringify(result)) else if (conf.parseable) - Object.keys(result).forEach((k) => output(k + '\t' + result[k])) + Object.keys(result).forEach((k) => this.npm.output(k + '\t' + result[k])) else { const table = new Table() for (const k of Object.keys(result)) table.push({ [ansistyles.bright(k)]: String(result[k]) }) - output(table.toString()) + this.npm.output(table.toString()) } }) } @@ -189,12 +204,6 @@ class Token { return conf } - usageError (msg) { - return Object.assign(new Error(`\nUsage: ${msg}\n\n` + this.usage), { - code: 'EUSAGE', - }) - } - invalidCIDRError (msg) { return Object.assign(new Error(msg), { code: 'EINVALIDCIDR' }) } diff --git a/lib/uninstall.js b/lib/uninstall.js index d7116e4c2dc2b..fbb2cef0fbf18 100644 --- a/lib/uninstall.js +++ b/lib/uninstall.js @@ -2,20 +2,28 @@ const { resolve } = require('path') const Arborist = require('@npmcli/arborist') const rpj = require('read-package-json-fast') -const usageUtil = require('./utils/usage.js') const reifyFinish = require('./utils/reify-finish.js') const completion = require('./utils/completion/installed-shallow.js') -class Uninstall { - constructor (npm) { - this.npm = npm +const ArboristWorkspaceCmd = require('./workspaces/arborist-cmd.js') +class Uninstall extends ArboristWorkspaceCmd { + static get description () { + return 'Remove a package' } - get usage () { - return usageUtil( - 'uninstall', - 'npm uninstall [<@scope>/]<pkg>[@<version>]... [-S|--save|--no-save]' - ) + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get name () { + return 'uninstall' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get params () { + return ['save', ...super.params] + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get usage () { + return ['[<@scope>/]<pkg>...'] } /* istanbul ignore next - see test/lib/load-all-commands.js */ @@ -29,8 +37,10 @@ class Uninstall { async uninstall (args) { // the /path/to/node_modules/.. - const { global, prefix } = this.npm.flatOptions - const path = global ? resolve(this.npm.globalDir, '..') : prefix + const global = this.npm.config.get('global') + const path = global + ? resolve(this.npm.globalDir, '..') + : this.npm.localPrefix if (!args.length) { if (!global) @@ -51,12 +61,15 @@ class Uninstall { } } - const arb = new Arborist({ ...this.npm.flatOptions, path }) - - await arb.reify({ + const opts = { ...this.npm.flatOptions, + path, + log: this.npm.log, rm: args, - }) + workspaces: this.workspaceNames, + } + const arb = new Arborist(opts) + await arb.reify(opts) await reifyFinish(this.npm, arb) } } diff --git a/lib/unpublish.js b/lib/unpublish.js index 34751da4a5909..32a634013a7c4 100644 --- a/lib/unpublish.js +++ b/lib/unpublish.js @@ -1,24 +1,33 @@ const path = require('path') const util = require('util') -const log = require('npmlog') const npa = require('npm-package-arg') const libaccess = require('libnpmaccess') const npmFetch = require('npm-registry-fetch') const libunpub = require('libnpmpublish').unpublish const readJson = util.promisify(require('read-package-json')) -const usageUtil = require('./utils/usage.js') -const output = require('./utils/output.js') const otplease = require('./utils/otplease.js') const getIdentity = require('./utils/get-identity.js') -class Unpublish { - constructor (npm) { - this.npm = npm +const BaseCommand = require('./base-command.js') +class Unpublish extends BaseCommand { + static get description () { + return 'Remove a package from the registry' } - get usage () { - return usageUtil('unpublish', 'npm unpublish [<@scope>/]<pkg>[@<version>]') + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get name () { + return 'unpublish' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get params () { + return ['dry-run', 'force', 'workspace', 'workspaces'] + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get usage () { + return ['[<@scope>/]<pkg>[@<version>]'] } async completion (args) { @@ -57,27 +66,33 @@ class Unpublish { this.unpublish(args).then(() => cb()).catch(cb) } + execWorkspaces (args, filters, cb) { + this.unpublishWorkspaces(args, filters).then(() => cb()).catch(cb) + } + async unpublish (args) { if (args.length > 1) - throw new Error(this.usage) + throw this.usageError() const spec = args.length && npa(args[0]) - const opts = this.npm.flatOptions - const { force, silent, loglevel } = opts + const force = this.npm.config.get('force') + const loglevel = this.npm.config.get('loglevel') + const silent = loglevel === 'silent' + const dryRun = this.npm.config.get('dry-run') let pkgName let pkgVersion - log.silly('unpublish', 'args[0]', args[0]) - log.silly('unpublish', 'spec', spec) + this.npm.log.silly('unpublish', 'args[0]', args[0]) + this.npm.log.silly('unpublish', 'spec', spec) - if (!spec.rawSpec && !force) { - throw new Error( + if ((!spec || !spec.rawSpec) && !force) { + throw this.usageError( 'Refusing to delete entire project.\n' + - 'Run with --force to do this.\n' + - this.usage + 'Run with --force to do this.' ) } + const opts = this.npm.flatOptions if (!spec || path.resolve(spec.name) === this.npm.localPrefix) { // if there's a package.json in the current folder, then // read the package name and version out of that. @@ -89,25 +104,42 @@ class Unpublish { if (err && err.code !== 'ENOENT' && err.code !== 'ENOTDIR') throw err else - throw new Error(`Usage: ${this.usage}`) + throw this.usageError() } - log.verbose('unpublish', manifest) + this.npm.log.verbose('unpublish', manifest) const { name, version, publishConfig } = manifest const pkgJsonSpec = npa.resolve(name, version) const optsWithPub = { ...opts, publishConfig } - await otplease(opts, opts => libunpub(pkgJsonSpec, optsWithPub)) + if (!dryRun) + await otplease(opts, opts => libunpub(pkgJsonSpec, optsWithPub)) pkgName = name pkgVersion = version ? `@${version}` : '' } else { - await otplease(opts, opts => libunpub(spec, opts)) + if (!dryRun) + await otplease(opts, opts => libunpub(spec, opts)) pkgName = spec.name pkgVersion = spec.type === 'version' ? `@${spec.rawSpec}` : '' } - if (!silent && loglevel !== 'silent') - output(`- ${pkgName}${pkgVersion}`) + if (!silent) + this.npm.output(`- ${pkgName}${pkgVersion}`) + } + + async unpublishWorkspaces (args, filters) { + await this.setWorkspaces(filters) + + const force = this.npm.config.get('force') + if (!force) { + throw this.usageError( + 'Refusing to delete entire project(s).\n' + + 'Run with --force to do this.' + ) + } + + for (const name of this.workspaceNames) + await this.unpublish([name]) } } module.exports = Unpublish diff --git a/lib/unstar.js b/lib/unstar.js index c814ef2b66807..36ce1daf1a5b8 100644 --- a/lib/unstar.js +++ b/lib/unstar.js @@ -1,6 +1,25 @@ const Star = require('./star.js') class Unstar extends Star { + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get description () { + return 'Remove an item from your favorite packages' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get name () { + return 'unstar' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get params () { + return [ + 'registry', + 'unicode', + 'otp', + ] + } + exec (args, cb) { this.npm.config.set('star.unstar', true) super.exec(args, cb) diff --git a/lib/update.js b/lib/update.js index 98043e09c7650..393c8f0f67e5f 100644 --- a/lib/update.js +++ b/lib/update.js @@ -3,21 +3,42 @@ const path = require('path') const Arborist = require('@npmcli/arborist') const log = require('npmlog') -const usageUtil = require('./utils/usage.js') const reifyFinish = require('./utils/reify-finish.js') const completion = require('./utils/completion/installed-deep.js') -class Update { - constructor (npm) { - this.npm = npm +const ArboristWorkspaceCmd = require('./workspaces/arborist-cmd.js') +class Update extends ArboristWorkspaceCmd { + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get description () { + return 'Update packages' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get name () { + return 'update' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get params () { + return [ + 'global', + 'global-style', + 'legacy-bundling', + 'strict-peer-deps', + 'package-lock', + 'omit', + 'ignore-scripts', + 'audit', + 'bin-links', + 'fund', + 'dry-run', + ...super.params, + ] } /* istanbul ignore next - see test/lib/load-all-commands.js */ - get usage () { - return usageUtil( - 'update', - 'npm update [-g] [<pkg>...]' - ) + static get usage () { + return ['[<pkg>...]'] } /* istanbul ignore next - see test/lib/load-all-commands.js */ @@ -32,18 +53,20 @@ class Update { async update (args) { const update = args.length === 0 ? true : args const global = path.resolve(this.npm.globalDir, '..') - const where = this.npm.flatOptions.global + const where = this.npm.config.get('global') ? global : this.npm.prefix - if (this.npm.flatOptions.depth) { + if (this.npm.config.get('depth')) { log.warn('update', 'The --depth option no longer has any effect. See RFC0019.\n' + 'https://github.com/npm/rfcs/blob/latest/implemented/0019-remove-update-depth-option.md') } const arb = new Arborist({ ...this.npm.flatOptions, + log: this.npm.log, path: where, + workspaces: this.workspaceNames, }) await arb.reify({ update }) diff --git a/lib/utils/audit-error.js b/lib/utils/audit-error.js index ae0749ff6f0be..c58c1d16e6885 100644 --- a/lib/utils/audit-error.js +++ b/lib/utils/audit-error.js @@ -3,7 +3,6 @@ // prints a JSON version of the error if it's --json // returns 'true' if there was an error, false otherwise -const output = require('./output.js') const auditError = (npm, report) => { if (!report || !report.error) return false @@ -18,7 +17,7 @@ const auditError = (npm, report) => { const { body: errBody } = error const body = Buffer.isBuffer(errBody) ? errBody.toString() : errBody if (npm.flatOptions.json) { - output(JSON.stringify({ + npm.output(JSON.stringify({ message: error.message, method: error.method, uri: error.uri, @@ -27,7 +26,7 @@ const auditError = (npm, report) => { body, }, null, 2)) } else - output(body) + npm.output(body) throw 'audit endpoint returned an error' } diff --git a/lib/utils/cache-file.js b/lib/utils/cache-file.js deleted file mode 100644 index b33881e872ec2..0000000000000 --- a/lib/utils/cache-file.js +++ /dev/null @@ -1,66 +0,0 @@ -const npm = require('../npm.js') -const path = require('path') -const chownr = require('chownr') -const writeFileAtomic = require('write-file-atomic') -const mkdirp = require('mkdirp-infer-owner') -const fs = require('graceful-fs') - -let cache = null -let cacheUid = null -let cacheGid = null -let needChown = typeof process.getuid === 'function' - -const getCacheOwner = () => { - let st - try { - st = fs.lstatSync(cache) - } catch (er) { - if (er.code !== 'ENOENT') - throw er - - st = fs.lstatSync(path.dirname(cache)) - } - - cacheUid = st.uid - cacheGid = st.gid - - needChown = st.uid !== process.getuid() || - st.gid !== process.getgid() -} - -const writeOrAppend = (method, file, data) => { - if (!cache) - cache = npm.config.get('cache') - - // redundant if already absolute, but prevents non-absolute files - // from being written as if they're part of the cache. - file = path.resolve(cache, file) - - if (cacheUid === null && needChown) - getCacheOwner() - - const dir = path.dirname(file) - const firstMade = mkdirp.sync(dir) - - if (!needChown) - return method(file, data) - - let methodThrew = true - try { - method(file, data) - methodThrew = false - } finally { - // always try to leave it in the right ownership state, even on failure - // let the method error fail it instead of the chownr error, though - if (!methodThrew) - chownr.sync(firstMade || file, cacheUid, cacheGid) - else { - try { - chownr.sync(firstMade || file, cacheUid, cacheGid) - } catch (_) {} - } - } -} - -exports.append = (file, data) => writeOrAppend(fs.appendFileSync, file, data) -exports.write = (file, data) => writeOrAppend(writeFileAtomic.sync, file, data) diff --git a/lib/utils/cmd-list.js b/lib/utils/cmd-list.js index c865cdabb4014..26da539006588 100644 --- a/lib/utils/cmd-list.js +++ b/lib/utils/cmd-list.js @@ -122,6 +122,7 @@ const cmdList = [ 'diff', 'dist-tag', 'ping', + 'pkg', 'test', 'stop', diff --git a/lib/utils/completion.sh b/lib/utils/completion.sh index c549b31c96493..a3e5143991edd 100755 --- a/lib/utils/completion.sh +++ b/lib/utils/completion.sh @@ -18,11 +18,15 @@ if type complete &>/dev/null; then fi local si="$IFS" - IFS=$'\n' COMPREPLY=($(COMP_CWORD="$cword" \ + if ! IFS=$'\n' COMPREPLY=($(COMP_CWORD="$cword" \ COMP_LINE="$COMP_LINE" \ COMP_POINT="$COMP_POINT" \ npm completion -- "${words[@]}" \ - 2>/dev/null)) || return $? + 2>/dev/null)); then + local ret=$? + IFS="$si" + return $ret + fi IFS="$si" if type __ltrim_colon_completions &>/dev/null; then __ltrim_colon_completions "${words[cword]}" @@ -49,11 +53,16 @@ elif type compctl &>/dev/null; then read -l line read -ln point si="$IFS" - IFS=$'\n' reply=($(COMP_CWORD="$cword" \ + if ! IFS=$'\n' reply=($(COMP_CWORD="$cword" \ COMP_LINE="$line" \ COMP_POINT="$point" \ npm completion -- "${words[@]}" \ - 2>/dev/null)) || return $? + 2>/dev/null)); then + + local ret=$? + IFS="$si" + return $ret + fi IFS="$si" } compctl -K _npm_completion npm diff --git a/lib/utils/completion/installed-deep.js b/lib/utils/completion/installed-deep.js index b65c17e41d743..2430688612cd4 100644 --- a/lib/utils/completion/installed-deep.js +++ b/lib/utils/completion/installed-deep.js @@ -16,7 +16,7 @@ const installedDeep = async (npm) => { }) .filter(i => (i.depth - 1) <= depth) .sort((a, b) => a.depth - b.depth) - .sort((a, b) => a.depth === b.depth ? a.name.localeCompare(b.name) : 0) + .sort((a, b) => a.depth === b.depth ? a.name.localeCompare(b.name, 'en') : 0) const res = new Set() const gArb = new Arborist({ global: true, path: resolve(npm.globalDir, '..') }) diff --git a/lib/utils/config.js b/lib/utils/config.js deleted file mode 100644 index 3ca9766132f02..0000000000000 --- a/lib/utils/config.js +++ /dev/null @@ -1,394 +0,0 @@ -// defaults, types, and shorthands - -const { - typeDefs: { - semver: { type: semver }, - Umask: { type: Umask }, - url: { type: url }, - path: { type: path }, - }, -} = require('@npmcli/config') - -const { version: npmVersion } = require('../../package.json') - -const ciDetect = require('@npmcli/ci-detect') -const ciName = ciDetect() - -const isWindows = require('./is-windows.js') - -const editor = process.env.EDITOR || - process.env.VISUAL || - (isWindows ? 'notepad.exe' : 'vi') - -const shell = isWindows ? process.env.ComSpec || 'cmd' - : process.env.SHELL || 'sh' - -const { tmpdir, networkInterfaces } = require('os') -const getLocalAddresses = () => { - try { - return Object.values(networkInterfaces()).map( - int => int.map(({ address }) => address) - ).reduce((set, addrs) => set.concat(addrs), [undefined]) - } catch (e) { - return [undefined] - } -} - -const unicode = /UTF-?8$/i.test( - process.env.LC_ALL || process.env.LC_CTYPE || process.env.LANG -) - -// use LOCALAPPDATA on Windows, if set -// https://github.com/npm/cli/pull/899 -const cacheRoot = (isWindows && process.env.LOCALAPPDATA) || '~' -const cacheExtra = isWindows ? 'npm-cache' : '.npm' -const cache = `${cacheRoot}/${cacheExtra}` - -const defaults = { - access: null, - all: false, - 'allow-same-version': false, - also: null, - 'always-auth': false, - audit: true, - 'audit-level': null, - _auth: null, - 'auth-type': 'legacy', - before: null, - 'bin-links': true, - browser: null, - ca: null, - cache, - 'cache-lock-retries': 10, - 'cache-lock-stale': 60000, - 'cache-lock-wait': 10000, - 'cache-max': Infinity, - 'cache-min': 10, - cafile: null, - call: '', - cert: null, - 'ci-name': ciName || null, - cidr: null, - color: process.env.NO_COLOR == null, - 'commit-hooks': true, - depth: null, - description: true, - dev: false, - diff: [], - 'diff-unified': null, - 'diff-ignore-all-space': false, - 'diff-name-only': false, - 'diff-no-prefix': false, - 'diff-src-prefix': '', - 'diff-dst-prefix': '', - 'diff-text': false, - 'dry-run': false, - editor, - 'engine-strict': false, - 'fetch-retries': 2, - 'fetch-retry-factor': 10, - 'fetch-retry-maxtimeout': 60000, - 'fetch-retry-mintimeout': 10000, - 'fetch-timeout': 5 * 60 * 1000, - force: false, - 'foreground-script': false, - 'format-package-lock': true, - fund: true, - git: 'git', - 'git-tag-version': true, - global: false, - 'global-style': false, - // `globalconfig` has its default defined outside of this module - heading: 'npm', - 'https-proxy': null, - 'if-present': false, - 'ignore-prepublish': false, - 'ignore-scripts': false, - include: [], - 'include-staged': false, - 'init-author-email': '', - 'init-author-name': '', - 'init-author-url': '', - 'init-license': 'ISC', - 'init-module': '~/.npm-init.js', - 'init-version': '1.0.0', - 'init.author.email': '', - 'init.author.name': '', - 'init.author.url': '', - 'init.license': 'ISC', - 'init.module': '~/.npm-init.js', - 'init.version': '1.0.0', - json: false, - key: null, - 'legacy-bundling': false, - 'legacy-peer-deps': false, - link: false, - 'local-address': undefined, - loglevel: 'notice', - 'logs-max': 10, - long: false, - maxsockets: 50, - message: '%s', - 'node-options': null, - 'node-version': process.version, - noproxy: null, - 'npm-version': npmVersion, - offline: false, - omit: [], - only: null, - optional: true, - otp: null, - package: [], - 'package-lock': true, - 'package-lock-only': false, - parseable: false, - 'prefer-offline': false, - 'prefer-online': false, - // `prefix` has its default defined outside of this module - preid: '', - production: process.env.NODE_ENV === 'production', - progress: !ciName, - proxy: null, - 'read-only': false, - 'rebuild-bundle': true, - registry: 'https://registry.npmjs.org/', - rollback: true, - save: true, - 'save-bundle': false, - 'save-dev': false, - 'save-exact': false, - 'save-optional': false, - 'save-prefix': '^', - 'save-prod': false, - scope: '', - 'script-shell': null, - 'scripts-prepend-node-path': 'warn-only', - searchexclude: null, - searchlimit: 20, - searchopts: '', - searchstaleness: 15 * 60, - shell, - shrinkwrap: true, - 'sign-git-commit': false, - 'sign-git-tag': false, - 'sso-poll-frequency': 500, - 'sso-type': 'oauth', - 'strict-peer-deps': false, - 'strict-ssl': true, - tag: 'latest', - 'tag-version-prefix': 'v', - timing: false, - tmp: tmpdir(), - umask: 0, - unicode, - 'update-notifier': true, - usage: false, - 'user-agent': 'npm/{npm-version} ' + - 'node/{node-version} ' + - '{platform} ' + - '{arch} ' + - '{ci}', - userconfig: '~/.npmrc', - version: false, - versions: false, - viewer: isWindows ? 'browser' : 'man', -} - -const types = { - access: [null, 'restricted', 'public'], - all: Boolean, - 'allow-same-version': Boolean, - also: [null, 'dev', 'development'], - _auth: [null, String], - 'always-auth': Boolean, - audit: Boolean, - 'audit-level': ['low', 'moderate', 'high', 'critical', 'none', null], - 'auth-type': ['legacy', 'sso', 'saml', 'oauth'], - before: [null, Date], - 'bin-links': Boolean, - browser: [null, Boolean, String], - ca: [null, String, Array], - cache: path, - 'cache-lock-retries': Number, - 'cache-lock-stale': Number, - 'cache-lock-wait': Number, - 'cache-max': Number, - 'cache-min': Number, - cafile: path, - call: String, - cert: [null, String], - 'ci-name': [null, String], - cidr: [null, String, Array], - color: ['always', Boolean], - 'commit-hooks': Boolean, - depth: [null, Number], - description: Boolean, - dev: Boolean, - diff: [String, Array], - 'diff-unified': [null, Number], - 'diff-ignore-all-space': Boolean, - 'diff-name-only': Boolean, - 'diff-no-prefix': Boolean, - 'diff-src-prefix': String, - 'diff-dst-prefix': String, - 'diff-text': Boolean, - 'dry-run': Boolean, - editor: String, - 'engine-strict': Boolean, - 'fetch-retries': Number, - 'fetch-retry-factor': Number, - 'fetch-retry-maxtimeout': Number, - 'fetch-retry-mintimeout': Number, - 'fetch-timeout': Number, - force: Boolean, - 'foreground-script': Boolean, - 'format-package-lock': Boolean, - fund: Boolean, - git: String, - 'git-tag-version': Boolean, - global: Boolean, - 'global-style': Boolean, - globalconfig: path, - heading: String, - 'https-proxy': [null, url], - 'if-present': Boolean, - 'ignore-prepublish': Boolean, - 'ignore-scripts': Boolean, - include: [Array, 'prod', 'dev', 'optional', 'peer'], - 'include-staged': Boolean, - 'init-author-email': String, - 'init-author-name': String, - 'init-author-url': ['', url], - 'init-license': String, - 'init-module': path, - 'init-version': semver, - 'init.author.email': String, - 'init.author.name': String, - 'init.author.url': ['', url], - 'init.license': String, - 'init.module': path, - 'init.version': semver, - json: Boolean, - key: [null, String], - 'legacy-bundling': Boolean, - 'legacy-peer-deps': Boolean, - link: Boolean, - 'local-address': getLocalAddresses(), - loglevel: [ - 'silent', - 'error', - 'warn', - 'notice', - 'http', - 'timing', - 'info', - 'verbose', - 'silly', - ], - 'logs-max': Number, - long: Boolean, - maxsockets: Number, - message: String, - 'node-options': [null, String], - 'node-version': [null, semver], - noproxy: [null, String, Array], - 'npm-version': semver, - offline: Boolean, - omit: [Array, 'dev', 'optional', 'peer'], - only: [null, 'dev', 'development', 'prod', 'production'], - optional: Boolean, - otp: [null, String], - package: [String, Array], - 'package-lock': Boolean, - 'package-lock-only': Boolean, - parseable: Boolean, - 'prefer-offline': Boolean, - 'prefer-online': Boolean, - prefix: path, - preid: String, - production: Boolean, - progress: Boolean, - proxy: [null, false, url], // allow proxy to be disabled explicitly - 'read-only': Boolean, - 'rebuild-bundle': Boolean, - registry: [null, url], - rollback: Boolean, - save: Boolean, - 'save-bundle': Boolean, - 'save-dev': Boolean, - 'save-exact': Boolean, - 'save-optional': Boolean, - 'save-prefix': String, - 'save-prod': Boolean, - scope: String, - 'script-shell': [null, String], - 'scripts-prepend-node-path': [Boolean, 'auto', 'warn-only'], - searchexclude: [null, String], - searchlimit: Number, - searchopts: String, - searchstaleness: Number, - shell: String, - shrinkwrap: Boolean, - 'sign-git-commit': Boolean, - 'sign-git-tag': Boolean, - 'sso-poll-frequency': Number, - 'sso-type': [null, 'oauth', 'saml'], - 'strict-peer-deps': Boolean, - 'strict-ssl': Boolean, - tag: String, - 'tag-version-prefix': String, - timing: Boolean, - tmp: path, - umask: Umask, - unicode: Boolean, - 'update-notifier': Boolean, - usage: Boolean, - 'user-agent': String, - userconfig: path, - version: Boolean, - versions: Boolean, - viewer: String, -} - -const shorthands = { - '?': ['--usage'], - a: ['--all'], - B: ['--save-bundle'], - C: ['--prefix'], - c: ['--call'], - D: ['--save-dev'], - d: ['--loglevel', 'info'], - dd: ['--loglevel', 'verbose'], - ddd: ['--loglevel', 'silly'], - desc: ['--description'], - E: ['--save-exact'], - 'enjoy-by': ['--before'], - f: ['--force'], - g: ['--global'], - H: ['--usage'], - h: ['--usage'], - help: ['--usage'], - l: ['--long'], - local: ['--no-global'], - m: ['--message'], - n: ['--no-yes'], - 'no-desc': ['--no-description'], - 'no-reg': ['--no-registry'], - noreg: ['--no-registry'], - O: ['--save-optional'], - P: ['--save-prod'], - p: ['--parseable'], - porcelain: ['--parseable'], - q: ['--loglevel', 'warn'], - quiet: ['--loglevel', 'warn'], - readonly: ['--read-only'], - reg: ['--registry'], - S: ['--save'], - s: ['--loglevel', 'silent'], - silent: ['--loglevel', 'silent'], - v: ['--version'], - verbose: ['--loglevel', 'verbose'], - y: ['--yes'], -} - -module.exports = { defaults, types, shorthands } diff --git a/lib/utils/config/definition.js b/lib/utils/config/definition.js new file mode 100644 index 0000000000000..1354851326adf --- /dev/null +++ b/lib/utils/config/definition.js @@ -0,0 +1,232 @@ +// class that describes a config key we know about +// this keeps us from defining a config key and not +// providing a default, description, etc. +// +// TODO: some kind of categorization system, so we can +// say "these are for registry access", "these are for +// version resolution" etc. + +const required = [ + 'type', + 'description', + 'default', + 'key', +] + +const allowed = [ + 'default', + 'defaultDescription', + 'deprecated', + 'description', + 'flatten', + 'hint', + 'key', + 'short', + 'type', + 'typeDescription', + 'usage', + 'envExport', +] + +const { + typeDefs: { + semver: { type: semver }, + Umask: { type: Umask }, + url: { type: url }, + path: { type: path }, + }, +} = require('@npmcli/config') + +class Definition { + constructor (key, def) { + this.key = key + // if it's set falsey, don't export it, otherwise we do by default + this.envExport = true + Object.assign(this, def) + this.validate() + if (!this.defaultDescription) + this.defaultDescription = describeValue(this.default) + if (!this.typeDescription) + this.typeDescription = describeType(this.type) + // hint is only used for non-boolean values + if (!this.hint) { + if (this.type === Number) + this.hint = '<number>' + else + this.hint = `<${this.key}>` + } + if (!this.usage) + this.usage = describeUsage(this) + } + + validate () { + for (const req of required) { + if (!Object.prototype.hasOwnProperty.call(this, req)) + throw new Error(`config lacks ${req}: ${this.key}`) + } + if (!this.key) + throw new Error(`config lacks key: ${this.key}`) + for (const field of Object.keys(this)) { + if (!allowed.includes(field)) + throw new Error(`config defines unknown field ${field}: ${this.key}`) + } + } + + // a textual description of this config, suitable for help output + describe () { + const description = unindent(this.description) + const noEnvExport = this.envExport ? '' : ` +This value is not exported to the environment for child processes. +` + const deprecated = !this.deprecated ? '' + : `* DEPRECATED: ${unindent(this.deprecated)}\n` + return wrapAll(`#### \`${this.key}\` + +* Default: ${unindent(this.defaultDescription)} +* Type: ${unindent(this.typeDescription)} +${deprecated} +${description} +${noEnvExport}`) + } +} + +const describeUsage = (def) => { + let key = '' + + // Single type + if (!Array.isArray(def.type)) { + if (def.short) + key = `-${def.short}|` + + if (def.type === Boolean && def.default !== false) + key = `${key}--no-${def.key}` + else + key = `${key}--${def.key}` + + if (def.type !== Boolean) + key = `${key} ${def.hint}` + + return key + } + + key = `--${def.key}` + if (def.short) + key = `-${def.short}|--${def.key}` + + // Multiple types + let types = def.type + const multiple = types.includes(Array) + const bool = types.includes(Boolean) + + // null type means optional and doesn't currently affect usage output since + // all non-optional params have defaults so we render everything as optional + types = types.filter(t => t !== null && t !== Array && t !== Boolean) + + if (!types.length) + return key + + let description + if (!types.some(t => typeof t !== 'string')) + // Specific values, use specifics given + description = `<${types.filter(d => d).join('|')}>` + else { + // Generic values, use hint + description = def.hint + } + + if (bool) { + // Currently none of our multi-type configs with boolean values default to + // false so all their hints should show `--no-`, if we ever add ones that + // default to false we can branch the logic here + key = `--no-${def.key}|${key}` + } + + const usage = `${key} ${description}` + if (multiple) + return `${usage} [${usage} ...]` + else + return usage +} + +const describeType = type => { + if (Array.isArray(type)) { + const descriptions = type + .filter(t => t !== Array) + .map(t => describeType(t)) + + // [a] => "a" + // [a, b] => "a or b" + // [a, b, c] => "a, b, or c" + // [a, Array] => "a (can be set multiple times)" + // [a, Array, b] => "a or b (can be set multiple times)" + const last = descriptions.length > 1 ? [descriptions.pop()] : [] + const oxford = descriptions.length > 1 ? ', or ' : ' or ' + const words = [descriptions.join(', ')].concat(last).join(oxford) + const multiple = type.includes(Array) ? ' (can be set multiple times)' + : '' + return `${words}${multiple}` + } + + // Note: these are not quite the same as the description printed + // when validation fails. In that case, we want to give the user + // a bit more information to help them figure out what's wrong. + switch (type) { + case String: + return 'String' + case Number: + return 'Number' + case Umask: + return 'Octal numeric string in range 0000..0777 (0..511)' + case Boolean: + return 'Boolean' + case Date: + return 'Date' + case path: + return 'Path' + case semver: + return 'SemVer string' + case url: + return 'URL' + default: + return describeValue(type) + } +} + +// if it's a string, quote it. otherwise, just cast to string. +const describeValue = val => + typeof val === 'string' ? JSON.stringify(val) : String(val) + +const unindent = s => { + // get the first \n followed by a bunch of spaces, and pluck off + // that many spaces from the start of every line. + const match = s.match(/\n +/) + return !match ? s.trim() : s.split(match[0]).join('\n').trim() +} + +const wrap = (s) => { + const cols = Math.min(Math.max(20, process.stdout.columns) || 80, 80) - 5 + return unindent(s).split(/[ \n]+/).reduce((left, right) => { + const last = left.split('\n').pop() + const join = last.length && last.length + right.length > cols ? '\n' : ' ' + return left + join + right + }) +} + +const wrapAll = s => { + let inCodeBlock = false + return s.split('\n\n').map(block => { + if (inCodeBlock || block.startsWith('```')) { + inCodeBlock = !block.endsWith('```') + return block + } + + if (block.charAt(0) === '*') { + return '* ' + block.substr(1).trim().split('\n* ').map(li => { + return wrap(li).replace(/\n/g, '\n ') + }).join('\n* ') + } else + return wrap(block) + }).join('\n\n') +} + +module.exports = Definition diff --git a/lib/utils/config/definitions.js b/lib/utils/config/definitions.js new file mode 100644 index 0000000000000..36b8a84a61c47 --- /dev/null +++ b/lib/utils/config/definitions.js @@ -0,0 +1,2150 @@ +const definitions = {} +module.exports = definitions + +const Definition = require('./definition.js') + +const { version: npmVersion } = require('../../../package.json') +const ciDetect = require('@npmcli/ci-detect') +const ciName = ciDetect() +const querystring = require('querystring') +const isWindows = require('../is-windows.js') +const { join } = require('path') + +// used by cafile flattening to flatOptions.ca +const fs = require('fs') +const maybeReadFile = file => { + try { + return fs.readFileSync(file, 'utf8') + } catch (er) { + if (er.code !== 'ENOENT') + throw er + return null + } +} + +const buildOmitList = obj => { + const include = obj.include || [] + const omit = obj.omit || [] + + const only = obj.only + if (/^prod(uction)?$/.test(only) || obj.production) + omit.push('dev') + else if (obj.production === false) + include.push('dev') + + if (/^dev/.test(obj.also)) + include.push('dev') + + if (obj.dev) + include.push('dev') + + if (obj.optional === false) + omit.push('optional') + else if (obj.optional === true) + include.push('optional') + + obj.omit = [...new Set(omit)].filter(type => !include.includes(type)) + obj.include = [...new Set(include)] + + if (obj.omit.includes('dev')) + process.env.NODE_ENV = 'production' + + return obj.omit +} + +const editor = process.env.EDITOR || + process.env.VISUAL || + (isWindows ? 'notepad.exe' : 'vi') + +const shell = isWindows ? process.env.ComSpec || 'cmd' + : process.env.SHELL || 'sh' + +const { tmpdir, networkInterfaces } = require('os') +const getLocalAddresses = () => { + try { + return Object.values(networkInterfaces()).map( + int => int.map(({ address }) => address) + ).reduce((set, addrs) => set.concat(addrs), [null]) + } catch (e) { + return [null] + } +} + +const unicode = /UTF-?8$/i.test( + process.env.LC_ALL || + process.env.LC_CTYPE || + process.env.LANG +) + +// use LOCALAPPDATA on Windows, if set +// https://github.com/npm/cli/pull/899 +const cacheRoot = (isWindows && process.env.LOCALAPPDATA) || '~' +const cacheExtra = isWindows ? 'npm-cache' : '.npm' +const cache = `${cacheRoot}/${cacheExtra}` + +const Config = require('@npmcli/config') + +// TODO: refactor these type definitions so that they are less +// weird to pull out of the config module. +// TODO: use better type definition/validation API, nopt's is so weird. +const { + typeDefs: { + semver: { type: semver }, + Umask: { type: Umask }, + url: { type: url }, + path: { type: path }, + }, +} = Config + +const define = (key, def) => { + /* istanbul ignore if - this should never happen, prevents mistakes below */ + if (definitions[key]) + throw new Error(`defining key more than once: ${key}`) + definitions[key] = new Definition(key, def) +} + +// basic flattening function, just copy it over camelCase +const flatten = (key, obj, flatOptions) => { + const camel = key.replace(/-([a-z])/g, (_0, _1) => _1.toUpperCase()) + flatOptions[camel] = obj[key] +} + +// TODO: +// Instead of having each definition provide a flatten method, +// provide the (?list of?) flat option field(s?) that it impacts. +// When that config is set, we mark the relevant flatOption fields +// dirty. Then, a getter for that field defines how we actually +// set it. +// +// So, `save-dev`, `save-optional`, `save-prod`, et al would indicate +// that they affect the `saveType` flat option. Then the config.flat +// object has a `get saveType () { ... }` that looks at the "real" +// config settings from files etc and returns the appropriate value. +// +// Getters will also (maybe?) give us a hook to audit flat option +// usage, so we can document and group these more appropriately. +// +// This will be a problem with cases where we currently do: +// const opts = { ...npm.flatOptions, foo: 'bar' }, but we can maybe +// instead do `npm.config.set('foo', 'bar')` prior to passing the +// config object down where it needs to go. +// +// This way, when we go hunting for "where does saveType come from anyway!?" +// while fixing some Arborist bug, we won't have to hunt through too +// many places. + +// Define all config keys we know about + +define('_auth', { + default: null, + type: [null, String], + description: ` + A basic-auth string to use when authenticating against the npm registry. + + Warning: This should generally not be set via a command-line option. It + is safer to use a registry-provided authentication bearer token stored in + the ~/.npmrc file by running \`npm login\`. + `, + flatten, +}) + +define('access', { + default: null, + defaultDescription: ` + 'restricted' for scoped packages, 'public' for unscoped packages + `, + type: [null, 'restricted', 'public'], + description: ` + When publishing scoped packages, the access level defaults to + \`restricted\`. If you want your scoped package to be publicly viewable + (and installable) set \`--access=public\`. The only valid values for + \`access\` are \`public\` and \`restricted\`. Unscoped packages _always_ + have an access level of \`public\`. + `, + flatten, +}) + +define('all', { + default: false, + type: Boolean, + short: 'a', + description: ` + When running \`npm outdated\` and \`npm ls\`, setting \`--all\` will show + all outdated or installed packages, rather than only those directly + depended upon by the current project. + `, + flatten, +}) + +define('allow-same-version', { + default: false, + type: Boolean, + description: ` + Prevents throwing an error when \`npm version\` is used to set the new + version to the same value as the current version. + `, + flatten, +}) + +define('also', { + default: null, + type: [null, 'dev', 'development'], + description: ` + When set to \`dev\` or \`development\`, this is an alias for + \`--include=dev\`. + `, + deprecated: 'Please use --include=dev instead.', + flatten (key, obj, flatOptions) { + definitions.omit.flatten('omit', obj, flatOptions) + }, +}) + +define('audit', { + default: true, + type: Boolean, + description: ` + When "true" submit audit reports alongside the current npm command to the + default registry and all registries configured for scopes. See the + documentation for [\`npm audit\`](/commands/npm-audit) for details on what + is submitted. + `, + flatten, +}) + +define('audit-level', { + default: null, + type: [null, 'info', 'low', 'moderate', 'high', 'critical', 'none'], + description: ` + The minimum level of vulnerability for \`npm audit\` to exit with + a non-zero exit code. + `, + flatten, +}) + +define('auth-type', { + default: 'legacy', + type: ['legacy', 'sso', 'saml', 'oauth'], + deprecated: ` + This method of SSO/SAML/OAuth is deprecated and will be removed in + a future version of npm in favor of web-based login. + `, + description: ` + What authentication strategy to use with \`adduser\`/\`login\`. + `, + flatten, +}) + +define('before', { + default: null, + type: [null, Date], + description: ` + If passed to \`npm install\`, will rebuild the npm tree such that only + versions that were available **on or before** the \`--before\` time get + installed. If there's no versions available for the current set of + direct dependencies, the command will error. + + If the requested version is a \`dist-tag\` and the given tag does not + pass the \`--before\` filter, the most recent version less than or equal + to that tag will be used. For example, \`foo@latest\` might install + \`foo@1.2\` even though \`latest\` is \`2.0\`. + `, + flatten, +}) + +define('bin-links', { + default: true, + type: Boolean, + description: ` + Tells npm to create symlinks (or \`.cmd\` shims on Windows) for package + executables. + + Set to false to have it not do this. This can be used to work around the + fact that some file systems don't support symlinks, even on ostensibly + Unix systems. + `, + flatten, +}) + +define('browser', { + default: null, + defaultDescription: ` + OS X: \`"open"\`, Windows: \`"start"\`, Others: \`"xdg-open"\` + `, + type: [null, Boolean, String], + description: ` + The browser that is called by npm commands to open websites. + + Set to \`false\` to suppress browser behavior and instead print urls to + terminal. + + Set to \`true\` to use default system URL opener. + `, + flatten, +}) + +define('ca', { + default: null, + type: [null, String, Array], + description: ` + The Certificate Authority signing certificate that is trusted for SSL + connections to the registry. Values should be in PEM format (Windows + calls it "Base-64 encoded X.509 (.CER)") with newlines replaced by the + string "\\n". For example: + + \`\`\`ini + ca="-----BEGIN CERTIFICATE-----\\nXXXX\\nXXXX\\n-----END CERTIFICATE-----" + \`\`\` + + Set to \`null\` to only allow "known" registrars, or to a specific CA + cert to trust only that specific signing authority. + + Multiple CAs can be trusted by specifying an array of certificates: + + \`\`\`ini + ca[]="..." + ca[]="..." + \`\`\` + + See also the \`strict-ssl\` config. + `, + flatten, +}) + +define('cache', { + default: cache, + defaultDescription: ` + Windows: \`%LocalAppData%\\npm-cache\`, Posix: \`~/.npm\` + `, + type: path, + description: ` + The location of npm's cache directory. See [\`npm + cache\`](/commands/npm-cache) + `, + flatten (key, obj, flatOptions) { + flatOptions.cache = join(obj.cache, '_cacache') + flatOptions.npxCache = join(obj.cache, '_npx') + }, +}) + +define('cache-max', { + default: Infinity, + type: Number, + description: ` + \`--cache-max=0\` is an alias for \`--prefer-online\` + `, + deprecated: ` + This option has been deprecated in favor of \`--prefer-online\` + `, + flatten (key, obj, flatOptions) { + if (obj[key] <= 0) + flatOptions.preferOnline = true + }, +}) + +define('cache-min', { + default: 0, + type: Number, + description: ` + \`--cache-min=9999 (or bigger)\` is an alias for \`--prefer-offline\`. + `, + deprecated: ` + This option has been deprecated in favor of \`--prefer-offline\`. + `, + flatten (key, obj, flatOptions) { + if (obj[key] >= 9999) + flatOptions.preferOffline = true + }, +}) + +define('cafile', { + default: null, + type: path, + description: ` + A path to a file containing one or multiple Certificate Authority signing + certificates. Similar to the \`ca\` setting, but allows for multiple + CA's, as well as for the CA information to be stored in a file on disk. + `, + flatten (key, obj, flatOptions) { + // always set to null in defaults + if (!obj.cafile) + return + + const raw = maybeReadFile(obj.cafile) + if (!raw) + return + + const delim = '-----END CERTIFICATE-----' + flatOptions.ca = raw.replace(/\r\n/g, '\n').split(delim) + .filter(section => section.trim()) + .map(section => section.trimLeft() + delim) + }, +}) + +define('call', { + default: '', + type: String, + short: 'c', + description: ` + Optional companion option for \`npm exec\`, \`npx\` that allows for + specifying a custom command to be run along with the installed packages. + + \`\`\`bash + npm exec --package yo --package generator-node --call "yo node" + \`\`\` + `, + flatten, +}) + +define('cert', { + default: null, + type: [null, String], + description: ` + A client certificate to pass when accessing the registry. Values should + be in PEM format (Windows calls it "Base-64 encoded X.509 (.CER)") with + newlines replaced by the string "\\n". For example: + + \`\`\`ini + cert="-----BEGIN CERTIFICATE-----\\nXXXX\\nXXXX\\n-----END CERTIFICATE-----" + \`\`\` + + It is _not_ the path to a certificate file (and there is no "certfile" + option). + `, + flatten, +}) + +define('ci-name', { + default: ciName || null, + defaultDescription: ` + The name of the current CI system, or \`null\` when not on a known CI + platform. + `, + type: [null, String], + description: ` + The name of a continuous integration system. If not set explicitly, npm + will detect the current CI environment using the + [\`@npmcli/ci-detect\`](http://npm.im/@npmcli/ci-detect) module. + `, + flatten, +}) + +define('cidr', { + default: null, + type: [null, String, Array], + description: ` + This is a list of CIDR address to be used when configuring limited access + tokens with the \`npm token create\` command. + `, + flatten, +}) + +// This should never be directly used, the flattened value is the derived value +// and is sent to other modules, and is also exposed as `npm.color` for use +// inside npm itself. +define('color', { + default: !process.env.NO_COLOR || process.env.NO_COLOR === '0', + usage: '--color|--no-color|--color always', + defaultDescription: ` + true unless the NO_COLOR environ is set to something other than '0' + `, + type: ['always', Boolean], + description: ` + If false, never shows colors. If \`"always"\` then always shows colors. + If true, then only prints color codes for tty file descriptors. + `, + flatten (key, obj, flatOptions) { + flatOptions.color = !obj.color ? false + : obj.color === 'always' ? true + : process.stdout.isTTY + }, +}) + +define('commit-hooks', { + default: true, + type: Boolean, + description: ` + Run git commit hooks when using the \`npm version\` command. + `, + flatten, +}) + +define('depth', { + default: null, + defaultDescription: ` + \`Infinity\` if \`--all\` is set, otherwise \`1\` + `, + type: [null, Number], + description: ` + The depth to go when recursing packages for \`npm ls\`. + + If not set, \`npm ls\` will show only the immediate dependencies of the + root project. If \`--all\` is set, then npm will show all dependencies + by default. + `, + flatten, +}) + +define('description', { + default: true, + type: Boolean, + usage: '--no-description', + description: ` + Show the description in \`npm search\` + `, + flatten (key, obj, flatOptions) { + flatOptions.search = flatOptions.search || { limit: 20 } + flatOptions.search[key] = obj[key] + }, +}) + +define('dev', { + default: false, + type: Boolean, + description: ` + Alias for \`--include=dev\`. + `, + deprecated: 'Please use --include=dev instead.', + flatten (key, obj, flatOptions) { + definitions.omit.flatten('omit', obj, flatOptions) + }, +}) + +define('diff', { + default: [], + hint: '<pkg-name|spec|version>', + type: [String, Array], + description: ` + Define arguments to compare in \`npm diff\`. + `, + flatten, +}) + +define('diff-ignore-all-space', { + default: false, + type: Boolean, + description: ` + Ignore whitespace when comparing lines in \`npm diff\`. + `, + flatten, +}) + +define('diff-name-only', { + default: false, + type: Boolean, + description: ` + Prints only filenames when using \`npm diff\`. + `, + flatten, +}) + +define('diff-no-prefix', { + default: false, + type: Boolean, + description: ` + Do not show any source or destination prefix in \`npm diff\` output. + + Note: this causes \`npm diff\` to ignore the \`--diff-src-prefix\` and + \`--diff-dst-prefix\` configs. + `, + flatten, +}) + +define('diff-dst-prefix', { + default: 'b/', + hint: '<path>', + type: String, + description: ` + Destination prefix to be used in \`npm diff\` output. + `, + flatten, +}) + +define('diff-src-prefix', { + default: 'a/', + hint: '<path>', + type: String, + description: ` + Source prefix to be used in \`npm diff\` output. + `, + flatten, +}) + +define('diff-text', { + default: false, + type: Boolean, + description: ` + Treat all files as text in \`npm diff\`. + `, + flatten, +}) + +define('diff-unified', { + default: 3, + type: Number, + description: ` + The number of lines of context to print in \`npm diff\`. + `, + flatten, +}) + +define('dry-run', { + default: false, + type: Boolean, + description: ` + Indicates that you don't want npm to make any changes and that it should + only report what it would have done. This can be passed into any of the + commands that modify your local installation, eg, \`install\`, + \`update\`, \`dedupe\`, \`uninstall\`, as well as \`pack\` and + \`publish\`. + + Note: This is NOT honored by other network related commands, eg + \`dist-tags\`, \`owner\`, etc. + `, + flatten, +}) + +define('editor', { + default: editor, + defaultDescription: ` + The EDITOR or VISUAL environment variables, or 'notepad.exe' on Windows, + or 'vim' on Unix systems + `, + type: String, + description: ` + The command to run for \`npm edit\` and \`npm config edit\`. + `, + flatten, +}) + +define('engine-strict', { + default: false, + type: Boolean, + description: ` + If set to true, then npm will stubbornly refuse to install (or even + consider installing) any package that claims to not be compatible with + the current Node.js version. + + This can be overridden by setting the \`--force\` flag. + `, + flatten, +}) + +define('fetch-retries', { + default: 2, + type: Number, + description: ` + The "retries" config for the \`retry\` module to use when fetching + packages from the registry. + + npm will retry idempotent read requests to the registry in the case + of network failures or 5xx HTTP errors. + `, + flatten (key, obj, flatOptions) { + flatOptions.retry = flatOptions.retry || {} + flatOptions.retry.retries = obj[key] + }, +}) + +define('fetch-retry-factor', { + default: 10, + type: Number, + description: ` + The "factor" config for the \`retry\` module to use when fetching + packages. + `, + flatten (key, obj, flatOptions) { + flatOptions.retry = flatOptions.retry || {} + flatOptions.retry.factor = obj[key] + }, +}) + +define('fetch-retry-maxtimeout', { + default: 60000, + defaultDescription: '60000 (1 minute)', + type: Number, + description: ` + The "maxTimeout" config for the \`retry\` module to use when fetching + packages. + `, + flatten (key, obj, flatOptions) { + flatOptions.retry = flatOptions.retry || {} + flatOptions.retry.maxTimeout = obj[key] + }, +}) + +define('fetch-retry-mintimeout', { + default: 10000, + defaultDescription: '10000 (10 seconds)', + type: Number, + description: ` + The "minTimeout" config for the \`retry\` module to use when fetching + packages. + `, + flatten (key, obj, flatOptions) { + flatOptions.retry = flatOptions.retry || {} + flatOptions.retry.minTimeout = obj[key] + }, +}) + +define('fetch-timeout', { + default: 5 * 60 * 1000, + defaultDescription: `${5 * 60 * 1000} (5 minutes)`, + type: Number, + description: ` + The maximum amount of time to wait for HTTP requests to complete. + `, + flatten (key, obj, flatOptions) { + flatOptions.timeout = obj[key] + }, +}) + +define('force', { + default: false, + type: Boolean, + short: 'f', + description: ` + Removes various protections against unfortunate side effects, common + mistakes, unnecessary performance degradation, and malicious input. + + * Allow clobbering non-npm files in global installs. + * Allow the \`npm version\` command to work on an unclean git repository. + * Allow deleting the cache folder with \`npm cache clean\`. + * Allow installing packages that have an \`engines\` declaration + requiring a different version of npm. + * Allow installing packages that have an \`engines\` declaration + requiring a different version of \`node\`, even if \`--engine-strict\` + is enabled. + * Allow \`npm audit fix\` to install modules outside your stated + dependency range (including SemVer-major changes). + * Allow unpublishing all versions of a published package. + * Allow conflicting peerDependencies to be installed in the root project. + * Implicitly set \`--yes\` during \`npm init\`. + * Allow clobbering existing values in \`npm pkg\` + + If you don't have a clear idea of what you want to do, it is strongly + recommended that you do not use this option! + `, + flatten, +}) + +define('foreground-scripts', { + default: false, + type: Boolean, + description: ` + Run all build scripts (ie, \`preinstall\`, \`install\`, and + \`postinstall\`) scripts for installed packages in the foreground + process, sharing standard input, output, and error with the main npm + process. + + Note that this will generally make installs run slower, and be much + noisier, but can be useful for debugging. + `, + flatten, +}) + +define('format-package-lock', { + default: true, + type: Boolean, + description: ` + Format \`package-lock.json\` or \`npm-shrinkwrap.json\` as a human + readable file. + `, + flatten, +}) + +define('fund', { + default: true, + type: Boolean, + description: ` + When "true" displays the message at the end of each \`npm install\` + acknowledging the number of dependencies looking for funding. + See [\`npm fund\`](/commands/npm-fund) for details. + `, + flatten, +}) + +define('git', { + default: 'git', + type: String, + description: ` + The command to use for git commands. If git is installed on the + computer, but is not in the \`PATH\`, then set this to the full path to + the git binary. + `, + flatten, +}) + +define('git-tag-version', { + default: true, + type: Boolean, + description: ` + Tag the commit when using the \`npm version\` command. + `, + flatten, +}) + +define('global', { + default: false, + type: Boolean, + short: 'g', + description: ` + Operates in "global" mode, so that packages are installed into the + \`prefix\` folder instead of the current working directory. See + [folders](/configuring-npm/folders) for more on the differences in + behavior. + + * packages are installed into the \`{prefix}/lib/node_modules\` folder, + instead of the current working directory. + * bin files are linked to \`{prefix}/bin\` + * man pages are linked to \`{prefix}/share/man\` + `, + flatten, +}) + +define('global-style', { + default: false, + type: Boolean, + description: ` + Causes npm to install the package into your local \`node_modules\` folder + with the same layout it uses with the global \`node_modules\` folder. + Only your direct dependencies will show in \`node_modules\` and + everything they depend on will be flattened in their \`node_modules\` + folders. This obviously will eliminate some deduping. If used with + \`legacy-bundling\`, \`legacy-bundling\` will be preferred. + `, + flatten, +}) + +// the globalconfig has its default defined outside of this module +define('globalconfig', { + type: path, + default: '', + defaultDescription: ` + The global --prefix setting plus 'etc/npmrc'. For example, + '/usr/local/etc/npmrc' + `, + description: ` + The config file to read for global config options. + `, + flatten, +}) + +define('heading', { + default: 'npm', + type: String, + description: ` + The string that starts all the debugging log output. + `, + flatten, +}) + +define('https-proxy', { + default: null, + type: [null, url], + description: ` + A proxy to use for outgoing https requests. If the \`HTTPS_PROXY\` or + \`https_proxy\` or \`HTTP_PROXY\` or \`http_proxy\` environment variables + are set, proxy settings will be honored by the underlying + \`make-fetch-happen\` library. + `, + flatten, +}) + +define('if-present', { + default: false, + type: Boolean, + description: ` + If true, npm will not exit with an error code when \`run-script\` is + invoked for a script that isn't defined in the \`scripts\` section of + \`package.json\`. This option can be used when it's desirable to + optionally run a script when it's present and fail if the script fails. + This is useful, for example, when running scripts that may only apply for + some builds in an otherwise generic CI setup. + `, + flatten, +}) + +define('ignore-scripts', { + default: false, + type: Boolean, + description: ` + If true, npm does not run scripts specified in package.json files. + + Note that commands explicitly intended to run a particular script, such + as \`npm start\`, \`npm stop\`, \`npm restart\`, \`npm test\`, and \`npm + run-script\` will still run their intended script if \`ignore-scripts\` is + set, but they will *not* run any pre- or post-scripts. + `, + flatten, +}) + +define('include', { + default: [], + type: [Array, 'prod', 'dev', 'optional', 'peer'], + description: ` + Option that allows for defining which types of dependencies to install. + + This is the inverse of \`--omit=<type>\`. + + Dependency types specified in \`--include\` will not be omitted, + regardless of the order in which omit/include are specified on the + command-line. + `, + flatten (key, obj, flatOptions) { + // just call the omit flattener, it reads from obj.include + definitions.omit.flatten('omit', obj, flatOptions) + }, +}) + +define('include-staged', { + default: false, + type: Boolean, + description: ` + Allow installing "staged" published packages, as defined by [npm RFC PR + #92](https://github.com/npm/rfcs/pull/92). + + This is experimental, and not implemented by the npm public registry. + `, + flatten, +}) + +define('init-author-email', { + default: '', + type: String, + description: ` + The value \`npm init\` should use by default for the package author's + email. + `, +}) + +define('init-author-name', { + default: '', + type: String, + description: ` + The value \`npm init\` should use by default for the package author's name. + `, +}) + +define('init-author-url', { + default: '', + type: ['', url], + description: ` + The value \`npm init\` should use by default for the package author's homepage. + `, +}) + +define('init-license', { + default: 'ISC', + type: String, + description: ` + The value \`npm init\` should use by default for the package license. + `, +}) + +define('init-module', { + default: '~/.npm-init.js', + type: path, + description: ` + A module that will be loaded by the \`npm init\` command. See the + documentation for the + [init-package-json](https://github.com/npm/init-package-json) module for + more information, or [npm init](/commands/npm-init). + `, +}) + +define('init-version', { + default: '1.0.0', + type: semver, + description: ` + The value that \`npm init\` should use by default for the package + version number, if not already set in package.json. + `, +}) + +// these "aliases" are historically supported in .npmrc files, unfortunately +// They should be removed in a future npm version. +define('init.author.email', { + default: '', + type: String, + deprecated: ` + Use \`--init-author-email\` instead.`, + description: ` + Alias for \`--init-author-email\` + `, +}) + +define('init.author.name', { + default: '', + type: String, + deprecated: ` + Use \`--init-author-name\` instead. + `, + description: ` + Alias for \`--init-author-name\` + `, +}) + +define('init.author.url', { + default: '', + type: ['', url], + deprecated: ` + Use \`--init-author-url\` instead. + `, + description: ` + Alias for \`--init-author-url\` + `, +}) + +define('init.license', { + default: 'ISC', + type: String, + deprecated: ` + Use \`--init-license\` instead. + `, + description: ` + Alias for \`--init-license\` + `, +}) + +define('init.module', { + default: '~/.npm-init.js', + type: path, + deprecated: ` + Use \`--init-module\` instead. + `, + description: ` + Alias for \`--init-module\` + `, +}) + +define('init.version', { + default: '1.0.0', + type: semver, + deprecated: ` + Use \`--init-version\` instead. + `, + description: ` + Alias for \`--init-version\` + `, +}) + +define('json', { + default: false, + type: Boolean, + description: ` + Whether or not to output JSON data, rather than the normal output. + + * In \`npm pkg set\` it enables parsing set values with JSON.parse() + before saving them to your \`package.json\`. + + Not supported by all npm commands. + `, + flatten, +}) + +define('key', { + default: null, + type: [null, String], + description: ` + A client key to pass when accessing the registry. Values should be in + PEM format with newlines replaced by the string "\\n". For example: + + \`\`\`ini + key="-----BEGIN PRIVATE KEY-----\\nXXXX\\nXXXX\\n-----END PRIVATE KEY-----" + \`\`\` + + It is _not_ the path to a key file (and there is no "keyfile" option). + `, + flatten, +}) + +define('legacy-bundling', { + default: false, + type: Boolean, + description: ` + Causes npm to install the package such that versions of npm prior to 1.4, + such as the one included with node 0.8, can install the package. This + eliminates all automatic deduping. If used with \`global-style\` this + option will be preferred. + `, + flatten, +}) + +define('legacy-peer-deps', { + default: false, + type: Boolean, + description: ` + Causes npm to completely ignore \`peerDependencies\` when building a + package tree, as in npm versions 3 through 6. + + If a package cannot be installed because of overly strict + \`peerDependencies\` that collide, it provides a way to move forward + resolving the situation. + + This differs from \`--omit=peer\`, in that \`--omit=peer\` will avoid + unpacking \`peerDependencies\` on disk, but will still design a tree such + that \`peerDependencies\` _could_ be unpacked in a correct place. + + Use of \`legacy-peer-deps\` is not recommended, as it will not enforce + the \`peerDependencies\` contract that meta-dependencies may rely on. + `, + flatten, +}) + +define('link', { + default: false, + type: Boolean, + description: ` + Used with \`npm ls\`, limiting output to only those packages that are + linked. + `, +}) + +define('local-address', { + default: null, + type: getLocalAddresses(), + typeDescription: 'IP Address', + description: ` + The IP address of the local interface to use when making connections to + the npm registry. Must be IPv4 in versions of Node prior to 0.12. + `, + flatten, +}) + +define('location', { + default: 'user', + short: 'L', + type: [ + 'global', + 'user', + 'project', + ], + defaultDescription: ` + "user" unless \`--global\` is passed, which will also set this value to "global" + `, + description: ` + When passed to \`npm config\` this refers to which config file to use. + `, + // NOTE: the flattener here deliberately does not alter the value of global + // for now, this is to avoid inadvertently causing any breakage. the value of + // global, however, does modify this flag. + flatten (key, obj, flatOptions) { + // if global is set, we override ourselves + if (obj.global) + obj.location = 'global' + flatOptions.location = obj.location + }, +}) + +define('loglevel', { + default: 'notice', + type: [ + 'silent', + 'error', + 'warn', + 'notice', + 'http', + 'timing', + 'info', + 'verbose', + 'silly', + ], + description: ` + What level of logs to report. On failure, *all* logs are written to + \`npm-debug.log\` in the current working directory. + + Any logs of a higher level than the setting are shown. The default is + "notice". + + See also the \`foreground-scripts\` config. + `, +}) + +define('logs-max', { + default: 10, + type: Number, + description: ` + The maximum number of log files to store. + `, +}) + +define('long', { + default: false, + type: Boolean, + short: 'l', + description: ` + Show extended information in \`ls\`, \`search\`, and \`help-search\`. + `, +}) + +define('maxsockets', { + default: 15, + type: Number, + description: ` + The maximum number of connections to use per origin (protocol/host/port + combination). + `, + flatten (key, obj, flatOptions) { + flatOptions.maxSockets = obj[key] + }, +}) + +define('message', { + default: '%s', + type: String, + short: 'm', + description: ` + Commit message which is used by \`npm version\` when creating version commit. + + Any "%s" in the message will be replaced with the version number. + `, + flatten, +}) + +define('node-options', { + default: null, + type: [null, String], + description: ` + Options to pass through to Node.js via the \`NODE_OPTIONS\` environment + variable. This does not impact how npm itself is executed but it does + impact how lifecycle scripts are called. + `, +}) + +define('node-version', { + default: process.version, + defaultDescription: 'Node.js `process.version` value', + type: semver, + description: ` + The node version to use when checking a package's \`engines\` setting. + `, + flatten, +}) + +define('noproxy', { + default: '', + defaultDescription: ` + The value of the NO_PROXY environment variable + `, + type: [String, Array], + description: ` + Domain extensions that should bypass any proxies. + + Also accepts a comma-delimited string. + `, + flatten (key, obj, flatOptions) { + if (Array.isArray(obj[key])) + flatOptions.noProxy = obj[key].join(',') + else + flatOptions.noProxy = obj[key] + }, +}) + +define('npm-version', { + default: npmVersion, + defaultDescription: 'Output of `npm --version`', + type: semver, + description: ` + The npm version to use when checking a package's \`engines\` setting. + `, + flatten, +}) + +define('offline', { + default: false, + type: Boolean, + description: ` + Force offline mode: no network requests will be done during install. To allow + the CLI to fill in missing cache data, see \`--prefer-offline\`. + `, + flatten, +}) + +define('omit', { + default: process.env.NODE_ENV === 'production' ? ['dev'] : [], + defaultDescription: ` + 'dev' if the \`NODE_ENV\` environment variable is set to 'production', + otherwise empty. + `, + type: [Array, 'dev', 'optional', 'peer'], + description: ` + Dependency types to omit from the installation tree on disk. + + Note that these dependencies _are_ still resolved and added to the + \`package-lock.json\` or \`npm-shrinkwrap.json\` file. They are just + not physically installed on disk. + + If a package type appears in both the \`--include\` and \`--omit\` + lists, then it will be included. + + If the resulting omit list includes \`'dev'\`, then the \`NODE_ENV\` + environment variable will be set to \`'production'\` for all lifecycle + scripts. + `, + flatten (key, obj, flatOptions) { + flatOptions.omit = buildOmitList(obj) + }, +}) + +define('only', { + default: null, + type: [null, 'prod', 'production'], + deprecated: ` + Use \`--omit=dev\` to omit dev dependencies from the install. + `, + description: ` + When set to \`prod\` or \`production\`, this is an alias for + \`--omit=dev\`. + `, + flatten (key, obj, flatOptions) { + definitions.omit.flatten('omit', obj, flatOptions) + }, +}) + +define('optional', { + default: null, + type: [null, Boolean], + deprecated: ` + Use \`--omit=optional\` to exclude optional dependencies, or + \`--include=optional\` to include them. + + Default value does install optional deps unless otherwise omitted. + `, + description: ` + Alias for --include=optional or --omit=optional + `, + flatten (key, obj, flatOptions) { + definitions.omit.flatten('omit', obj, flatOptions) + }, +}) + +define('otp', { + default: null, + type: [null, String], + description: ` + This is a one-time password from a two-factor authenticator. It's needed + when publishing or changing package permissions with \`npm access\`. + + If not set, and a registry response fails with a challenge for a one-time + password, npm will prompt on the command line for one. + `, + flatten, +}) + +define('package', { + default: [], + hint: '<pkg>[@<version>]', + type: [String, Array], + description: ` + The package to install for [\`npm exec\`](/commands/npm-exec) + `, + flatten, +}) + +define('package-lock', { + default: true, + type: Boolean, + description: ` + If set to false, then ignore \`package-lock.json\` files when installing. + This will also prevent _writing_ \`package-lock.json\` if \`save\` is + true. + + When package package-locks are disabled, automatic pruning of extraneous + modules will also be disabled. To remove extraneous modules with + package-locks disabled use \`npm prune\`. + `, + flatten, +}) + +define('package-lock-only', { + default: false, + type: Boolean, + description: ` + If set to true, the current operation will only use the \`package-lock.json\`, + ignoring \`node_modules\`. + + For \`update\` this means only the \`package-lock.json\` will be updated, + instead of checking \`node_modules\` and downloading dependencies. + + For \`list\` this means the output will be based on the tree described by the + \`package-lock.json\`, rather than the contents of \`node_modules\`. + `, + flatten, +}) + +define('pack-destination', { + default: '.', + type: String, + description: ` + Directory in which \`npm pack\` will save tarballs. + `, +}) + +define('parseable', { + default: false, + type: Boolean, + short: 'p', + description: ` + Output parseable results from commands that write to standard output. For + \`npm search\`, this will be tab-separated table format. + `, + flatten, +}) + +define('prefer-offline', { + default: false, + type: Boolean, + description: ` + If true, staleness checks for cached data will be bypassed, but missing + data will be requested from the server. To force full offline mode, use + \`--offline\`. + `, + flatten, +}) + +define('prefer-online', { + default: false, + type: Boolean, + description: ` + If true, staleness checks for cached data will be forced, making the CLI + look for updates immediately even for fresh package data. + `, + flatten, +}) + +// `prefix` has its default defined outside of this module +define('prefix', { + type: path, + short: 'C', + default: '', + defaultDescription: ` + In global mode, the folder where the node executable is installed. In + local mode, the nearest parent folder containing either a package.json + file or a node_modules folder. + `, + description: ` + The location to install global items. If set on the command line, then + it forces non-global commands to run in the specified folder. + `, +}) + +define('preid', { + default: '', + hint: 'prerelease-id', + type: String, + description: ` + The "prerelease identifier" to use as a prefix for the "prerelease" part + of a semver. Like the \`rc\` in \`1.2.0-rc.8\`. + `, + flatten, +}) + +define('production', { + default: null, + type: [null, Boolean], + deprecated: 'Use `--omit=dev` instead.', + description: 'Alias for `--omit=dev`', + flatten (key, obj, flatOptions) { + definitions.omit.flatten('omit', obj, flatOptions) + }, +}) + +define('progress', { + default: !ciName, + defaultDescription: ` + \`true\` unless running in a known CI system + `, + type: Boolean, + description: ` + When set to \`true\`, npm will display a progress bar during time + intensive operations, if \`process.stderr\` is a TTY. + + Set to \`false\` to suppress the progress bar. + `, +}) + +define('proxy', { + default: null, + type: [null, false, url], // allow proxy to be disabled explicitly + description: ` + A proxy to use for outgoing http requests. If the \`HTTP_PROXY\` or + \`http_proxy\` environment variables are set, proxy settings will be + honored by the underlying \`request\` library. + `, + flatten, +}) + +define('read-only', { + default: false, + type: Boolean, + description: ` + This is used to mark a token as unable to publish when configuring + limited access tokens with the \`npm token create\` command. + `, + flatten, +}) + +define('rebuild-bundle', { + default: true, + type: Boolean, + description: ` + Rebuild bundled dependencies after installation. + `, + flatten, +}) + +define('registry', { + default: 'https://registry.npmjs.org/', + type: url, + description: ` + The base URL of the npm registry. + `, + flatten, +}) + +define('save', { + default: true, + usage: '-S|--save|--no-save|--save-prod|--save-dev|--save-optional|--save-peer', + type: Boolean, + short: 'S', + description: ` + Save installed packages to a package.json file as dependencies. + + When used with the \`npm rm\` command, removes the dependency from + package.json. + `, + flatten, +}) + +define('save-bundle', { + default: false, + type: Boolean, + short: 'B', + description: ` + If a package would be saved at install time by the use of \`--save\`, + \`--save-dev\`, or \`--save-optional\`, then also put it in the + \`bundleDependencies\` list. + + Ignore if \`--save-peer\` is set, since peerDependencies cannot be bundled. + `, + flatten (key, obj, flatOptions) { + // XXX update arborist to just ignore it if resulting saveType is peer + // otherwise this won't have the expected effect: + // + // npm config set save-peer true + // npm i foo --save-bundle --save-prod <-- should bundle + flatOptions.saveBundle = obj['save-bundle'] && !obj['save-peer'] + }, +}) + +// XXX: We should really deprecate all these `--save-blah` switches +// in favor of a single `--save-type` option. The unfortunate shortcut +// we took for `--save-peer --save-optional` being `--save-type=peerOptional` +// makes this tricky, and likely a breaking change. + +define('save-dev', { + default: false, + type: Boolean, + short: 'D', + description: ` + Save installed packages to a package.json file as \`devDependencies\`. + `, + flatten (key, obj, flatOptions) { + if (!obj[key]) { + if (flatOptions.saveType === 'dev') + delete flatOptions.saveType + return + } + + flatOptions.saveType = 'dev' + }, +}) + +define('save-exact', { + default: false, + type: Boolean, + short: 'E', + description: ` + Dependencies saved to package.json will be configured with an exact + version rather than using npm's default semver range operator. + `, + flatten (key, obj, flatOptions) { + // just call the save-prefix flattener, it reads from obj['save-exact'] + definitions['save-prefix'].flatten('save-prefix', obj, flatOptions) + }, +}) + +define('save-optional', { + default: false, + type: Boolean, + short: 'O', + description: ` + Save installed packages to a package.json file as + \`optionalDependencies\`. + `, + flatten (key, obj, flatOptions) { + if (!obj[key]) { + if (flatOptions.saveType === 'optional') + delete flatOptions.saveType + else if (flatOptions.saveType === 'peerOptional') + flatOptions.saveType = 'peer' + return + } + + if (flatOptions.saveType === 'peerOptional') + return + + if (flatOptions.saveType === 'peer') + flatOptions.saveType = 'peerOptional' + else + flatOptions.saveType = 'optional' + }, +}) + +define('save-peer', { + default: false, + type: Boolean, + description: ` + Save installed packages. to a package.json file as \`peerDependencies\` + `, + flatten (key, obj, flatOptions) { + if (!obj[key]) { + if (flatOptions.saveType === 'peer') + delete flatOptions.saveType + else if (flatOptions.saveType === 'peerOptional') + flatOptions.saveType = 'optional' + return + } + + if (flatOptions.saveType === 'peerOptional') + return + + if (flatOptions.saveType === 'optional') + flatOptions.saveType = 'peerOptional' + else + flatOptions.saveType = 'peer' + }, +}) + +define('save-prefix', { + default: '^', + type: String, + description: ` + Configure how versions of packages installed to a package.json file via + \`--save\` or \`--save-dev\` get prefixed. + + For example if a package has version \`1.2.3\`, by default its version is + set to \`^1.2.3\` which allows minor upgrades for that package, but after + \`npm config set save-prefix='~'\` it would be set to \`~1.2.3\` which + only allows patch upgrades. + `, + flatten (key, obj, flatOptions) { + flatOptions.savePrefix = obj['save-exact'] ? '' : obj['save-prefix'] + obj['save-prefix'] = flatOptions.savePrefix + }, +}) + +define('save-prod', { + default: false, + type: Boolean, + short: 'P', + description: ` + Save installed packages into \`dependencies\` specifically. This is + useful if a package already exists in \`devDependencies\` or + \`optionalDependencies\`, but you want to move it to be a non-optional + production dependency. + + This is the default behavior if \`--save\` is true, and neither + \`--save-dev\` or \`--save-optional\` are true. + `, + flatten (key, obj, flatOptions) { + if (!obj[key]) { + if (flatOptions.saveType === 'prod') + delete flatOptions.saveType + return + } + + flatOptions.saveType = 'prod' + }, +}) + +define('scope', { + default: '', + defaultDescription: ` + the scope of the current project, if any, or "" + `, + type: String, + hint: '<@scope>', + description: ` + Associate an operation with a scope for a scoped registry. + + Useful when logging in to or out of a private registry: + + \`\`\` + # log in, linking the scope to the custom registry + npm login --scope=@mycorp --registry=https://registry.mycorp.com + + # log out, removing the link and the auth token + npm logout --scope=@mycorp + \`\`\` + + This will cause \`@mycorp\` to be mapped to the registry for future + installation of packages specified according to the pattern + \`@mycorp/package\`. + + This will also cause \`npm init\` to create a scoped package. + + \`\`\` + # accept all defaults, and create a package named "@foo/whatever", + # instead of just named "whatever" + npm init --scope=@foo --yes + \`\`\` + `, + flatten (key, obj, flatOptions) { + const value = obj[key] + flatOptions.projectScope = value && !/^@/.test(value) ? `@${value}` : value + }, +}) + +define('script-shell', { + default: null, + defaultDescription: ` + '/bin/sh' on POSIX systems, 'cmd.exe' on Windows + `, + type: [null, String], + description: ` + The shell to use for scripts run with the \`npm exec\`, + \`npm run\` and \`npm init <pkg>\` commands. + `, + flatten (key, obj, flatOptions) { + flatOptions.scriptShell = obj[key] || undefined + }, +}) + +define('searchexclude', { + default: '', + type: String, + description: ` + Space-separated options that limit the results from search. + `, + flatten (key, obj, flatOptions) { + flatOptions.search = flatOptions.search || { limit: 20 } + flatOptions.search.exclude = obj[key] + }, +}) + +define('searchlimit', { + default: 20, + type: Number, + description: ` + Number of items to limit search results to. Will not apply at all to + legacy searches. + `, + flatten (key, obj, flatOptions) { + flatOptions.search = flatOptions.search || {} + flatOptions.search.limit = obj[key] + }, +}) + +define('searchopts', { + default: '', + type: String, + description: ` + Space-separated options that are always passed to search. + `, + flatten (key, obj, flatOptions) { + flatOptions.search = flatOptions.search || { limit: 20 } + flatOptions.search.opts = querystring.parse(obj[key]) + }, +}) + +define('searchstaleness', { + default: 15 * 60, + type: Number, + description: ` + The age of the cache, in seconds, before another registry request is made + if using legacy search endpoint. + `, + flatten (key, obj, flatOptions) { + flatOptions.search = flatOptions.search || { limit: 20 } + flatOptions.search.staleness = obj[key] + }, +}) + +define('shell', { + default: shell, + defaultDescription: ` + SHELL environment variable, or "bash" on Posix, or "cmd.exe" on Windows + `, + type: String, + description: ` + The shell to run for the \`npm explore\` command. + `, + flatten, +}) + +define('shrinkwrap', { + default: true, + type: Boolean, + deprecated: ` + Use the --package-lock setting instead. + `, + description: ` + Alias for --package-lock + `, + flatten (key, obj, flatOptions) { + obj['package-lock'] = obj.shrinkwrap + definitions['package-lock'].flatten('package-lock', obj, flatOptions) + }, +}) + +define('sign-git-commit', { + default: false, + type: Boolean, + description: ` + If set to true, then the \`npm version\` command will commit the new + package version using \`-S\` to add a signature. + + Note that git requires you to have set up GPG keys in your git configs + for this to work properly. + `, + flatten, +}) + +define('sign-git-tag', { + default: false, + type: Boolean, + description: ` + If set to true, then the \`npm version\` command will tag the version + using \`-s\` to add a signature. + + Note that git requires you to have set up GPG keys in your git configs + for this to work properly. + `, + flatten, +}) + +define('sso-poll-frequency', { + default: 500, + type: Number, + deprecated: ` + The --auth-type method of SSO/SAML/OAuth will be removed in a future + version of npm in favor of web-based login. + `, + description: ` + When used with SSO-enabled \`auth-type\`s, configures how regularly the + registry should be polled while the user is completing authentication. + `, + flatten, +}) + +define('sso-type', { + default: 'oauth', + type: [null, 'oauth', 'saml'], + deprecated: ` + The --auth-type method of SSO/SAML/OAuth will be removed in a future + version of npm in favor of web-based login. + `, + description: ` + If \`--auth-type=sso\`, the type of SSO type to use. + `, + flatten, +}) + +define('strict-peer-deps', { + default: false, + type: Boolean, + description: ` + If set to \`true\`, and \`--legacy-peer-deps\` is not set, then _any_ + conflicting \`peerDependencies\` will be treated as an install failure, + even if npm could reasonably guess the appropriate resolution based on + non-peer dependency relationships. + + By default, conflicting \`peerDependencies\` deep in the dependency graph + will be resolved using the nearest non-peer dependency specification, + even if doing so will result in some packages receiving a peer dependency + outside the range set in their package's \`peerDependencies\` object. + + When such and override is performed, a warning is printed, explaining the + conflict and the packages involved. If \`--strict-peer-deps\` is set, + then this warning is treated as a failure. + `, + flatten, +}) + +define('strict-ssl', { + default: true, + type: Boolean, + description: ` + Whether or not to do SSL key validation when making requests to the + registry via https. + + See also the \`ca\` config. + `, + flatten (key, obj, flatOptions) { + flatOptions.strictSSL = obj[key] + }, +}) + +define('tag', { + default: 'latest', + type: String, + description: ` + If you ask npm to install a package and don't tell it a specific version, + then it will install the specified tag. + + Also the tag that is added to the package@version specified by the \`npm + tag\` command, if no explicit tag is given. + + When used by the \`npm diff\` command, this is the tag used to fetch the + tarball that will be compared with the local files by default. + `, + flatten (key, obj, flatOptions) { + flatOptions.defaultTag = obj[key] + }, +}) + +define('tag-version-prefix', { + default: 'v', + type: String, + description: ` + If set, alters the prefix used when tagging a new version when performing + a version increment using \`npm-version\`. To remove the prefix + altogether, set it to the empty string: \`""\`. + + Because other tools may rely on the convention that npm version tags look + like \`v1.0.0\`, _only use this property if it is absolutely necessary_. + In particular, use care when overriding this setting for public packages. + `, + flatten, +}) + +define('timing', { + default: false, + type: Boolean, + description: ` + If true, writes an \`npm-debug\` log to \`_logs\` and timing information + to \`_timing.json\`, both in your cache, even if the command completes + successfully. \`_timing.json\` is a newline delimited list of JSON + objects. + + You can quickly view it with this [json](https://npm.im/json) command + line: \`npm exec -- json -g < ~/.npm/_timing.json\`. + `, +}) + +define('tmp', { + default: tmpdir(), + defaultDescription: ` + The value returned by the Node.js \`os.tmpdir()\` method + <https://nodejs.org/api/os.html#os_os_tmpdir> + `, + type: path, + deprecated: ` + This setting is no longer used. npm stores temporary files in a special + location in the cache, and they are managed by + [\`cacache\`](http://npm.im/cacache). + `, + description: ` + Historically, the location where temporary files were stored. No longer + relevant. + `, +}) + +define('umask', { + default: 0, + type: Umask, + description: ` + The "umask" value to use when setting the file creation mode on files and + folders. + + Folders and executables are given a mode which is \`0o777\` masked + against this value. Other files are given a mode which is \`0o666\` + masked against this value. + + Note that the underlying system will _also_ apply its own umask value to + files and folders that are created, and npm does not circumvent this, but + rather adds the \`--umask\` config to it. + + Thus, the effective default umask value on most POSIX systems is 0o22, + meaning that folders and executables are created with a mode of 0o755 and + other files are created with a mode of 0o644. + `, + flatten, +}) + +define('unicode', { + default: unicode, + defaultDescription: ` + false on windows, true on mac/unix systems with a unicode locale, as + defined by the \`LC_ALL\`, \`LC_CTYPE\`, or \`LANG\` environment variables. + `, + type: Boolean, + description: ` + When set to true, npm uses unicode characters in the tree output. When + false, it uses ascii characters instead of unicode glyphs. + `, +}) + +define('update-notifier', { + default: true, + type: Boolean, + description: ` + Set to false to suppress the update notification when using an older + version of npm than the latest. + `, +}) + +define('usage', { + default: false, + type: Boolean, + short: ['?', 'H', 'h'], + description: ` + Show short usage output about the command specified. + `, +}) + +define('user-agent', { + default: 'npm/{npm-version} ' + + 'node/{node-version} ' + + '{platform} ' + + '{arch} ' + + 'workspaces/{workspaces} ' + + '{ci}', + type: String, + description: ` + Sets the User-Agent request header. The following fields are replaced + with their actual counterparts: + + * \`{npm-version}\` - The npm version in use + * \`{node-version}\` - The Node.js version in use + * \`{platform}\` - The value of \`process.platform\` + * \`{arch}\` - The value of \`process.arch\` + * \`{workspaces}\` - Set to \`true\` if the \`workspaces\` or \`workspace\` + options are set. + * \`{ci}\` - The value of the \`ci-name\` config, if set, prefixed with + \`ci/\`, or an empty string if \`ci-name\` is empty. + `, + flatten (key, obj, flatOptions) { + const value = obj[key] + const ciName = obj['ci-name'] + let inWorkspaces = false + if (obj.workspaces || obj.workspace && obj.workspace.length) + inWorkspaces = true + flatOptions.userAgent = + value.replace(/\{node-version\}/gi, obj['node-version']) + .replace(/\{npm-version\}/gi, obj['npm-version']) + .replace(/\{platform\}/gi, process.platform) + .replace(/\{arch\}/gi, process.arch) + .replace(/\{workspaces\}/gi, inWorkspaces) + .replace(/\{ci\}/gi, ciName ? `ci/${ciName}` : '') + .trim() + // user-agent is a unique kind of config item that gets set from a template + // and ends up translated. Because of this, the normal "should we set this + // to process.env also doesn't work + obj[key] = flatOptions.userAgent + process.env.npm_config_user_agent = flatOptions.userAgent + }, +}) + +define('userconfig', { + default: '~/.npmrc', + type: path, + description: ` + The location of user-level configuration settings. + + This may be overridden by the \`npm_config_userconfig\` environment + variable or the \`--userconfig\` command line option, but may _not_ + be overridden by settings in the \`globalconfig\` file. + `, +}) + +define('version', { + default: false, + type: Boolean, + short: 'v', + description: ` + If true, output the npm version and exit successfully. + + Only relevant when specified explicitly on the command line. + `, +}) + +define('versions', { + default: false, + type: Boolean, + description: ` + If true, output the npm version as well as node's \`process.versions\` + map and the version in the current working directory's \`package.json\` + file if one exists, and exit successfully. + + Only relevant when specified explicitly on the command line. + `, +}) + +define('viewer', { + default: isWindows ? 'browser' : 'man', + defaultDescription: ` + "man" on Posix, "browser" on Windows + `, + type: String, + description: ` + The program to use to view help content. + + Set to \`"browser"\` to view html help content in the default web browser. + `, +}) + +define('which', { + default: null, + hint: '<fundingSourceNumber>', + type: [null, Number], + description: ` + If there are multiple funding sources, which 1-indexed source URL to open. + `, +}) + +define('workspace', { + default: [], + type: [String, Array], + hint: '<workspace-name>', + short: 'w', + envExport: false, + description: ` + Enable running a command in the context of the configured workspaces of the + current project while filtering by running only the workspaces defined by + this configuration option. + + Valid values for the \`workspace\` config are either: + + * Workspace names + * Path to a workspace directory + * Path to a parent workspace directory (will result to selecting all of the + nested workspaces) + + When set for the \`npm init\` command, this may be set to the folder of + a workspace which does not yet exist, to create the folder and set it + up as a brand new workspace within the project. + `, +}) + +define('workspaces', { + default: false, + type: Boolean, + short: 'ws', + envExport: false, + description: ` + Enable running a command in the context of **all** the configured + workspaces. + `, +}) + +define('yes', { + default: null, + type: [null, Boolean], + short: 'y', + description: ` + Automatically answer "yes" to any prompts that npm might print on + the command line. + `, +}) diff --git a/lib/utils/config/describe-all.js b/lib/utils/config/describe-all.js new file mode 100644 index 0000000000000..5fb785f08310d --- /dev/null +++ b/lib/utils/config/describe-all.js @@ -0,0 +1,16 @@ +const definitions = require('./definitions.js') +const describeAll = () => { + // sort not-deprecated ones to the top + /* istanbul ignore next - typically already sorted in the definitions file, + * but this is here so that our help doc will stay consistent if we decide + * to move them around. */ + const sort = ([keya, {deprecated: depa}], [keyb, {deprecated: depb}]) => { + return depa && !depb ? 1 + : !depa && depb ? -1 + : keya.localeCompare(keyb, 'en') + } + return Object.entries(definitions).sort(sort) + .map(([key, def]) => def.describe()) + .join('\n\n') +} +module.exports = describeAll diff --git a/lib/utils/config/flatten.js b/lib/utils/config/flatten.js new file mode 100644 index 0000000000000..f6d6124bddf7a --- /dev/null +++ b/lib/utils/config/flatten.js @@ -0,0 +1,32 @@ +// use the defined flattening function, and copy over any scoped +// registries and registry-specific "nerfdart" configs verbatim +// +// TODO: make these getters so that we only have to make dirty +// the thing that changed, and then flatten the fields that +// could have changed when a config.set is called. +// +// TODO: move nerfdart auth stuff into a nested object that +// is only passed along to paths that end up calling npm-registry-fetch. +const definitions = require('./definitions.js') +const flatten = (obj, flat = {}) => { + for (const [key, val] of Object.entries(obj)) { + const def = definitions[key] + if (def && def.flatten) + def.flatten(key, obj, flat) + else if (/@.*:registry$/i.test(key) || /^\/\//.test(key)) + flat[key] = val + } + + // XXX make this the bin/npm-cli.js file explicitly instead + // otherwise using npm programmatically is a bit of a pain. + flat.npmBin = require.main ? require.main.filename + : /* istanbul ignore next - not configurable property */ undefined + flat.nodeBin = process.env.NODE || process.execPath + + // XXX should this be sha512? is it even relevant? + flat.hashAlgorithm = 'sha1' + + return flat +} + +module.exports = flatten diff --git a/lib/utils/config/index.js b/lib/utils/config/index.js new file mode 100644 index 0000000000000..a24f5865242bf --- /dev/null +++ b/lib/utils/config/index.js @@ -0,0 +1,52 @@ +const flatten = require('./flatten.js') +const definitions = require('./definitions.js') +const describeAll = require('./describe-all.js') + +// aliases where they get expanded into a completely different thing +// these are NOT supported in the environment or npmrc files, only +// expanded on the CLI. +// TODO: when we switch off of nopt, use an arg parser that supports +// more reasonable aliasing and short opts right in the definitions set. +const shorthands = { + 'enjoy-by': ['--before'], + d: ['--loglevel', 'info'], + dd: ['--loglevel', 'verbose'], + ddd: ['--loglevel', 'silly'], + quiet: ['--loglevel', 'warn'], + q: ['--loglevel', 'warn'], + s: ['--loglevel', 'silent'], + silent: ['--loglevel', 'silent'], + verbose: ['--loglevel', 'verbose'], + desc: ['--description'], + help: ['--usage'], + local: ['--no-global'], + n: ['--no-yes'], + no: ['--no-yes'], + porcelain: ['--parseable'], + readonly: ['--read-only'], + reg: ['--registry'], +} + +for (const [key, {short}] of Object.entries(definitions)) { + if (!short) + continue + // can be either an array or string + for (const s of [].concat(short)) + shorthands[s] = [`--${key}`] +} + +module.exports = { + get defaults () { + // NB: 'default' is a reserved word + return Object.entries(definitions).map(([key, { default: def }]) => { + return [key, def] + }).reduce((defaults, [key, def]) => { + defaults[key] = def + return defaults + }, {}) + }, + definitions, + flatten, + shorthands, + describeAll, +} diff --git a/lib/utils/did-you-mean.js b/lib/utils/did-you-mean.js index c2bdf159dd118..98133196e3c56 100644 --- a/lib/utils/did-you-mean.js +++ b/lib/utils/did-you-mean.js @@ -1,12 +1,33 @@ const leven = require('leven') +const readJson = require('read-package-json-fast') +const { cmdList } = require('./cmd-list.js') -const didYouMean = (scmd, commands) => { - const best = commands +const didYouMean = async (npm, path, scmd) => { + const bestCmd = cmdList + .filter(cmd => leven(scmd, cmd) < scmd.length * 0.4 && scmd !== cmd) + .map(str => ` npm ${str} # ${npm.commands[str].description}`) + + const pkg = await readJson(`${path}/package.json`) + const { scripts } = pkg + // We would already be suggesting this in `npm x` so omit them here + const runScripts = ['stop', 'start', 'test', 'restart'] + const bestRun = Object.keys(scripts || {}) + .filter(cmd => leven(scmd, cmd) < scmd.length * 0.4 && + !runScripts.includes(cmd)) + .map(str => ` npm run ${str} # run the "${str}" package script`) + + const { bin } = pkg + const bestBin = Object.keys(bin || {}) .filter(cmd => leven(scmd, cmd) < scmd.length * 0.4) - .map(str => ` ${str}`) - return best.length === 0 ? '' - : best.length === 1 ? `\nDid you mean this?\n${best[0]}` - : `\nDid you mean one of these?\n${best.slice(0, 3).join('\n')}` -} + .map(str => ` npm exec ${str} # run the "${str}" command from either this or a remote npm package`) + const best = [...bestCmd, ...bestRun, ...bestBin] + + if (best.length === 0) + return '' + + const suggestion = best.length === 1 ? `\n\nDid you mean this?\n${best[0]}` + : `\n\nDid you mean one of these?\n${best.slice(0, 3).join('\n')}` + return suggestion +} module.exports = didYouMean diff --git a/lib/utils/error-handler.js b/lib/utils/error-handler.js deleted file mode 100644 index 1fc31df44ffb9..0000000000000 --- a/lib/utils/error-handler.js +++ /dev/null @@ -1,226 +0,0 @@ -let cbCalled = false -const log = require('npmlog') -const npm = require('../npm.js') -let itWorked = false -const path = require('path') -let wroteLogFile = false -let exitCode = 0 -const errorMessage = require('./error-message.js') -const replaceInfo = require('./replace-info.js') - -const cacheFile = require('./cache-file.js') - -let logFileName -const getLogFile = () => { - if (!logFileName) - logFileName = path.resolve(npm.config.get('cache'), '_logs', (new Date()).toISOString().replace(/[.:]/g, '_') + '-debug.log') - - return logFileName -} - -const timings = { - version: npm.version, - command: process.argv.slice(2), - logfile: null, -} -process.on('timing', (name, value) => { - if (timings[name]) - timings[name] += value - else - timings[name] = value -}) - -process.on('exit', code => { - process.emit('timeEnd', 'npm') - log.disableProgress() - if (npm.config && npm.config.loaded && npm.config.get('timing')) { - try { - timings.logfile = getLogFile() - cacheFile.append('_timing.json', JSON.stringify(timings) + '\n') - } catch (_) { - // ignore - } - } - - if (code) - itWorked = false - if (itWorked) - log.info('ok') - else { - if (!cbCalled) { - log.error('', 'cb() never called!') - console.error('') - log.error('', 'This is an error with npm itself. Please report this error at:') - log.error('', ' <https://github.com/npm/cli/issues>') - writeLogFile() - } - - if (code) - log.verbose('code', code) - } - if (npm.config && npm.config.loaded && npm.config.get('timing') && !wroteLogFile) - writeLogFile() - if (wroteLogFile) { - // just a line break - if (log.levels[log.level] <= log.levels.error) - console.error('') - - log.error( - '', - [ - 'A complete log of this run can be found in:', - ' ' + getLogFile(), - ].join('\n') - ) - wroteLogFile = false - } - - // actually exit. - if (exitCode === 0 && !itWorked) - exitCode = 1 - - if (exitCode !== 0) - process.exit(exitCode) -}) - -const exit = (code, noLog) => { - exitCode = exitCode || process.exitCode || code - - log.verbose('exit', code) - if (log.level === 'silent') - noLog = true - - const reallyExit = () => { - itWorked = !code - - // Exit directly -- nothing in the CLI should still be running in the - // background at this point, and this makes sure anything left dangling - // for whatever reason gets thrown away, instead of leaving the CLI open - // - // Commands that expect long-running actions should just delay `cb()` - process.stdout.write('', () => { - process.exit(code) - }) - } - - if (code && !noLog) - writeLogFile() - reallyExit() -} - -const errorHandler = (er) => { - log.disableProgress() - if (!npm.config || !npm.config.loaded) { - // logging won't work unless we pretend that it's ready - er = er || new Error('Exit prior to config file resolving.') - console.error(er.stack || er.message) - } - - if (cbCalled) - er = er || new Error('Callback called more than once.') - - if (npm.updateNotification) { - const { level } = log - log.level = log.levels.notice - log.notice('', npm.updateNotification) - log.level = level - } - - cbCalled = true - if (!er) - return exit(0) - - // if we got a command that just shells out to something else, then it - // will presumably print its own errors and exit with a proper status - // code if there's a problem. If we got an error with a code=0, then... - // something else went wrong along the way, so maybe an npm problem? - const isShellout = npm.shelloutCommands.includes(npm.command) - const quietShellout = isShellout && typeof er.code === 'number' && er.code - if (quietShellout) - return exit(er.code, true) - else if (typeof er === 'string') { - log.error('', er) - return exit(1, true) - } else if (!(er instanceof Error)) { - log.error('weird error', er) - return exit(1, true) - } - - if (!er.code) { - const matchErrorCode = er.message.match(/^(?:Error: )?(E[A-Z]+)/) - er.code = matchErrorCode && matchErrorCode[1] - } - - for (const k of ['type', 'stack', 'statusCode', 'pkgid']) { - const v = er[k] - if (v) - log.verbose(k, replaceInfo(v)) - } - - log.verbose('cwd', process.cwd()) - - const os = require('os') - const args = replaceInfo(process.argv) - log.verbose('', os.type() + ' ' + os.release()) - log.verbose('argv', args.map(JSON.stringify).join(' ')) - log.verbose('node', process.version) - log.verbose('npm ', 'v' + npm.version) - - for (const k of ['code', 'syscall', 'file', 'path', 'dest', 'errno']) { - const v = er[k] - if (v) - log.error(k, v) - } - - const msg = errorMessage(er) - for (const errline of [...msg.summary, ...msg.detail]) - log.error(...errline) - - if (npm.config && npm.config.get('json')) { - const error = { - error: { - code: er.code, - summary: messageText(msg.summary), - detail: messageText(msg.detail), - }, - } - console.error(JSON.stringify(error, null, 2)) - } - - exit(typeof er.errno === 'number' ? er.errno : typeof er.code === 'number' ? er.code : 1) -} - -const messageText = msg => msg.map(line => line.slice(1).join(' ')).join('\n') - -const writeLogFile = () => { - if (wroteLogFile) - return - - const os = require('os') - - try { - let logOutput = '' - log.record.forEach(m => { - const p = [m.id, m.level] - if (m.prefix) - p.push(m.prefix) - const pref = p.join(' ') - - m.message.trim().split(/\r?\n/) - .map(line => (pref + ' ' + line).trim()) - .forEach(line => { - logOutput += line + os.EOL - }) - }) - cacheFile.write(getLogFile(), logOutput) - - // truncate once it's been written. - log.record.length = 0 - wroteLogFile = true - } catch (ex) { - - } -} - -module.exports = errorHandler -module.exports.exit = exit diff --git a/lib/utils/error-message.js b/lib/utils/error-message.js index ac5a935dc8770..3b590f712e783 100644 --- a/lib/utils/error-message.js +++ b/lib/utils/error-message.js @@ -1,12 +1,11 @@ -const npm = require('../npm.js') const { format } = require('util') const { resolve } = require('path') const nameValidator = require('validate-npm-package-name') const npmlog = require('npmlog') const replaceInfo = require('./replace-info.js') -const { report: explainEresolve } = require('./explain-eresolve.js') +const { report } = require('./explain-eresolve.js') -module.exports = (er) => { +module.exports = (er, npm) => { const short = [] const detail = [] @@ -19,7 +18,7 @@ module.exports = (er) => { case 'ERESOLVE': short.push(['ERESOLVE', er.message]) detail.push(['', '']) - detail.push(['', explainEresolve(er)]) + detail.push(['', report(er, npm.color, resolve(npm.cache, 'eresolve-report.txt'))]) break case 'ENOLOCK': { @@ -270,6 +269,7 @@ module.exports = (er) => { case 'ECONNRESET': case 'ENOTFOUND': case 'ETIMEDOUT': + case 'ERR_SOCKET_TIMEOUT': case 'EAI_FAIL': short.push(['network', er.message]) detail.push([ diff --git a/lib/utils/exit-handler.js b/lib/utils/exit-handler.js new file mode 100644 index 0000000000000..7be138d2c361f --- /dev/null +++ b/lib/utils/exit-handler.js @@ -0,0 +1,227 @@ +const os = require('os') +const path = require('path') +const writeFileAtomic = require('write-file-atomic') +const mkdirp = require('mkdirp-infer-owner') +const fs = require('graceful-fs') + +const errorMessage = require('./error-message.js') +const replaceInfo = require('./replace-info.js') + +let exitHandlerCalled = false +let logFileName +let npm // set by the cli +let wroteLogFile = false + +const getLogFile = () => { + // we call this multiple times, so we need to treat it as a singleton because + // the date is part of the name + if (!logFileName) + logFileName = path.resolve(npm.config.get('cache'), '_logs', (new Date()).toISOString().replace(/[.:]/g, '_') + '-debug.log') + + return logFileName +} + +process.on('exit', code => { + // process.emit is synchronous, so the timeEnd handler will run before the + // unfinished timer check below + process.emit('timeEnd', 'npm') + npm.log.disableProgress() + for (const [name, timers] of npm.timers) + npm.log.verbose('unfinished npm timer', name, timers) + + if (npm.config.loaded && npm.config.get('timing')) { + try { + const file = path.resolve(npm.config.get('cache'), '_timing.json') + const dir = path.dirname(npm.config.get('cache')) + mkdirp.sync(dir) + + fs.appendFileSync(file, JSON.stringify({ + command: process.argv.slice(2), + logfile: getLogFile(), + version: npm.version, + ...npm.timings, + }) + '\n') + + const st = fs.lstatSync(path.dirname(npm.config.get('cache'))) + fs.chownSync(dir, st.uid, st.gid) + fs.chownSync(file, st.uid, st.gid) + } catch (ex) { + // ignore + } + } + + if (!code) + npm.log.info('ok') + else + npm.log.verbose('code', code) + + if (!exitHandlerCalled) { + process.exitCode = code || 1 + npm.log.error('', 'Exit handler never called!') + console.error('') + npm.log.error('', 'This is an error with npm itself. Please report this error at:') + npm.log.error('', ' <https://github.com/npm/cli/issues>') + // TODO this doesn't have an npm.config.loaded guard + writeLogFile() + } + // In timing mode we always write the log file + if (npm.config.loaded && npm.config.get('timing') && !wroteLogFile) + writeLogFile() + if (wroteLogFile) { + // just a line break + if (npm.log.levels[npm.log.level] <= npm.log.levels.error) + console.error('') + + npm.log.error( + '', + [ + 'A complete log of this run can be found in:', + ' ' + getLogFile(), + ].join('\n') + ) + } + + // these are needed for the tests to have a clean slate in each test case + exitHandlerCalled = false + wroteLogFile = false +}) + +const exitHandler = (err) => { + npm.log.disableProgress() + if (!npm.config.loaded) { + err = err || new Error('Exit prior to config file resolving.') + console.error(err.stack || err.message) + } + + // only show the notification if it finished. + if (typeof npm.updateNotification === 'string') { + const { level } = npm.log + npm.log.level = 'notice' + npm.log.notice('', npm.updateNotification) + npm.log.level = level + } + + exitHandlerCalled = true + + let exitCode + let noLog + + if (err) { + exitCode = 1 + // if we got a command that just shells out to something else, then it + // will presumably print its own errors and exit with a proper status + // code if there's a problem. If we got an error with a code=0, then... + // something else went wrong along the way, so maybe an npm problem? + const isShellout = npm.shelloutCommands.includes(npm.command) + const quietShellout = isShellout && typeof err.code === 'number' && err.code + if (quietShellout) { + exitCode = err.code + noLog = true + } else if (typeof err === 'string') { + noLog = true + npm.log.error('', err) + } else if (!(err instanceof Error)) { + noLog = true + npm.log.error('weird error', err) + } else { + if (!err.code) { + const matchErrorCode = err.message.match(/^(?:Error: )?(E[A-Z]+)/) + err.code = matchErrorCode && matchErrorCode[1] + } + + for (const k of ['type', 'stack', 'statusCode', 'pkgid']) { + const v = err[k] + if (v) + npm.log.verbose(k, replaceInfo(v)) + } + + npm.log.verbose('cwd', process.cwd()) + + const args = replaceInfo(process.argv) + npm.log.verbose('', os.type() + ' ' + os.release()) + npm.log.verbose('argv', args.map(JSON.stringify).join(' ')) + npm.log.verbose('node', process.version) + npm.log.verbose('npm ', 'v' + npm.version) + + for (const k of ['code', 'syscall', 'file', 'path', 'dest', 'errno']) { + const v = err[k] + if (v) + npm.log.error(k, v) + } + + const msg = errorMessage(err, npm) + for (const errline of [...msg.summary, ...msg.detail]) + npm.log.error(...errline) + + if (npm.config.loaded && npm.config.get('json')) { + const error = { + error: { + code: err.code, + summary: messageText(msg.summary), + detail: messageText(msg.detail), + }, + } + console.error(JSON.stringify(error, null, 2)) + } + + if (typeof err.errno === 'number') + exitCode = err.errno + else if (typeof err.code === 'number') + exitCode = err.code + } + } + npm.log.verbose('exit', exitCode || 0) + + if (npm.log.level === 'silent') + noLog = true + + // noLog is true if there was an error, including if config wasn't loaded, so + // this doesn't need a config.loaded guard + if (exitCode && !noLog) + writeLogFile() + + // explicitly call process.exit now so we don't hang on things like the + // update notifier, also flush stdout beforehand because process.exit doesn't + // wait for that to happen. + process.stdout.write('', () => process.exit(exitCode)) +} + +const messageText = msg => msg.map(line => line.slice(1).join(' ')).join('\n') + +const writeLogFile = () => { + try { + let logOutput = '' + npm.log.record.forEach(m => { + const p = [m.id, m.level] + if (m.prefix) + p.push(m.prefix) + const pref = p.join(' ') + + m.message.trim().split(/\r?\n/) + .map(line => (pref + ' ' + line).trim()) + .forEach(line => { + logOutput += line + os.EOL + }) + }) + + const file = getLogFile() + const dir = path.dirname(file) + mkdirp.sync(dir) + writeFileAtomic.sync(file, logOutput) + + const st = fs.lstatSync(path.dirname(npm.config.get('cache'))) + fs.chownSync(dir, st.uid, st.gid) + fs.chownSync(file, st.uid, st.gid) + + // truncate once it's been written. + npm.log.record.length = 0 + wroteLogFile = true + } catch (ex) { + + } +} + +module.exports = exitHandler +module.exports.setNpm = (n) => { + npm = n +} diff --git a/lib/utils/explain-dep.js b/lib/utils/explain-dep.js index c01bc780bfb47..944b4be62bacf 100644 --- a/lib/utils/explain-dep.js +++ b/lib/utils/explain-dep.js @@ -7,19 +7,24 @@ const nocolor = { cyan: s => s, magenta: s => s, blue: s => s, + green: s => s, } +const { relative } = require('path') + const explainNode = (node, depth, color) => printNode(node, color) + - explainDependents(node, depth, color) + explainDependents(node, depth, color) + + explainLinksIn(node, depth, color) const colorType = (type, color) => { - const { red, yellow, cyan, magenta, blue } = color ? chalk : nocolor + const { red, yellow, cyan, magenta, blue, green } = color ? chalk : nocolor const style = type === 'extraneous' ? red : type === 'dev' ? yellow : type === 'optional' ? cyan : type === 'peer' ? magenta : type === 'bundled' ? blue + : type === 'workspace' ? green : /* istanbul ignore next */ s => s return style(type) } @@ -34,8 +39,9 @@ const printNode = (node, color) => { optional, peer, bundled, + isWorkspace, } = node - const { bold, dim } = color ? chalk : nocolor + const { bold, dim, green } = color ? chalk : nocolor const extra = [] if (extraneous) extra.push(' ' + bold(colorType('extraneous', color))) @@ -52,10 +58,23 @@ const printNode = (node, color) => { if (bundled) extra.push(' ' + bold(colorType('bundled', color))) - return `${bold(name)}@${bold(version)}${extra.join('')}` + + const pkgid = isWorkspace + ? green(`${name}@${version}`) + : `${bold(name)}@${bold(version)}` + + return `${pkgid}${extra.join('')}` + (location ? dim(`\n${location}`) : '') } +const explainLinksIn = ({ linksIn }, depth, color) => { + if (!linksIn || !linksIn.length || depth <= 0) + return '' + + const messages = linksIn.map(link => explainNode(link, depth - 1, color)) + const str = '\n' + messages.join('\n') + return str.split('\n').join('\n ') +} + const explainDependents = ({ name, dependents }, depth, color) => { if (!dependents || !dependents.length || depth <= 0) return '' @@ -88,10 +107,14 @@ const explainDependents = ({ name, dependents }, depth, color) => { const explainEdge = ({ name, type, bundled, from, spec }, depth, color) => { const { bold } = color ? chalk : nocolor + const dep = type === 'workspace' + ? bold(relative(from.location, spec.slice('file:'.length))) + : `${bold(name)}@"${bold(spec)}"` + const fromMsg = ` from ${explainFrom(from, depth, color)}` + return (type === 'prod' ? '' : `${colorType(type, color)} `) + (bundled ? `${colorType('bundled', color)} ` : '') + - `${bold(name)}@"${bold(spec)}" from ` + - explainFrom(from, depth, color) + `${dep}${fromMsg}` } const explainFrom = (from, depth, color) => { @@ -99,7 +122,8 @@ const explainFrom = (from, depth, color) => { return 'the root project' return printNode(from, color) + - explainDependents(from, depth - 1, color) + explainDependents(from, depth - 1, color) + + explainLinksIn(from, depth - 1, color) } module.exports = { explainNode, printNode, explainEdge } diff --git a/lib/utils/explain-eresolve.js b/lib/utils/explain-eresolve.js index 69789ec9a1c2d..b25e3e4a9ccd0 100644 --- a/lib/utils/explain-eresolve.js +++ b/lib/utils/explain-eresolve.js @@ -1,33 +1,41 @@ -// this is called when an ERESOLVE error is caught in the error-handler, +// this is called when an ERESOLVE error is caught in the exit-handler, // or when there's a log.warn('eresolve', msg, explanation), to turn it // into a human-intelligible explanation of what's wrong and how to fix. -// -// TODO: abstract out the explainNode methods into a separate util for -// use by a future `npm explain <path || spec>` command. - -const npm = require('../npm.js') const { writeFileSync } = require('fs') -const { resolve } = require('path') const { explainEdge, explainNode, printNode } = require('./explain-dep.js') // expl is an explanation object that comes from Arborist. It looks like: // Depth is how far we want to want to descend into the object making a report. // The full report (ie, depth=Infinity) is always written to the cache folder // at ${cache}/eresolve-report.txt along with full json. -const explainEresolve = (expl, color, depth) => { - const { edge, current, peerConflict } = expl +const explain = (expl, color, depth) => { + const { edge, dep, current, peerConflict, currentEdge } = expl const out = [] - if (edge.from && edge.from.whileInstalling) - out.push('While resolving: ' + printNode(edge.from.whileInstalling, color)) + const whileInstalling = dep && dep.whileInstalling || + current && current.whileInstalling || + edge && edge.from && edge.from.whileInstalling + if (whileInstalling) + out.push('While resolving: ' + printNode(whileInstalling, color)) + + // it "should" be impossible for an ERESOLVE explanation to lack both + // current and currentEdge, but better to have a less helpful error + // than a crashing failure. + if (current) + out.push('Found: ' + explainNode(current, depth, color)) + else if (peerConflict && peerConflict.current) + out.push('Found: ' + explainNode(peerConflict.current, depth, color)) + else if (currentEdge) + out.push('Found: ' + explainEdge(currentEdge, depth, color)) + else /* istanbul ignore else - should always have one */ if (edge) + out.push('Found: ' + explainEdge(edge, depth, color)) - out.push('Found: ' + explainNode(current, depth, color)) out.push('\nCould not resolve dependency:\n' + explainEdge(edge, depth, color)) if (peerConflict) { const heading = '\nConflicting peer dependency:' - const pc = explainNode(peerConflict, depth, color) + const pc = explainNode(peerConflict.peer, depth, color) out.push(heading + ' ' + pc) } @@ -35,9 +43,7 @@ const explainEresolve = (expl, color, depth) => { } // generate a full verbose report and tell the user how to fix it -const report = (expl, depth = 4) => { - const fullReport = resolve(npm.cache, 'eresolve-report.txt') - +const report = (expl, color, fullReport) => { const orNoStrict = expl.strictPeerDeps ? '--no-strict-peer-deps, ' : '' const fix = `Fix the upstream dependency conflict, or retry this command with ${orNoStrict}--force, or --legacy-peer-deps @@ -47,7 +53,7 @@ to accept an incorrect (and potentially broken) dependency resolution.` ${new Date().toISOString()} -${explainEresolve(expl, false, Infinity)} +${explain(expl, false, Infinity)} ${fix} @@ -56,13 +62,10 @@ Raw JSON explanation object: ${JSON.stringify(expl, null, 2)} `, 'utf8') - return explainEresolve(expl, npm.color, depth) + + return explain(expl, color, 4) + `\n\n${fix}\n\nSee ${fullReport} for a full report.` } -// the terser explain method for the warning when using --force -const explain = (expl, depth = 2) => explainEresolve(expl, npm.color, depth) - module.exports = { explain, report, diff --git a/lib/utils/flat-options.js b/lib/utils/flat-options.js deleted file mode 100644 index c082e4137ab21..0000000000000 --- a/lib/utils/flat-options.js +++ /dev/null @@ -1,254 +0,0 @@ -// return a flattened config object with canonical names suitable for -// passing to dependencies like arborist, pacote, npm-registry-fetch, etc. - -const log = require('npmlog') -const crypto = require('crypto') -const querystring = require('querystring') -const npmSession = crypto.randomBytes(8).toString('hex') -log.verbose('npm-session', npmSession) -const { join } = require('path') - -const buildOmitList = obj => { - const include = obj.include || [] - const omit = new Set((obj.omit || []) - .filter(type => !include.includes(type))) - const only = obj.only - - if (/^prod(uction)?$/.test(only) || obj.production) - omit.add('dev') - - if (/dev/.test(obj.also)) - omit.delete('dev') - - if (obj.dev) - omit.delete('dev') - - if (obj.optional === false) - omit.add('optional') - - obj.omit = [...omit] - - // it would perhaps make more sense to put this in @npmcli/config, but - // since we can set dev to be omitted in multiple various legacy ways, - // it's better to set it here once it's all resolved. - if (obj.omit.includes('dev')) - process.env.NODE_ENV = 'production' - - return [...omit] -} - -// turn an object with npm-config style keys into an options object -// with camelCase values. This doesn't account for the stuff that is -// not pulled from the config keys, that's all handled only for the -// main function which acts on the npm object itself. Used by the -// flatOptions generator, and by the publishConfig handling logic. -const flatten = obj => ({ - includeStaged: obj['include-staged'], - preferDedupe: obj['prefer-dedupe'], - ignoreScripts: obj['ignore-scripts'], - nodeVersion: obj['node-version'], - cache: join(obj.cache, '_cacache'), - global: obj.global, - - registry: obj.registry, - scope: obj.scope, - access: obj.access, - alwaysAuth: obj['always-auth'], - audit: obj.audit, - auditLevel: obj['audit-level'], - _auth: obj._auth, - authType: obj['auth-type'], - ssoType: obj['sso-type'], - ssoPollFrequency: obj['sso-poll-frequency'], - before: obj.before, - browser: obj.browser, - ca: obj.ca, - cafile: obj.cafile, - cert: obj.cert, - key: obj.key, - - // token creation options - cidr: obj.cidr, - readOnly: obj['read-only'], - - // npm version options - preid: obj.preid, - tagVersionPrefix: obj['tag-version-prefix'], - allowSameVersion: obj['allow-same-version'], - - // npm version git options - message: obj.message, - commitHooks: obj['commit-hooks'], - gitTagVersion: obj['git-tag-version'], - signGitCommit: obj['sign-git-commit'], - signGitTag: obj['sign-git-tag'], - - // only used for npm ls in v7, not update - depth: obj.depth, - all: obj.all, - - // Output configs - unicode: obj.unicode, - json: obj.json, - long: obj.long, - parseable: obj.parseable, - - // options for npm search - search: { - description: obj.description, - exclude: obj.searchexclude, - limit: obj.searchlimit || 20, - opts: querystring.parse(obj.searchopts), - staleness: obj.searchstaleness, - }, - - diff: obj.diff, - diffUnified: obj['diff-unified'], - diffIgnoreAllSpace: obj['diff-ignore-all-space'], - diffNameOnly: obj['diff-name-only'], - diffNoPrefix: obj['diff-no-prefix'], - diffSrcPrefix: obj['diff-src-prefix'], - diffDstPrefix: obj['diff-dst-prefix'], - diffText: obj['diff-text'], - - dryRun: obj['dry-run'], - engineStrict: obj['engine-strict'], - - retry: { - retries: obj['fetch-retries'], - factor: obj['fetch-retry-factor'], - maxTimeout: obj['fetch-retry-maxtimeout'], - minTimeout: obj['fetch-retry-mintimeout'], - }, - - timeout: obj['fetch-timeout'], - - force: obj.force, - - formatPackageLock: obj['format-package-lock'], - fund: obj.fund, - - // binary locators - git: obj.git, - viewer: obj.viewer, - editor: obj.editor, - - // configs that affect how we build trees - binLinks: obj['bin-links'], - rebuildBundle: obj['rebuild-bundle'], - // --no-shrinkwrap is the same as --no-package-lock - packageLock: !(obj['package-lock'] === false || - obj.shrinkwrap === false), - packageLockOnly: obj['package-lock-only'], - globalStyle: obj['global-style'], - legacyBundling: obj['legacy-bundling'], - foregroundScripts: !!obj['foreground-scripts'], - scriptShell: obj['script-shell'] || undefined, - shell: obj.shell, - omit: buildOmitList(obj), - legacyPeerDeps: obj['legacy-peer-deps'], - strictPeerDeps: obj['strict-peer-deps'], - - // npx stuff - call: obj.call, - package: obj.package, - - // used to build up the appropriate {add:{...}} options to Arborist.reify - save: obj.save, - saveBundle: obj['save-bundle'] && !obj['save-peer'], - saveType: obj['save-optional'] && obj['save-peer'] - ? 'peerOptional' - : obj['save-optional'] ? 'optional' - : obj['save-dev'] ? 'dev' - : obj['save-peer'] ? 'peer' - : obj['save-prod'] ? 'prod' - : null, - savePrefix: obj['save-exact'] ? '' - : obj['save-prefix'], - - // configs for npm-registry-fetch - otp: obj.otp, - offline: obj.offline, - preferOffline: getPreferOffline(obj), - preferOnline: getPreferOnline(obj), - strictSSL: obj['strict-ssl'], - defaultTag: obj.tag, - userAgent: obj['user-agent'], - - // yes, it's fine, just do it, jeez, stop asking - yes: obj.yes, - - ...getScopesAndAuths(obj), - - // npm fund exclusive option to select an item from a funding list - which: obj.which, - - // socks proxy can be configured in https-proxy or proxy field - // note that the various (HTTPS_|HTTP_|]PROXY environs will be - // respected if this is not set. - proxy: obj['https-proxy'] || obj.proxy, - noProxy: obj.noproxy, -}) - -const flatOptions = npm => npm.flatOptions || Object.freeze({ - // flatten the config object - ...flatten(npm.config.list[0]), - - // Note that many of these do not come from configs or cli flags - // per se, though they may be implied or defined by them. - log, - npmSession, - dmode: npm.modes.exec, - fmode: npm.modes.file, - umask: npm.modes.umask, - hashAlgorithm: 'sha1', // XXX should this be sha512? - color: !!npm.color, - projectScope: npm.projectScope, - npmVersion: npm.version, - - // npm.command is not set until AFTER flatOptions are defined - // so we need to make this a getter. - get npmCommand () { - return npm.command - }, - - tmp: npm.tmp, - prefix: npm.prefix, - globalPrefix: npm.globalPrefix, - localPrefix: npm.localPrefix, - npmBin: require.main && require.main.filename, - nodeBin: process.env.NODE || process.execPath, - get tag () { - return npm.config.get('tag') - }, -}) - -const getPreferOnline = obj => { - const po = obj['prefer-online'] - if (po !== undefined) - return po - - return obj['cache-max'] <= 0 -} - -const getPreferOffline = obj => { - const po = obj['prefer-offline'] - if (po !== undefined) - return po - - return obj['cache-min'] >= 9999 -} - -// pull out all the @scope:<key> and //host:key config fields -// these are used by npm-registry-fetch for authing against registries -const getScopesAndAuths = obj => { - const scopesAndAuths = {} - // pull out all the @scope:... configs into a flat object. - for (const key in obj) { - if (/@.*:registry$/i.test(key) || /^\/\//.test(key)) - scopesAndAuths[key] = obj[key] - } - return scopesAndAuths -} - -module.exports = Object.assign(flatOptions, { flatten }) diff --git a/lib/utils/lifecycle-cmd.js b/lib/utils/lifecycle-cmd.js index 8be9b5a12f9a3..2c5b89dfcdd04 100644 --- a/lib/utils/lifecycle-cmd.js +++ b/lib/utils/lifecycle-cmd.js @@ -1,19 +1,18 @@ // The implementation of commands that are just "run a script" // restart, start, stop, test -const usageUtil = require('./usage.js') -class LifecycleCmd { - constructor (npm, stage) { - this.npm = npm - this.stage = stage +const BaseCommand = require('../base-command.js') +class LifecycleCmd extends BaseCommand { + static get usage () { + return ['[-- <args>]'] } - get usage () { - return usageUtil(this.stage, `npm ${this.stage} [-- <args>]`) + exec (args, cb) { + this.npm.commands['run-script']([this.constructor.name, ...args], cb) } - exec (args, cb) { - this.npm.commands['run-script']([this.stage, ...args], cb) + execWorkspaces (args, filters, cb) { + this.npm.commands['run-script']([this.constructor.name, ...args], cb) } } module.exports = LifecycleCmd diff --git a/lib/utils/npm-usage.js b/lib/utils/npm-usage.js index 220f8037f164d..ddb0bab0bc9a2 100644 --- a/lib/utils/npm-usage.js +++ b/lib/utils/npm-usage.js @@ -1,15 +1,12 @@ -const didYouMean = require('./did-you-mean.js') const { dirname } = require('path') -const output = require('./output.js') const { cmdList } = require('./cmd-list') -module.exports = (npm, valid = true) => { - npm.config.set('loglevel', 'silent') +module.exports = (npm) => { const usesBrowser = npm.config.get('viewer') === 'browser' ? ' (in a browser)' : '' - npm.log.level = 'silent' - output(` -Usage: npm <command> + return `npm <command> + +Usage: npm install install all the dependencies in your project npm install <foo> add the <foo> dependency to your project @@ -21,7 +18,7 @@ npm help <term> search for help on <term>${usesBrowser} npm help npm more involved overview${usesBrowser} All commands: -${npm.config.get('long') ? usages(npm) : ('\n ' + wrap(cmdList))} +${allCommands(npm)} Specify configs in the ini-formatted file: ${npm.config.get('userconfig')} @@ -30,14 +27,13 @@ or on the command line via: npm <command> --key=value More configuration info: npm help config Configuration fields: npm help 7 config -npm@${npm.version} ${dirname(dirname(__dirname))} -`) - - if (npm.argv.length >= 1) - output(didYouMean(npm.argv[0], cmdList)) +npm@${npm.version} ${dirname(dirname(__dirname))}` +} - if (!valid) - process.exitCode = 1 +const allCommands = (npm) => { + if (npm.config.get('long')) + return usages(npm) + return ('\n ' + wrap(cmdList)) } const wrap = (arr) => { @@ -66,7 +62,7 @@ const usages = (npm) => { maxLen = Math.max(maxLen, c.length) return set }, []) - .sort((a, b) => a[0].localeCompare(b[0])) + .sort((a, b) => a[0].localeCompare(b[0], 'en')) .map(([c, usage]) => `\n ${c}${' '.repeat(maxLen - c.length + 1)}${ (usage.split('\n').join('\n' + ' '.repeat(maxLen + 5)))}`) .join('\n') diff --git a/lib/utils/open-url.js b/lib/utils/open-url.js index 1fe456bd050be..41fac33ec66e9 100644 --- a/lib/utils/open-url.js +++ b/lib/utils/open-url.js @@ -1,4 +1,3 @@ -const output = require('./output.js') const opener = require('opener') const { URL } = require('url') @@ -16,7 +15,7 @@ const open = async (npm, url, errMsg) => { }, null, 2) : `${errMsg}:\n ${url}\n` - output(alternateMsg) + npm.output(alternateMsg) } if (browser === false) { diff --git a/lib/utils/output.js b/lib/utils/output.js deleted file mode 100644 index 2d1549859ac0d..0000000000000 --- a/lib/utils/output.js +++ /dev/null @@ -1,7 +0,0 @@ -const log = require('npmlog') -// output to stdout in a progress bar compatible way -module.exports = (...msg) => { - log.clearProgress() - console.log(...msg) - log.showProgress() -} diff --git a/lib/utils/perf.js b/lib/utils/perf.js deleted file mode 100644 index 3f81ee4b049e4..0000000000000 --- a/lib/utils/perf.js +++ /dev/null @@ -1,16 +0,0 @@ -const log = require('npmlog') -const timings = new Map() - -process.on('time', (name) => { - timings.set(name, Date.now()) -}) - -process.on('timeEnd', (name) => { - if (timings.has(name)) { - const ms = Date.now() - timings.get(name) - process.emit('timing', name, ms) - log.timing(name, `Completed in ${ms}ms`) - timings.delete(name) - } else - log.silly('timing', "Tried to end timer that doesn't exist:", name) -}) diff --git a/lib/utils/ping.js b/lib/utils/ping.js index f5f7fcc6a6258..00956d0c1630c 100644 --- a/lib/utils/ping.js +++ b/lib/utils/ping.js @@ -1,7 +1,7 @@ // ping the npm registry // used by the ping and doctor commands const fetch = require('npm-registry-fetch') -module.exports = async (opts) => { - const res = await fetch('/-/ping?write=true', opts) +module.exports = async (flatOptions) => { + const res = await fetch('/-/ping?write=true', flatOptions) return res.json().catch(() => ({})) } diff --git a/lib/utils/proc-log-listener.js b/lib/utils/proc-log-listener.js index 1dc4b4399eaea..2cfe94ecb0cf2 100644 --- a/lib/utils/proc-log-listener.js +++ b/lib/utils/proc-log-listener.js @@ -14,3 +14,9 @@ module.exports = () => { } }) } + +// for tests +/* istanbul ignore next */ +module.exports.reset = () => { + process.removeAllListeners('log') +} diff --git a/lib/utils/queryable.js b/lib/utils/queryable.js new file mode 100644 index 0000000000000..e10eba3b5f092 --- /dev/null +++ b/lib/utils/queryable.js @@ -0,0 +1,314 @@ +const util = require('util') +const _data = Symbol('data') +const _delete = Symbol('delete') +const _append = Symbol('append') + +const sqBracketsMatcher = str => str.match(/(.+)\[([^\]]+)\]\.?(.*)$/) + +// replaces any occurence of an empty-brackets (e.g: []) with a special +// Symbol(append) to represent it, this is going to be useful for the setter +// method that will push values to the end of the array when finding these +const replaceAppendSymbols = str => { + const matchEmptyBracket = str.match(/^(.*)\[\]\.?(.*)$/) + + if (matchEmptyBracket) { + const [, pre, post] = matchEmptyBracket + return [...replaceAppendSymbols(pre), _append, post].filter(Boolean) + } + + return [str] +} + +const parseKeys = (key) => { + const sqBracketItems = new Set() + sqBracketItems.add(_append) + const parseSqBrackets = (str) => { + const index = sqBracketsMatcher(str) + + // once we find square brackets, we recursively parse all these + if (index) { + const preSqBracketPortion = index[1] + + // we want to have a `new String` wrapper here in order to differentiate + // between multiple occurences of the same string, e.g: + // foo.bar[foo.bar] should split into { foo: { bar: { 'foo.bar': {} } } + /* eslint-disable-next-line no-new-wrappers */ + const foundKey = new String(index[2]) + const postSqBracketPortion = index[3] + + // we keep track of items found during this step to make sure + // we don't try to split-separate keys that were defined within + // square brackets, since the key name itself might contain dots + sqBracketItems.add(foundKey) + + // returns an array that contains either dot-separate items (that will + // be splitted appart during the next step OR the fully parsed keys + // read from square brackets, e.g: + // foo.bar[1.0.0].a.b -> ['foo.bar', '1.0.0', 'a.b'] + return [ + ...parseSqBrackets(preSqBracketPortion), + foundKey, + ...( + postSqBracketPortion + ? parseSqBrackets(postSqBracketPortion) + : [] + ), + ] + } + + // at the end of parsing, any usage of the special empty-bracket syntax + // (e.g: foo.array[]) has not yet been parsed, here we'll take care + // of parsing it and adding a special symbol to represent it in + // the resulting list of keys + return replaceAppendSymbols(str) + } + + const res = [] + // starts by parsing items defined as square brackets, those might be + // representing properties that have a dot in the name or just array + // indexes, e.g: foo[1.0.0] or list[0] + const sqBracketKeys = parseSqBrackets(key.trim()) + + for (const k of sqBracketKeys) { + // keys parsed from square brackets should just be added to list of + // resulting keys as they might have dots as part of the key + if (sqBracketItems.has(k)) + res.push(k) + else { + // splits the dot-sep property names and add them to the list of keys + for (const splitKey of k.split('.')) + /* eslint-disable-next-line no-new-wrappers */ + res.push(new String(splitKey)) + } + } + + // returns an ordered list of strings in which each entry + // represents a key in an object defined by the previous entry + return res +} + +const getter = ({ data, key }) => { + // keys are a list in which each entry represents the name of + // a property that should be walked through the object in order to + // return the final found value + const keys = parseKeys(key) + let _data = data + let label = '' + + for (const k of keys) { + // empty-bracket-shortcut-syntax is not supported on getter + if (k === _append) { + throw Object.assign( + new Error('Empty brackets are not valid syntax for retrieving values.'), + { code: 'EINVALIDSYNTAX' } + ) + } + + // extra logic to take into account printing array, along with its + // special syntax in which using a dot-sep property name after an + // arry will expand it's results, e.g: + // arr.name -> arr[0].name=value, arr[1].name=value, ... + const maybeIndex = Number(k) + if (Array.isArray(_data) && !Number.isInteger(maybeIndex)) { + _data = _data.reduce((acc, i, index) => { + acc[`${label}[${index}].${k}`] = i[k] + return acc + }, {}) + return _data + } else { + // if can't find any more values, it means it's just over + // and there's nothing to return + if (!_data[k]) + return undefined + + // otherwise sets the next value + _data = _data[k] + } + + label += k + } + + // these are some legacy expectations from + // the old API consumed by lib/view.js + if (Array.isArray(_data) && _data.length <= 1) + _data = _data[0] + + return { + [key]: _data, + } +} + +const setter = ({ data, key, value, force }) => { + // setter goes to recursively transform the provided data obj, + // setting properties from the list of parsed keys, e.g: + // ['foo', 'bar', 'baz'] -> { foo: { bar: { baz: {} } } + const keys = parseKeys(key) + const setKeys = (_data, _key) => { + // handles array indexes, converting valid integers to numbers, + // note that occurences of Symbol(append) will throw, + // so we just ignore these for now + let maybeIndex = Number.NaN + try { + maybeIndex = Number(_key) + } catch (err) {} + if (!Number.isNaN(maybeIndex)) + _key = maybeIndex + + // creates new array in case key is an index + // and the array obj is not yet defined + const keyIsAnArrayIndex = _key === maybeIndex || _key === _append + const dataHasNoItems = !Object.keys(_data).length + if (keyIsAnArrayIndex && dataHasNoItems && !Array.isArray(_data)) + _data = [] + + // converting from array to an object is also possible, in case the + // user is using force mode, we should also convert existing arrays + // to an empty object if the current _data is an array + if (force && Array.isArray(_data) && !keyIsAnArrayIndex) + _data = { ..._data } + + // the _append key is a special key that is used to represent + // the empty-bracket notation, e.g: arr[] -> arr[arr.length] + if (_key === _append) { + if (!Array.isArray(_data)) { + throw Object.assign( + new Error(`Can't use append syntax in non-Array element`), + { code: 'ENOAPPEND' } + ) + } + _key = _data.length + } + + // retrieves the next data object to recursively iterate on, + // throws if trying to override a literal value or add props to an array + const next = () => { + const haveContents = + !force && + _data[_key] != null && + value !== _delete + const shouldNotOverrideLiteralValue = + !(typeof _data[_key] === 'object') + // if the next obj to recurse is an array and the next key to be + // appended to the resulting obj is not an array index, then it + // should throw since we can't append arbitrary props to arrays + const shouldNotAddPropsToArrays = + typeof keys[0] !== 'symbol' && + Array.isArray(_data[_key]) && + Number.isNaN(Number(keys[0])) + + const overrideError = + haveContents && + shouldNotOverrideLiteralValue + if (overrideError) { + throw Object.assign( + new Error(`Property ${_key} already exists and is not an Array or Object.`), + { code: 'EOVERRIDEVALUE' } + ) + } + + const addPropsToArrayError = + haveContents && + shouldNotAddPropsToArrays + if (addPropsToArrayError) { + throw Object.assign( + new Error(`Can't add property ${key} to an Array.`), + { code: 'ENOADDPROP' } + ) + } + + return typeof _data[_key] === 'object' ? _data[_key] || {} : {} + } + + // sets items from the parsed array of keys as objects, recurses to + // setKeys in case there are still items to be handled, otherwise it + // just sets the original value set by the user + if (keys.length) + _data[_key] = setKeys(next(), keys.shift()) + else { + // handles special deletion cases for obj props / array items + if (value === _delete) { + if (Array.isArray(_data)) + _data.splice(_key, 1) + else + delete _data[_key] + } else + // finally, sets the value in its right place + _data[_key] = value + } + + return _data + } + + setKeys(data, keys.shift()) +} + +class Queryable { + constructor (obj) { + if (!obj || typeof obj !== 'object') { + throw Object.assign( + new Error('Queryable needs an object to query properties from.'), + { code: 'ENOQUERYABLEOBJ' } + ) + } + + this[_data] = obj + } + + query (queries) { + // this ugly interface here is meant to be a compatibility layer + // with the legacy API lib/view.js is consuming, if at some point + // we refactor that command then we can revisit making this nicer + if (queries === '') + return { '': this[_data] } + + const q = query => getter({ + data: this[_data], + key: query, + }) + + if (Array.isArray(queries)) { + let res = {} + for (const query of queries) + res = { ...res, ...q(query) } + return res + } else + return q(queries) + } + + // return the value for a single query if found, otherwise returns undefined + get (query) { + const obj = this.query(query) + if (obj) + return obj[query] + } + + // creates objects along the way for the provided `query` parameter + // and assigns `value` to the last property of the query chain + set (query, value, { force } = {}) { + setter({ + data: this[_data], + key: query, + value, + force, + }) + } + + // deletes the value of the property found at `query` + delete (query) { + setter({ + data: this[_data], + key: query, + value: _delete, + }) + } + + toJSON () { + return this[_data] + } + + [util.inspect.custom] () { + return this.toJSON() + } +} + +module.exports = Queryable diff --git a/lib/utils/read-local-package.js b/lib/utils/read-local-package.js deleted file mode 100644 index c31bca994704c..0000000000000 --- a/lib/utils/read-local-package.js +++ /dev/null @@ -1,11 +0,0 @@ -const { resolve } = require('path') -const readJson = require('read-package-json-fast') -async function readLocalPackageName (npm) { - if (npm.flatOptions.global) - return - - const filepath = resolve(npm.flatOptions.prefix, 'package.json') - return (await readJson(filepath)).name -} - -module.exports = readLocalPackageName diff --git a/lib/utils/read-package-name.js b/lib/utils/read-package-name.js new file mode 100644 index 0000000000000..7ed15987767bb --- /dev/null +++ b/lib/utils/read-package-name.js @@ -0,0 +1,9 @@ +const { resolve } = require('path') +const readJson = require('read-package-json-fast') +async function readLocalPackageName (prefix) { + const filepath = resolve(prefix, 'package.json') + const json = await readJson(filepath) + return json.name +} + +module.exports = readLocalPackageName diff --git a/lib/utils/reify-output.js b/lib/utils/reify-output.js index 216f0e902e90a..bf3fa7fb2e13d 100644 --- a/lib/utils/reify-output.js +++ b/lib/utils/reify-output.js @@ -10,7 +10,6 @@ // run `npm audit fix` to fix them, or `npm audit` for details const log = require('npmlog') -const output = require('./output.js') const { depth } = require('treeverse') const ms = require('ms') const auditReport = require('npm-audit-report') @@ -19,10 +18,6 @@ const auditError = require('./audit-error.js') // TODO: output JSON if flatOptions.json is true const reifyOutput = (npm, arb) => { - // don't print any info in --silent mode - if (log.levels[log.level] > log.levels.error) - return - const { diff, actualTree } = arb // note: fails and crashes if we're running audit fix and there was an error @@ -30,6 +25,13 @@ const reifyOutput = (npm, arb) => { // stuff in that case! const auditReport = auditError(npm, arb.auditReport) ? null : arb.auditReport + // don't print any info in --silent mode, but we still need to + // set the exitCode properly from the audit report, if we have one. + if (log.levels[log.level] > log.levels.error) { + getAuditReport(npm, auditReport) + return + } + const summary = { added: 0, removed: 0, @@ -69,13 +71,15 @@ const reifyOutput = (npm, arb) => { if (npm.flatOptions.json) { if (auditReport) { + // call this to set the exit code properly + getAuditReport(npm, auditReport) summary.audit = npm.command === 'audit' ? auditReport : auditReport.toJSON().metadata } - output(JSON.stringify(summary, 0, 2)) + npm.output(JSON.stringify(summary, 0, 2)) } else { packagesChangedMessage(npm, summary) - packagesFundingMessage(summary) + packagesFundingMessage(npm, summary) printAuditReport(npm, auditReport) } } @@ -84,11 +88,25 @@ const reifyOutput = (npm, arb) => { // at the end if there's still stuff, because it's silly for `npm audit` // to tell you to run `npm audit` for details. otherwise, use the summary // report. if we get here, we know it's not quiet or json. +// If the loglevel is set higher than 'error', then we just run the report +// to get the exitCode set appropriately. const printAuditReport = (npm, report) => { + const res = getAuditReport(npm, report) + if (!res || !res.report) + return + npm.output(`\n${res.report}`) +} + +const getAuditReport = (npm, report) => { if (!report) return - const reporter = npm.command !== 'audit' ? 'install' : 'detail' + // when in silent mode, we print nothing. the JSON output is + // going to just JSON.stringify() the report object. + const reporter = log.levels[log.level] > log.levels.error ? 'quiet' + : npm.flatOptions.json ? 'quiet' + : npm.command !== 'audit' ? 'install' + : 'detail' const defaultAuditLevel = npm.command !== 'audit' ? 'none' : 'low' const auditLevel = npm.flatOptions.auditLevel || defaultAuditLevel @@ -97,8 +115,9 @@ const printAuditReport = (npm, report) => { ...npm.flatOptions, auditLevel, }) - process.exitCode = process.exitCode || res.exitCode - output('\n' + res.report) + if (npm.command === 'audit') + process.exitCode = process.exitCode || res.exitCode + return res } const packagesChangedMessage = (npm, { added, removed, changed, audited }) => { @@ -136,18 +155,18 @@ const packagesChangedMessage = (npm, { added, removed, changed, audited }) => { msg.push(`audited ${audited} package${audited === 1 ? '' : 's'}`) msg.push(` in ${ms(Date.now() - npm.started)}`) - output(msg.join('')) + npm.output(msg.join('')) } -const packagesFundingMessage = ({ funding }) => { +const packagesFundingMessage = (npm, { funding }) => { if (!funding) return - output('') + npm.output('') const pkg = funding === 1 ? 'package' : 'packages' const is = funding === 1 ? 'is' : 'are' - output(`${funding} ${pkg} ${is} looking for funding`) - output(' run `npm fund` for details') + npm.output(`${funding} ${pkg} ${is} looking for funding`) + npm.output(' run `npm fund` for details') } module.exports = reifyOutput diff --git a/lib/utils/setup-log.js b/lib/utils/setup-log.js index 44e612d50dc9f..aaf7fa47e266d 100644 --- a/lib/utils/setup-log.js +++ b/lib/utils/setup-log.js @@ -14,10 +14,23 @@ module.exports = (config) => { const { warn } = log + const stdoutTTY = process.stdout.isTTY + const stderrTTY = process.stderr.isTTY + const dumbTerm = process.env.TERM === 'dumb' + const stderrNotDumb = stderrTTY && !dumbTerm + // this logic is duplicated in the config 'color' flattener + const enableColorStderr = color === 'always' ? true + : color === false ? false + : stderrTTY + + const enableColorStdout = color === 'always' ? true + : color === false ? false + : stdoutTTY + log.warn = (heading, ...args) => { if (heading === 'ERESOLVE' && args[1] && typeof args[1] === 'object') { warn(heading, args[0]) - return warn('', explain(args[1])) + return warn('', explain(args[1], enableColorStdout, 2)) } return warn(heading, ...args) } @@ -29,19 +42,6 @@ module.exports = (config) => { log.heading = config.get('heading') || 'npm' - const stdoutTTY = process.stdout.isTTY - const stderrTTY = process.stderr.isTTY - const dumbTerm = process.env.TERM === 'dumb' - const stderrNotDumb = stderrTTY && !dumbTerm - - const enableColorStderr = color === 'always' ? true - : color === false ? false - : stderrTTY - - const enableColorStdout = color === 'always' ? true - : color === false ? false - : stdoutTTY - if (enableColorStderr) log.enableColor() else @@ -59,6 +59,4 @@ module.exports = (config) => { log.enableProgress() else log.disableProgress() - - return enableColorStdout } diff --git a/lib/utils/tar.js b/lib/utils/tar.js index 887c40a0f6ebe..9e7c3329530ee 100644 --- a/lib/utils/tar.js +++ b/lib/utils/tar.js @@ -76,7 +76,7 @@ const getContents = async (manifest, tarball) => { }) const comparator = (a, b) => { - return a.path.localeCompare(b.path, undefined, { + return a.path.localeCompare(b.path, 'en', { sensitivity: 'case', numeric: true, }) @@ -84,7 +84,7 @@ const getContents = async (manifest, tarball) => { const isUpper = (str) => { const ch = str.charAt(0) - return ch >= 'A' && ch <= 'Z' + return ch === ch.toUpperCase() } const uppers = files.filter(file => isUpper(file.path)) diff --git a/lib/utils/update-notifier.js b/lib/utils/update-notifier.js index 0a19be94e62a4..14c4fac0d58b3 100644 --- a/lib/utils/update-notifier.js +++ b/lib/utils/update-notifier.js @@ -14,30 +14,26 @@ const { resolve } = require('path') const isGlobalNpmUpdate = npm => { return npm.flatOptions.global && ['install', 'update'].includes(npm.command) && - npm.argv.includes('npm') + npm.argv.some(arg => /^npm(@|$)/.test(arg)) } // update check frequency const DAILY = 1000 * 60 * 60 * 24 const WEEKLY = DAILY * 7 -const updateTimeout = async (npm, duration) => { +// don't put it in the _cacache folder, just in npm's cache +const lastCheckedFile = npm => + resolve(npm.flatOptions.cache, '../_update-notifier-last-checked') + +const checkTimeout = async (npm, duration) => { const t = new Date(Date.now() - duration) - // don't put it in the _cacache folder, just in npm's cache - const f = resolve(npm.flatOptions.cache, '../_update-notifier-last-checked') + const f = lastCheckedFile(npm) // if we don't have a file, then definitely check it. const st = await stat(f).catch(() => ({ mtime: t - 1 })) - - if (t > st.mtime) { - // best effort, if this fails, it's ok. - // might be using /dev/null as the cache or something weird like that. - await writeFile(f, '').catch(() => {}) - return true - } else - return false + return t > st.mtime } -const updateNotifier = module.exports = async (npm, spec = 'latest') => { +const updateNotifier = async (npm, spec = 'latest') => { // never check for updates in CI, when updating npm already, or opted out if (!npm.config.get('update-notifier') || isGlobalNpmUpdate(npm) || @@ -57,7 +53,7 @@ const updateNotifier = module.exports = async (npm, spec = 'latest') => { const duration = spec !== 'latest' ? DAILY : WEEKLY // if we've already checked within the specified duration, don't check again - if (!(await updateTimeout(npm, duration))) + if (!(await checkTimeout(npm, duration))) return null // if they're currently using a prerelease, nudge to the next prerelease @@ -109,7 +105,16 @@ const updateNotifier = module.exports = async (npm, spec = 'latest') => { `${oldc} -> ${latestc}\n` + `Changelog: ${changelogc}\n` + `Run ${cmdc} to update!\n` - const messagec = !useColor ? message : chalk.bgBlack.white(message) - return messagec + return message +} + +// only update the notification timeout if we actually finished checking +module.exports = async npm => { + const notification = await updateNotifier(npm) + // intentional. do not await this. it's a best-effort update. if this + // fails, it's ok. might be using /dev/null as the cache or something weird + // like that. + writeFile(lastCheckedFile(npm), '').catch(() => {}) + npm.updateNotification = notification } diff --git a/lib/version.js b/lib/version.js index 1ba834f5d711b..f3680fe8b7a01 100644 --- a/lib/version.js +++ b/lib/version.js @@ -1,21 +1,37 @@ -const libversion = require('libnpmversion') -const output = require('./utils/output.js') -const usageUtil = require('./utils/usage.js') +const libnpmversion = require('libnpmversion') +const { resolve } = require('path') +const { promisify } = require('util') +const readFile = promisify(require('fs').readFile) -class Version { - constructor (npm) { - this.npm = npm +const BaseCommand = require('./base-command.js') + +class Version extends BaseCommand { + static get description () { + return 'Bump a package version' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get name () { + return 'version' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get params () { + return [ + 'allow-same-version', + 'commit-hooks', + 'git-tag-version', + 'json', + 'preid', + 'sign-git-tag', + 'workspace', + 'workspaces', + ] } - get usage () { - return usageUtil( - 'version', - 'npm version [<newversion> | major | minor | patch | premajor | preminor | prepatch | prerelease [--preid=<prerelease-id>] | from-git]\n' + - '(run in package dir)\n\n' + - `'npm -v' or 'npm --version' to print npm version (${this.npm.version})\n` + - `'npm view <pkg> version' to view a package's published version\n` + - `'npm ls' to inspect current package/dependency versions\n` - ) + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get usage () { + return ['[<newversion> | major | minor | patch | premajor | preminor | prepatch | prerelease | from-git]'] } async completion (opts) { @@ -39,6 +55,10 @@ class Version { return this.version(args).then(() => cb()).catch(cb) } + execWorkspaces (args, filters, cb) { + this.versionWorkspaces(args, filters).then(() => cb()).catch(cb) + } + async version (args) { switch (args.length) { case 0: @@ -50,20 +70,41 @@ class Version { } } + async versionWorkspaces (args, filters) { + switch (args.length) { + case 0: + return this.listWorkspaces(filters) + case 1: + return this.changeWorkspaces(args, filters) + default: + throw this.usage + } + } + async change (args) { - const prefix = this.npm.flatOptions.tagVersionPrefix - const version = await libversion(args[0], { + const prefix = this.npm.config.get('tag-version-prefix') + const version = await libnpmversion(args[0], { ...this.npm.flatOptions, path: this.npm.prefix, }) - return output(`${prefix}${version}`) + return this.npm.output(`${prefix}${version}`) } - async list () { - const results = {} - const { promisify } = require('util') - const { resolve } = require('path') - const readFile = promisify(require('fs').readFile) + async changeWorkspaces (args, filters) { + const prefix = this.npm.config.get('tag-version-prefix') + await this.setWorkspaces(filters) + for (const [name, path] of this.workspaces) { + this.npm.output(name) + const version = await libnpmversion(args[0], { + ...this.npm.flatOptions, + 'git-tag-version': false, + path, + }) + this.npm.output(`${prefix}${version}`) + } + } + + async list (results = {}) { const pj = resolve(this.npm.prefix, 'package.json') const pkg = await readFile(pj, 'utf8') @@ -77,10 +118,26 @@ class Version { for (const [key, version] of Object.entries(process.versions)) results[key] = version - if (this.npm.flatOptions.json) - output(JSON.stringify(results, null, 2)) + if (this.npm.config.get('json')) + this.npm.output(JSON.stringify(results, null, 2)) else - output(results) + this.npm.output(results) + } + + async listWorkspaces (filters) { + const results = {} + await this.setWorkspaces(filters) + for (const path of this.workspacePaths) { + const pj = resolve(path, 'package.json') + // setWorkspaces has already parsed package.json so we know it won't error + const pkg = await readFile(pj, 'utf8') + .then(data => JSON.parse(data)) + + if (pkg.name && pkg.version) + results[pkg.name] = pkg.version + } + return this.list(results) } } + module.exports = Version diff --git a/lib/view.js b/lib/view.js index d0d5fa59d462b..47e631f5565c0 100644 --- a/lib/view.js +++ b/lib/view.js @@ -7,29 +7,41 @@ const fs = require('fs') const jsonParse = require('json-parse-even-better-errors') const log = require('npmlog') const npa = require('npm-package-arg') -const path = require('path') +const { resolve } = require('path') const relativeDate = require('tiny-relative-date') const semver = require('semver') const style = require('ansistyles') const { inspect, promisify } = require('util') const { packument } = require('pacote') -const usageUtil = require('./utils/usage.js') - const readFile = promisify(fs.readFile) const readJson = async file => jsonParse(await readFile(file, 'utf8')) -class View { - constructor (npm) { - this.npm = npm +const Queryable = require('./utils/queryable.js') +const BaseCommand = require('./base-command.js') +class View extends BaseCommand { + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get description () { + return 'View registry info' } /* istanbul ignore next - see test/lib/load-all-commands.js */ - get usage () { - return usageUtil( - 'view', - 'npm view [<@scope>/]<pkg>[@<version>] [<field>[.subfield]...]' - ) + static get params () { + return [ + 'json', + 'workspace', + 'workspaces', + ] + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get name () { + return 'view' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get usage () { + return ['[<@scope>/]<pkg>[@<version>] [<field>[.subfield]...]'] } async completion (opts) { @@ -44,9 +56,9 @@ class View { fullMetadata: true, preferOnline: true, } - const { defaultTag } = config const spec = npa(opts.conf.argv.remain[2]) const pckmnt = await packument(spec, config) + const defaultTag = this.npm.config.get('tag') const dv = pckmnt.versions[pckmnt['dist-tags'][defaultTag]] pckmnt.versions = Object.keys(pckmnt.versions).sort(semver.compareLoose) @@ -83,43 +95,118 @@ class View { this.view(args).then(() => cb()).catch(cb) } + execWorkspaces (args, filters, cb) { + this.viewWorkspaces(args, filters).then(() => cb()).catch(cb) + } + async view (args) { if (!args.length) args = ['.'] + let pkg = args.shift() + const local = /^\.@/.test(pkg) || pkg === '.' - const opts = { - ...this.npm.flatOptions, - preferOnline: true, - fullMetadata: true, + if (local) { + if (this.npm.config.get('global')) + throw new Error('Cannot use view command in global mode.') + const dir = this.npm.prefix + const manifest = await readJson(resolve(dir, 'package.json')) + if (!manifest.name) + throw new Error('Invalid package.json, no "name" field') + // put the version back if it existed + pkg = `${manifest.name}${pkg.slice(1)}` + } + let wholePackument = false + if (!args.length) { + args = [''] + wholePackument = true + } + const [pckmnt, data] = await this.getData(pkg, args) + + if (!this.npm.config.get('json') && wholePackument) { + // pretty view (entire packument) + data.map((v) => this.prettyView(pckmnt, v[Object.keys(v)[0]][''])) + } else { + // JSON formatted output (JSON or specific attributes from packument) + let reducedData = data.reduce(reducer, {}) + if (wholePackument) { + // No attributes + reducedData = cleanBlanks(reducedData) + log.silly('view', reducedData) + } + // disable the progress bar entirely, as we can't meaningfully update it + // if we may have partial lines printed. + log.disableProgress() + + const msg = await this.jsonData(reducedData, pckmnt._id) + if (msg !== '') + console.log(msg) } + } + + async viewWorkspaces (args, filters) { + if (!args.length) + args = ['.'] + const pkg = args.shift() - let nv - if (/^[.]@/.test(pkg)) - nv = npa.resolve(null, pkg.slice(2)) - else - nv = npa(pkg) - const name = nv.name - const local = (name === '.' || !name) + const local = /^\.@/.test(pkg) || pkg === '.' + if (!local) { + this.npm.log.warn('Ignoring workspaces for specified package(s)') + return this.view([pkg, ...args]) + } + let wholePackument = false + if (!args.length) { + wholePackument = true + args = [''] // getData relies on this + } + const results = {} + await this.setWorkspaces(filters) + for (const name of this.workspaceNames) { + const wsPkg = `${name}${pkg.slice(1)}` + const [pckmnt, data] = await this.getData(wsPkg, args) + + let reducedData = data.reduce(reducer, {}) + if (wholePackument) { + // No attributes + reducedData = cleanBlanks(reducedData) + log.silly('view', reducedData) + } - if (opts.global && local) - throw new Error('Cannot use view command in global mode.') + if (!this.npm.config.get('json')) { + if (wholePackument) + data.map((v) => this.prettyView(pckmnt, v[Object.keys(v)[0]][''])) + else { + console.log(`${name}:`) + const msg = await this.jsonData(reducedData, pckmnt._id) + if (msg !== '') + console.log(msg) + } + } else { + const msg = await this.jsonData(reducedData, pckmnt._id) + if (msg !== '') + results[name] = JSON.parse(msg) + } + } + if (Object.keys(results).length > 0) + console.log(JSON.stringify(results, null, 2)) + } - if (local) { - const dir = this.npm.prefix - const manifest = await readJson(path.resolve(dir, 'package.json')) - if (!manifest.name) - throw new Error('Invalid package.json, no "name" field') - const p = manifest.name - nv = npa(p) - if (pkg && ~pkg.indexOf('@')) - nv.rawSpec = pkg.split('@')[pkg.indexOf('@')] + async getData (pkg, args) { + const opts = { + ...this.npm.flatOptions, + preferOnline: true, + fullMetadata: true, } + const spec = npa(pkg) + // get the data about this package - let version = nv.rawSpec || this.npm.flatOptions.defaultTag + let version = this.npm.config.get('tag') + // rawSpec is the git url if this is from git + if (spec.type !== 'git' && spec.rawSpec) + version = spec.rawSpec - const pckmnt = await packument(nv, opts) + const pckmnt = await packument(spec, opts) if (pckmnt['dist-tags'] && pckmnt['dist-tags'][version]) version = pckmnt['dist-tags'][version] @@ -133,11 +220,9 @@ class View { throw er } - const results = [] + const data = [] const versions = pckmnt.versions || {} pckmnt.versions = Object.keys(versions).sort(semver.compareLoose) - if (!args.length) - args = [''] // remove readme unless we asked for it if (args.indexOf('readme') === -1) @@ -150,54 +235,41 @@ class View { if (args.indexOf('readme') !== -1) delete versions[v].readme - results.push(showFields(pckmnt, versions[v], arg)) + data.push(showFields(pckmnt, versions[v], arg)) }) } }) - let retval = results.reduce(reducer, {}) - - if (args.length === 1 && args[0] === '') { - retval = cleanBlanks(retval) - log.silly('view', retval) - } if ( - !opts.json && + !this.npm.config.get('json') && args.length === 1 && args[0] === '' - ) { - // general view + ) pckmnt.version = version - await Promise.all( - results.map((v) => this.prettyView(pckmnt, v[Object.keys(v)[0]][''], opts)) - ) - return retval - } else { - // view by field name - await this.printData(retval, pckmnt._id, opts) - return retval - } + + return [pckmnt, data] } - async printData (data, name, opts) { + async jsonData (data, name) { const versions = Object.keys(data) let msg = '' let msgJson = [] const includeVersions = versions.length > 1 let includeFields + const json = this.npm.config.get('json') versions.forEach((v) => { const fields = Object.keys(data[v]) includeFields = includeFields || (fields.length > 1) - if (opts.json) + if (json) msgJson.push({}) fields.forEach((f) => { let d = cleanup(data[v][f]) - if (fields.length === 1 && opts.json) + if (fields.length === 1 && json) msgJson[msgJson.length - 1][f] = d if (includeVersions || includeFields || typeof d !== 'string') { - if (opts.json) + if (json) msgJson[msgJson.length - 1][f] = d else { d = inspect(d, { @@ -207,10 +279,10 @@ class View { maxArrayLength: null, }) } - } else if (typeof d === 'string' && opts.json) + } else if (typeof d === 'string' && json) d = JSON.stringify(d) - if (!opts.json) { + if (!json) { if (f && includeFields) f += ' = ' msg += (includeVersions ? name + '@' + v + ' ' : '') + @@ -219,7 +291,7 @@ class View { }) }) - if (opts.json) { + if (json) { if (msgJson.length && Object.keys(msgJson[0]).length === 1) { const k = Object.keys(msgJson[0])[0] msgJson = msgJson.map(m => m[k]) @@ -230,18 +302,12 @@ class View { msg = JSON.stringify(msgJson, null, 2) + '\n' } - // disable the progress bar entirely, as we can't meaningfully update it if - // we may have partial lines printed. - log.disableProgress() - - // only log if there is something to log - if (msg !== '') - console.log(msg.trim()) + return msg.trim() } - async prettyView (packument, manifest, opts) { + prettyView (packument, manifest) { // More modern, pretty printing of default view - const unicode = opts.unicode + const unicode = this.npm.config.get('unicode') const tags = [] Object.keys(packument['dist-tags']).forEach((t) => { @@ -372,17 +438,18 @@ function cleanBlanks (obj) { return clean } -function reducer (l, r) { - if (r) { - Object.keys(r).forEach((v) => { - l[v] = l[v] || {} - Object.keys(r[v]).forEach((t) => { - l[v][t] = r[v][t] +// takes an array of objects and merges them into one object +function reducer (acc, cur) { + if (cur) { + Object.keys(cur).forEach((v) => { + acc[v] = acc[v] || {} + Object.keys(cur[v]).forEach((t) => { + acc[v][t] = cur[v][t] }) }) } - return l + return acc } // return whatever was printed @@ -393,56 +460,13 @@ function showFields (data, version, fields) { o[k] = s[k] }) }) - return search(o, fields.split('.'), version.version, fields) -} -function search (data, fields, version, title) { - let field - const tail = fields - while (!field && fields.length) - field = tail.shift() - fields = [field].concat(tail) - let o - if (!field && !tail.length) { - o = {} - o[version] = {} - o[version][title] = data - return o - } - let index = field.match(/(.+)\[([^\]]+)\]$/) - if (index) { - field = index[1] - index = index[2] - if (data[field] && data[field][index]) - return search(data[field][index], tail, version, title) - else - field = field + '[' + index + ']' - } - if (Array.isArray(data)) { - if (data.length === 1) - return search(data[0], fields, version, title) - - let results = [] - data.forEach((data, i) => { - const tl = title.length - const newt = title.substr(0, tl - fields.join('.').length - 1) + - '[' + i + ']' + [''].concat(fields).join('.') - results.push(search(data, fields.slice(), version, newt)) - }) - results = results.reduce(reducer, {}) - return results - } - if (!data[field]) - return undefined - data = data[field] - if (tail.length) { - // there are more fields to deal with. - return search(data, tail, version, title) - } - o = {} - o[version] = {} - o[version][title] = data - return o + const queryable = new Queryable(o) + const s = queryable.query(fields) + const res = { [version.version]: s } + + if (s) + return res } function cleanup (data) { diff --git a/lib/whoami.js b/lib/whoami.js index 39184ed9c581c..82c4520d9e883 100644 --- a/lib/whoami.js +++ b/lib/whoami.js @@ -1,19 +1,20 @@ -const output = require('./utils/output.js') const getIdentity = require('./utils/get-identity.js') -const usageUtil = require('./utils/usage.js') -class Whoami { - constructor (npm) { - this.npm = npm +const BaseCommand = require('./base-command.js') +class Whoami extends BaseCommand { + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get description () { + return 'Display npm username' } /* istanbul ignore next - see test/lib/load-all-commands.js */ - get usage () { - return usageUtil( - 'whoami', - 'npm whoami [--registry <registry>]\n' + - '(just prints username according to given registry)' - ) + static get name () { + return 'whoami' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get params () { + return ['registry'] } exec (args, cb) { @@ -21,9 +22,10 @@ class Whoami { } async whoami (args) { - const opts = this.npm.flatOptions - const username = await getIdentity(this.npm, opts) - output(opts.json ? JSON.stringify(username) : username) + const username = await getIdentity(this.npm, this.npm.flatOptions) + this.npm.output( + this.npm.config.get('json') ? JSON.stringify(username) : username + ) } } module.exports = Whoami diff --git a/lib/workspaces/arborist-cmd.js b/lib/workspaces/arborist-cmd.js new file mode 100644 index 0000000000000..cb6b66b8cb257 --- /dev/null +++ b/lib/workspaces/arborist-cmd.js @@ -0,0 +1,24 @@ +// This is the base for all commands whose execWorkspaces just gets +// a list of workspace names and passes it on to new Arborist() to +// be able to run a filtered Arborist.reify() at some point. + +const BaseCommand = require('../base-command.js') +class ArboristCmd extends BaseCommand { + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get params () { + return [ + 'workspace', + 'workspaces', + ] + } + + execWorkspaces (args, filters, cb) { + this.setWorkspaces(filters) + .then(() => { + this.exec(args, cb) + }) + .catch(er => cb(er)) + } +} + +module.exports = ArboristCmd diff --git a/lib/workspaces/get-workspaces.js b/lib/workspaces/get-workspaces.js new file mode 100644 index 0000000000000..91b0074556ae7 --- /dev/null +++ b/lib/workspaces/get-workspaces.js @@ -0,0 +1,36 @@ +const { resolve } = require('path') +const mapWorkspaces = require('@npmcli/map-workspaces') +const minimatch = require('minimatch') +const rpj = require('read-package-json-fast') + +// Returns an Map of paths to workspaces indexed by workspace name +// { foo => '/path/to/foo' } +const getWorkspaces = async (filters, { path }) => { + // TODO we need a better error to be bubbled up here if this rpj call fails + const pkg = await rpj(resolve(path, 'package.json')) + const workspaces = await mapWorkspaces({ cwd: path, pkg }) + const res = filters.length ? new Map() : workspaces + + for (const filterArg of filters) { + for (const [workspaceName, workspacePath] of workspaces.entries()) { + if (filterArg === workspaceName + || resolve(path, filterArg) === workspacePath + || minimatch(workspacePath, `${resolve(path, filterArg)}/*`)) + res.set(workspaceName, workspacePath) + } + } + + if (!res.size) { + let msg = '!' + if (filters.length) { + msg = `:\n ${filters.reduce( + (res, filterArg) => `${res} --workspace=${filterArg}`, '')}` + } + + throw new Error(`No workspaces found${msg}`) + } + + return res +} + +module.exports = getWorkspaces diff --git a/node_modules/.gitignore b/node_modules/.gitignore index df322e6a05cb4..2363ccfda45e8 100644 --- a/node_modules/.gitignore +++ b/node_modules/.gitignore @@ -1,6 +1,29 @@ # Automatically generated to ignore dev deps /.package-lock.json package-lock.json +CHANGELOG* +changelog* +README* +readme* +.editorconfig +.idea/ +.npmignore +.eslintrc* +.travis* +.github +.jscsrc +.nycrc +.istanbul* +.eslintignore +.jshintrc* +.prettierrc* +.jscs.json +.dir-locals* +.coveralls* +.babelrc* +.nyc_output +.gitkeep + /@babel/code-frame /@babel/core /@babel/generator @@ -27,6 +50,10 @@ package-lock.json /@babel/types /@blueoak/list /@eslint/eslintrc +/@humanwhocodes/config-array +/@humanwhocodes/object-schema +/@istanbuljs/load-nyc-config +/@istanbuljs/schema /@mdx-js/mdx /@mdx-js/util /@types/hast @@ -42,7 +69,6 @@ package-lock.json /ansi-colors /anymatch /append-transform -/arg /argparse /array-find-index /array-includes @@ -62,7 +88,6 @@ package-lock.json /buffer-from /caching-transform /call-bind -/caller /callsites /camelcase /camelcase-css @@ -74,14 +99,11 @@ package-lock.json /cliui /cmark-gfm /collapse-white-space -/color-support /comma-separated-tokens /commondir -/contains-path /convert-source-map /correct-license-metadata /coveralls -/cp-file /cross-spawn /cssom /cssstyle @@ -95,8 +117,8 @@ package-lock.json /define-properties /detab /detect-libc -/diff-frag /docopt +/docs /doctrine /domexception /end-of-stream @@ -118,7 +140,6 @@ package-lock.json /eslint-scope /eslint-utils /eslint-visitor-keys -/esm /espree /esprima /esquery @@ -136,9 +157,9 @@ package-lock.json /findit /flat-cache /flatted -/flow-parser -/flow-remove-types /foreground-child +/form-data +/fromentries /fs-access /fs-constants /fs-exists-cached @@ -148,9 +169,11 @@ package-lock.json /gensync /get-caller-file /get-intrinsic +/get-package-type /github-from-package /glob-parent /globals +/has-bigints /has-symbols /hasha /hast-to-hyperscript @@ -169,7 +192,9 @@ package-lock.json /is-alphabetical /is-alphanumerical /is-arrayish +/is-bigint /is-binary-path +/is-boolean-object /is-buffer /is-callable /is-date-object @@ -179,6 +204,7 @@ package-lock.json /is-hexadecimal /is-negative-zero /is-number +/is-number-object /is-plain-obj /is-potential-custom-element-name /is-regex @@ -186,6 +212,7 @@ package-lock.json /is-string /is-symbol /is-whitespace-character +/is-windows /is-word-character /istanbul-lib-coverage /istanbul-lib-hook @@ -205,17 +232,19 @@ package-lock.json /json5 /lcov-parse /levn +/libtap /licensee /load-json-file /locate-path /lodash +/lodash.clonedeep /lodash.flattendeep -/lodash.sortby +/lodash.merge +/lodash.truncate /lodash.uniq /log-driver /loose-envify /make-dir -/make-error /markdown-escapes /marked /marked-man @@ -223,17 +252,14 @@ package-lock.json /mdast-util-definitions /mdast-util-to-hast /mdurl -/merge-source-map /mimic-response /minimist /mkdirp-classic /napi-build-utils /natural-compare -/nested-error-stacks -/nice-try /node-abi /node-addon-api -/node-modules-regexp +/node-preload /noop-logger /normalize-path /npm-license-corrections @@ -246,7 +272,6 @@ package-lock.json /object.getownpropertydescriptors /object.values /optionator -/os-homedir /own-or /own-or-env /p-limit @@ -262,14 +287,14 @@ package-lock.json /path-type /picomatch /pify -/pirates /pkg-dir +/pkg-up /prebuild-install /prelude-ls +/process-on-spawn /progress /prop-types /property-information -/pseudomap /pump /queue-microtask /rc @@ -286,11 +311,8 @@ package-lock.json /remark-parse /remark-squeeze-paragraphs /repeat-string -/request-promise-core -/request-promise-native /require-directory /require-from-string -/require-inject /require-main-filename /resolve-from /run-parallel @@ -312,7 +334,6 @@ package-lock.json /sprintf-js /stack-utils /state-toggle -/stealthy-require /string.prototype.trimend /string.prototype.trimstart /strip-bom @@ -336,11 +357,10 @@ package-lock.json /trim-trailing-lines /trivial-deferred /trough -/ts-node /tsconfig-paths /type-check /type-fest -/typescript +/unbox-primitive /unherit /unicode-length /unified @@ -353,12 +373,12 @@ package-lock.json /unist-util-stringify-position /unist-util-visit /unist-util-visit-parents +/universalify /util-promisify /v8-compile-cache /vfile /vfile-location /vfile-message -/vlq /w3c-hr-time /w3c-xmlserializer /web-namespaces @@ -366,8 +386,8 @@ package-lock.json /whatwg-encoding /whatwg-mimetype /whatwg-url +/which-boxed-primitive /which-module -/which-pm-runs /word-wrap /wrap-ansi /ws @@ -379,5 +399,4 @@ package-lock.json /yapool /yargs /yargs-parser -/yn /zwitch diff --git a/node_modules/@npmcli/arborist/CHANGELOG.md b/node_modules/@npmcli/arborist/CHANGELOG.md deleted file mode 100644 index 3cd36d027b631..0000000000000 --- a/node_modules/@npmcli/arborist/CHANGELOG.md +++ /dev/null @@ -1,19 +0,0 @@ -# CHANGELOG - -## 2.0 - -* BREAKING CHANGE: root node is now included in inventory -* All parent/target/fsParent/etc. references set in `root` setter, rather - than the hodgepodge of setters that existed before. -* `treeCheck` function added, to enforce strict correctness guarantees when - `ARBORIST_DEBUG=1` in the environment (on by default in Arborist tests). - -## 1.0 - -* Release for npm v7 beta -* Fully functional - -## 0.0 - -* Proof of concept -* Before this, it was [`read-package-tree`](http://npm.im/read-package-tree) diff --git a/node_modules/@npmcli/arborist/README.md b/node_modules/@npmcli/arborist/README.md deleted file mode 100644 index cda5f8b9085be..0000000000000 --- a/node_modules/@npmcli/arborist/README.md +++ /dev/null @@ -1,335 +0,0 @@ -# @npmcli/arborist - -Inspect and manage `node_modules` trees. - -![a tree with the word ARBORIST superimposed on it](https://raw.githubusercontent.com/npm/arborist/main/logo.svg?sanitize=true) - -There's more documentation [in the notes -folder](https://github.com/npm/arborist/tree/main/notes). - -## USAGE - -```js -const Arborist = require('@npmcli/arborist') - -const arb = new Arborist({ - // options object - - // where we're doing stuff. defaults to cwd. - path: '/path/to/package/root', - - // url to the default registry. defaults to npm's default registry - registry: 'https://registry.npmjs.org', - - // scopes can be mapped to a different registry - '@foo:registry': 'https://registry.foo.com/', - - // Auth can be provided in a couple of different ways. If none are - // provided, then requests are anonymous, and private packages will 404. - // Arborist doesn't do anything with these, it just passes them down - // the chain to pacote and npm-registry-fetch. - - // Safest: a bearer token provided by a registry: - // 1. an npm auth token, used with the default registry - token: 'deadbeefcafebad', - // 2. an alias for the same thing: - _authToken: 'deadbeefcafebad', - - // insecure options: - // 3. basic auth, username:password, base64 encoded - auth: 'aXNhYWNzOm5vdCBteSByZWFsIHBhc3N3b3Jk', - // 4. username and base64 encoded password - username: 'isaacs', - password: 'bm90IG15IHJlYWwgcGFzc3dvcmQ=', - - // auth configs can also be scoped to a given registry with this - // rather unusual pattern: - '//registry.foo.com:token': 'blahblahblah', - '//basic.auth.only.foo.com:_auth': 'aXNhYWNzOm5vdCBteSByZWFsIHBhc3N3b3Jk', - '//registry.foo.com:always-auth': true, -}) - -// READING - -// returns a promise. reads the actual contents of node_modules -arb.loadActual().then(tree => { - // tree is also stored at arb.virtualTree -}) - -// read just what the package-lock.json/npm-shrinkwrap says -// This *also* loads the yarn.lock file, but that's only relevant -// when building the ideal tree. -arb.loadVirtual().then(tree => { - // tree is also stored at arb.virtualTree - // now arb.virtualTree is loaded - // this fails if there's no package-lock.json or package.json in the folder - // note that loading this way should only be done if there's no - // node_modules folder -}) - -// OPTIMIZING AND DESIGNING - -// build an ideal tree from the package.json and various lockfiles. -arb.buildIdealTree(options).then(() => { - // next step is to reify that ideal tree onto disk. - // options can be: - // rm: array of package names to remove at top level - // add: Array of package specifiers to add at the top level. Each of - // these will be resolved with pacote.manifest if the name can't be - // determined from the spec. (Eg, `github:foo/bar` vs `foo@somespec`.) - // The dep will be saved in the location where it already exists, - // (or pkg.dependencies) unless a different saveType is specified. - // saveType: Save added packages in a specific dependency set. - // - null (default) Wherever they exist already, or 'dependencies' - // - prod: definitely in 'dependencies' - // - optional: in 'optionalDependencies' - // - dev: devDependencies - // - peer: save in peerDependencies, and remove any optional flag from - // peerDependenciesMeta if one exists - // - peerOptional: save in peerDependencies, and add a - // peerDepsMeta[name].optional flag - // saveBundle: add newly added deps to the bundleDependencies list - // update: Either `true` to just go ahead and update everything, or an - // object with any or all of the following fields: - // - all: boolean. set to true to just update everything - // - names: names of packages update (like `npm update foo`) - // prune: boolean, default true. Prune extraneous nodes from the tree. - // preferDedupe: prefer to deduplicate packages if possible, rather than - // choosing a newer version of a dependency. Defaults to false, ie, - // always try to get the latest and greatest deps. - // legacyBundling: Nest every dep under the node requiring it, npm v2 style. - // No unnecessary deduplication. Default false. - - // At the end of this process, arb.idealTree is set. -}) - -// WRITING - -// Make the idealTree be the thing that's on disk -arb.reify({ - // write the lockfile(s) back to disk, and package.json with any updates - // defaults to 'true' - save: true, -}).then(() => { - // node modules has been written to match the idealTree -}) -``` - -## DATA STRUCTURES - -A `node_modules` tree is a logical graph of dependencies overlaid on a -physical tree of folders. - -A `Node` represents a package folder on disk, either at the root of the -package, or within a `node_modules` folder. The physical structure of the -folder tree is represented by the `node.parent` reference to the containing -folder, and `node.children` map of nodes within its `node_modules` -folder, where the key in the map is the name of the folder in -`node_modules`, and the value is the child node. - -A node without a parent is a top of tree. - -A `Link` represents a symbolic link to a package on disk. This can be a -symbolic link to a package folder within the current tree, or elsewhere on -disk. The `link.target` is a reference to the actual node. Links differ -from Nodes in that dependencies are resolved from the _target_ location, -rather than from the link location. - -An `Edge` represents a dependency relationship. Each node has an `edgesIn` -set, and an `edgesOut` map. Each edge has a `type` which specifies what -kind of dependency it represents: `'prod'` for regular dependencies, -`'peer'` for peerDependencies, `'dev'` for devDependencies, and -`'optional'` for optionalDependencies. `edge.from` is a reference to the -node that has the dependency, and `edge.to` is a reference to the node that -requires the dependency. - -As nodes are moved around in the tree, the graph edges are automatically -updated to point at the new module resolution targets. In other words, -`edge.from`, `edge.name`, and `edge.spec` are immutable; `edge.to` is -updated automatically when a node's parent changes. - -### class Node - -All arborist trees are `Node` objects. A `Node` refers -to a package folder, which may have children in `node_modules`. - -* `node.name` The name of this node's folder in `node_modules`. -* `node.parent` Physical parent node in the tree. The package in whose - `node_modules` folder this package lives. Null if node is top of tree. - - Setting `node.parent` will automatically update `node.location` and all - graph edges affected by the move. - -* `node.meta` A `Shrinkwrap` object which looks up `resolved` and - `integrity` values for all modules in this tree. Only relevant on `root` - nodes. - -* `node.children` Map of packages located in the node's `node_modules` - folder. -* `node.package` The contents of this node's `package.json` file. -* `node.path` File path to this package. If the node is a link, then this - is the path to the link, not to the link target. If the node is _not_ a - link, then this matches `node.realpath`. -* `node.realpath` The full real filepath on disk where this node lives. -* `node.location` A slash-normalized relative path from the root node to - this node's path. -* `node.isLink` Whether this represents a symlink. Always `false` for Node - objects, always `true` for Link objects. -* `node.isRoot` True if this node is a root node. (Ie, if `node.root === - node`.) -* `node.root` The root node where we are working. If not assigned to some - other value, resolves to the node itself. (Ie, the root node's `root` - property refers to itself.) -* `node.isTop` True if this node is the top of its tree (ie, has no - `parent`, false otherwise). -* `node.top` The top node in this node's tree. This will be equal to - `node.root` for simple trees, but link targets will frequently be outside - of (or nested somewhere within) a `node_modules` hierarchy, and so will - have a different `top`. -* `node.dev`, `node.optional`, `node.devOptional`, `node.peer`, Indicators - as to whether this node is a dev, optional, and/or peer dependency. - These flags are relevant when pruning dependencies out of the tree or - deciding what to reify. See **Package Dependency Flags** below for - explanations. -* `node.edgesOut` Edges in the dependency graph indicating nodes that this - node depends on, which resolve its dependencies. -* `node.edgesIn` Edges in the dependency graph indicating nodes that depend - on this node. - -* `extraneous` True if this package is not required by any other for any - reason. False for top of tree. - -* `node.resolve(name)` Identify the node that will be returned when code - in this package runs `require(name)` - -* `node.errors` Array of errors encountered while parsing package.json or - version specifiers. - -### class Link - -Link objects represent a symbolic link within the `node_modules` folder. -They have most of the same properties and methods as `Node` objects, with a -few differences. - -* `link.target` A Node object representing the package that the link - references. If this is a Node already present within the tree, then it - will be the same object. If it's outside of the tree, then it will be - treated as the top of its own tree. -* `link.isLink` Always true. -* `link.children` This is always an empty map, since links don't have their - own children directly. - -### class Edge - -Edge objects represent a dependency relationship a package node to the -point in the tree where the dependency will be loaded. As nodes are moved -within the tree, Edges automatically update to point to the appropriate -location. - -* `new Edge({ from, type, name, spec })` Creates a new edge with the - specified fields. After instantiation, none of the fields can be - changed directly. -* `edge.from` The node that has the dependency. -* `edge.type` The type of dependency. One of `'prod'`, `'dev'`, `'peer'`, - or `'optional'`. -* `edge.name` The name of the dependency. Ie, the key in the - relevant `package.json` dependencies object. -* `edge.spec` The specifier that is required. This can be a version, - range, tag name, git url, or tarball URL. Any specifier allowed by npm - is supported. -* `edge.to` Automatically set to the node in the tree that matches the - `name` field. -* `edge.valid` True if `edge.to` satisfies the specifier. -* `edge.error` A string indicating the type of error if there is a problem, - or `null` if it's valid. Values, in order of precedence: - * `DETACHED` Indicates that the edge has been detached from its - `edge.from` node, typically because a new edge was created when a - dependency specifier was modified. - * `MISSING` Indicates that the dependency is unmet. Note that this is - _not_ set for unmet dependencies of the `optional` type. - * `PEER LOCAL` Indicates that a `peerDependency` is found in the - node's local `node_modules` folder, and the node is not the top of - the tree. This violates the `peerDependency` contract, because it - means that the dependency is not a peer. - * `INVALID` Indicates that the dependency does not satisfy `edge.spec`. -* `edge.reload()` Re-resolve to find the appropriate value for `edge.to`. - Called automatically from the `Node` class when the tree is mutated. - -### Package Dependency Flags - -The dependency type of a node can be determined efficiently by looking at -the `dev`, `optional`, and `devOptional` flags on the node object. These -are updated by arborist when necessary whenever the tree is modified in -such a way that the dependency graph can change, and are relevant when -pruning nodes from the tree. - -``` -| extraneous | peer | dev | optional | devOptional | meaning | prune? | -|------------+------+-----+----------+-------------+---------------------+-------------------| -| | | | | | production dep | never | -|------------+------+-----+----------+-------------+---------------------+-------------------| -| X | N/A | N/A | N/A | N/A | nothing depends on | always | -| | | | | | this, it is trash | | -|------------+------+-----+----------+-------------+---------------------+-------------------| -| | | X | | X | devDependency, or | if pruning dev | -| | | | | not in lock | only depended upon | | -| | | | | | by devDependencies | | -|------------+------+-----+----------+-------------+---------------------+-------------------| -| | | | X | X | optionalDependency, | if pruning | -| | | | | not in lock | or only depended on | optional | -| | | | | | by optionalDeps | | -|------------+------+-----+----------+-------------+---------------------+-------------------| -| | | X | X | X | Optional dependency | if pruning EITHER | -| | | | | not in lock | of dep(s) in the | dev OR optional | -| | | | | | dev hierarchy | | -|------------+------+-----+----------+-------------+---------------------+-------------------| -| | | | | X | BOTH a non-optional | if pruning BOTH | -| | | | | in lock | dep within the dev | dev AND optional | -| | | | | | hierarchy, AND a | | -| | | | | | dep within the | | -| | | | | | optional hierarchy | | -|------------+------+-----+----------+-------------+---------------------+-------------------| -| | X | | | | peer dependency, or | if pruning peers | -| | | | | | only depended on by | | -| | | | | | peer dependencies | | -|------------+------+-----+----------+-------------+---------------------+-------------------| -| | X | X | | X | peer dependency of | if pruning peer | -| | | | | not in lock | dev node hierarchy | OR dev deps | -|------------+------+-----+----------+-------------+---------------------+-------------------| -| | X | | X | X | peer dependency of | if pruning peer | -| | | | | not in lock | optional nodes, or | OR optional deps | -| | | | | | peerOptional dep | | -|------------+------+-----+----------+-------------+---------------------+-------------------| -| | X | X | X | X | peer optional deps | if pruning peer | -| | | | | not in lock | of the dev dep | OR optional OR | -| | | | | | hierarchy | dev | -|------------+------+-----+----------+-------------+---------------------+-------------------| -| | X | | | X | BOTH a non-optional | if pruning peers | -| | | | | in lock | peer dep within the | OR: | -| | | | | | dev hierarchy, AND | BOTH optional | -| | | | | | a peer optional dep | AND dev deps | -+------------+------+-----+----------+-------------+---------------------+-------------------+ -``` - -* If none of these flags are set, then the node is required by the - dependency and/or peerDependency hierarchy. It should not be pruned. -* If _both_ `node.dev` and `node.optional` are set, then the node is an - optional dependency of one of the packages in the devDependency - hierarchy. It should be pruned if _either_ dev or optional deps are - being removed. -* If `node.dev` is set, but `node.optional` is not, then the node is - required in the devDependency hierarchy. It should be pruned if dev - dependencies are being removed. -* If `node.optional` is set, but `node.dev` is not, then the node is - required in the optionalDependency hierarchy. It should be pruned if - optional dependencies are being removed. -* If `node.devOptional` is set, then the node is a (non-optional) - dependency within the devDependency hierarchy, _and_ a dependency - within the `optionalDependency` hierarchy. It should be pruned if - _both_ dev and optional dependencies are being removed. -* If `node.peer` is set, then all the same semantics apply as above, except - that the dep is brought in by a peer dep at some point, rather than a - normal non-peer dependency. - -Note: `devOptional` is only set in the shrinkwrap/package-lock file if -_neither_ `dev` nor `optional` are set, as it would be redundant. diff --git a/node_modules/@npmcli/arborist/bin/index.js b/node_modules/@npmcli/arborist/bin/index.js index 3cedc91d73565..5449a50e67f62 100755 --- a/node_modules/@npmcli/arborist/bin/index.js +++ b/node_modules/@npmcli/arborist/bin/index.js @@ -11,6 +11,7 @@ Version: ${require('../package.json').version} # COMMANDS * reify: reify ideal tree to node_modules (install, update, rm, ...) +* prune: prune the ideal tree and reify (like npm prune) * ideal: generate and print the ideal tree * actual: read and print the actual tree in node_modules * virtual: read and print the virtual tree in the local shrinkwrap file @@ -50,6 +51,9 @@ switch (cmd) { case 'ideal': require('./ideal.js') break + case 'prune': + require('./prune.js') + break case 'reify': require('./reify.js') break diff --git a/node_modules/@npmcli/arborist/bin/lib/logging.js b/node_modules/@npmcli/arborist/bin/lib/logging.js index a7d20a1f53647..9420bca3c320c 100644 --- a/node_modules/@npmcli/arborist/bin/lib/logging.js +++ b/node_modules/@npmcli/arborist/bin/lib/logging.js @@ -20,13 +20,21 @@ const levelMap = new Map(levels.reduce((set, level, index) => { }, [])) const { inspect, format } = require('util') +const colors = process.stderr.isTTY +const magenta = colors ? msg => `\x1B[35m${msg}\x1B[39m` : m => m if (loglevel !== 'silent') { process.on('log', (level, ...args) => { if (levelMap.get(level) < levelMap.get(loglevel)) return - const pref = `${process.pid} ${level} ` + const pref = `${process.pid} ${magenta(level)} ` if (level === 'warn' && args[0] === 'ERESOLVE') - args[2] = inspect(args[2], { depth: 10 }) + args[2] = inspect(args[2], { depth: 10, colors }) + else { + args = args.map(a => { + return typeof a === 'string' ? a + : inspect(a, { depth: 10, colors }) + }) + } const msg = pref + format(...args).trim().split('\n').join(`\n${pref}`) console.error(msg) }) diff --git a/node_modules/@npmcli/arborist/bin/lib/options.js b/node_modules/@npmcli/arborist/bin/lib/options.js index bf8e08ec22a57..a1b6719627f50 100644 --- a/node_modules/@npmcli/arborist/bin/lib/options.js +++ b/node_modules/@npmcli/arborist/bin/lib/options.js @@ -33,7 +33,13 @@ for (const arg of process.argv.slice(2)) { options.omit.push(arg.substr('--omit='.length)) } else if (/^--before=/.test(arg)) options.before = new Date(arg.substr('--before='.length)) - else if (/^--[^=]+=/.test(arg)) { + else if (/^-w.+/.test(arg)) { + options.workspaces = options.workspaces || [] + options.workspaces.push(arg.replace(/^-w/, '')) + } else if (/^--workspace=/.test(arg)) { + options.workspaces = options.workspaces || [] + options.workspaces.push(arg.replace(/^--workspace=/, '')) + } else if (/^--[^=]+=/.test(arg)) { const [key, ...v] = arg.replace(/^--/, '').split('=') const val = v.join('=') options[key] = val === 'false' ? false : val === 'true' ? true : val diff --git a/node_modules/@npmcli/arborist/bin/lib/timers.js b/node_modules/@npmcli/arborist/bin/lib/timers.js index 3b73c0bf6ddd3..b516af92c5b57 100644 --- a/node_modules/@npmcli/arborist/bin/lib/timers.js +++ b/node_modules/@npmcli/arborist/bin/lib/timers.js @@ -1,4 +1,6 @@ const timers = Object.create(null) +const { format } = require('util') +const options = require('./options.js') process.on('time', name => { if (timers[name]) @@ -6,17 +8,21 @@ process.on('time', name => { timers[name] = process.hrtime() }) +const dim = process.stderr.isTTY ? msg => `\x1B[2m${msg}\x1B[22m` : m => m +const red = process.stderr.isTTY ? msg => `\x1B[31m${msg}\x1B[39m` : m => m process.on('timeEnd', name => { if (!timers[name]) throw new Error('timer not started! ' + name) const res = process.hrtime(timers[name]) delete timers[name] - console.error(`${process.pid} ${name}`, res[0] * 1e3 + res[1] / 1e6) + const msg = format(`${process.pid} ${name}`, res[0] * 1e3 + res[1] / 1e6) + if (options.timers !== false) + console.error(dim(msg)) }) process.on('exit', () => { for (const name of Object.keys(timers)) { - console.error('Dangling timer: ', name) + console.error(red('Dangling timer:'), name) process.exitCode = 1 } }) diff --git a/node_modules/@npmcli/arborist/bin/license.js b/node_modules/@npmcli/arborist/bin/license.js index 4083ddc695d46..89d0d879036b0 100644 --- a/node_modules/@npmcli/arborist/bin/license.js +++ b/node_modules/@npmcli/arborist/bin/license.js @@ -22,7 +22,7 @@ a.loadVirtual().then(tree => { set.push([tree.inventory.query('license', license).size, license]) for (const [count, license] of set.sort((a, b) => - a[1] && b[1] ? b[0] - a[0] || a[1].localeCompare(b[1]) + a[1] && b[1] ? b[0] - a[0] || a[1].localeCompare(b[1], 'en') : a[1] ? -1 : b[1] ? 1 : 0)) diff --git a/node_modules/@npmcli/arborist/bin/prune.js b/node_modules/@npmcli/arborist/bin/prune.js new file mode 100644 index 0000000000000..357dbcaafa03f --- /dev/null +++ b/node_modules/@npmcli/arborist/bin/prune.js @@ -0,0 +1,46 @@ +const Arborist = require('../') + +const options = require('./lib/options.js') +const print = require('./lib/print-tree.js') +require('./lib/logging.js') +require('./lib/timers.js') + +const printDiff = diff => { + const {depth} = require('treeverse') + depth({ + tree: diff, + visit: d => { + if (d.location === '') + return + switch (d.action) { + case 'REMOVE': + console.error('REMOVE', d.actual.location) + break + case 'ADD': + console.error('ADD', d.ideal.location, d.ideal.resolved) + break + case 'CHANGE': + console.error('CHANGE', d.actual.location, { + from: d.actual.resolved, + to: d.ideal.resolved, + }) + break + } + }, + getChildren: d => d.children, + }) +} + +const start = process.hrtime() +process.emit('time', 'install') +const arb = new Arborist(options) +arb.prune(options).then(tree => { + process.emit('timeEnd', 'install') + const end = process.hrtime(start) + print(tree) + if (options.dryRun) + printDiff(arb.diff) + console.error(`resolved ${tree.inventory.size} deps in ${end[0] + end[1] / 1e9}s`) + if (tree.meta && options.save) + tree.meta.save() +}).catch(er => console.error(require('util').inspect(er, { depth: Infinity }))) diff --git a/node_modules/@npmcli/arborist/bin/virtual.js b/node_modules/@npmcli/arborist/bin/virtual.js index 7f90f20cf3817..3352802c2de87 100644 --- a/node_modules/@npmcli/arborist/bin/virtual.js +++ b/node_modules/@npmcli/arborist/bin/virtual.js @@ -8,7 +8,8 @@ require('./lib/timers.js') const start = process.hrtime() new Arborist(options).loadVirtual().then(tree => { const end = process.hrtime(start) - print(tree) + if (!options.quiet) + print(tree) if (options.save) tree.meta.save() console.error(`read ${tree.inventory.size} deps in ${end[0] * 1000 + end[1] / 1e6}ms`) diff --git a/node_modules/@npmcli/arborist/lib/add-rm-pkg-deps.js b/node_modules/@npmcli/arborist/lib/add-rm-pkg-deps.js index 9e4825c526451..f78a43319be8c 100644 --- a/node_modules/@npmcli/arborist/lib/add-rm-pkg-deps.js +++ b/node_modules/@npmcli/arborist/lib/add-rm-pkg-deps.js @@ -1,60 +1,60 @@ // add and remove dependency specs to/from pkg manifest -const removeFromOthers = (name, type, pkg) => { - const others = new Set([ - 'dependencies', - 'optionalDependencies', - 'devDependencies', - 'peerDependenciesMeta', - 'peerDependencies', - ]) - - switch (type) { - case 'prod': - others.delete('dependencies') - break - case 'dev': - others.delete('devDependencies') - others.delete('peerDependencies') - others.delete('peerDependenciesMeta') - break - case 'optional': - others.delete('optionalDependencies') - break - case 'peer': - case 'peerOptional': - others.delete('devDependencies') - others.delete('peerDependencies') - others.delete('peerDependenciesMeta') - break - } - - for (const other of others) - deleteSubKey(pkg, other, name) -} - -const add = ({pkg, add, saveBundle, saveType}) => { +const add = ({pkg, add, saveBundle, saveType, log}) => { for (const spec of add) - addSingle({pkg, spec, saveBundle, saveType}) + addSingle({pkg, spec, saveBundle, saveType, log}) return pkg } -const addSingle = ({pkg, spec, saveBundle, saveType}) => { - if (!saveType) - saveType = getSaveType(pkg, spec) +// Canonical source of both the map between saveType and where it correlates to +// in the package, and the names of all our dependencies attributes +const saveTypeMap = new Map([ + ['dev', 'devDependencies'], + ['optional', 'optionalDependencies'], + ['prod', 'dependencies'], + ['peerOptional', 'peerDependencies'], + ['peer', 'peerDependencies'], +]) +const addSingle = ({pkg, spec, saveBundle, saveType, log}) => { const { name, rawSpec } = spec - removeFromOthers(name, saveType, pkg) - const type = saveType === 'prod' ? 'dependencies' - : saveType === 'optional' ? 'optionalDependencies' - : saveType === 'peer' || saveType === 'peerOptional' ? 'peerDependencies' - : saveType === 'dev' ? 'devDependencies' - : /* istanbul ignore next */ null - pkg[type] = pkg[type] || {} - if (rawSpec !== '' || pkg[type][name] === undefined) - pkg[type][name] = rawSpec || '*' + // if the user does not give us a type, we infer which type(s) + // to keep based on the same order of priority we do when + // building the tree as defined in the _loadDeps method of + // the node class. + if (!saveType) + saveType = inferSaveType(pkg, spec.name) + + if (saveType === 'prod') { + // a production dependency can only exist as production (rpj ensures it + // doesn't coexist w/ optional) + deleteSubKey(pkg, 'devDependencies', name, 'dependencies', log) + deleteSubKey(pkg, 'peerDependencies', name, 'dependencies', log) + } else if (saveType === 'dev') { + // a dev dependency may co-exist as peer, or optional, but not production + deleteSubKey(pkg, 'dependencies', name, 'devDependencies', log) + } else if (saveType === 'optional') { + // an optional dependency may co-exist as dev (rpj ensures it doesn't + // coexist w/ prod) + deleteSubKey(pkg, 'peerDependencies', name, 'optionalDependencies', log) + } else { // peer or peerOptional is all that's left + // a peer dependency may coexist as dev + deleteSubKey(pkg, 'dependencies', name, 'peerDependencies', log) + deleteSubKey(pkg, 'optionalDependencies', name, 'peerDependencies', log) + } + + const depType = saveTypeMap.get(saveType) + + pkg[depType] = pkg[depType] || {} + if (rawSpec !== '' || pkg[depType][name] === undefined) + pkg[depType][name] = rawSpec || '*' + if (saveType === 'optional') { + // Affordance for previous npm versions that require this behaviour + pkg.dependencies = pkg.dependencies || {} + pkg.dependencies[name] = pkg.optionalDependencies[name] + } if (saveType === 'peer' || saveType === 'peerOptional') { const pdm = pkg.peerDependenciesMeta || {} @@ -71,55 +71,57 @@ const addSingle = ({pkg, spec, saveBundle, saveType}) => { pkg.devDependencies[name] = pkg.peerDependencies[name] } - if (saveBundle) { + if (saveBundle && saveType !== 'peer' && saveType !== 'peerOptional') { // keep it sorted, keep it unique const bd = new Set(pkg.bundleDependencies || []) bd.add(spec.name) - pkg.bundleDependencies = [...bd].sort((a, b) => a.localeCompare(b)) + pkg.bundleDependencies = [...bd].sort((a, b) => a.localeCompare(b, 'en')) } } -const getSaveType = (pkg, spec) => { - const {name} = spec - const { - // these names are so lonnnnngggg - devDependencies: devDeps, - optionalDependencies: optDeps, - peerDependencies: peerDeps, - peerDependenciesMeta: peerDepsMeta, - } = pkg - - if (peerDeps && peerDeps[name] !== undefined) { - if (peerDepsMeta && peerDepsMeta[name] && peerDepsMeta[name].optional) - return 'peerOptional' - else - return 'peer' - } else if (devDeps && devDeps[name] !== undefined) - return 'dev' - else if (optDeps && optDeps[name] !== undefined) - return 'optional' - else - return 'prod' +// Finds where the package is already in the spec and infers saveType from that +const inferSaveType = (pkg, name) => { + for (const saveType of saveTypeMap.keys()) { + if (hasSubKey(pkg, saveTypeMap.get(saveType), name)) { + if ( + saveType === 'peerOptional' && + (!hasSubKey(pkg, 'peerDependenciesMeta', name) || + !pkg.peerDependenciesMeta[name].optional) + ) + return 'peer' + return saveType + } + } + return 'prod' } -const deleteSubKey = (obj, k, sk) => { - if (obj[k]) { - delete obj[k][sk] - if (!Object.keys(obj[k]).length) - delete obj[k] +const hasSubKey = (pkg, depType, name) => { + return pkg[depType] && Object.prototype.hasOwnProperty.call(pkg[depType], name) +} + +// Removes a subkey and warns about it if it's being replaced +const deleteSubKey = (pkg, depType, name, replacedBy, log) => { + if (hasSubKey(pkg, depType, name)) { + if (replacedBy && log) + log.warn('idealTree', `Removing ${depType}.${name} in favor of ${replacedBy}.${name}`) + delete pkg[depType][name] + + // clean up peerDependenciesMeta if we are removing something from peerDependencies + if (depType === 'peerDependencies' && pkg.peerDependenciesMeta) { + delete pkg.peerDependenciesMeta[name] + if (!Object.keys(pkg.peerDependenciesMeta).length) + delete pkg.peerDependenciesMeta + } + + if (!Object.keys(pkg[depType]).length) + delete pkg[depType] } } const rm = (pkg, rm) => { - for (const type of [ - 'dependencies', - 'optionalDependencies', - 'peerDependencies', - 'peerDependenciesMeta', - 'devDependencies', - ]) { + for (const depType of new Set(saveTypeMap.values())) { for (const name of rm) - deleteSubKey(pkg, type, name) + deleteSubKey(pkg, depType, name) } if (pkg.bundleDependencies) { pkg.bundleDependencies = pkg.bundleDependencies @@ -130,4 +132,4 @@ const rm = (pkg, rm) => { return pkg } -module.exports = { add, rm } +module.exports = { add, rm, saveTypeMap, hasSubKey } diff --git a/node_modules/@npmcli/arborist/lib/arborist/audit.js b/node_modules/@npmcli/arborist/lib/arborist/audit.js index aee7072d02ab0..bf1c335e75363 100644 --- a/node_modules/@npmcli/arborist/lib/arborist/audit.js +++ b/node_modules/@npmcli/arborist/lib/arborist/audit.js @@ -4,6 +4,7 @@ const AuditReport = require('../audit-report.js') // shared with reify const _global = Symbol.for('global') +const _workspaces = Symbol.for('workspaces') module.exports = cls => class Auditor extends cls { async audit (options = {}) { @@ -21,8 +22,10 @@ module.exports = cls => class Auditor extends cls { process.emit('time', 'audit') const tree = await this.loadVirtual() - this.auditReport = await AuditReport.load(tree, this.options) - const ret = options.fix ? this.reify() : this.auditReport + if (this[_workspaces] && this[_workspaces].length) + options.filterSet = this.workspaceDependencySet(tree, this[_workspaces]) + this.auditReport = await AuditReport.load(tree, options) + const ret = options.fix ? this.reify(options) : this.auditReport process.emit('timeEnd', 'audit') this.finishTracker('audit') return ret diff --git a/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js b/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js index 17b42f81bb39a..7ef42289d297b 100644 --- a/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js +++ b/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js @@ -3,14 +3,20 @@ const rpj = require('read-package-json-fast') const npa = require('npm-package-arg') const pacote = require('pacote') const cacache = require('cacache') -const semver = require('semver') const promiseCallLimit = require('promise-call-limit') -const getPeerSet = require('../peer-set.js') const realpath = require('../../lib/realpath.js') -const { resolve } = require('path') +const { resolve, dirname } = require('path') const { promisify } = require('util') const treeCheck = require('../tree-check.js') const readdir = promisify(require('readdir-scoped-modules')) +const { depth } = require('treeverse') + +const { + OK, + REPLACE, + CONFLICT, +} = require('../can-place-dep.js') +const PlaceDep = require('../place-dep.js') const debug = require('../debug.js') const fromPath = require('../from-path.js') @@ -19,20 +25,9 @@ const Shrinkwrap = require('../shrinkwrap.js') const Node = require('../node.js') const Link = require('../link.js') const addRmPkgDeps = require('../add-rm-pkg-deps.js') -const gatherDepSet = require('../gather-dep-set.js') const optionalSet = require('../optional-set.js') const {checkEngine, checkPlatform} = require('npm-install-checks') -// enum of return values for canPlaceDep. -// No, this is a conflict, you may not put that package here -const CONFLICT = Symbol('CONFLICT') -// Yes, this is fine, and should not be a problem -const OK = Symbol('OK') -// No need, because the package already here is fine -const KEEP = Symbol('KEEP') -// Yes, clobber the package that is already here -const REPLACE = Symbol('REPLACE') - const relpath = require('../relpath.js') // note: some of these symbols are shared so we can hit @@ -44,12 +39,14 @@ const _currentDep = Symbol('currentDep') const _updateAll = Symbol('updateAll') const _mutateTree = Symbol('mutateTree') const _flagsSuspect = Symbol.for('flagsSuspect') +const _workspaces = Symbol.for('workspaces') const _prune = Symbol('prune') const _preferDedupe = Symbol('preferDedupe') const _legacyBundling = Symbol('legacyBundling') const _parseSettings = Symbol('parseSettings') const _initTree = Symbol('initTree') const _applyUserRequests = Symbol('applyUserRequests') +const _applyUserRequestsToNode = Symbol('applyUserRequestsToNode') const _inflateAncientLockfile = Symbol('inflateAncientLockfile') const _buildDeps = Symbol('buildDeps') const _buildDepStep = Symbol('buildDepStep') @@ -62,10 +59,6 @@ const _loadWorkspaces = Symbol.for('loadWorkspaces') const _linkFromSpec = Symbol('linkFromSpec') const _loadPeerSet = Symbol('loadPeerSet') const _updateNames = Symbol.for('updateNames') -const _placeDep = Symbol.for('placeDep') -const _canPlaceDep = Symbol.for('canPlaceDep') -const _canPlacePeers = Symbol('canPlacePeers') -const _pruneForReplacement = Symbol('pruneForReplacement') const _fixDepFlags = Symbol('fixDepFlags') const _resolveLinks = Symbol('resolveLinks') const _rootNodeFromPackage = Symbol('rootNodeFromPackage') @@ -97,19 +90,15 @@ const _checkPlatform = Symbol('checkPlatform') const _virtualRoots = Symbol('virtualRoots') const _virtualRoot = Symbol('virtualRoot') -// used for the ERESOLVE error to show the last peer conflict encountered -const _peerConflict = Symbol('peerConflict') - const _failPeerConflict = Symbol('failPeerConflict') const _explainPeerConflict = Symbol('explainPeerConflict') -const _warnPeerConflict = Symbol('warnPeerConflict') const _edgesOverridden = Symbol('edgesOverridden') // exposed symbol for unit testing the placeDep method directly const _peerSetSource = Symbol.for('peerSetSource') // used by Reify mixin const _force = Symbol.for('force') -const _explicitRequests = Symbol.for('explicitRequests') +const _explicitRequests = Symbol('explicitRequests') const _global = Symbol.for('global') const _idealTreePrune = Symbol.for('idealTreePrune') @@ -130,8 +119,10 @@ module.exports = cls => class IdealTreeBuilder extends cls { force = false, packageLock = true, strictPeerDeps = false, + workspaces = [], } = options + this[_workspaces] = workspaces || [] this[_force] = !!force this[_strictPeerDeps] = !!strictPeerDeps @@ -143,6 +134,9 @@ module.exports = cls => class IdealTreeBuilder extends cls { this[_globalStyle] = this[_global] || globalStyle this[_follow] = !!follow + if (this[_workspaces].length && this[_global]) + throw new Error('Cannot operate on workspaces in global mode') + this[_explicitRequests] = new Set() this[_preferDedupe] = false this[_legacyBundling] = false @@ -155,8 +149,8 @@ module.exports = cls => class IdealTreeBuilder extends cls { this[_loadFailures] = new Set() this[_linkNodes] = new Set() this[_manifests] = new Map() - this[_peerConflict] = null this[_edgesOverridden] = new Set() + this[_resolvedAdd] = [] // a map of each module in a peer set to the thing that depended on // that set of peers in the first place. Use a WeakMap so that we @@ -204,8 +198,8 @@ module.exports = cls => class IdealTreeBuilder extends cls { try { await this[_initTree]() - await this[_applyUserRequests](options) await this[_inflateAncientLockfile]() + await this[_applyUserRequests](options) await this[_buildDeps]() await this[_fixDepFlags]() await this[_pruneFailedOptional]() @@ -218,17 +212,13 @@ module.exports = cls => class IdealTreeBuilder extends cls { return treeCheck(this.idealTree) } - [_checkEngineAndPlatform] () { - // engine/platform checks throw, so start the promise chain off first - return Promise.resolve() - .then(() => { - for (const node of this.idealTree.inventory.values()) { - if (!node.optional) { - this[_checkEngine](node) - this[_checkPlatform](node) - } - } - }) + async [_checkEngineAndPlatform] () { + for (const node of this.idealTree.inventory.values()) { + if (!node.optional) { + this[_checkEngine](node) + this[_checkPlatform](node) + } + } } [_checkPlatform] (node) { @@ -266,6 +256,7 @@ module.exports = cls => class IdealTreeBuilder extends cls { this[_preferDedupe] = !!options.preferDedupe this[_legacyBundling] = !!options.legacyBundling this[_updateNames] = update.names + this[_updateAll] = update.all // we prune by default unless explicitly set to boolean false this[_prune] = options.prune !== false @@ -314,7 +305,7 @@ module.exports = cls => class IdealTreeBuilder extends cls { .then(async root => { if (!this[_updateAll] && !this[_global] && !root.meta.loadedFromDisk) { await new this.constructor(this.options).loadActual({ root }) - const tree = root.target || root + const tree = root.target // even though we didn't load it from a package-lock.json FILE, // we still loaded it "from disk", meaning we have to reset // dep flags before assuming that any mutations were reflected. @@ -386,7 +377,19 @@ module.exports = cls => class IdealTreeBuilder extends cls { // update.names request by queueing nodes dependent on those named. async [_applyUserRequests] (options) { process.emit('time', 'idealTree:userRequests') - const tree = this.idealTree.target || this.idealTree + const tree = this.idealTree.target + + if (!this[_workspaces].length) + await this[_applyUserRequestsToNode](tree, options) + else { + await Promise.all(this.workspaceNodes(tree, this[_workspaces]) + .map(node => this[_applyUserRequestsToNode](node, options))) + } + + process.emit('timeEnd', 'idealTree:userRequests') + } + + async [_applyUserRequestsToNode] (tree, options) { // If we have a list of package names to update, and we know it's // going to update them wherever they are, add any paths into those // named nodes to the buildIdealTree queue. @@ -395,60 +398,77 @@ module.exports = cls => class IdealTreeBuilder extends cls { // global updates only update the globalTop nodes, but we need to know // that they're there, and not reinstall the world unnecessarily. + const globalExplicitUpdateNames = [] if (this[_global] && (this[_updateAll] || this[_updateNames].length)) { const nm = resolve(this.path, 'node_modules') for (const name of await readdir(nm).catch(() => [])) { - if (this[_updateNames].includes(name)) - this[_explicitRequests].add(name) tree.package.dependencies = tree.package.dependencies || {} - if (this[_updateAll] || this[_updateNames].includes(name)) + const updateName = this[_updateNames].includes(name) + if (this[_updateAll] || updateName) { + if (updateName) + globalExplicitUpdateNames.push(name) tree.package.dependencies[name] = '*' + } } } if (this.auditReport && this.auditReport.size > 0) - this[_queueVulnDependents](options) + await this[_queueVulnDependents](options) - if (options.rm && options.rm.length) { - addRmPkgDeps.rm(tree.package, options.rm) - for (const name of options.rm) - this[_explicitRequests].add(name) + const { add, rm } = options + + if (rm && rm.length) { + addRmPkgDeps.rm(tree.package, rm) + for (const name of rm) + this[_explicitRequests].add({ from: tree, name, action: 'DELETE' }) } - if (options.add) - await this[_add](options) + if (add && add.length) + await this[_add](tree, options) - // triggers a refresh of all edgesOut - if (options.add && options.add.length || options.rm && options.rm.length || this[_global]) + // triggers a refresh of all edgesOut. this has to be done BEFORE + // adding the edges to explicitRequests, because the package setter + // resets all edgesOut. + if (add && add.length || rm && rm.length || this[_global]) tree.package = tree.package - process.emit('timeEnd', 'idealTree:userRequests') + + for (const spec of this[_resolvedAdd]) { + if (spec.tree === tree) + this[_explicitRequests].add(tree.edgesOut.get(spec.name)) + } + for (const name of globalExplicitUpdateNames) + this[_explicitRequests].add(tree.edgesOut.get(name)) + + this[_depsQueue].push(tree) } // This returns a promise because we might not have the name yet, // and need to call pacote.manifest to find the name. - [_add] ({add, saveType = null, saveBundle = false}) { + [_add] (tree, {add, saveType = null, saveBundle = false}) { // get the name for each of the specs in the list. // ie, doing `foo@bar` we just return foo // but if it's a url or git, we don't know the name until we // fetch it and look in its manifest. - return Promise.all(add.map(rawSpec => - this[_retrieveSpecName](npa(rawSpec)) - .then(add => this[_updateFilePath](add)) - .then(add => this[_followSymlinkPath](add)) - )).then(add => { - this[_resolvedAdd] = add + return Promise.all(add.map(async rawSpec => { + // We do NOT provide the path to npa here, because user-additions + // need to be resolved relative to the CWD the user is in. + const spec = await this[_retrieveSpecName](npa(rawSpec)) + .then(spec => this[_updateFilePath](spec)) + .then(spec => this[_followSymlinkPath](spec)) + spec.tree = tree + return spec + })).then(add => { + this[_resolvedAdd].push(...add) // now add is a list of spec objects with names. // find a home for each of them! - const tree = this.idealTree.target || this.idealTree addRmPkgDeps.add({ pkg: tree.package, add, saveBundle, saveType, path: this.path, + log: this.log, }) - for (const spec of add) - this[_explicitRequests].add(spec.name) }) } @@ -471,7 +491,7 @@ module.exports = cls => class IdealTreeBuilder extends cls { async [_updateFilePath] (spec) { if (spec.type === 'file') - spec = this[_getRelpathSpec](spec, spec.fetchSpec) + return this[_getRelpathSpec](spec, spec.fetchSpec) return spec } @@ -484,7 +504,7 @@ module.exports = cls => class IdealTreeBuilder extends cls { .catch(/* istanbul ignore next */() => null) ) - spec = this[_getRelpathSpec](spec, real) + return this[_getRelpathSpec](spec, real) } return spec } @@ -493,7 +513,7 @@ module.exports = cls => class IdealTreeBuilder extends cls { /* istanbul ignore else - should also be covered by realpath failure */ if (filepath) { const { name } = spec - const tree = this.idealTree.target || this.idealTree + const tree = this.idealTree.target spec = npa(`file:${relpath(tree.path, filepath)}`, tree.path) spec.name = name } @@ -504,9 +524,9 @@ module.exports = cls => class IdealTreeBuilder extends cls { // what's in the bundle at each published manifest. Without that, we // can't possibly fix bundled deps without breaking a ton of other stuff, // and leaving the user subject to getting it overwritten later anyway. - [_queueVulnDependents] (options) { - for (const {nodes} of this.auditReport.values()) { - for (const node of nodes) { + async [_queueVulnDependents] (options) { + for (const vuln of this.auditReport.values()) { + for (const node of vuln.nodes) { const bundler = node.getBundler() // XXX this belongs in the audit report itself, not here. @@ -538,6 +558,7 @@ module.exports = cls => class IdealTreeBuilder extends cls { if (this[_force] && this.auditReport && this.auditReport.topVulns.size) { options.add = options.add || [] options.rm = options.rm || [] + const nodesTouched = new Set() for (const [name, topVuln] of this.auditReport.topVulns.entries()) { const { simpleRange, @@ -545,7 +566,7 @@ module.exports = cls => class IdealTreeBuilder extends cls { fixAvailable, } = topVuln for (const node of topNodes) { - if (node !== this.idealTree && node !== this.idealTree.target) { + if (!node.isProjectRoot && !node.isWorkspace) { // not something we're going to fix, sorry. have to cd into // that directory and fix it yourself. this.log.warn('audit', 'Manual fix required in linked project ' + @@ -565,9 +586,13 @@ module.exports = cls => class IdealTreeBuilder extends cls { : 'outside your stated dependency range' this.log.warn('audit', `Updating ${name} to ${version},` + `which is ${breakingMessage}.`) - options.add.push(`${name}@${version}`) + + await this[_add](node, { add: [`${name}@${version}`] }) + nodesTouched.add(node) } } + for (const node of nodesTouched) + node.package = node.package } } @@ -589,7 +614,7 @@ module.exports = cls => class IdealTreeBuilder extends cls { // probably have their own project associated with them. // for every node with one of the names on the list, we add its - // dependents to the queue to be evaluated. in buildDepStem, + // dependents to the queue to be evaluated. in buildDepStep, // anything on the update names list will get refreshed, even if // it isn't a problem. @@ -617,7 +642,7 @@ module.exports = cls => class IdealTreeBuilder extends cls { const ancient = meta.ancientLockfile const old = meta.loadedFromDisk && !(meta.originalLockfileVersion >= 2) - if (inventory.size === 0 || !ancient && !(old && this[_complete])) + if (inventory.size === 0 || !ancient && !old) return // if the lockfile is from node v5 or earlier, then we'll have to reload @@ -644,10 +669,12 @@ This is a one-time fix-up, please be patient... this.log.silly('inflate', node.location) const { resolved, version, path, name, location, integrity } = node // don't try to hit the registry for linked deps - const useResolved = !version || - resolved && resolved.startsWith('file:') - const id = useResolved ? resolved : version - const spec = npa.resolve(name, id, path) + const useResolved = resolved && ( + !version || resolved.startsWith('file:') + ) + const id = useResolved ? resolved + : version || `file:${node.path}` + const spec = npa.resolve(name, id, dirname(path)) const sloc = location.substr('node_modules/'.length) const t = `idealTree:inflate:${sloc}` this.addTracker(t) @@ -684,7 +711,7 @@ This is a one-time fix-up, please be patient... // or extraneous. [_buildDeps] () { process.emit('time', 'idealTree:buildDeps') - const tree = this.idealTree.target || this.idealTree + const tree = this.idealTree.target this[_depsQueue].push(tree) this.log.silly('idealTree', 'buildDeps') this.addTracker('idealTree', tree.name, '') @@ -707,7 +734,7 @@ This is a one-time fix-up, please be patient... // sort physically shallower deps up to the front of the queue, // because they'll affect things deeper in, then alphabetical this[_depsQueue].sort((a, b) => - (a.depth - b.depth) || a.path.localeCompare(b.path)) + (a.depth - b.depth) || a.path.localeCompare(b.path, 'en')) const node = this[_depsQueue].shift() const bd = node.package.bundleDependencies @@ -742,7 +769,11 @@ This is a one-time fix-up, please be patient... const Arborist = this.constructor const opt = { ...this.options } await cacache.tmp.withTmp(this.cache, opt, async path => { - await pacote.extract(node.resolved, path, opt) + await pacote.extract(node.resolved, path, { + ...opt, + resolved: node.resolved, + integrity: node.integrity, + }) if (hasShrinkwrap) { await new Arborist({ ...this.options, path }) @@ -800,7 +831,7 @@ This is a one-time fix-up, please be patient... const tasks = [] const peerSource = this[_peerSetSource].get(node) || node for (const edge of this[_problemEdges](node)) { - if (this[_edgesOverridden].has(edge)) + if (edge.overridden) continue // peerSetSource is only relevant when we have a peerEntryEdge @@ -844,34 +875,101 @@ This is a one-time fix-up, please be patient... tasks.push({edge, dep}) } - const placed = tasks - .sort((a, b) => a.edge.name.localeCompare(b.edge.name)) - .map(({ edge, dep }) => this[_placeDep](dep, node, edge)) + const placeDeps = tasks + .sort((a, b) => a.edge.name.localeCompare(b.edge.name, 'en')) + .map(({ edge, dep }) => new PlaceDep({ + edge, + dep, + + explicitRequest: this[_explicitRequests].has(edge), + updateNames: this[_updateNames], + auditReport: this.auditReport, + force: this[_force], + preferDedupe: this[_preferDedupe], + legacyBundling: this[_legacyBundling], + strictPeerDeps: this[_strictPeerDeps], + legacyPeerDeps: this.legacyPeerDeps, + globalStyle: this[_globalStyle], + })) const promises = [] - for (const set of placed) { - for (const node of set) { - this[_mutateTree] = true - this.addTracker('idealTree', node.name, node.location) - this[_depsQueue].push(node) - - // we're certainly going to need these soon, fetch them asap - // if it fails at this point, though, dont' worry because it - // may well be an optional dep that has gone missing. it'll - // fail later anyway. - const from = fromPath(node) - promises.push(...this[_problemEdges](node).map(e => - this[_fetchManifest](npa.resolve(e.name, e.spec, from)) - .catch(er => null))) - } + for (const pd of placeDeps) { + // placing a dep is actually a tree of placing the dep itself + // and all of its peer group that aren't already met by the tree + depth({ + tree: pd, + getChildren: pd => pd.children, + visit: pd => { + const { placed, edge, canPlace: cpd } = pd + // if we didn't place anything, nothing to do here + if (!placed) + return + + // we placed something, that means we changed the tree + if (placed.errors.length) + this[_loadFailures].add(placed) + this[_mutateTree] = true + if (cpd.canPlaceSelf === OK) { + for (const edgeIn of placed.edgesIn) { + if (edgeIn === edge) + continue + const { from, valid, overridden } = edgeIn + if (!overridden && !valid && !this[_depsSeen].has(from)) { + this.addTracker('idealTree', from.name, from.location) + this[_depsQueue].push(edgeIn.from) + } + } + } else { + /* istanbul ignore else - should be only OK or REPLACE here */ + if (cpd.canPlaceSelf === REPLACE) { + // this may also create some invalid edges, for example if we're + // intentionally causing something to get nested which was + // previously placed in this location. + for (const edgeIn of placed.edgesIn) { + if (edgeIn === edge) + continue + + const { valid, overridden } = edgeIn + if (!valid && !overridden) { + // if it's already been visited, we have to re-visit + // otherwise, just enqueue normally. + this[_depsSeen].delete(edgeIn.from) + this[_depsQueue].push(edgeIn.from) + } + } + } + } + + /* istanbul ignore if - should be impossible */ + if (cpd.canPlaceSelf === CONFLICT) { + debug(() => { + const er = new Error('placed with canPlaceSelf=CONFLICT') + throw Object.assign(er, { placeDep: pd }) + }) + return + } + + // lastly, also check for the missing deps of the node we placed + this[_depsQueue].push(placed) + + // pre-fetch any problem edges, since we'll need these soon + // if it fails at this point, though, dont' worry because it + // may well be an optional dep that has gone missing. it'll + // fail later anyway. + const from = fromPath(placed) + promises.push(...this[_problemEdges](placed).map(e => + this[_fetchManifest](npa.resolve(e.name, e.spec, from)) + .catch(er => null))) + }, + }) } - await Promise.all(promises) for (const { to } of node.edgesOut.values()) { - if (to && to.isLink) + if (to && to.isLink && to.target) this[_linkNodes].add(to) } + await Promise.all(promises) return this[_buildDepStep]() } @@ -881,6 +979,8 @@ This is a one-time fix-up, please be patient... // create a virtual root node with the same deps as the node that // is requesting this one, so that we can get all the peer deps in // a context where they're likely to be resolvable. + // Note that the virtual root will also have virtual copies of the + // targets of any child Links, so that they resolve appropriately. const parent = parent_ || this[_virtualRoot](edge.from) const realParent = edge.peer ? edge.from.resolveParent : edge.from @@ -934,11 +1034,23 @@ This is a one-time fix-up, please be patient... return this[_virtualRoots].get(node) const vr = new Node({ - path: '/virtual-root', + path: node.realpath, sourceReference: node, legacyPeerDeps: this.legacyPeerDeps, }) + // also need to set up any targets from any link deps, so that + // they are properly reflected in the virtual environment + for (const child of node.children.values()) { + if (child.isLink) { + new Node({ + path: child.realpath, + sourceReference: child.target, + root: vr, + }) + } + } + this[_virtualRoots].set(node, vr) return vr } @@ -954,7 +1066,8 @@ This is a one-time fix-up, please be patient... // also skip over any nodes in the tree that failed to load, since those // will crash the install later on anyway. - const bd = node.isProjectRoot ? null : node.package.bundleDependencies + const bd = node.isProjectRoot || node.isWorkspace ? null + : node.package.bundleDependencies const bundled = new Set(bd || []) return [...node.edgesOut.values()] @@ -975,7 +1088,7 @@ This is a one-time fix-up, please be patient... // if it's peerOptional and not explicitly requested. if (!edge.to) { return edge.type !== 'peerOptional' || - this[_explicitRequests].has(edge.name) + this[_explicitRequests].has(edge) } // If the edge has an error, there's a problem. @@ -990,8 +1103,8 @@ This is a one-time fix-up, please be patient... if (this[_isVulnerable](edge.to)) return true - // If the user has explicitly asked to install this package, it's a problem. - if (node.isProjectRoot && this[_explicitRequests].has(edge.name)) + // If the user has explicitly asked to install this package, it's a "problem". + if (this[_explicitRequests].has(edge)) return true // No problems! @@ -1076,7 +1189,7 @@ This is a one-time fix-up, please be patient... // we typically only install non-optional peers, but we have to // factor them into the peerSet so that we can avoid conflicts .filter(e => e.peer && !(e.valid && e.to)) - .sort(({name: a}, {name: b}) => a.localeCompare(b)) + .sort(({name: a}, {name: b}) => a.localeCompare(b, 'en')) for (const edge of peerEdges) { // already placed this one, and we're happy with it. @@ -1111,14 +1224,26 @@ This is a one-time fix-up, please be patient... // allow it. either we're overriding, or it's not something // that will be installed by default anyway, and we'll fail when // we get to the point where we need to, if we need to. - if (conflictOK || !required.has(dep)) + if (conflictOK || !required.has(dep)) { + edge.overridden = true continue + } // problem - this[_failPeerConflict](edge) + this[_failPeerConflict](edge, parentEdge) } } + // There is something present already, and we're not happy about it + // See if the thing we WOULD be happy with is also going to satisfy + // the other dependents on the current node. + const current = edge.to + const dep = await this[_nodeFromEdge](edge, null, null, required) + if (dep.canReplace(current)) { + await this[_nodeFromEdge](edge, node.parent, null, required) + continue + } + // at this point we know that there is a dep there, and // we don't like it. always fail strictly, always allow forcibly or // in non-strict mode if it's not our fault. don't warn here, because @@ -1131,529 +1256,33 @@ This is a one-time fix-up, please be patient... continue // ok, it's the root, or we're in unforced strict mode, so this is bad - this[_failPeerConflict](edge) + this[_failPeerConflict](edge, parentEdge) } return node } - [_failPeerConflict] (edge) { - const expl = this[_explainPeerConflict](edge) + [_failPeerConflict] (edge, currentEdge) { + const expl = this[_explainPeerConflict](edge, currentEdge) throw Object.assign(new Error('unable to resolve dependency tree'), expl) } - [_explainPeerConflict] (edge) { + [_explainPeerConflict] (edge, currentEdge) { const node = edge.from const curNode = node.resolve(edge.name) - const pc = this[_peerConflict] || { peer: null, current: null } - const current = curNode ? curNode.explain() : pc.current - const peerConflict = pc.peer + const current = curNode.explain() return { code: 'ERESOLVE', current, + // it SHOULD be impossible to get here without a current node in place, + // but this at least gives us something report on when bugs creep into + // the tree handling logic. + currentEdge: currentEdge ? currentEdge.explain() : null, edge: edge.explain(), - peerConflict, strictPeerDeps: this[_strictPeerDeps], force: this[_force], } } - [_warnPeerConflict] (edge) { - // track that we've overridden this edge, so that we don't keep trying - // to re-resolve it in an infinite loop. - this[_edgesOverridden].add(edge) - const expl = this[_explainPeerConflict](edge) - this.log.warn('ERESOLVE', 'overriding peer dependency', expl) - } - - // starting from either node, or in the case of non-root peer deps, - // the node's parent, walk up the tree until we find the first spot - // where this dep cannot be placed, and use the one right before that. - // place dep, requested by node, to satisfy edge - // XXX split this out into a separate method or mixin? It's quite a lot - // of functionality that ought to have its own unit tests more conveniently. - [_placeDep] (dep, node, edge, peerEntryEdge = null, peerPath = []) { - if (edge.to && - !edge.error && - !this[_explicitRequests].has(edge.name) && - !this[_updateNames].includes(edge.name) && - !this[_isVulnerable](edge.to)) - return [] - - // top nodes should still get peer deps from their fsParent if possible, - // and only install locally if there's no other option, eg for a link - // outside of the project root, or for a conflicted dep. - const start = edge.peer && !node.isProjectRoot ? node.resolveParent || node - : node - - let target - let canPlace = null - let isSource = false - const source = this[_peerSetSource].get(dep) - for (let check = start; check; check = check.resolveParent) { - // we always give the FIRST place we possibly *can* put this a little - // extra prioritization with peer dep overrides and deduping - if (check === source) - isSource = true - - // if the current location has a peerDep on it, then we can't place here - // this is pretty rare to hit, since we always prefer deduping peers. - const checkEdge = check.edgesOut.get(edge.name) - if (!check.isTop && checkEdge && checkEdge.peer) - continue - - const cp = this[_canPlaceDep](dep, check, edge, peerEntryEdge, peerPath, isSource) - isSource = false - - // anything other than a conflict is fine to proceed with - if (cp !== CONFLICT) { - canPlace = cp - target = check - } else - break - - // nest packages like npm v1 and v2 - // very disk-inefficient - if (this[_legacyBundling]) - break - - // when installing globally, or just in global style, we never place - // deps above the first level. - const tree = this.idealTree && this.idealTree.target || this.idealTree - if (this[_globalStyle] && check.resolveParent === tree) - break - } - - // if we can't find a target, that means that the last placed checked - // (and all the places before it) had a copy already. if we're in - // --force mode, then the user has explicitly said that they're ok - // with conflicts. This can only occur in --force mode in the case - // when a node was added to the tree with a peerOptional dep that we - // ignored, and then later, that edge became invalid, and we fail to - // resolve it. We will warn about it in a moment. - if (!target) { - if (this[_force]) { - // we know that there is a dep (not the root) which is the target - // of this edge, or else it wouldn't have been a conflict. - target = edge.to.resolveParent - canPlace = KEEP - } else - this[_failPeerConflict](edge) - } else { - // it worked, so we clearly have no peer conflicts at this point. - this[_peerConflict] = null - } - - this.log.silly( - 'placeDep', - target.location || 'ROOT', - `${dep.name}@${dep.version}`, - canPlace.description || /* istanbul ignore next */ canPlace, - `for: ${node.package._id || node.location}`, - `want: ${edge.spec || '*'}` - ) - - // Can only get KEEP here if the original edge was valid, - // and we're checking for an update but it's already up to date. - if (canPlace === KEEP) { - if (edge.peer && !target.children.get(edge.name).satisfies(edge)) { - // this is an overridden peer dep - this[_warnPeerConflict](edge) - } - return [] - } - - // figure out which of this node's peer deps will get placed as well - const virtualRoot = dep.parent - - const newDep = new dep.constructor({ - name: dep.name, - pkg: dep.package, - resolved: dep.resolved, - integrity: dep.integrity, - legacyPeerDeps: this.legacyPeerDeps, - error: dep.errors[0], - ...(dep.target ? { target: dep.target, realpath: dep.target.path } : {}), - }) - if (this[_loadFailures].has(dep)) - this[_loadFailures].add(newDep) - - const placed = [newDep] - const oldChild = target.children.get(edge.name) - if (oldChild) { - // if we're replacing, we should also remove any nodes for edges that - // are now invalid, and where this (or its deps) is the only dependent, - // and also recurse on that pruning. Otherwise leaving that dep node - // around can result in spurious conflicts pushing nodes deeper into - // the tree than needed in the case of cycles that will be removed - // later anyway. - const oldDeps = [] - for (const [name, edge] of oldChild.edgesOut.entries()) { - if (!newDep.edgesOut.has(name) && edge.to) - oldDeps.push(...gatherDepSet([edge.to], e => e.to !== edge.to)) - } - newDep.replace(oldChild) - this[_pruneForReplacement](newDep, oldDeps) - // this may also create some invalid edges, for example if we're - // intentionally causing something to get nested which was previously - // placed in this location. - for (const edgeIn of newDep.edgesIn) { - if (edgeIn.invalid && edgeIn !== edge) { - this[_depsQueue].push(edgeIn.from) - this[_depsSeen].delete(edgeIn.from) - } - } - } else - newDep.parent = target - - if (edge.peer && !newDep.satisfies(edge)) { - // this is an overridden peer dep - this[_warnPeerConflict](edge) - } - - // If the edge is not an error, then we're updating something, and - // MAY end up putting a better/identical node further up the tree in - // a way that causes an unnecessary duplication. If so, remove the - // now-unnecessary node. - if (edge.valid && edge.to.parent !== target && newDep.canReplace(edge.to)) - edge.to.parent = null - - // visit any dependents who are upset by this change - // if it's an angry overridden peer edge, however, make sure we - // skip over it! - for (const edgeIn of newDep.edgesIn) { - if (edgeIn !== edge && !edgeIn.valid && !this[_depsSeen].has(edge.from)) { - this.addTracker('idealTree', edgeIn.from.name, edgeIn.from.location) - this[_depsQueue].push(edgeIn.from) - } - } - - // in case we just made some duplicates that can be removed, - // prune anything deeper in the tree that can be replaced by this - if (this.idealTree) { - for (const node of this.idealTree.inventory.query('name', newDep.name)) { - if (node !== newDep && - node.isDescendantOf(target) && - !node.inShrinkwrap && - !node.inBundle && - node.canReplaceWith(newDep)) { - // don't prune if the dupe is necessary! - // root (a, d) - // +-- a (b, c2) - // | +-- b (c2) <-- place c2 for b, lands at root - // +-- d (e) - // +-- e (c1, d) - // +-- c1 - // +-- f (c2) - // +-- c2 <-- pruning this would be bad - - const mask = node.parent !== target && - node.parent && - node.parent.parent && - node.parent.parent !== target && - node.parent.parent.resolve(newDep.name) - - if (!mask || mask === newDep || node.canReplaceWith(mask)) - node.parent = null - } - } - } - - // also place its unmet or invalid peer deps at this location - // note that newDep has now been removed from the virtualRoot set - // by virtue of being placed in the target's node_modules. - // loop through any peer deps from the thing we just placed, and place - // those ones as well. it's safe to do this with the virtual nodes, - // because we're copying rather than moving them out of the virtual root, - // otherwise they'd be gone and the peer set would change throughout - // this loop. - for (const peerEdge of newDep.edgesOut.values()) { - const peer = virtualRoot.children.get(peerEdge.name) - - // Note: if the virtualRoot *doesn't* have the peer, then that means - // it's an optional peer dep. If it's not being properly met (ie, - // peerEdge.valid is false), that this is likely heading for an - // ERESOLVE error, unless it can walk further up the tree. - if (!peerEdge.peer || peerEdge.valid || !peer) - continue - - const peerPlaced = this[_placeDep]( - peer, newDep, peerEdge, peerEntryEdge || edge, peerPath) - placed.push(...peerPlaced) - } - - // we're done with this now, clean it up. - this[_virtualRoots].delete(virtualRoot.sourceReference) - - return placed - } - - [_pruneForReplacement] (node, oldDeps) { - // gather up all the invalid edgesOut, and any now-extraneous - // deps that the new node doesn't depend on but the old one did. - const invalidDeps = new Set([...node.edgesOut.values()] - .filter(e => e.to && !e.valid).map(e => e.to)) - for (const dep of oldDeps) { - const set = gatherDepSet([dep], e => e.to !== dep && e.valid) - for (const dep of set) - invalidDeps.add(dep) - } - - // ignore dependency edges from the node being replaced, but - // otherwise filter the set down to just the set with no - // dependencies from outside the set, except the node in question. - const deps = gatherDepSet(invalidDeps, edge => - edge.from !== node && edge.to !== node && edge.valid) - - // now just delete whatever's left, because it's junk - for (const dep of deps) - dep.parent = null - } - - // check if we can place DEP in TARGET to satisfy EDGE - // Need to verify: - // - no child by that name there already - // - target does not have a peer dep on name - // - no higher-level pkg by that name and incompatible spec is depended on - // by anything lower in the tree. - // - node's peer deps and meta-peer deps are siblings in a virtual root at - // this point. make sure that the whole family can come along, so apply - // the same checks to each of them. They may land higher up in the tree, - // but we need to know that they CAN live here. - // Responses: - // - OK - Yes, because there is nothing there and no conflicts caused - // - REPLACE - Yes, and you can clobber what's there - // - KEEP - No, but what's there is fine - // - CONFLICT - You may not put that there - // - // Check peers on OK or REPLACE. KEEP and CONFLICT do not require peer - // checking, because either we're leaving it alone, or it won't work anyway. - // When we check peers, we pass along the peerEntryEdge to track the - // original edge that caused us to load the family of peer dependencies. - [_canPlaceDep] (dep, target, edge, peerEntryEdge = null, peerPath = [], isSource = false) { - /* istanbul ignore next */ - debug(() => { - if (!dep) - throw new Error('no dep??') - }) - const entryEdge = peerEntryEdge || edge - const source = this[_peerSetSource].get(dep) - - isSource = isSource || target === source - // if we're overriding the source, then we care if the *target* is - // ours, even if it wasn't actually the original source, since we - // are depending on something that has a dep that can't go in its own - // folder. for example, a -> b, b -> PEER(a). Even though a is the - // source, b has to be installed up a level, and if the root package - // depends on a, and it has a conflict, it's our problem. So, the root - // (or whatever is bringing in a) becomes the "effective source" for - // the purposes of this calculation. - const { isProjectRoot, isWorkspace } = isSource ? target : source || {} - const isMine = isProjectRoot || isWorkspace - - // Useful testing thingie right here. - // peerEntryEdge should *always* be a non-peer dependency, or a peer - // dependency from the root node. When we get spurious ERESOLVE errors, - // or *don't* get ERESOLVE errors when we should, check to see if this - // fails, because it MAY mean we got off track somehow. - /* istanbul ignore next - debug check, should be impossible */ - debug(() => { - if (peerEntryEdge && peerEntryEdge.peer && !peerEntryEdge.from.isTop) - throw new Error('lost original peerEntryEdge somehow?') - }) - - if (target.children.has(edge.name)) { - const current = target.children.get(edge.name) - - // same thing = keep - if (dep.matches(current)) - return KEEP - - const { version: curVer } = current - const { version: newVer } = dep - const tryReplace = curVer && newVer && semver.gte(newVer, curVer) - if (tryReplace && dep.canReplace(current)) { - const res = this[_canPlacePeers](dep, target, edge, REPLACE, peerEntryEdge, peerPath, isSource) - /* istanbul ignore else - It's extremely rare that a replaceable - * node would be a conflict, if the current one wasn't a conflict, - * but it is theoretically possible if peer deps are pinned. In - * that case we treat it like any other conflict, and keep trying */ - if (res !== CONFLICT) - return res - } - - // ok, can't replace the current with new one, but maybe current is ok? - // no need to check if it's a peer that's valid to be here, because - // peers are always placed along with their entry source - if (edge.satisfiedBy(current)) - return KEEP - - // if we prefer deduping, then try replacing newer with older - // we always prefer to dedupe peers, because they are trying - // a bit harder to be singletons. - const preferDedupe = this[_preferDedupe] || edge.peer - if (preferDedupe && !tryReplace && dep.canReplace(current)) { - const res = this[_canPlacePeers](dep, target, edge, REPLACE, peerEntryEdge, peerPath, isSource) - /* istanbul ignore else - It's extremely rare that a replaceable - * node would be a conflict, if the current one wasn't a conflict, - * but it is theoretically possible if peer deps are pinned. In - * that case we treat it like any other conflict, and keep trying */ - if (res !== CONFLICT) - return res - } - - // check for conflict override cases. - // first: is this the only place this thing can go? If the target is - // the source, then one of these things are true. - // - // 1. the conflicted dep was deduped up to here from a lower dependency - // w -> (x,y) - // x -> (z) - // y -> PEER(p@1) - // z -> (q) - // q -> (p@2) - // - // When building, let's say that x is fully placed, with all of its - // deps, and we're _adding_ y. Since the peer on p@1 was not initially - // present, it's been deduped up to w, and now needs to be pushed out. - // Replace it, and potentially also replace its peer set (though that'll - // be accomplished by making the same determination when we call - // _canPlacePeers) - // - // 2. the dep we're TRYING to place here ought to be overridden by the - // one that's here now, because current is (a) a direct dep of the - // source, or (b) an already-placed peer in a conflicted peer set, or - // (c) an already-placed peer in a different peer set at the same level. - // If strict or ours, conflict. Otherwise, keep. - if (isSource) { - // check to see if the current module could go deeper in the tree - let canReplace = true - // only do this check when we're placing peers. when we're placing - // the original in the source, we know that the edge from the source - // is the thing we're trying to place, so its peer set will need to be - // placed here as well. the virtualRoot already has the appropriate - // overrides applied. - if (peerEntryEdge) { - const peerSet = getPeerSet(current) - OUTER: for (const p of peerSet) { - // if any have a non-peer dep from the target, or a peer dep if - // the target is root, then cannot safely replace and dupe deeper. - for (const edge of p.edgesIn) { - if (peerSet.has(edge.from)) - continue - - // only respect valid edges, however, since we're likely trying - // to fix the very one that's currently broken! If the virtual - // root's replacement is ok, and doesn't have any invalid edges - // indicating that it was an overridden peer, then ignore the - // conflict and continue. If it WAS an override, then we need - // to get the conflict here so that we can decide whether to - // accept the current dep node, clobber it, or fail the install. - if (edge.from === target && edge.valid) { - const rep = dep.parent.children.get(edge.name) - const override = rep && ([...rep.edgesIn].some(e => !e.valid)) - if (!rep || !rep.satisfies(edge) || override) { - canReplace = false - break OUTER - } - } - } - } - } - if (canReplace) { - const ret = this[_canPlacePeers](dep, target, edge, REPLACE, peerEntryEdge, peerPath, isSource) - /* istanbul ignore else - extremely rare that the peer set would - * conflict if we can replace the node in question, but theoretically - * possible, if peer deps are pinned aggressively. */ - if (ret !== CONFLICT) - return ret - } - - // so it's not a deeper dep that's been deduped. That means that the - // only way it could have ended up here is if it's a conflicted peer. - /* istanbul ignore else - would have already crashed if not forced, - * and either mine or strict, when creating the peerSet. Keeping this - * check so that we're not only relying on action at a distance. */ - if (!this[_strictPeerDeps] && !isMine || this[_force]) { - this[_warnPeerConflict](edge, dep) - return KEEP - } - } - - // no justification for overriding, and no agreement possible. - return CONFLICT - } - - // no existing node at this location! - // check to see if the target doesn't have a child by that name, - // but WANTS one, and won't be happy with this one. if this is the - // edge we're looking to resolve, then not relevant, of course. - if (target !== entryEdge.from && target.edgesOut.has(dep.name)) { - const targetEdge = target.edgesOut.get(dep.name) - // It might be that the dep would not be valid here, BUT some other - // version would. Could to try to resolve that, but that makes this no - // longer a pure synchronous function. ugh. - // This is a pretty unlikely scenario in a normal install, because we - // resolve the peer dep set against the parent dependencies, and - // presumably they all worked together SOMEWHERE to get published in the - // first place, and since we resolve shallower deps before deeper ones, - // this can only occur by a child having a peer dep that does not satisfy - // the parent. It can happen if we're doing a deep update limited by - // a specific name, however, or if a dep makes an incompatible change - // to its peer dep in a non-semver-major version bump, or if the parent - // is unbounded in its dependency list. - if (!targetEdge.satisfiedBy(dep)) - return CONFLICT - } - - // check to see what that name resolves to here, and who may depend on - // being able to reach it by crawling up past this parent. we know - // at this point that it's not the target's direct child node. if it's - // a direct dep of the target, we just make the invalid edge and - // resolve it later. - const current = target !== entryEdge.from && target.resolve(dep.name) - if (current) { - for (const edge of current.edgesIn.values()) { - if (edge.from.isDescendantOf(target) && edge.valid) { - if (!edge.satisfiedBy(dep)) - return CONFLICT - } - } - } - - // no objections! ok to place here - return this[_canPlacePeers](dep, target, edge, OK, peerEntryEdge, peerPath, isSource) - } - - // make sure the family of peer deps can live here alongside it. - // this doesn't guarantee that THIS solution will be the one we take, - // but it does establish that SOME solution exists at this level in - // the tree. - [_canPlacePeers] (dep, target, edge, ret, peerEntryEdge, peerPath, isSource) { - // do not go in cycles when we're resolving a peer group - if (!dep.parent || peerEntryEdge && peerPath.includes(dep)) - return ret - - const entryEdge = peerEntryEdge || edge - peerPath = [...peerPath, dep] - - for (const peerEdge of dep.edgesOut.values()) { - if (!peerEdge.peer || !peerEdge.to) - continue - const peer = peerEdge.to - const canPlacePeer = this[_canPlaceDep](peer, target, peerEdge, entryEdge, peerPath, isSource) - if (canPlacePeer !== CONFLICT) - continue - - const current = target.resolve(peer.name) - this[_peerConflict] = { - peer: peer.explain(peerEdge), - current: current && current.explain(), - } - return CONFLICT - } - return ret - } - // go through all the links in the this[_linkNodes] set // for each one: // - if outside the root, ignore it, assume it's fine, it's not our problem @@ -1670,7 +1299,8 @@ This is a one-time fix-up, please be patient... if (link.root !== this.idealTree) continue - const external = /^\.\.(\/|$)/.test(relpath(this.path, link.realpath)) + const tree = this.idealTree.target + const external = !link.target.isDescendantOf(tree) // outside the root, somebody else's problem, ignore it if (external && !this[_follow]) @@ -1734,6 +1364,7 @@ This is a one-time fix-up, please be patient... const needPrune = metaFromDisk && (mutateTree || flagsSuspect) if (this[_prune] && needPrune) this[_idealTreePrune]() + process.emit('timeEnd', 'idealTree:fixDepFlags') } diff --git a/node_modules/@npmcli/arborist/lib/arborist/index.js b/node_modules/@npmcli/arborist/lib/arborist/index.js index 09a6f700547f2..b26a26c2be2ab 100644 --- a/node_modules/@npmcli/arborist/lib/arborist/index.js +++ b/node_modules/@npmcli/arborist/lib/arborist/index.js @@ -28,7 +28,8 @@ const {resolve} = require('path') const {homedir} = require('os') -const procLog = require('../proc-log.js') +const procLog = require('proc-log') +const { saveTypeMap } = require('../add-rm-pkg-deps.js') const mixins = [ require('../tracker.js'), @@ -44,6 +45,7 @@ const mixins = [ ] const Base = mixins.reduce((a, b) => b(a), require('events')) +const getWorkspaceNodes = require('../get-workspace-nodes.js') class Arborist extends Base { constructor (options = {}) { @@ -54,13 +56,44 @@ class Arborist extends Base { ...options, path: options.path || '.', cache: options.cache || `${homedir()}/.npm/_cacache`, - packumentCache: new Map(), + packumentCache: options.packumentCache || new Map(), log: options.log || procLog, } + if (options.saveType && !saveTypeMap.get(options.saveType)) + throw new Error(`Invalid saveType ${options.saveType}`) this.cache = resolve(this.options.cache) this.path = resolve(this.options.path) process.emit('timeEnd', 'arborist:ctor') } + + // returns an array of the actual nodes for all the workspaces + workspaceNodes (tree, workspaces) { + return getWorkspaceNodes(tree, workspaces, this.log) + } + + // returns a set of workspace nodes and all their deps + workspaceDependencySet (tree, workspaces) { + const wsNodes = this.workspaceNodes(tree, workspaces) + const set = new Set(wsNodes) + const extraneous = new Set() + for (const node of set) { + for (const edge of node.edgesOut.values()) { + const dep = edge.to + if (dep) { + set.add(dep) + if (dep.isLink) + set.add(dep.target) + } + } + for (const child of node.children.values()) { + if (child.extraneous) + extraneous.add(child) + } + } + for (const extra of extraneous) + set.add(extra) + return set + } } module.exports = Arborist diff --git a/node_modules/@npmcli/arborist/lib/arborist/load-actual.js b/node_modules/@npmcli/arborist/lib/arborist/load-actual.js index 49e76e265b816..86856d868b426 100644 --- a/node_modules/@npmcli/arborist/lib/arborist/load-actual.js +++ b/node_modules/@npmcli/arborist/lib/arborist/load-actual.js @@ -22,6 +22,7 @@ const _loadFSTree = Symbol('loadFSTree') const _loadFSChildren = Symbol('loadFSChildren') const _findMissingEdges = Symbol('findMissingEdges') const _findFSParents = Symbol('findFSParents') +const _resetDepFlags = Symbol('resetDepFlags') const _actualTreeLoaded = Symbol('actualTreeLoaded') const _rpcache = Symbol.for('realpathCache') @@ -32,6 +33,7 @@ const _loadActual = Symbol('loadActual') const _loadActualVirtually = Symbol('loadActualVirtually') const _loadActualActually = Symbol('loadActualActually') const _loadWorkspaces = Symbol.for('loadWorkspaces') +const _loadWorkspaceTargets = Symbol('loadWorkspaceTargets') const _actualTreePromise = Symbol('actualTreePromise') const _actualTree = Symbol('actualTree') const _transplant = Symbol('transplant') @@ -73,6 +75,19 @@ module.exports = cls => class ActualLoader extends cls { this[_topNodes] = new Set() } + [_resetDepFlags] (tree, root) { + // reset all deps to extraneous prior to recalc + if (!root) { + for (const node of tree.inventory.values()) + node.extraneous = true + } + + // only reset root flags if we're not re-rooting, + // otherwise leave as-is + calcDepFlags(tree, !root) + return tree + } + // public method async loadActual (options = {}) { // allow the user to set options on the ctor as well. @@ -87,6 +102,7 @@ module.exports = cls => class ActualLoader extends cls { return this.actualTree ? this.actualTree : this[_actualTreePromise] ? this[_actualTreePromise] : this[_actualTreePromise] = this[_loadActual](options) + .then(tree => this[_resetDepFlags](tree, options.root)) .then(tree => this.actualTree = treeCheck(tree)) } @@ -150,18 +166,21 @@ module.exports = cls => class ActualLoader extends cls { await new this.constructor({...this.options}).loadVirtual({ root: this[_actualTree], }) + await this[_loadWorkspaces](this[_actualTree]) + this[_transplant](root) return this[_actualTree] } async [_loadActualActually] ({ root, ignoreMissing, global }) { await this[_loadFSTree](this[_actualTree]) + await this[_loadWorkspaces](this[_actualTree]) + await this[_loadWorkspaceTargets](this[_actualTree]) if (!ignoreMissing) await this[_findMissingEdges]() this[_findFSParents]() this[_transplant](root) - await this[_loadWorkspaces](this[_actualTree]) if (global) { // need to depend on the children, or else all of them // will end up being flagged as extraneous, since the @@ -173,21 +192,40 @@ module.exports = cls => class ActualLoader extends cls { dependencies[name] = dependencies[name] || '*' actualRoot.package = { ...actualRoot.package, dependencies } } - // only reset root flags if we're not re-rooting, otherwise leave as-is - calcDepFlags(this[_actualTree], !root) return this[_actualTree] } + // if there are workspace targets without Link nodes created, load + // the targets, so that we know what they are. + async [_loadWorkspaceTargets] (tree) { + if (!tree.workspaces || !tree.workspaces.size) + return + + const promises = [] + for (const path of tree.workspaces.values()) { + if (!this[_cache].has(path)) { + const p = this[_loadFSNode]({ path, root: this[_actualTree] }) + .then(node => this[_loadFSTree](node)) + promises.push(p) + } + } + await Promise.all(promises) + } + [_transplant] (root) { if (!root || root === this[_actualTree]) return + this[_actualTree][_changePath](root.path) for (const node of this[_actualTree].children.values()) { if (!this[_transplantFilter](node)) - node.parent = null + node.root = null } root.replace(this[_actualTree]) + for (const node of this[_actualTree].fsChildren) + node.root = this[_transplantFilter](node) ? root : null + this[_actualTree] = root } @@ -277,7 +315,7 @@ module.exports = cls => class ActualLoader extends cls { [_loadFSTree] (node) { const did = this[_actualTreeLoaded] - node = node.target || node + node = node.target // if a Link target has started, but not completed, then // a Promise will be in the cache to indicate this. diff --git a/node_modules/@npmcli/arborist/lib/arborist/load-virtual.js b/node_modules/@npmcli/arborist/lib/arborist/load-virtual.js index 3a38905b77433..d1edcaca01d7e 100644 --- a/node_modules/@npmcli/arborist/lib/arborist/load-virtual.js +++ b/node_modules/@npmcli/arborist/lib/arborist/load-virtual.js @@ -93,7 +93,8 @@ module.exports = cls => class VirtualLoader extends cls { this.virtualTree = root const {links, nodes} = this[resolveNodes](s, root) await this[resolveLinks](links, nodes) - this[assignBundles](nodes) + if (!(s.originalLockfileVersion >= 2)) + this[assignBundles](nodes) if (this[flagsSuspect]) this[reCalcDepFlags](nodes.values()) return root @@ -158,12 +159,12 @@ module.exports = cls => class VirtualLoader extends cls { ...depsToEdges('peerOptional', peerOptional), ...lockWS, ].sort(([atype, aname], [btype, bname]) => - atype.localeCompare(btype) || aname.localeCompare(bname)) + atype.localeCompare(btype, 'en') || aname.localeCompare(bname, 'en')) const rootEdges = [...root.edgesOut.values()] .map(e => [e.type, e.name, e.spec]) .sort(([atype, aname], [btype, bname]) => - atype.localeCompare(btype) || aname.localeCompare(bname)) + atype.localeCompare(btype, 'en') || aname.localeCompare(bname, 'en')) if (rootEdges.length !== lockEdges.length) { // something added or removed @@ -220,22 +221,24 @@ module.exports = cls => class VirtualLoader extends cls { [assignBundles] (nodes) { for (const [location, node] of nodes) { // Skip assignment of parentage for the root package - if (!location) + if (!location || node.isLink && !node.target.location) continue const { name, parent, package: { inBundle }} = node + if (!parent) continue // read inBundle from package because 'package' here is // actually a v2 lockfile metadata entry. - // If the *parent* is also bundled, though, then we assume - // that it's being pulled in just by virtue of that. + // If the *parent* is also bundled, though, or if the parent has + // no dependency on it, then we assume that it's being pulled in + // just by virtue of its parent or a transitive dep being bundled. const { package: ppkg } = parent const { inBundle: parentBundled } = ppkg - if (inBundle && !parentBundled) { + if (inBundle && !parentBundled && parent.edgesOut.has(node.name)) { if (!ppkg.bundleDependencies) ppkg.bundleDependencies = [name] - else if (!ppkg.bundleDependencies.includes(name)) + else ppkg.bundleDependencies.push(name) } } diff --git a/node_modules/@npmcli/arborist/lib/arborist/rebuild.js b/node_modules/@npmcli/arborist/lib/arborist/rebuild.js index 9c52d009d6fd8..8e447bb8f5ad1 100644 --- a/node_modules/@npmcli/arborist/lib/arborist/rebuild.js +++ b/node_modules/@npmcli/arborist/lib/arborist/rebuild.js @@ -14,8 +14,9 @@ const { } = require('@npmcli/node-gyp') const boolEnv = b => b ? '1' : '' -const sortNodes = (a, b) => (a.depth - b.depth) || a.path.localeCompare(b.path) +const sortNodes = (a, b) => (a.depth - b.depth) || a.path.localeCompare(b.path, 'en') +const _workspaces = Symbol.for('workspaces') const _build = Symbol('build') const _resetQueues = Symbol('resetQueues') const _rebuildBundle = Symbol('rebuildBundle') @@ -70,8 +71,14 @@ module.exports = cls => class Builder extends cls { // if we don't have a set of nodes, then just rebuild // the actual tree on disk. - if (!nodes) - nodes = (await this.loadActual()).inventory.values() + if (!nodes) { + const tree = await this.loadActual() + if (this[_workspaces] && this[_workspaces].length) { + const filterSet = this.workspaceDependencySet(tree, this[_workspaces]) + nodes = tree.inventory.filter(node => filterSet.has(node)) + } else + nodes = tree.inventory.values() + } // separates links nodes so that it can run // prepare scripts and link bins in the expected order @@ -115,10 +122,6 @@ module.exports = cls => class Builder extends cls { await this[_runScripts]('preinstall') if (this[_binLinks] && type !== 'links') await this[_linkAllBins]() - if (!this[_ignoreScripts]) { - await this[_runScripts]('install') - await this[_runScripts]('postinstall') - } // links should also run prepare scripts and only link bins after that if (type === 'links') { @@ -128,6 +131,11 @@ module.exports = cls => class Builder extends cls { await this[_linkAllBins]() } + if (!this[_ignoreScripts]) { + await this[_runScripts]('install') + await this[_runScripts]('postinstall') + } + process.emit('timeEnd', `build:${type}`) } @@ -161,7 +169,7 @@ module.exports = cls => class Builder extends cls { const queue = [...set].sort(sortNodes) for (const node of queue) { - const { package: { bin, scripts = {} } } = node + const { package: { bin, scripts = {} } } = node.target const { preinstall, install, postinstall, prepare } = scripts const tests = { bin, preinstall, install, postinstall, prepare } for (const [key, has] of Object.entries(tests)) { @@ -194,7 +202,7 @@ module.exports = cls => class Builder extends cls { !(meta.originalLockfileVersion >= 2) } - const { package: pkg, hasInstallScript } = node + const { package: pkg, hasInstallScript } = node.target const { gypfile, bin, scripts = {} } = pkg const { preinstall, install, postinstall, prepare } = scripts @@ -255,7 +263,7 @@ module.exports = cls => class Builder extends cls { devOptional, package: pkg, location, - } = node.target || node + } = node.target // skip any that we know we'll be deleting if (this[_trashList].has(path)) diff --git a/node_modules/@npmcli/arborist/lib/arborist/reify.js b/node_modules/@npmcli/arborist/lib/arborist/reify.js index c3ea1b43bbc76..1cfa6034eadb8 100644 --- a/node_modules/@npmcli/arborist/lib/arborist/reify.js +++ b/node_modules/@npmcli/arborist/lib/arborist/reify.js @@ -2,10 +2,10 @@ const onExit = require('../signal-handling.js') const pacote = require('pacote') -const rpj = require('read-package-json-fast') -const { updateDepSpec } = require('../dep-spec.js') const AuditReport = require('../audit-report.js') -const {subset} = require('semver') +const {subset, intersects} = require('semver') +const npa = require('npm-package-arg') +const debug = require('../debug.js') const {dirname, resolve, relative} = require('path') const {depth: dfwalk} = require('treeverse') @@ -13,8 +13,10 @@ const fs = require('fs') const {promisify} = require('util') const symlink = promisify(fs.symlink) const mkdirp = require('mkdirp-infer-owner') +const justMkdirp = require('mkdirp') const moveFile = require('@npmcli/move-file') const rimraf = promisify(require('rimraf')) +const PackageJson = require('@npmcli/package-json') const packageContents = require('@npmcli/installed-package-contents') const { checkEngine, checkPlatform } = require('npm-install-checks') @@ -24,7 +26,8 @@ const Diff = require('../diff.js') const retirePath = require('../retire-path.js') const promiseAllRejectLate = require('promise-all-reject-late') const optionalSet = require('../optional-set.js') -const updateRootPackageJson = require('../update-root-package-json.js') +const calcDepFlags = require('../calc-dep-flags.js') +const { saveTypeMap, hasSubKey } = require('../add-rm-pkg-deps.js') const _retiredPaths = Symbol('retiredPaths') const _retiredUnchanged = Symbol('retiredUnchanged') @@ -35,6 +38,8 @@ const _retireShallowNodes = Symbol.for('retireShallowNodes') const _getBundlesByDepth = Symbol('getBundlesByDepth') const _registryResolved = Symbol('registryResolved') const _addNodeToTrashList = Symbol('addNodeToTrashList') +const _workspaces = Symbol.for('workspaces') + // shared by rebuild mixin const _trashList = Symbol.for('trashList') const _handleOptionalFailure = Symbol.for('handleOptionalFailure') @@ -44,14 +49,15 @@ const _loadTrees = Symbol.for('loadTrees') const _diffTrees = Symbol.for('diffTrees') const _createSparseTree = Symbol.for('createSparseTree') const _loadShrinkwrapsAndUpdateTrees = Symbol.for('loadShrinkwrapsAndUpdateTrees') -const _shrinkwrapUnpacked = Symbol('shrinkwrapUnpacked') +const _shrinkwrapInflated = Symbol('shrinkwrapInflated') +const _bundleUnpacked = Symbol('bundleUnpacked') +const _bundleMissing = Symbol('bundleMissing') const _reifyNode = Symbol.for('reifyNode') const _extractOrLink = Symbol('extractOrLink') // defined by rebuild mixin const _checkBins = Symbol.for('checkBins') const _symlink = Symbol('symlink') const _warnDeprecated = Symbol('warnDeprecated') -const _loadAncientPackageDetails = Symbol('loadAncientPackageDetails') const _loadBundlesAndUpdateTrees = Symbol.for('loadBundlesAndUpdateTrees') const _submitQuickAudit = Symbol('submitQuickAudit') const _awaitQuickAudit = Symbol('awaitQuickAudit') @@ -79,9 +85,9 @@ const _omitPeer = Symbol('omitPeer') const _global = Symbol.for('global') +const _pruneBundledMetadeps = Symbol('pruneBundledMetadeps') + // defined by Ideal mixin -const _pruneBundledMetadeps = Symbol.for('pruneBundledMetadeps') -const _explicitRequests = Symbol.for('explicitRequests') const _resolvedAdd = Symbol.for('resolvedAdd') const _usePackageLock = Symbol.for('usePackageLock') const _formatPackageLock = Symbol.for('formatPackageLock') @@ -104,11 +110,15 @@ module.exports = cls => class Reifier extends cls { this.diff = null this[_retiredPaths] = {} - this[_shrinkwrapUnpacked] = new Set() + this[_shrinkwrapInflated] = new Set() this[_retiredUnchanged] = {} this[_sparseTreeDirs] = new Set() this[_sparseTreeRoots] = new Set() this[_trashList] = new Set() + // the nodes we unpack to read their bundles + this[_bundleUnpacked] = new Set() + // child nodes we'd EXPECT to be included in a bundle, but aren't + this[_bundleMissing] = new Set() } // public method @@ -128,12 +138,12 @@ module.exports = cls => class Reifier extends cls { this.addTracker('reify') process.emit('time', 'reify') await this[_validatePath]() - .then(() => this[_loadTrees](options)) - .then(() => this[_diffTrees]()) - .then(() => this[_reifyPackages]()) - .then(() => this[_saveIdealTree](options)) - .then(() => this[_copyIdealToActual]()) - .then(() => this[_awaitQuickAudit]()) + await this[_loadTrees](options) + await this[_diffTrees]() + await this[_reifyPackages]() + await this[_saveIdealTree](options) + await this[_copyIdealToActual]() + await this[_awaitQuickAudit]() this.finishTracker('reify') process.emit('timeEnd', 'reify') @@ -145,7 +155,10 @@ module.exports = cls => class Reifier extends cls { if (this[_packageLockOnly] || this[_dryRun]) return - await mkdirp(resolve(this.path)) + // we do NOT want to set ownership on this folder, especially + // recursively, because it can have other side effects to do that + // in a project directory. We just want to make it if it's missing. + await justMkdirp(resolve(this.path)) } async [_reifyPackages] () { @@ -236,9 +249,25 @@ module.exports = cls => class Reifier extends cls { const actualOpt = this[_global] ? { ignoreMissing: true, global: true, - filter: (node, kid) => - this[_explicitRequests].size === 0 || !node.isProjectRoot ? true - : (this.idealTree.edgesOut.has(kid) || this[_explicitRequests].has(kid)), + filter: (node, kid) => { + // if it's not the project root, and we have no explicit requests, + // then we're already into a nested dep, so we keep it + if (this.explicitRequests.size === 0 || !node.isProjectRoot) + return true + + // if we added it as an edgeOut, then we want it + if (this.idealTree.edgesOut.has(kid)) + return true + + // if it's an explicit request, then we want it + const hasExplicit = [...this.explicitRequests] + .some(edge => edge.name === kid) + if (hasExplicit) + return true + + // ignore the rest of the global install folder + return false + }, } : { ignoreMissing: true } if (!this[_global]) { @@ -265,9 +294,36 @@ module.exports = cls => class Reifier extends cls { // to just invalidate the parts that changed, but avoid walking the // whole tree again. + const filterNodes = [] + if (this[_global] && this.explicitRequests.size) { + const idealTree = this.idealTree.target + const actualTree = this.actualTree.target + // we ONLY are allowed to make changes in the global top-level + // children where there's an explicit request. + for (const { name } of this.explicitRequests) { + const ideal = idealTree.children.get(name) + if (ideal) + filterNodes.push(ideal) + const actual = actualTree.children.get(name) + if (actual) + filterNodes.push(actual) + } + } else { + for (const ws of this[_workspaces]) { + const ideal = this.idealTree.children.get(ws) + if (ideal) + filterNodes.push(ideal) + const actual = this.actualTree.children.get(ws) + if (actual) + filterNodes.push(actual) + } + } + // find all the nodes that need to change between the actual // and ideal trees. this.diff = Diff.calculate({ + shrinkwrapInflated: this[_shrinkwrapInflated], + filterNodes, actual: this.actualTree, ideal: this.idealTree, }) @@ -285,7 +341,7 @@ module.exports = cls => class Reifier extends cls { // removed later on in the process. optionally, also mark them // as a retired paths, so that we move them out of the way and // replace them when rolling back on failure. - [_addNodeToTrashList] (node, retire) { + [_addNodeToTrashList] (node, retire = false) { const paths = [node.path, ...node.binPaths] const moves = this[_retiredPaths] this.log.silly('reify', 'mark', retire ? 'retired' : 'deleted', paths) @@ -355,11 +411,13 @@ module.exports = cls => class Reifier extends cls { return process.emit('time', 'reify:trashOmits') + const filter = node => - node.peer && this[_omitPeer] || - node.dev && this[_omitDev] || - node.optional && this[_omitOptional] || - node.devOptional && this[_omitOptional] && this[_omitDev] + node.top.isProjectRoot && + (node.peer && this[_omitPeer] || + node.dev && this[_omitDev] || + node.optional && this[_omitOptional] || + node.devOptional && this[_omitOptional] && this[_omitDev]) for (const node of this.idealTree.inventory.filter(filter)) this[_addNodeToTrashList](node) @@ -374,7 +432,8 @@ module.exports = cls => class Reifier extends cls { const dirs = this.diff.leaves .filter(diff => { return (diff.action === 'ADD' || diff.action === 'CHANGE') && - !this[_sparseTreeDirs].has(diff.ideal.path) + !this[_sparseTreeDirs].has(diff.ideal.path) && + !diff.ideal.isLink }) .map(diff => diff.ideal.path) @@ -408,9 +467,9 @@ module.exports = cls => class Reifier extends cls { // we need to unpack them, read that shrinkwrap file, and then update // the tree by calling loadVirtual with the node as the root. [_loadShrinkwrapsAndUpdateTrees] () { - const seen = this[_shrinkwrapUnpacked] + const seen = this[_shrinkwrapInflated] const shrinkwraps = this.diff.leaves - .filter(d => (d.action === 'CHANGE' || d.action === 'ADD') && + .filter(d => (d.action === 'CHANGE' || d.action === 'ADD' || !d.action) && d.ideal.hasShrinkwrap && !seen.has(d.ideal) && !this[_trashList].has(d.ideal.path)) @@ -423,7 +482,7 @@ module.exports = cls => class Reifier extends cls { return promiseAllRejectLate(shrinkwraps.map(diff => { const node = diff.ideal seen.add(node) - return this[_reifyNode](node) + return diff.action ? this[_reifyNode](node) : node })) .then(nodes => promiseAllRejectLate(nodes.map(node => new Arborist({ ...this.options, @@ -454,7 +513,7 @@ module.exports = cls => class Reifier extends cls { const { npmVersion, nodeVersion } = this.options const p = Promise.resolve() - .then(() => { + .then(async () => { // when we reify an optional node, check the engine and platform // first. be sure to ignore the --force and --engine-strict flags, // since we always want to skip any optional packages we can't install. @@ -464,11 +523,10 @@ module.exports = cls => class Reifier extends cls { checkEngine(node.package, npmVersion, nodeVersion, false) checkPlatform(node.package, false) } + await this[_checkBins](node) + await this[_extractOrLink](node) + await this[_warnDeprecated](node) }) - .then(() => this[_checkBins](node)) - .then(() => this[_extractOrLink](node)) - .then(() => this[_warnDeprecated](node)) - .then(() => this[_loadAncientPackageDetails](node)) return this[_handleOptionalFailure](node, p) .then(() => { @@ -487,8 +545,8 @@ module.exports = cls => class Reifier extends cls { // Do the best with what we have, or else remove it from the tree // entirely, since we can't possibly reify it. const res = node.resolved ? `${node.name}@${this[_registryResolved](node.resolved)}` - : node.package.name && node.version - ? `${node.package.name}@${node.version}` + : node.packageName && node.version + ? `${node.packageName}@${node.version}` : null // no idea what this thing is. remove it from the tree. @@ -514,10 +572,11 @@ module.exports = cls => class Reifier extends cls { }) } - [_symlink] (node) { + async [_symlink] (node) { const dir = dirname(node.path) const target = node.realpath const rel = relative(dir, target) + await mkdirp(dir) return symlink(rel, node.path, 'junction') } @@ -527,32 +586,6 @@ module.exports = cls => class Reifier extends cls { this.log.warn('deprecated', `${_id}: ${deprecated}`) } - async [_loadAncientPackageDetails] (node, forceReload = false) { - // If we're loading from a v1 lockfile, load details from the package.json - // that weren't recorded in the old format. - const {meta} = this.idealTree - const ancient = meta.ancientLockfile - const old = meta.loadedFromDisk && !(meta.originalLockfileVersion >= 2) - - // already replaced with the manifest if it's truly ancient - if (node.path && (forceReload || (old && !ancient))) { - // XXX should have a shared location where package.json is read, - // so we don't ever read the same pj more than necessary. - let pkg - try { - pkg = await rpj(node.path + '/package.json') - } catch (err) {} - - if (pkg) { - node.package.bin = pkg.bin - node.package.os = pkg.os - node.package.cpu = pkg.cpu - node.package.engines = pkg.engines - meta.add(node) - } - } - } - // if the node is optional, then the failure of the promise is nonfatal // just add it and its optional set to the trash list. [_handleOptionalFailure] (node, p) { @@ -586,6 +619,7 @@ module.exports = cls => class Reifier extends cls { ) { if (depth === 0) process.emit('time', 'reify:loadBundles') + const maxBundleDepth = bundlesByDepth.get('maxBundleDepth') if (depth > maxBundleDepth) { // if we did something, then prune the tree and update the diffs @@ -601,23 +635,43 @@ module.exports = cls => class Reifier extends cls { // shallower bundle overwriting them with a bundled meta-dep. const set = (bundlesByDepth.get(depth) || []) .filter(node => node.root === this.idealTree && + node.target !== node.root && !this[_trashList].has(node.path)) if (!set.length) return this[_loadBundlesAndUpdateTrees](depth + 1, bundlesByDepth) // extract all the nodes with bundles - return promiseAllRejectLate(set.map(node => this[_reifyNode](node))) + return promiseAllRejectLate(set.map(node => { + this[_bundleUnpacked].add(node) + return this[_reifyNode](node) + })) // then load their unpacked children and move into the ideal tree .then(nodes => - promiseAllRejectLate(nodes.map(node => new this.constructor({ - ...this.options, - path: node.path, - }).loadActual({ - root: node, - // don't transplant any sparse folders we created - transplantFilter: node => node.package._id, - })))) + promiseAllRejectLate(nodes.map(async node => { + const arb = new this.constructor({ + ...this.options, + path: node.path, + }) + const notTransplanted = new Set(node.children.keys()) + await arb.loadActual({ + root: node, + // don't transplant any sparse folders we created + // loadActual will set node.package to {} for empty directories + // if by chance there are some empty folders in the node_modules + // tree for some other reason, then ok, ignore those too. + transplantFilter: node => { + if (node.package._id) { + // it's actually in the bundle if it gets transplanted + notTransplanted.delete(node.name) + return true + } else + return false + }, + }) + for (const name of notTransplanted) + this[_bundleMissing].add(node.children.get(name)) + }))) // move onto the next level of bundled items .then(() => this[_loadBundlesAndUpdateTrees](depth + 1, bundlesByDepth)) } @@ -629,8 +683,13 @@ module.exports = cls => class Reifier extends cls { tree: this.diff, visit: diff => { const node = diff.ideal - if (node && !node.isProjectRoot && node.package.bundleDependencies && - node.package.bundleDependencies.length) { + if (!node) + return + if (node.isProjectRoot) + return + + const { bundleDependencies } = node.package + if (bundleDependencies && bundleDependencies.length) { maxBundleDepth = Math.max(maxBundleDepth, node.depth) if (!bundlesByDepth.has(node.depth)) bundlesByDepth.set(node.depth, [node]) @@ -648,6 +707,27 @@ module.exports = cls => class Reifier extends cls { // https://github.com/npm/cli/issues/1597#issuecomment-667639545 [_pruneBundledMetadeps] (bundlesByDepth) { const bundleShadowed = new Set() + + // Example dep graph: + // root -> (a, c) + // a -> BUNDLE(b) + // b -> c + // c -> b + // + // package tree: + // root + // +-- a + // | +-- b(1) + // | +-- c(1) + // +-- b(2) + // +-- c(2) + // 1. mark everything that's shadowed by anything in the bundle. This + // marks b(2) and c(2). + // 2. anything with edgesIn from outside the set, mark not-extraneous, + // remove from set. This unmarks c(2). + // 3. continue until no change + // 4. remove everything in the set from the tree. b(2) is pruned + // create the list of nodes shadowed by children of bundlers for (const bundles of bundlesByDepth.values()) { // skip the 'maxBundleDepth' item @@ -663,36 +743,50 @@ module.exports = cls => class Reifier extends cls { } } } - let changed = true - while (changed) { - changed = false - for (const shadow of bundleShadowed) { - if (!shadow.extraneous) { - bundleShadowed.delete(shadow) - continue + + // lib -> (a@1.x) BUNDLE(a@1.2.3 (b@1.2.3)) + // a@1.2.3 -> (b@1.2.3) + // a@1.3.0 -> (b@2) + // b@1.2.3 -> () + // b@2 -> (c@2) + // + // root + // +-- lib + // | +-- a@1.2.3 + // | +-- b@1.2.3 + // +-- b@2 <-- shadowed, now extraneous + // +-- c@2 <-- also shadowed, because only dependent is shadowed + for (const shadow of bundleShadowed) { + for (const shadDep of shadow.edgesOut.values()) { + /* istanbul ignore else - pretty unusual situation, just being + * defensive here. Would mean that a bundled dep has a dependency + * that is unmet. which, weird, but if you bundle it, we take + * whatever you put there and assume the publisher knows best. */ + if (shadDep.to) { + bundleShadowed.add(shadDep.to) + shadDep.to.extraneous = true } + } + } + let changed + do { + changed = false + for (const shadow of bundleShadowed) { for (const edge of shadow.edgesIn) { - if (!edge.from.extraneous) { + if (!bundleShadowed.has(edge.from)) { shadow.extraneous = false bundleShadowed.delete(shadow) changed = true - } else { - for (const shadDep of shadow.edgesOut.values()) { - /* istanbul ignore else - pretty unusual situation, just being - * defensive here. Would mean that a bundled dep has a dependency - * that is unmet. which, weird, but if you bundle it, we take - * whatever you put there and assume the publisher knows best. */ - if (shadDep.to) - bundleShadowed.add(shadDep.to) - } + break } } } - } + } while (changed) + for (const shadow of bundleShadowed) { - shadow.parent = null this[_addNodeToTrashList](shadow) + shadow.root = null } } @@ -707,8 +801,14 @@ module.exports = cls => class Reifier extends cls { // NOT return the promise, as the intent is for this to run in parallel // with the reification, and be resolved at a later time. process.emit('time', 'reify:audit') + const options = { ...this.options } + const tree = this.idealTree - this.auditReport = AuditReport.load(this.idealTree, this.options) + // if we're operating on a workspace, only audit the workspace deps + if (this[_workspaces] && this[_workspaces].length) + options.filterSet = this.workspaceDependencySet(tree, this[_workspaces]) + + this.auditReport = AuditReport.load(tree, options) .then(res => { process.emit('timeEnd', 'reify:audit') this.auditReport = res @@ -735,16 +835,17 @@ module.exports = cls => class Reifier extends cls { return const node = diff.ideal - const bd = node.package.bundleDependencies - const sw = this[_shrinkwrapUnpacked].has(node) + const bd = this[_bundleUnpacked].has(node) + const sw = this[_shrinkwrapInflated].has(node) + const bundleMissing = this[_bundleMissing].has(node) // check whether we still need to unpack this one. // test the inDepBundle last, since that's potentially a tree walk. const doUnpack = node && // can't unpack if removed! !node.isRoot && // root node already exists - !(bd && bd.length) && // already unpacked to read bundle + !bd && // already unpacked to read bundle !sw && // already unpacked to read sw - !node.inDepBundle // already unpacked by another dep's bundle + (bundleMissing || !node.inDepBundle) // already unpacked by another dep's bundle if (doUnpack) unpacks.push(this[_reifyNode](node)) @@ -771,8 +872,26 @@ module.exports = cls => class Reifier extends cls { const moves = this[_retiredPaths] this[_retiredUnchanged] = {} return promiseAllRejectLate(this.diff.children.map(diff => { - const realFolder = (diff.actual || diff.ideal).path + // skip if nothing was retired + if (diff.action !== 'CHANGE' && diff.action !== 'REMOVE') + return + + const { path: realFolder } = diff.actual const retireFolder = moves[realFolder] + /* istanbul ignore next - should be impossible */ + debug(() => { + if (!retireFolder) { + const er = new Error('trying to un-retire but not retired') + throw Object.assign(er, { + realFolder, + retireFolder, + actual: diff.actual, + ideal: diff.ideal, + action: diff.action, + }) + } + }) + this[_retiredUnchanged][retireFolder] = [] return promiseAllRejectLate(diff.unchanged.map(node => { // no need to roll back links, since we'll just delete them anyway @@ -780,7 +899,7 @@ module.exports = cls => class Reifier extends cls { return mkdirp(dirname(node.path)).then(() => this[_reifyNode](node)) // will have been moved/unpacked along with bundler - if (node.inDepBundle) + if (node.inDepBundle && !this[_bundleMissing].has(node)) return this[_retiredUnchanged][retireFolder].push(node) @@ -843,6 +962,18 @@ module.exports = cls => class Reifier extends cls { filter: diff => diff.action === 'ADD' || diff.action === 'CHANGE', }) + // pick up link nodes from the unchanged list as we want to run their + // scripts in every install despite of having a diff status change + for (const node of this.diff.unchanged) { + const tree = node.root.target + + // skip links that only live within node_modules as they are most + // likely managed by packages we installed, we only want to rebuild + // unchanged links we directly manage + if (node.isLink && node.target.fsTop === tree) + nodes.push(node) + } + return this.rebuild({ nodes, handleOptionalFailure: true }) .then(() => process.emit('timeEnd', 'reify:build')) } @@ -869,7 +1000,7 @@ module.exports = cls => class Reifier extends cls { // last but not least, we save the ideal tree metadata to the package-lock // or shrinkwrap file, and any additions or removals to package.json - [_saveIdealTree] (options) { + async [_saveIdealTree] (options) { // the ideal tree is actualized now, hooray! // it still contains all the references to optional nodes that were removed // for install failures. Those still end up in the shrinkwrap, so we @@ -877,18 +1008,28 @@ module.exports = cls => class Reifier extends cls { // support save=false option if (options.save === false || this[_global] || this[_dryRun]) - return + return false process.emit('time', 'reify:save') - if (this[_resolvedAdd]) { - const root = this.idealTree - const pkg = root.package - for (const req of this[_resolvedAdd]) { - const {name, rawSpec, subSpec} = req + const updatedTrees = new Set() + + // resolvedAdd is the list of user add requests, but with names added + // to things like git repos and tarball file/urls. However, if the + // user requested 'foo@', and we have a foo@file:../foo, then we should + // end up saving the spec we actually used, not whatever they gave us. + if (this[_resolvedAdd].length) { + for (const { name, tree: addTree } of this[_resolvedAdd]) { + // addTree either the root, or a workspace + const edge = addTree.edgesOut.get(name) + const pkg = addTree.package + const req = npa.resolve(name, edge.spec, addTree.realpath) + const {rawSpec, subSpec} = req + const spec = subSpec ? subSpec.rawSpec : rawSpec - const child = root.children.get(name) + const child = edge.to + let newSpec if (req.registry) { const version = child.version const prefixRange = version ? this[_savePrefix] + version : '*' @@ -898,24 +1039,68 @@ module.exports = cls => class Reifier extends cls { // would allow versions outside the requested range. Tags and // specific versions save with the save-prefix. const isRange = (subSpec || req).type === 'range' - const range = !isRange || subset(prefixRange, spec, { loose: true }) - ? prefixRange : spec - const pname = child.package.name + + let range = spec + if ( + !isRange || + spec === '*' || + subset(prefixRange, spec, { loose: true }) + ) + range = prefixRange + + const pname = child.packageName const alias = name !== pname - updateDepSpec(pkg, name, (alias ? `npm:${pname}@` : '') + range) + newSpec = alias ? `npm:${pname}@${range}` : range } else if (req.hosted) { // save the git+https url if it has auth, otherwise shortcut const h = req.hosted const opt = { noCommittish: false } - const save = h.https && h.auth ? `git+${h.https(opt)}` - : h.shortcut(opt) - updateDepSpec(pkg, name, save) + if (h.https && h.auth) + newSpec = `git+${h.https(opt)}` + else + newSpec = h.shortcut(opt) + } else if (req.type === 'directory' || req.type === 'file') { + // save the relative path in package.json + // Normally saveSpec is updated with the proper relative + // path already, but it's possible to specify a full absolute + // path initially, in which case we can end up with the wrong + // thing, so just get the ultimate fetchSpec and relativize it. + const p = req.fetchSpec.replace(/^file:/, '') + const rel = relpath(addTree.realpath, p) + newSpec = `file:${rel}` } else - updateDepSpec(pkg, name, req.saveSpec) - } + newSpec = req.saveSpec + + if (options.saveType) { + const depType = saveTypeMap.get(options.saveType) + pkg[depType][name] = newSpec + // rpj will have moved it here if it was in both + // if it is empty it will be deleted later + if (options.saveType === 'prod' && pkg.optionalDependencies) + delete pkg.optionalDependencies[name] + } else { + if (hasSubKey(pkg, 'dependencies', name)) + pkg.dependencies[name] = newSpec + + if (hasSubKey(pkg, 'devDependencies', name)) { + pkg.devDependencies[name] = newSpec + // don't update peer or optional if we don't have to + if (hasSubKey(pkg, 'peerDependencies', name) && !intersects(newSpec, pkg.peerDependencies[name])) + pkg.peerDependencies[name] = newSpec + + if (hasSubKey(pkg, 'optionalDependencies', name) && !intersects(newSpec, pkg.optionalDependencies[name])) + pkg.optionalDependencies[name] = newSpec + } else { + if (hasSubKey(pkg, 'peerDependencies', name)) + pkg.peerDependencies[name] = newSpec - // refresh the edges so they have the correct specs - this.idealTree.package = pkg + if (hasSubKey(pkg, 'optionalDependencies', name)) + pkg.optionalDependencies[name] = newSpec + } + } + + updatedTrees.add(addTree) + } } // preserve indentation, if possible @@ -929,10 +1114,40 @@ module.exports = cls => class Reifier extends cls { : this[_formatPackageLock], } - return Promise.all([ - this[_saveLockFile](saveOpt), - updateRootPackageJson(this.idealTree), - ]).then(() => process.emit('timeEnd', 'reify:save')) + const promises = [this[_saveLockFile](saveOpt)] + + const updatePackageJson = async (tree) => { + const pkgJson = await PackageJson.load(tree.path) + .catch(() => new PackageJson(tree.path)) + const { + dependencies = {}, + devDependencies = {}, + optionalDependencies = {}, + peerDependencies = {}, + } = tree.package + + pkgJson.update({ + dependencies, + devDependencies, + optionalDependencies, + peerDependencies, + }) + await pkgJson.save() + } + + // grab any from explicitRequests that had deps removed + for (const { from: tree } of this.explicitRequests) + updatedTrees.add(tree) + + for (const tree of updatedTrees) { + // refresh the edges so they have the correct specs + tree.package = tree.package + promises.push(updatePackageJson(tree)) + } + + await Promise.all(promises) + process.emit('timeEnd', 'reify:save') + return true } async [_saveLockFile] (saveOpt) { @@ -941,29 +1156,98 @@ module.exports = cls => class Reifier extends cls { const { meta } = this.idealTree - // might have to update metadata for bins and stuff that gets lost - if (meta.loadedFromDisk && !(meta.originalLockfileVersion >= 2)) { - for (const node of this.idealTree.inventory.values()) - await this[_loadAncientPackageDetails](node, true) - } - return meta.save(saveOpt) } - [_copyIdealToActual] () { + async [_copyIdealToActual] () { + // clean up any trash that is still in the tree + for (const path of this[_trashList]) { + const loc = relpath(this.idealTree.realpath, path) + const node = this.idealTree.inventory.get(loc) + if (node && node.root === this.idealTree) + node.parent = null + } + + // if we filtered to only certain nodes, then anything ELSE needs + // to be untouched in the resulting actual tree, even if it differs + // in the idealTree. Copy over anything that was in the actual and + // was not changed, delete anything in the ideal and not actual. + // Then we move the entire idealTree over to this.actualTree, and + // save the hidden lockfile. + if (this.diff && this.diff.filterSet.size) { + const reroot = new Set() + + const { filterSet } = this.diff + const seen = new Set() + for (const [loc, ideal] of this.idealTree.inventory.entries()) { + seen.add(loc) + + // if it's an ideal node from the filter set, then skip it + // because we already made whatever changes were necessary + if (filterSet.has(ideal)) + continue + + // otherwise, if it's not in the actualTree, then it's not a thing + // that we actually added. And if it IS in the actualTree, then + // it's something that we left untouched, so we need to record + // that. + const actual = this.actualTree.inventory.get(loc) + if (!actual) + ideal.root = null + else { + if ([...actual.linksIn].some(link => filterSet.has(link))) { + seen.add(actual.location) + continue + } + const { realpath, isLink } = actual + if (isLink && ideal.isLink && ideal.realpath === realpath) + continue + else + reroot.add(actual) + } + } + + // now find any actual nodes that may not be present in the ideal + // tree, but were left behind by virtue of not being in the filter + for (const [loc, actual] of this.actualTree.inventory.entries()) { + if (seen.has(loc)) + continue + seen.add(loc) + + // we know that this is something that ISN'T in the idealTree, + // or else we will have addressed it in the previous loop. + // If it's in the filterSet, that means we intentionally removed + // it, so nothing to do here. + if (filterSet.has(actual)) + continue + + reroot.add(actual) + } + + // go through the rerooted actual nodes, and move them over. + for (const actual of reroot) + actual.root = this.idealTree + + // prune out any tops that lack a linkIn, they are no longer relevant. + for (const top of this.idealTree.tops) { + if (top.linksIn.size === 0) + top.root = null + } + + // need to calculate dep flags, since nodes may have been marked + // as extraneous or otherwise incorrect during transit. + calcDepFlags(this.idealTree) + } + // save the ideal's meta as a hidden lockfile after we actualize it this.idealTree.meta.filename = - this.path + '/node_modules/.package-lock.json' + this.idealTree.realpath + '/node_modules/.package-lock.json' this.idealTree.meta.hiddenLockfile = true + this.actualTree = this.idealTree this.idealTree = null - for (const path of this[_trashList]) { - const loc = relpath(this.path, path) - const node = this.actualTree.inventory.get(loc) - if (node && node.root === this.actualTree) - node.parent = null - } - return !this[_global] && this.actualTree.meta.save() + if (!this[_global]) + await this.actualTree.meta.save() } } diff --git a/node_modules/@npmcli/arborist/lib/audit-report.js b/node_modules/@npmcli/arborist/lib/audit-report.js index 77cd6511aea3b..8f7d6546d64f4 100644 --- a/node_modules/@npmcli/arborist/lib/audit-report.js +++ b/node_modules/@npmcli/arborist/lib/audit-report.js @@ -12,7 +12,7 @@ const _fixAvailable = Symbol('fixAvailable') const _checkTopNode = Symbol('checkTopNode') const _init = Symbol('init') const _omit = Symbol('omit') -const procLog = require('./proc-log.js') +const procLog = require('proc-log') const fetch = require('npm-registry-fetch') @@ -78,7 +78,7 @@ class AuditReport extends Map { } obj.vulnerabilities = vulnerabilities - .sort(([a], [b]) => a.localeCompare(b)) + .sort(([a], [b]) => a.localeCompare(b, 'en')) .reduce((set, [name, vuln]) => { set[name] = vuln return set @@ -89,7 +89,8 @@ class AuditReport extends Map { constructor (tree, opts = {}) { super() - this[_omit] = new Set(opts.omit || []) + const { omit } = opts + this[_omit] = new Set(omit || []) this.topVulns = new Map() this.calculator = new Calculator(opts) @@ -97,17 +98,19 @@ class AuditReport extends Map { this.options = opts this.log = opts.log || procLog this.tree = tree + this.filterSet = opts.filterSet } async run () { this.report = await this[_getReport]() + this.log.silly('audit report', this.report) if (this.report) await this[_init]() return this } isVulnerable (node) { - const vuln = this.get(node.package.name) + const vuln = this.get(node.packageName) return !!(vuln && vuln.isVulnerable(node)) } @@ -144,8 +147,8 @@ class AuditReport extends Map { super.set(name, vuln) const p = [] - for (const node of this.tree.inventory.query('name', name)) { - if (shouldOmit(node, this[_omit])) + for (const node of this.tree.inventory.query('packageName', name)) { + if (!shouldAudit(node, this[_omit], this.filterSet)) continue // if not vulnerable by this advisory, keep searching @@ -167,7 +170,7 @@ class AuditReport extends Map { this[_checkTopNode](dep, vuln, spec) else { // calculate a metavuln, if necessary - p.push(this.calculator.calculate(dep.name, advisory).then(meta => { + p.push(this.calculator.calculate(dep.packageName, advisory).then(meta => { if (meta.testVersion(dep.version, spec)) advisories.add(meta) })) @@ -228,6 +231,9 @@ class AuditReport extends Map { if (!specObj.registry) return false + if (specObj.subSpec) + spec = specObj.subSpec.rawSpec + // We don't provide fixes for top nodes other than root, but we // still check to see if the node is fixable with a different version, // and if that is a semver major bump. @@ -288,7 +294,8 @@ class AuditReport extends Map { try { try { // first try the super fast bulk advisory listing - const body = prepareBulkData(this.tree, this[_omit]) + const body = prepareBulkData(this.tree, this[_omit], this.filterSet) + this.log.silly('audit', 'bulk request', body) // no sense asking if we don't have anything to audit, // we know it'll be empty @@ -304,7 +311,8 @@ class AuditReport extends Map { }) return await res.json() - } catch (_) { + } catch (er) { + this.log.silly('audit', 'bulk request failed', String(er.body)) // that failed, try the quick audit endpoint const body = prepareData(this.tree, this.options) const res = await fetch('/-/npm/v1/security/audits/quick', { @@ -327,21 +335,25 @@ class AuditReport extends Map { } } -// return true if we should ignore this one -const shouldOmit = (node, omit) => - !node.version ? true - : omit.size === 0 ? false - : node.dev && omit.has('dev') || +// return true if we should audit this one +const shouldAudit = (node, omit, filterSet) => + !node.version ? false + : node.isRoot ? false + : filterSet && filterSet.size !== 0 && !filterSet.has(node) ? false + : omit.size === 0 ? true + : !( // otherwise, just ensure we're not omitting this one + node.dev && omit.has('dev') || node.optional && omit.has('optional') || node.devOptional && omit.has('dev') && omit.has('optional') || node.peer && omit.has('peer') + ) -const prepareBulkData = (tree, omit) => { +const prepareBulkData = (tree, omit, filterSet) => { const payload = {} - for (const name of tree.inventory.query('name')) { + for (const name of tree.inventory.query('packageName')) { const set = new Set() - for (const node of tree.inventory.query('name', name)) { - if (shouldOmit(node, omit)) + for (const node of tree.inventory.query('packageName', name)) { + if (!shouldAudit(node, omit, filterSet)) continue set.add(node.version) diff --git a/node_modules/@npmcli/arborist/lib/calc-dep-flags.js b/node_modules/@npmcli/arborist/lib/calc-dep-flags.js index d6ae266db3bb0..968fc83c5136c 100644 --- a/node_modules/@npmcli/arborist/lib/calc-dep-flags.js +++ b/node_modules/@npmcli/arborist/lib/calc-dep-flags.js @@ -22,15 +22,19 @@ const calcDepFlagsStep = (node) => { // Since we're only walking through deps that are not already flagged // as non-dev/non-optional, it's typically a very shallow traversal node.extraneous = false + resetParents(node, 'extraneous') + resetParents(node, 'dev') + resetParents(node, 'peer') + resetParents(node, 'devOptional') + resetParents(node, 'optional') // for links, map their hierarchy appropriately - if (node.target) { + if (node.isLink) { node.target.dev = node.dev node.target.optional = node.optional node.target.devOptional = node.devOptional node.target.peer = node.peer - node.target.extraneous = false - node = node.target + return calcDepFlagsStep(node.target) } node.edgesOut.forEach(({peer, optional, dev, to}) => { @@ -71,6 +75,14 @@ const calcDepFlagsStep = (node) => { return node } +const resetParents = (node, flag) => { + if (node[flag]) + return + + for (let p = node; p && (p === node || p[flag]); p = p.resolveParent) + p[flag] = false +} + // typically a short walk, since it only traverses deps that // have the flag set. const unsetFlag = (node, flag) => { @@ -80,10 +92,10 @@ const unsetFlag = (node, flag) => { tree: node, visit: node => { node.extraneous = node[flag] = false - if (node.target) + if (node.isLink) node.target.extraneous = node.target[flag] = false }, - getChildren: node => [...(node.target || node).edgesOut.values()] + getChildren: node => [...node.target.edgesOut.values()] .filter(edge => edge.to && edge.to[flag] && (flag !== 'peer' && edge.type === 'peer' || edge.type === 'prod')) .map(edge => edge.to), diff --git a/node_modules/@npmcli/arborist/lib/can-place-dep.js b/node_modules/@npmcli/arborist/lib/can-place-dep.js new file mode 100644 index 0000000000000..cf6b800c44ea2 --- /dev/null +++ b/node_modules/@npmcli/arborist/lib/can-place-dep.js @@ -0,0 +1,405 @@ +// Internal methods used by buildIdealTree. +// Answer the question: "can I put this dep here?" +// +// IMPORTANT: *nothing* in this class should *ever* modify or mutate the tree +// at all. The contract here is strictly limited to read operations. We call +// this in the process of walking through the ideal tree checking many +// different potential placement targets for a given node. If a change is made +// to the tree along the way, that can cause serious problems! +// +// In order to enforce this restriction, in debug mode, canPlaceDep() will +// snapshot the tree at the start of the process, and then at the end, will +// verify that it still matches the snapshot, and throw an error if any changes +// occurred. +// +// The algorithm is roughly like this: +// - check the node itself: +// - if there is no version present, and no conflicting edges from target, +// OK, provided all peers can be placed at or above the target. +// - if the current version matches, KEEP +// - if there is an older version present, which can be replaced, then +// - if satisfying and preferDedupe? KEEP +// - else: REPLACE +// - if there is a newer version present, and preferDedupe, REPLACE +// - if the version present satisfies the edge, KEEP +// - else: CONFLICT +// - if the node is not in conflict, check each of its peers: +// - if the peer can be placed in the target, continue +// - else if the peer can be placed in a parent, and there is no other +// conflicting version shadowing it, continue +// - else CONFLICT +// - If the peers are not in conflict, return the original node's value +// +// An exception to this logic is that if the target is the deepest location +// that a node can be placed, and the conflicting node can be placed deeper, +// then we will return REPLACE rather than CONFLICT, and Arborist will queue +// the replaced node for resolution elsewhere. + +const semver = require('semver') +const debug = require('./debug.js') +const peerEntrySets = require('./peer-entry-sets.js') +const deepestNestingTarget = require('./deepest-nesting-target.js') + +const CONFLICT = Symbol('CONFLICT') +const OK = Symbol('OK') +const REPLACE = Symbol('REPLACE') +const KEEP = Symbol('KEEP') + +class CanPlaceDep { + // dep is a dep that we're trying to place. it should already live in + // a virtual tree where its peer set is loaded as children of the root. + // target is the actual place where we're trying to place this dep + // in a node_modules folder. + // edge is the edge that we're trying to satisfy with this placement. + // parent is the CanPlaceDep object of the entry node when placing a peer. + constructor (options) { + const { + dep, + target, + edge, + preferDedupe, + parent = null, + peerPath = [], + explicitRequest = false, + } = options + + debug(() => { + if (!dep) + throw new Error('no dep provided to CanPlaceDep') + + if (!target) + throw new Error('no target provided to CanPlaceDep') + + if (!edge) + throw new Error('no edge provided to CanPlaceDep') + + this._nodeSnapshot = JSON.stringify(dep) + this._treeSnapshot = JSON.stringify(target.root) + }) + + // the result of whether we can place it or not + this.canPlace = null + // if peers conflict, but this one doesn't, then that is useful info + this.canPlaceSelf = null + + this.dep = dep + this.target = target + this.edge = edge + this.explicitRequest = explicitRequest + + // preventing cycles when we check peer sets + this.peerPath = peerPath + // we always prefer to dedupe peers, because they are trying + // a bit harder to be singletons. + this.preferDedupe = !!preferDedupe || edge.peer + this.parent = parent + this.children = [] + + this.isSource = target === this.peerSetSource + this.name = edge.name + this.current = target.children.get(this.name) + this.targetEdge = target.edgesOut.get(this.name) + this.conflicts = new Map() + + // check if this dep was already subject to a peerDep override while + // building the peerSet. + this.edgeOverride = !dep.satisfies(edge) + + this.canPlace = this.checkCanPlace() + if (!this.canPlaceSelf) + this.canPlaceSelf = this.canPlace + + debug(() => { + const nodeSnapshot = JSON.stringify(dep) + const treeSnapshot = JSON.stringify(target.root) + /* istanbul ignore if */ + if (this._nodeSnapshot !== nodeSnapshot) { + throw Object.assign(new Error('dep changed in CanPlaceDep'), { + expect: this._nodeSnapshot, + actual: nodeSnapshot, + }) + } + /* istanbul ignore if */ + if (this._treeSnapshot !== treeSnapshot) { + throw Object.assign(new Error('tree changed in CanPlaceDep'), { + expect: this._treeSnapshot, + actual: treeSnapshot, + }) + } + }) + } + + checkCanPlace () { + const { target, targetEdge, current, dep } = this + + // if the dep failed to load, we're going to fail the build or + // prune it out anyway, so just move forward placing/replacing it. + if (dep.errors.length) + return current ? REPLACE : OK + + // cannot place peers inside their dependents, except for tops + if (targetEdge && targetEdge.peer && !target.isTop) + return CONFLICT + + if (targetEdge && !dep.satisfies(targetEdge) && targetEdge !== this.edge) + return CONFLICT + + return current ? this.checkCanPlaceCurrent() : this.checkCanPlaceNoCurrent() + } + + // we know that the target has a dep by this name in its node_modules + // already. Can return KEEP, REPLACE, or CONFLICT. + checkCanPlaceCurrent () { + const { preferDedupe, explicitRequest, current, target, edge, dep } = this + + if (dep.matches(current)) { + if (current.satisfies(edge) || this.edgeOverride) + return explicitRequest ? REPLACE : KEEP + } + + const { version: curVer } = current + const { version: newVer } = dep + const tryReplace = curVer && newVer && semver.gte(newVer, curVer) + if (tryReplace && dep.canReplace(current)) { + /* XXX-istanbul ignore else - It's extremely rare that a replaceable + * node would be a conflict, if the current one wasn't a conflict, + * but it is theoretically possible if peer deps are pinned. In + * that case we treat it like any other conflict, and keep trying */ + const cpp = this.canPlacePeers(REPLACE) + if (cpp !== CONFLICT) + return cpp + } + + // ok, can't replace the current with new one, but maybe current is ok? + if (current.satisfies(edge) && (!explicitRequest || preferDedupe)) + return KEEP + + // if we prefer deduping, then try replacing newer with older + if (preferDedupe && !tryReplace && dep.canReplace(current)) { + const cpp = this.canPlacePeers(REPLACE) + if (cpp !== CONFLICT) + return cpp + } + + // Check for interesting cases! + // First, is this the deepest place that this thing can go, and NOT the + // deepest place where the conflicting dep can go? If so, replace it, + // and let it re-resolve deeper in the tree. + const myDeepest = this.deepestNestingTarget + + // ok, i COULD be placed deeper, so leave the current one alone. + if (target !== myDeepest) + return CONFLICT + + // if we are not checking a peerDep, then we MUST place it here, in the + // target that has a non-peer dep on it. + if (!edge.peer && target === edge.from) + return this.canPlacePeers(REPLACE) + + // if we aren't placing a peer in a set, then we're done here. + // This is ignored because it SHOULD be redundant, as far as I can tell, + // with the deepest target and target===edge.from tests. But until we + // can prove that isn't possible, this condition is here for safety. + /* istanbul ignore if - allegedly impossible */ + if (!this.parent && !edge.peer) + return CONFLICT + + // check the deps in the peer group for each edge into that peer group + // if ALL of them can be pushed deeper, or if it's ok to replace its + // members with the contents of the new peer group, then we're good. + let canReplace = true + for (const [entryEdge, currentPeers] of peerEntrySets(current)) { + if (entryEdge === this.edge || entryEdge === this.peerEntryEdge) + continue + + // First, see if it's ok to just replace the peerSet entirely. + // we do this by walking out from the entryEdge, because in a case like + // this: + // + // v -> PEER(a@1||2) + // a@1 -> PEER(b@1) + // a@2 -> PEER(b@2) + // b@1 -> PEER(a@1) + // b@2 -> PEER(a@2) + // + // root + // +-- v + // +-- a@2 + // +-- b@2 + // + // Trying to place a peer group of (a@1, b@1) would fail to note that + // they can be replaced, if we did it by looping 1 by 1. If we are + // replacing something, we don't have to check its peer deps, because + // the peerDeps in the placed peerSet will presumably satisfy. + const entryNode = entryEdge.to + const entryRep = dep.parent.children.get(entryNode.name) + if (entryRep) { + if (entryRep.canReplace(entryNode, dep.parent.children.keys())) + continue + } + + let canClobber = !entryRep + if (!entryRep) { + const peerReplacementWalk = new Set([entryNode]) + OUTER: for (const currentPeer of peerReplacementWalk) { + for (const edge of currentPeer.edgesOut.values()) { + if (!edge.peer || !edge.valid) + continue + const rep = dep.parent.children.get(edge.name) + if (!rep) { + if (edge.to) + peerReplacementWalk.add(edge.to) + continue + } + if (!rep.satisfies(edge)) { + canClobber = false + break OUTER + } + } + } + } + if (canClobber) + continue + + // ok, we can't replace, but maybe we can nest the current set deeper? + let canNestCurrent = true + for (const currentPeer of currentPeers) { + if (!canNestCurrent) + break + + // still possible to nest this peerSet + const curDeep = deepestNestingTarget(entryEdge.from, currentPeer.name) + if (curDeep === target || target.isDescendantOf(curDeep)) { + canNestCurrent = false + canReplace = false + } + if (canNestCurrent) + continue + } + } + + // if we can nest or replace all the current peer groups, we can replace. + if (canReplace) + return this.canPlacePeers(REPLACE) + + return CONFLICT + } + + checkCanPlaceNoCurrent () { + const { target, peerEntryEdge, dep, name } = this + + // check to see what that name resolves to here, and who may depend on + // being able to reach it by crawling up past the parent. we know + // that it's not the target's direct child node, and if it was a direct + // dep of the target, we would have conflicted earlier. + const current = target !== peerEntryEdge.from && target.resolve(name) + if (current) { + for (const edge of current.edgesIn.values()) { + if (edge.from.isDescendantOf(target) && edge.valid) { + if (!dep.satisfies(edge)) + return CONFLICT + } + } + } + + // no objections, so this is fine as long as peers are ok here. + return this.canPlacePeers(OK) + } + + get deepestNestingTarget () { + const start = this.parent ? this.parent.deepestNestingTarget + : this.edge.from + return deepestNestingTarget(start, this.name) + } + + get conflictChildren () { + return this.allChildren.filter(c => c.canPlace === CONFLICT) + } + + get allChildren () { + const set = new Set(this.children) + for (const child of set) { + for (const grandchild of child.children) + set.add(grandchild) + } + return [...set] + } + + get top () { + return this.parent ? this.parent.top : this + } + + // check if peers can go here. returns state or CONFLICT + canPlacePeers (state) { + this.canPlaceSelf = state + if (this._canPlacePeers) + return this._canPlacePeers + + // TODO: represent peerPath in ERESOLVE error somehow? + const peerPath = [...this.peerPath, this.dep] + let sawConflict = false + for (const peerEdge of this.dep.edgesOut.values()) { + if (!peerEdge.peer || !peerEdge.to || peerPath.includes(peerEdge.to)) + continue + const peer = peerEdge.to + // it may be the case that the *initial* dep can be nested, but a peer + // of that dep needs to be placed shallower, because the target has + // a peer dep on the peer as well. + const target = deepestNestingTarget(this.target, peer.name) + const cpp = new CanPlaceDep({ + dep: peer, + target, + parent: this, + edge: peerEdge, + peerPath, + // always place peers in preferDedupe mode + preferDedupe: true, + }) + /* istanbul ignore next */ + debug(() => { + if (this.children.some(c => c.dep === cpp.dep)) + throw new Error('checking same dep repeatedly') + }) + this.children.push(cpp) + + if (cpp.canPlace === CONFLICT) + sawConflict = true + } + + this._canPlacePeers = sawConflict ? CONFLICT : state + return this._canPlacePeers + } + + // what is the node that is causing this peerSet to be placed? + get peerSetSource () { + return this.parent ? this.parent.peerSetSource : this.edge.from + } + + get peerEntryEdge () { + return this.top.edge + } + + static get CONFLICT () { + return CONFLICT + } + + static get OK () { + return OK + } + + static get REPLACE () { + return REPLACE + } + + static get KEEP () { + return KEEP + } + + get description () { + const { canPlace } = this + return canPlace && canPlace.description || + /* istanbul ignore next - old node affordance */ canPlace + } +} + +module.exports = CanPlaceDep diff --git a/node_modules/@npmcli/arborist/lib/debug.js b/node_modules/@npmcli/arborist/lib/debug.js index 5acacee69e223..aeda7229d5e8c 100644 --- a/node_modules/@npmcli/arborist/lib/debug.js +++ b/node_modules/@npmcli/arborist/lib/debug.js @@ -12,6 +12,7 @@ // run in debug mode if explicitly requested, running arborist tests, // or working in the arborist project directory. + const debug = process.env.ARBORIST_DEBUG !== '0' && ( process.env.ARBORIST_DEBUG === '1' || /\barborist\b/.test(process.env.NODE_DEBUG || '') || @@ -21,4 +22,10 @@ const debug = process.env.ARBORIST_DEBUG !== '0' && ( ) module.exports = debug ? fn => fn() : () => {} -module.exports.log = (...msg) => module.exports(() => console.error(...msg)) +const red = process.stderr.isTTY ? msg => `\x1B[31m${msg}\x1B[39m` : m => m +module.exports.log = (...msg) => module.exports(() => { + const { format } = require('util') + const prefix = `\n${process.pid} ${red(format(msg.shift()))} ` + msg = (prefix + format(...msg).trim().split('\n').join(prefix)).trim() + console.error(msg) +}) diff --git a/node_modules/@npmcli/arborist/lib/deepest-nesting-target.js b/node_modules/@npmcli/arborist/lib/deepest-nesting-target.js new file mode 100644 index 0000000000000..cbaa396f3f251 --- /dev/null +++ b/node_modules/@npmcli/arborist/lib/deepest-nesting-target.js @@ -0,0 +1,16 @@ +// given a starting node, what is the *deepest* target where name could go? +// This is not on the Node class for the simple reason that we sometimes +// need to check the deepest *potential* target for a Node that is not yet +// added to the tree where we are checking. +const deepestNestingTarget = (start, name) => { + for (const target of start.ancestry()) { + // note: this will skip past the first target if edge is peer + if (target.isProjectRoot || !target.resolveParent) + return target + const targetEdge = target.edgesOut.get(name) + if (!targetEdge || !targetEdge.peer) + return target + } +} + +module.exports = deepestNestingTarget diff --git a/node_modules/@npmcli/arborist/lib/dep-spec.js b/node_modules/@npmcli/arborist/lib/dep-spec.js deleted file mode 100644 index 92911543e1684..0000000000000 --- a/node_modules/@npmcli/arborist/lib/dep-spec.js +++ /dev/null @@ -1,43 +0,0 @@ -const types = [ - 'peerDependencies', - 'devDependencies', - 'optionalDependencies', - 'dependencies', -] - -const findType = (pkg, name) => { - for (const t of types) { - if (pkg[t] && typeof pkg[t] === 'object' && pkg[t][name] !== undefined) - return t - } - return 'dependencies' -} - -// given a dep name and spec, update it wherever it exists in -// the manifest, or add the spec to 'dependencies' if not found. -const updateDepSpec = (pkg, name, newSpec) => { - const type = findType(pkg, name) - pkg[type] = pkg[type] || {} - pkg[type][name] = newSpec - return pkg -} - -// sort alphabetically all types of deps for a given package -const orderDeps = (pkg) => { - for (const type of types) { - if (pkg && pkg[type]) { - pkg[type] = Object.keys(pkg[type]) - .sort((a, b) => a.localeCompare(b)) - .reduce((res, key) => { - res[key] = pkg[type][key] - return res - }, {}) - } - } - return pkg -} - -module.exports = { - orderDeps, - updateDepSpec, -} diff --git a/node_modules/@npmcli/arborist/lib/diff.js b/node_modules/@npmcli/arborist/lib/diff.js index ada67f8161d30..2008ef7a35bdd 100644 --- a/node_modules/@npmcli/arborist/lib/diff.js +++ b/node_modules/@npmcli/arborist/lib/diff.js @@ -11,7 +11,9 @@ const {existsSync} = require('fs') const ssri = require('ssri') class Diff { - constructor ({actual, ideal}) { + constructor ({actual, ideal, filterSet, shrinkwrapInflated}) { + this.filterSet = filterSet + this.shrinkwrapInflated = shrinkwrapInflated this.children = [] this.actual = actual this.ideal = ideal @@ -29,9 +31,62 @@ class Diff { this.removed = [] } - static calculate ({actual, ideal}) { + static calculate ({actual, ideal, filterNodes = [], shrinkwrapInflated = new Set()}) { + // if there's a filterNode, then: + // - get the path from the root to the filterNode. The root or + // root.target should have an edge either to the filterNode or + // a link to the filterNode. If not, abort. Add the path to the + // filterSet. + // - Add set of Nodes depended on by the filterNode to filterSet. + // - Anything outside of that set should be ignored by getChildren + const filterSet = new Set() + const extraneous = new Set() + for (const filterNode of filterNodes) { + const { root } = filterNode + if (root !== ideal && root !== actual) + throw new Error('invalid filterNode: outside idealTree/actualTree') + const rootTarget = root.target + const edge = [...rootTarget.edgesOut.values()].filter(e => { + return e.to && (e.to === filterNode || e.to.target === filterNode) + })[0] + filterSet.add(root) + filterSet.add(rootTarget) + filterSet.add(ideal) + filterSet.add(actual) + if (edge && edge.to) { + filterSet.add(edge.to) + filterSet.add(edge.to.target) + } + filterSet.add(filterNode) + + depth({ + tree: filterNode, + visit: node => filterSet.add(node), + getChildren: node => { + node = node.target + const loc = node.location + const idealNode = ideal.inventory.get(loc) + const ideals = !idealNode ? [] + : [...idealNode.edgesOut.values()].filter(e => e.to).map(e => e.to) + const actualNode = actual.inventory.get(loc) + const actuals = !actualNode ? [] + : [...actualNode.edgesOut.values()].filter(e => e.to).map(e => e.to) + if (actualNode) { + for (const child of actualNode.children.values()) { + if (child.extraneous) + extraneous.add(child) + } + } + + return ideals.concat(actuals) + }, + }) + } + for (const extra of extraneous) + filterSet.add(extra) + return depth({ - tree: new Diff({actual, ideal}), + tree: new Diff({actual, ideal, filterSet, shrinkwrapInflated}), getChildren, leave, }) @@ -53,16 +108,32 @@ const getAction = ({actual, ideal}) => { if (ideal.isRoot && actual.isRoot) return null + // if the versions don't match, it's a change no matter what + if (ideal.version !== actual.version) + return 'CHANGE' + const binsExist = ideal.binPaths.every((path) => existsSync(path)) // top nodes, links, and git deps won't have integrity, but do have resolved - if (!ideal.integrity && !actual.integrity && ideal.resolved === actual.resolved && binsExist) + // if neither node has integrity, the bins exist, and either (a) neither + // node has a resolved value or (b) they both do and match, then we can + // leave this one alone since we already know the versions match due to + // the condition above. The "neither has resolved" case (a) cannot be + // treated as a 'mark CHANGE and refetch', because shrinkwraps, bundles, + // and link deps may lack this information, and we don't want to try to + // go to the registry for something that isn't there. + const noIntegrity = !ideal.integrity && !actual.integrity + const noResolved = !ideal.resolved && !actual.resolved + const resolvedMatch = ideal.resolved && ideal.resolved === actual.resolved + if (noIntegrity && binsExist && (resolvedMatch || noResolved)) return null // otherwise, verify that it's the same bits // note that if ideal has integrity, and resolved doesn't, we treat // that as a 'change', so that it gets re-fetched and locked down. - if (!ideal.integrity || !actual.integrity || !ssri.parse(ideal.integrity).match(actual.integrity) || !binsExist) + const integrityMismatch = !ideal.integrity || !actual.integrity || + !ssri.parse(ideal.integrity).match(actual.integrity) + if (integrityMismatch || !binsExist) return 'CHANGE' return null @@ -72,9 +143,9 @@ const allChildren = node => { if (!node) return new Map() - // if the node is a global root, and also a link, then what we really + // if the node is root, and also a link, then what we really // want is to traverse the target's children - if (node.global && node.isRoot && node.isLink) + if (node.isRoot && node.isLink) return allChildren(node.target) const kids = new Map() @@ -89,20 +160,29 @@ const allChildren = node => { // to create the diff tree const getChildren = diff => { const children = [] - const {unchanged, removed} = diff + const {actual, ideal, unchanged, removed, filterSet, shrinkwrapInflated} = diff // Note: we DON'T diff fsChildren themselves, because they are either // included in the package contents, or part of some other project, and // will never appear in legacy shrinkwraps anyway. but we _do_ include the // child nodes of fsChildren, because those are nodes that we are typically // responsible for installing. - const actualKids = allChildren(diff.actual) - const idealKids = allChildren(diff.ideal) + const actualKids = allChildren(actual) + const idealKids = allChildren(ideal) + + if (ideal && ideal.hasShrinkwrap && !shrinkwrapInflated.has(ideal)) { + // Guaranteed to get a diff.leaves here, because we always + // be called with a proper Diff object when ideal has a shrinkwrap + // that has not been inflated. + diff.leaves.push(diff) + return children + } + const paths = new Set([...actualKids.keys(), ...idealKids.keys()]) for (const path of paths) { const actual = actualKids.get(path) const ideal = idealKids.get(path) - diffNode(actual, ideal, children, unchanged, removed) + diffNode(actual, ideal, children, unchanged, removed, filterSet, shrinkwrapInflated) } if (diff.leaves && !children.length) @@ -111,15 +191,18 @@ const getChildren = diff => { return children } -const diffNode = (actual, ideal, children, unchanged, removed) => { +const diffNode = (actual, ideal, children, unchanged, removed, filterSet, shrinkwrapInflated) => { + if (filterSet.size && !(filterSet.has(ideal) || filterSet.has(actual))) + return + const action = getAction({actual, ideal}) // if it's a match, then get its children // otherwise, this is the child diff node - if (action) { + if (action || (!shrinkwrapInflated.has(ideal) && ideal.hasShrinkwrap)) { if (action === 'REMOVE') removed.push(actual) - children.push(new Diff({actual, ideal})) + children.push(new Diff({actual, ideal, filterSet, shrinkwrapInflated})) } else { unchanged.push(ideal) // !*! Weird dirty hack warning !*! @@ -150,7 +233,7 @@ const diffNode = (actual, ideal, children, unchanged, removed) => { for (const node of bundledChildren) node.parent = ideal } - children.push(...getChildren({actual, ideal, unchanged, removed})) + children.push(...getChildren({actual, ideal, unchanged, removed, filterSet, shrinkwrapInflated})) } } diff --git a/node_modules/@npmcli/arborist/lib/edge.js b/node_modules/@npmcli/arborist/lib/edge.js index c5f00faff2999..0bd9021d56a70 100644 --- a/node_modules/@npmcli/arborist/lib/edge.js +++ b/node_modules/@npmcli/arborist/lib/edge.js @@ -37,6 +37,7 @@ const printableEdge = (edge) => { ...(edgeFrom != null ? { from: edgeFrom } : {}), ...(edgeTo ? { to: edgeTo } : {}), ...(edge.error ? { error: edge.error } : {}), + ...(edge.overridden ? { overridden: true } : {}), }) } @@ -72,6 +73,7 @@ class Edge { throw new TypeError('must provide "from" node') this[_setFrom](from) this[_error] = this[_loadError]() + this.overridden = false } satisfiedBy (node) { @@ -87,16 +89,24 @@ class Edge { // return the edge data, and an explanation of how that edge came to be here [_explain] (seen) { - const { error, from } = this + const { error, from, bundled } = this return { type: this.type, name: this.name, spec: this.spec, + ...(bundled ? { bundled } : {}), ...(error ? { error } : {}), ...(from ? { from: from.explain(null, seen) } : {}), } } + get bundled () { + if (!this.from) + return false + const { package: { bundleDependencies = [] } } = this.from + return bundleDependencies.includes(this.name) + } + get workspace () { return this[_type] === 'workspace' } diff --git a/node_modules/@npmcli/arborist/lib/get-workspace-nodes.js b/node_modules/@npmcli/arborist/lib/get-workspace-nodes.js new file mode 100644 index 0000000000000..6db489f69c518 --- /dev/null +++ b/node_modules/@npmcli/arborist/lib/get-workspace-nodes.js @@ -0,0 +1,33 @@ +// Get the actual nodes corresponding to a root node's child workspaces, +// given a list of workspace names. +const relpath = require('./relpath.js') +const getWorkspaceNodes = (tree, workspaces, log) => { + const wsMap = tree.workspaces + if (!wsMap) { + log.warn('workspaces', 'filter set, but no workspaces present') + return [] + } + + const nodes = [] + for (const name of workspaces) { + const path = wsMap.get(name) + if (!path) { + log.warn('workspaces', `${name} in filter set, but not in workspaces`) + continue + } + + const loc = relpath(tree.realpath, path) + const node = tree.inventory.get(loc) + + if (!node) { + log.warn('workspaces', `${name} in filter set, but no workspace folder present`) + continue + } + + nodes.push(node) + } + + return nodes +} + +module.exports = getWorkspaceNodes diff --git a/node_modules/@npmcli/arborist/lib/index.js b/node_modules/@npmcli/arborist/lib/index.js index fd7d8817258ed..c7b07ce28e4df 100644 --- a/node_modules/@npmcli/arborist/lib/index.js +++ b/node_modules/@npmcli/arborist/lib/index.js @@ -3,5 +3,6 @@ module.exports.Arborist = module.exports module.exports.Node = require('./node.js') module.exports.Link = require('./link.js') module.exports.Edge = require('./edge.js') +module.exports.Shrinkwrap = require('./shrinkwrap.js') // XXX export the other classes, too. shrinkwrap, diff, etc. // they're handy! diff --git a/node_modules/@npmcli/arborist/lib/inventory.js b/node_modules/@npmcli/arborist/lib/inventory.js index cef0c4e265899..a4ae11c2ab41e 100644 --- a/node_modules/@npmcli/arborist/lib/inventory.js +++ b/node_modules/@npmcli/arborist/lib/inventory.js @@ -4,9 +4,23 @@ // keys is the set of fields to be able to query. const _primaryKey = Symbol('_primaryKey') const _index = Symbol('_index') -const defaultKeys = ['name', 'license', 'funding', 'realpath'] +const defaultKeys = ['name', 'license', 'funding', 'realpath', 'packageName'] const { hasOwnProperty } = Object.prototype const debug = require('./debug.js') + +// handling for the outdated "licenses" array, just pick the first one +// also support the alternative spelling "licence" +const getLicense = pkg => { + if (pkg) { + const lic = pkg.license || pkg.licence + if (lic) + return lic + const lics = pkg.licenses || pkg.licences + if (Array.isArray(lics)) + return lics[0] + } +} + class Inventory extends Map { constructor (opt = {}) { const { primary, keys } = opt @@ -56,7 +70,9 @@ class Inventory extends Map { for (const [key, map] of this[_index].entries()) { // if the node has the value, but it's false, then use that const val_ = hasOwnProperty.call(node, key) ? node[key] - : node[key] || (node.package && node.package[key]) + : key === 'license' ? getLicense(node.package) + : node[key] ? node[key] + : node.package && node.package[key] const val = typeof val_ === 'string' ? val_ : !val_ || typeof val_ !== 'object' ? val_ : key === 'license' ? val_.type diff --git a/node_modules/@npmcli/arborist/lib/link.js b/node_modules/@npmcli/arborist/lib/link.js index 2394c6e41173c..4d15428d87360 100644 --- a/node_modules/@npmcli/arborist/lib/link.js +++ b/node_modules/@npmcli/arborist/lib/link.js @@ -23,13 +23,19 @@ class Link extends Node { : null), }) - this.target = target || new Node({ - ...options, - path: realpath, - parent: null, - fsParent: null, - root: this.root, - }) + if (target) + this.target = target + else if (this.realpath === this.root.path) + this.target = this.root + else { + this.target = new Node({ + ...options, + path: realpath, + parent: null, + fsParent: null, + root: this.root, + }) + } } get version () { diff --git a/node_modules/@npmcli/arborist/lib/node.js b/node_modules/@npmcli/arborist/lib/node.js index fa39bed5ef9d4..d77b18355ff31 100644 --- a/node_modules/@npmcli/arborist/lib/node.js +++ b/node_modules/@npmcli/arborist/lib/node.js @@ -28,6 +28,7 @@ // where we need to quickly find all instances of a given package name within a // tree. +const semver = require('semver') const nameFromFolder = require('@npmcli/name-from-folder') const Edge = require('./edge.js') const Inventory = require('./inventory.js') @@ -290,6 +291,10 @@ class Node { return this[_package].version || '' } + get packageName () { + return this[_package].name || null + } + get pkgid () { const { name = '', version = '' } = this.package // root package will prefer package name over folder name, @@ -349,10 +354,10 @@ class Node { } const why = { - name: this.isProjectRoot ? this.package.name : this.name, + name: this.isProjectRoot || this.isTop ? this.packageName : this.name, version: this.package.version, } - if (this.errors.length || !this.package.name || !this.package.version) { + if (this.errors.length || !this.packageName || !this.package.version) { why.errors = this.errors.length ? this.errors : [ new Error('invalid package: lacks name and/or version'), ] @@ -375,6 +380,7 @@ class Node { return why why.location = this.location + why.isWorkspace = this.isWorkspace // make a new list each time. we can revisit, but not loop. seen = seen.concat(this) @@ -395,11 +401,15 @@ class Node { for (const edge of edges) why.dependents.push(edge.explain(seen)) } + + if (this.linksIn.size) + why.linksIn = [...this.linksIn].map(link => link[_explain](edge, seen)) + return why } isDescendantOf (node) { - for (let p = this; p; p = p.parent) { + for (let p = this; p; p = p.resolveParent) { if (p === node) return true } @@ -459,7 +469,7 @@ class Node { if (this.isProjectRoot) return false const { root } = this - const { type, to } = root.edgesOut.get(this.package.name) || {} + const { type, to } = root.edgesOut.get(this.packageName) || {} return type === 'workspace' && to && (to.target === this || to === this) } @@ -471,6 +481,11 @@ class Node { return this === this.root || this === this.root.target } + * ancestry () { + for (let anc = this; anc; anc = anc.resolveParent) + yield anc + } + set root (root) { // setting to null means this is the new root // should only ever be one step @@ -537,6 +552,8 @@ class Node { // try to find our parent/fsParent in the new root inventory for (const p of walkUp(dirname(this.path))) { + if (p === this.path) + continue const ploc = relpath(root.realpath, p) const parent = root.inventory.get(ploc) if (parent) { @@ -637,7 +654,7 @@ class Node { }) if (this.isLink) { - const target = node.target || node + const target = node.target this[_target] = target this[_package] = target.package target.linksIn.add(this) @@ -685,6 +702,7 @@ class Node { ...this.children.values(), ...this.inventory.values(), ].filter(n => n !== this)) + for (const child of family) { if (child.root !== root) { child[_delistFromMeta]() @@ -704,12 +722,14 @@ class Node { } // if we had a target, and didn't find one in the new root, then bring - // it over as well. - if (this.isLink && target && !this.target) + // it over as well, but only if we're setting the link into a new root, + // as we don't want to lose the target any time we remove a link. + if (this.isLink && target && !this.target && root !== this) target.root = root // tree should always be valid upon root setter completion. treeCheck(this) + treeCheck(root) } get root () { @@ -726,20 +746,14 @@ class Node { [_loadDeps] () { // Caveat! Order is relevant! - // packages in optionalDependencies and prod/peer/dev are - // optional. Packages in both deps and devDeps are required. + // Packages in optionalDependencies are optional. + // Packages in both deps and devDeps are required. // Note the subtle breaking change from v6: it is no longer possible // to have a different spec for a devDep than production dep. - this[_loadDepType](this.package.optionalDependencies, 'optional') // Linked targets that are disconnected from the tree are tops, // but don't have a 'path' field, only a 'realpath', because we // don't know their canonical location. We don't need their devDeps. - const { isTop, path, sourceReference } = this - const { isTop: srcTop, path: srcPath } = sourceReference || {} - if (isTop && path && (!sourceReference || srcTop && srcPath)) - this[_loadDepType](this.package.devDependencies, 'dev') - const pd = this.package.peerDependencies if (pd && typeof pd === 'object' && !this.legacyPeerDeps) { const pm = this.package.peerDependenciesMeta || {} @@ -756,24 +770,33 @@ class Node { } this[_loadDepType](this.package.dependencies, 'prod') + this[_loadDepType](this.package.optionalDependencies, 'optional') + + const { isTop, path, sourceReference } = this + const { isTop: srcTop, path: srcPath } = sourceReference || {} + if (isTop && path && (!sourceReference || srcTop && srcPath)) + this[_loadDepType](this.package.devDependencies, 'dev') } - [_loadDepType] (obj, type) { - const from = this + [_loadDepType] (deps, type) { const ad = this.package.acceptDependencies || {} - for (const [name, spec] of Object.entries(obj || {})) { - const accept = ad[name] - // if it's already set, then we keep the existing edge - // Prod deps should not be marked as dev, however. - // NB: the Edge ctor adds itself to from.edgesOut + // Because of the order in which _loadDeps runs, we always want to + // prioritize a new edge over an existing one + for (const [name, spec] of Object.entries(deps || {})) { const current = this.edgesOut.get(name) - if (!current || current.dev && type === 'prod') - new Edge({ from, name, spec, accept, type }) + if (!current || current.type !== 'workspace') + new Edge({ from: this, name, spec, accept: ad[name], type }) } } get fsParent () { - return this[_fsParent] + const parent = this[_fsParent] + /* istanbul ignore next - should be impossible */ + debug(() => { + if (parent === this) + throw new Error('node set to its own fsParent') + }) + return parent } set fsParent (fsParent) { @@ -860,16 +883,31 @@ class Node { // root dependency brings peer deps along with it. In that case, we // will go ahead and create the invalid state, and then try to resolve // it with more tree construction, because it's a user request. - canReplaceWith (node) { + canReplaceWith (node, ignorePeers = []) { if (node.name !== this.name) return false + if (node.packageName !== this.packageName) + return false + + ignorePeers = new Set(ignorePeers) + // gather up all the deps of this node and that are only depended // upon by deps of this node. those ones don't count, since // they'll be replaced if this node is replaced anyway. const depSet = gatherDepSet([this], e => e.to !== this && e.valid) for (const edge of this.edgesIn) { + // when replacing peer sets, we need to be able to replace the entire + // peer group, which means we ignore incoming edges from other peers + // within the replacement set. + const ignored = !this.isTop && + edge.from.parent === this.parent && + edge.peer && + ignorePeers.has(edge.from.name) + if (ignored) + continue + // only care about edges that don't originate from this node if (!depSet.has(edge.from) && !edge.satisfiedBy(node)) return false @@ -878,8 +916,45 @@ class Node { return true } - canReplace (node) { - return node.canReplaceWith(this) + canReplace (node, ignorePeers) { + return node.canReplaceWith(this, ignorePeers) + } + + // return true if it's safe to remove this node, because anything that + // is depending on it would be fine with the thing that they would resolve + // to if it was removed, or nothing is depending on it in the first place. + canDedupe (preferDedupe = false) { + // not allowed to mess with shrinkwraps or bundles + if (this.inDepBundle || this.inShrinkwrap) + return false + + // it's a top level pkg, or a dep of one + if (!this.resolveParent || !this.resolveParent.resolveParent) + return false + + // no one wants it, remove it + if (this.edgesIn.size === 0) + return true + + const other = this.resolveParent.resolveParent.resolve(this.name) + + // nothing else, need this one + if (!other) + return false + + // if it's the same thing, then always fine to remove + if (other.matches(this)) + return true + + // if the other thing can't replace this, then skip it + if (!other.canReplace(this)) + return false + + // if we prefer dedupe, or if the version is greater/equal, take the other + if (preferDedupe || semver.gte(other.version, this.version)) + return true + + return false } satisfies (requested) { @@ -924,8 +999,8 @@ class Node { // if no resolved, check both package name and version // otherwise, conclude that they are different things - return this.package.name && node.package.name && - this.package.name === node.package.name && + return this.packageName && node.packageName && + this.packageName === node.packageName && this.version && node.version && this.version === node.version } @@ -962,7 +1037,13 @@ class Node { } get parent () { - return this[_parent] + const parent = this[_parent] + /* istanbul ignore next - should be impossible */ + debug(() => { + if (parent === this) + throw new Error('node set to its own parent') + }) + return parent } // This setter keeps everything in order when we move a node from @@ -1113,7 +1194,7 @@ class Node { } get target () { - return null + return this } set target (n) { @@ -1136,11 +1217,25 @@ class Node { return this.isTop ? this : this.parent.top } + get isFsTop () { + return !this.fsParent + } + + get fsTop () { + return this.isFsTop ? this : this.fsParent.fsTop + } + get resolveParent () { return this.parent || this.fsParent } resolve (name) { + /* istanbul ignore next - should be impossible, + * but I keep doing this mistake in tests */ + debug(() => { + if (typeof name !== 'string' || !name) + throw new Error('non-string passed to Node.resolve') + }) const mine = this.children.get(name) if (mine) return mine diff --git a/node_modules/@npmcli/arborist/lib/peer-entry-sets.js b/node_modules/@npmcli/arborist/lib/peer-entry-sets.js new file mode 100644 index 0000000000000..11f9a431607ec --- /dev/null +++ b/node_modules/@npmcli/arborist/lib/peer-entry-sets.js @@ -0,0 +1,72 @@ +// Given a node in a tree, return all of the peer dependency sets that +// it is a part of, with the entry (top or non-peer) edges into the sets +// identified. +// +// With this information, we can determine whether it is appropriate to +// replace the entire peer set with another (and remove the old one), +// push the set deeper into the tree, and so on. +// +// Returns a Map of { edge => Set(peerNodes) }, + +const peerEntrySets = node => { + // this is the union of all peer groups that the node is a part of + // later, we identify all of the entry edges, and create a set of + // 1 or more overlapping sets that this node is a part of. + const unionSet = new Set([node]) + for (const node of unionSet) { + for (const edge of node.edgesOut.values()) { + if (edge.valid && edge.peer && edge.to) + unionSet.add(edge.to) + } + for (const edge of node.edgesIn) { + if (edge.valid && edge.peer) + unionSet.add(edge.from) + } + } + const entrySets = new Map() + for (const peer of unionSet) { + for (const edge of peer.edgesIn) { + // if not valid, it doesn't matter anyway. either it's been previously + // overridden, or it's the thing we're interested in replacing. + if (!edge.valid) + continue + // this is the entry point into the peer set + if (!edge.peer || edge.from.isTop) { + // get the subset of peer brought in by this peer entry edge + const sub = new Set([peer]) + for (const peer of sub) { + for (const edge of peer.edgesOut.values()) { + if (edge.valid && edge.peer && edge.to) + sub.add(edge.to) + } + } + // if this subset does not include the node we are focused on, + // then it is not relevant for our purposes. Example: + // + // a -> (b, c, d) + // b -> PEER(d) b -> d -> e -> f <-> g + // c -> PEER(f, h) c -> (f <-> g, h -> g) + // d -> PEER(e) d -> e -> f <-> g + // e -> PEER(f) + // f -> PEER(g) + // g -> PEER(f) + // h -> PEER(g) + // + // The unionSet(e) will include c, but we don't actually care about + // it. We only expanded to the edge of the peer nodes in order to + // find the entry edges that caused the inclusion of peer sets + // including (e), so we want: + // Map{ + // Edge(a->b) => Set(b, d, e, f, g) + // Edge(a->d) => Set(d, e, f, g) + // } + if (sub.has(node)) + entrySets.set(edge, sub) + } + } + } + + return entrySets +} + +module.exports = peerEntrySets diff --git a/node_modules/@npmcli/arborist/lib/peer-set.js b/node_modules/@npmcli/arborist/lib/peer-set.js deleted file mode 100644 index 727814e1de3f0..0000000000000 --- a/node_modules/@npmcli/arborist/lib/peer-set.js +++ /dev/null @@ -1,25 +0,0 @@ -// when we have to dupe a set of peer dependencies deeper into the tree in -// order to make room for a dep that would otherwise conflict, we use -// this to get the set of all deps that have to be checked to ensure -// nothing is locking them into the current location. -// -// this is different in its semantics from an "optional set" (ie, the nodes -// that should be removed if an optional dep fails), because in this case, -// we specifically intend to include deps in the peer set that have -// dependants outside the set. -const peerSet = node => { - const set = new Set([node]) - for (const node of set) { - for (const edge of node.edgesOut.values()) { - if (edge.valid && edge.peer && edge.to) - set.add(edge.to) - } - for (const edge of node.edgesIn) { - if (edge.valid && edge.peer) - set.add(edge.from) - } - } - return set -} - -module.exports = peerSet diff --git a/node_modules/@npmcli/arborist/lib/place-dep.js b/node_modules/@npmcli/arborist/lib/place-dep.js new file mode 100644 index 0000000000000..913b2ba6c2bc7 --- /dev/null +++ b/node_modules/@npmcli/arborist/lib/place-dep.js @@ -0,0 +1,536 @@ +// Given a dep, a node that depends on it, and the edge representing that +// dependency, place the dep somewhere in the node's tree, and all of its +// peer dependencies. +// +// Handles all of the tree updating needed to place the dep, including +// removing replaced nodes, pruning now-extraneous or invalidated nodes, +// and saves a set of what was placed and what needs re-evaluation as +// a result. + +const log = require('proc-log') +const deepestNestingTarget = require('./deepest-nesting-target.js') +const CanPlaceDep = require('./can-place-dep.js') +const { + KEEP, + CONFLICT, +} = CanPlaceDep +const debug = require('./debug.js') + +const gatherDepSet = require('./gather-dep-set.js') +const peerEntrySets = require('./peer-entry-sets.js') + +class PlaceDep { + constructor (options) { + const { + dep, + edge, + parent = null, + } = options + this.name = edge.name + this.dep = dep + this.edge = edge + this.canPlace = null + + this.target = null + this.placed = null + + // inherit all these fields from the parent to ensure consistency. + const { + preferDedupe, + force, + explicitRequest, + updateNames, + auditReport, + legacyBundling, + strictPeerDeps, + legacyPeerDeps, + globalStyle, + } = parent || options + Object.assign(this, { + preferDedupe, + force, + explicitRequest, + updateNames, + auditReport, + legacyBundling, + strictPeerDeps, + legacyPeerDeps, + globalStyle, + }) + + this.children = [] + this.parent = parent + this.peerConflict = null + + this.checks = new Map() + + this.place() + } + + place () { + const { + edge, + dep, + preferDedupe, + globalStyle, + legacyBundling, + explicitRequest, + updateNames, + checks, + } = this + + // nothing to do if the edge is fine as it is + if (edge.to && + !edge.error && + !explicitRequest && + !updateNames.includes(edge.name) && + !this.isVulnerable(edge.to)) + return + + // walk up the tree until we hit either a top/root node, or a place + // where the dep is not a peer dep. + const start = this.getStartNode() + + let canPlace = null + let canPlaceSelf = null + for (const target of start.ancestry()) { + // if the current location has a peerDep on it, then we can't place here + // this is pretty rare to hit, since we always prefer deduping peers, + // and the getStartNode will start us out above any peers from the + // thing that depends on it. but we could hit it with something like: + // + // a -> (b@1, c@1) + // +-- c@1 + // +-- b -> PEEROPTIONAL(v) (c@2) + // +-- c@2 -> (v) + // + // So we check if we can place v under c@2, that's fine. + // Then we check under b, and can't, because of the optional peer dep. + // but we CAN place it under a, so the correct thing to do is keep + // walking up the tree. + const targetEdge = target.edgesOut.get(edge.name) + if (!target.isTop && targetEdge && targetEdge.peer) + continue + + const cpd = new CanPlaceDep({ + dep, + edge, + // note: this sets the parent's canPlace as the parent of this + // canPlace, but it does NOT add this canPlace to the parent's + // children. This way, we can know that it's a peer dep, and + // get the top edge easily, while still maintaining the + // tree of checks that factored into the original decision. + parent: this.parent && this.parent.canPlace, + target, + preferDedupe, + explicitRequest: this.explicitRequest, + }) + checks.set(target, cpd) + + // It's possible that a "conflict" is a conflict among the *peers* of + // a given node we're trying to place, but there actually is no current + // node. Eg, + // root -> (a, b) + // a -> PEER(c) + // b -> PEER(d) + // d -> PEER(c@2) + // We place (a), and get a peer of (c) along with it. + // then we try to place (b), and get CONFLICT in the check, because + // of the conflicting peer from (b)->(d)->(c@2). In that case, we + // should treat (b) and (d) as OK, and place them in the last place + // where they did not themselves conflict, and skip c@2 if conflict + // is ok by virtue of being forced or not ours and not strict. + if (cpd.canPlaceSelf !== CONFLICT) + canPlaceSelf = cpd + + // we found a place this can go, along with all its peer friends. + // we break when we get the first conflict + if (cpd.canPlace !== CONFLICT) + canPlace = cpd + else + break + + // if it's a load failure, just plop it in the first place attempted, + // since we're going to crash the build or prune it out anyway. + // but, this will frequently NOT be a successful canPlace, because + // it'll have no version or other information. + if (dep.errors.length) + break + + // nest packages like npm v1 and v2 + // very disk-inefficient + if (legacyBundling) + break + + // when installing globally, or just in global style, we never place + // deps above the first level. + if (globalStyle) { + const rp = target.resolveParent + if (rp && rp.isProjectRoot) + break + } + } + + Object.assign(this, { + canPlace, + canPlaceSelf, + }) + this.current = edge.to + + // if we can't find a target, that means that the last place checked, + // and all the places before it, had a conflict. + if (!canPlace) { + // if not forced, or it's our dep, or strictPeerDeps is set, then + // this is an ERESOLVE error. + if (!this.conflictOk) + return this.failPeerConflict() + + // ok! we're gonna allow the conflict, but we should still warn + // if we have a current, then we treat CONFLICT as a KEEP. + // otherwise, we just skip it. Only warn on the one that actually + // could not be placed somewhere. + if (!canPlaceSelf) { + this.warnPeerConflict() + return + } + + this.canPlace = canPlaceSelf + } + + // now we have a target, a tree of CanPlaceDep results for the peer group, + // and we are ready to go + this.placeInTree() + } + + placeInTree () { + const { + dep, + canPlace, + edge, + } = this + + /* istanbul ignore next */ + if (!canPlace) { + debug(() => { + throw new Error('canPlace not set, but trying to place in tree') + }) + return + } + + const { target } = canPlace + + log.silly( + 'placeDep', + target.location || 'ROOT', + `${dep.name}@${dep.version}`, + canPlace.description, + `for: ${this.edge.from.package._id || this.edge.from.location}`, + `want: ${edge.spec || '*'}` + ) + + const placementType = canPlace.canPlace === CONFLICT + ? canPlace.canPlaceSelf + : canPlace.canPlace + + // if we're placing in the tree with --force, we can get here even though + // it's a conflict. Treat it as a KEEP, but warn and move on. + if (placementType === KEEP) { + // this was an overridden peer dep + if (edge.peer && !edge.valid) + this.warnPeerConflict() + + // if we get a KEEP in a update scenario, then we MAY have something + // already duplicating this unnecessarily! For example: + // ``` + // root (dep: y@1) + // +-- x (dep: y@1.1) + // | +-- y@1.1.0 (replacing with 1.1.2, got KEEP at the root) + // +-- y@1.1.2 (updated already from 1.0.0) + // ``` + // Now say we do `reify({update:['y']})`, and the latest version is + // 1.1.2, which we now have in the root. We'll try to place y@1.1.2 + // first in x, then in the root, ending with KEEP, because we already + // have it. In that case, we ought to REMOVE the nm/x/nm/y node, because + // it is an unnecessary duplicate. + this.pruneDedupable(target) + return + } + + // XXX if we are replacing SOME of a peer entry group, we will need to + // remove any that are not being replaced and will now be invalid, and + // re-evaluate them deeper into the tree. + + const virtualRoot = dep.parent + this.placed = new dep.constructor({ + name: dep.name, + pkg: dep.package, + resolved: dep.resolved, + integrity: dep.integrity, + legacyPeerDeps: this.legacyPeerDeps, + error: dep.errors[0], + ...(dep.isLink ? { target: dep.target, realpath: dep.target.path } : {}), + }) + + this.oldDep = target.children.get(this.name) + if (this.oldDep) + this.replaceOldDep() + else + this.placed.parent = target + + // if it's an overridden peer dep, warn about it + if (edge.peer && !this.placed.satisfies(edge)) + this.warnPeerConflict() + + // If the edge is not an error, then we're updating something, and + // MAY end up putting a better/identical node further up the tree in + // a way that causes an unnecessary duplication. If so, remove the + // now-unnecessary node. + if (edge.valid && edge.to && edge.to !== this.placed) + this.pruneDedupable(edge.to, false) + + // in case we just made some duplicates that can be removed, + // prune anything deeper in the tree that can be replaced by this + for (const node of target.root.inventory.query('name', this.name)) { + if (node.isDescendantOf(target) && !node.isTop) { + this.pruneDedupable(node, false) + // only walk the direct children of the ones we kept + if (node.root === target.root) { + for (const kid of node.children.values()) + this.pruneDedupable(kid, false) + } + } + } + + // also place its unmet or invalid peer deps at this location + // loop through any peer deps from the thing we just placed, and place + // those ones as well. it's safe to do this with the virtual nodes, + // because we're copying rather than moving them out of the virtual root, + // otherwise they'd be gone and the peer set would change throughout + // this loop. + for (const peerEdge of this.placed.edgesOut.values()) { + if (peerEdge.valid || !peerEdge.peer || peerEdge.overridden) + continue + + const peer = virtualRoot.children.get(peerEdge.name) + + // Note: if the virtualRoot *doesn't* have the peer, then that means + // it's an optional peer dep. If it's not being properly met (ie, + // peerEdge.valid is false), then this is likely heading for an + // ERESOLVE error, unless it can walk further up the tree. + if (!peer) + continue + + // overridden peerEdge, just accept what's there already + if (!peer.satisfies(peerEdge)) + continue + + this.children.push(new PlaceDep({ + parent: this, + dep: peer, + node: this.placed, + edge: peerEdge, + })) + } + } + + replaceOldDep () { + // XXX handle replacing an entire peer group? + // what about cases where we need to push some other peer groups deeper + // into the tree? all the tree updating should be done here, and track + // all the things that we add and remove, so that we can know what + // to re-evaluate. + + // if we're replacing, we should also remove any nodes for edges that + // are now invalid, and where this (or its deps) is the only dependent, + // and also recurse on that pruning. Otherwise leaving that dep node + // around can result in spurious conflicts pushing nodes deeper into + // the tree than needed in the case of cycles that will be removed + // later anyway. + const oldDeps = [] + for (const [name, edge] of this.oldDep.edgesOut.entries()) { + if (!this.placed.edgesOut.has(name) && edge.to) + oldDeps.push(...gatherDepSet([edge.to], e => e.to !== edge.to)) + } + this.placed.replace(this.oldDep) + this.pruneForReplacement(this.placed, oldDeps) + } + + pruneForReplacement (node, oldDeps) { + // gather up all the now-invalid/extraneous edgesOut, as long as they are + // only depended upon by the old node/deps + const invalidDeps = new Set([...node.edgesOut.values()] + .filter(e => e.to && !e.valid).map(e => e.to)) + for (const dep of oldDeps) { + const set = gatherDepSet([dep], e => e.to !== dep && e.valid) + for (const dep of set) + invalidDeps.add(dep) + } + + // ignore dependency edges from the node being replaced, but + // otherwise filter the set down to just the set with no + // dependencies from outside the set, except the node in question. + const deps = gatherDepSet(invalidDeps, edge => + edge.from !== node && edge.to !== node && edge.valid) + + // now just delete whatever's left, because it's junk + for (const dep of deps) + dep.root = null + } + + // prune all the nodes in a branch of the tree that can be safely removed + // This is only the most basic duplication detection; it finds if there + // is another satisfying node further up the tree, and if so, dedupes. + // Even in legacyBundling mode, we do this amount of deduplication. + pruneDedupable (node, descend = true) { + if (node.canDedupe(this.preferDedupe)) { + // gather up all deps that have no valid edges in from outside + // the dep set, except for this node we're deduping, so that we + // also prune deps that would be made extraneous. + const deps = gatherDepSet([node], e => e.to !== node && e.valid) + for (const node of deps) + node.root = null + return + } + if (descend) { + // sort these so that they're deterministically ordered + // otherwise, resulting tree shape is dependent on the order + // in which they happened to be resolved. + const nodeSort = (a, b) => a.location.localeCompare(b.location, 'en') + + const children = [...node.children.values()].sort(nodeSort) + for (const child of children) + this.pruneDedupable(child) + const fsChildren = [...node.fsChildren].sort(nodeSort) + for (const topNode of fsChildren) { + const children = [...topNode.children.values()].sort(nodeSort) + for (const child of children) + this.pruneDedupable(child) + } + } + } + + get conflictOk () { + return this.force || (!this.isMine && !this.strictPeerDeps) + } + + get isMine () { + const { edge } = this.top + const { from: node } = edge + + if (node.isWorkspace || node.isProjectRoot) + return true + + if (!edge.peer) + return false + + // re-entry case. check if any non-peer edges come from the project, + // or any entryEdges on peer groups are from the root. + let hasPeerEdges = false + for (const edge of node.edgesIn) { + if (edge.peer) { + hasPeerEdges = true + continue + } + if (edge.from.isWorkspace || edge.from.isProjectRoot) + return true + } + if (hasPeerEdges) { + for (const edge of peerEntrySets(node).keys()) { + if (edge.from.isWorkspace || edge.from.isProjectRoot) + return true + } + } + + return false + } + + warnPeerConflict () { + this.edge.overridden = true + const expl = this.explainPeerConflict() + log.warn('ERESOLVE', 'overriding peer dependency', expl) + } + + failPeerConflict () { + const expl = this.explainPeerConflict() + throw Object.assign(new Error('could not resolve'), expl) + } + + explainPeerConflict () { + const { edge, dep } = this.top + const { from: node } = edge + const curNode = node.resolve(edge.name) + + const expl = { + code: 'ERESOLVE', + edge: edge.explain(), + dep: dep.explain(edge), + } + + if (this.parent) { + // this is the conflicted peer + expl.current = curNode && curNode.explain(edge) + expl.peerConflict = this.current && this.current.explain(this.edge) + } else { + expl.current = curNode && curNode.explain() + if (this.canPlaceSelf && this.canPlaceSelf.canPlaceSelf !== CONFLICT) { + // failed while checking for a child dep + const cps = this.canPlaceSelf + for (const peer of cps.conflictChildren) { + if (peer.current) { + expl.peerConflict = { + current: peer.current.explain(), + peer: peer.dep.explain(peer.edge), + } + break + } + } + } else { + expl.peerConflict = { + current: this.current && this.current.explain(), + peer: this.dep.explain(this.edge), + } + } + } + + const { + strictPeerDeps, + force, + isMine, + } = this + Object.assign(expl, { + strictPeerDeps, + force, + isMine, + }) + + // XXX decorate more with this.canPlace and this.canPlaceSelf, + // this.checks, this.children, walk over conflicted peers, etc. + return expl + } + + getStartNode () { + // if we are a peer, then we MUST be at least as shallow as the + // peer dependent + const from = this.parent ? this.parent.getStartNode() : this.edge.from + return deepestNestingTarget(from, this.name) + } + + get top () { + return this.parent ? this.parent.top : this + } + + isVulnerable (node) { + return this.auditReport && this.auditReport.isVulnerable(node) + } + + get allChildren () { + const set = new Set(this.children) + for (const child of set) { + for (const grandchild of child.children) + set.add(grandchild) + } + return [...set] + } +} + +module.exports = PlaceDep diff --git a/node_modules/@npmcli/arborist/lib/printable.js b/node_modules/@npmcli/arborist/lib/printable.js index fb73c7c2bc434..4aa2fffd104b4 100644 --- a/node_modules/@npmcli/arborist/lib/printable.js +++ b/node_modules/@npmcli/arborist/lib/printable.js @@ -2,12 +2,13 @@ // of the current node and its descendents const util = require('util') +const relpath = require('./relpath.js') class ArboristNode { constructor (tree, path) { this.name = tree.name - if (tree.package.name && tree.package.name !== this.name) - this.packageName = tree.package.name + if (tree.packageName && tree.packageName !== this.name) + this.packageName = tree.packageName if (tree.version) this.version = tree.version this.location = tree.location @@ -28,6 +29,19 @@ class ArboristNode { this.peer = true if (tree.inBundle) this.bundled = true + if (tree.inDepBundle) + this.bundler = tree.getBundler().location + if (tree.isProjectRoot) + this.isProjectRoot = true + if (tree.isWorkspace) + this.isWorkspace = true + const bd = tree.package && tree.package.bundleDependencies + if (bd && bd.length) + this.bundleDependencies = bd + if (tree.inShrinkwrap) + this.inShrinkwrap = true + else if (tree.hasShrinkwrap) + this.hasShrinkwrap = true if (tree.error) this.error = treeError(tree.error) if (tree.errors && tree.errors.length) @@ -36,33 +50,45 @@ class ArboristNode { // edgesOut sorted by name if (tree.edgesOut.size) { this.edgesOut = new Map([...tree.edgesOut.entries()] - .sort(([a], [b]) => a.localeCompare(b)) + .sort(([a], [b]) => a.localeCompare(b, 'en')) .map(([name, edge]) => [name, new EdgeOut(edge)])) } // edgesIn sorted by location if (tree.edgesIn.size) { this.edgesIn = new Set([...tree.edgesIn] - .sort((a, b) => a.from.location.localeCompare(b.from.location)) + .sort((a, b) => a.from.location.localeCompare(b.from.location, 'en')) .map(edge => new EdgeIn(edge))) } + if (tree.workspaces && tree.workspaces.size) { + this.workspaces = new Map([...tree.workspaces.entries()] + .map(([name, path]) => [name, relpath(tree.root.realpath, path)])) + } + // fsChildren sorted by path if (tree.fsChildren.size) { this.fsChildren = new Set([...tree.fsChildren] - .sort(({path: a}, {path: b}) => a.localeCompare(b)) + .sort(({path: a}, {path: b}) => a.localeCompare(b, 'en')) .map(tree => printableTree(tree, path))) } // children sorted by name if (tree.children.size) { this.children = new Map([...tree.children.entries()] - .sort(([a], [b]) => a.localeCompare(b)) + .sort(([a], [b]) => a.localeCompare(b, 'en')) .map(([name, tree]) => [name, printableTree(tree, path)])) } } } +class ArboristVirtualNode extends ArboristNode { + constructor (tree, path) { + super(tree, path) + this.sourceReference = printableTree(tree.sourceReference, path) + } +} + class ArboristLink extends ArboristNode { constructor (tree, path) { super(tree, path) @@ -85,6 +111,8 @@ class Edge { this.spec = edge.spec || '*' if (edge.error) this.error = edge.error + if (edge.overridden) + this.overridden = edge.overridden } } @@ -100,6 +128,8 @@ class EdgeOut extends Edge { this.to ? ' -> ' + this.to : '' }${ this.error ? ' ' + this.error : '' + }${ + this.overridden ? ' overridden' : '' } }` } } @@ -114,15 +144,24 @@ class EdgeIn extends Edge { [util.inspect.custom] () { return `{ ${this.from || '""'} ${this.type} ${this.name}@${this.spec}${ this.error ? ' ' + this.error : '' + }${ + this.overridden ? ' overridden' : '' } }` } } const printableTree = (tree, path = []) => { - if (path.includes(tree)) - return { location: tree.location } + if (!tree) + return tree + + const Cls = tree.isLink ? ArboristLink + : tree.sourceReference ? ArboristVirtualNode + : ArboristNode + if (path.includes(tree)) { + const obj = Object.create(Cls.prototype) + return Object.assign(obj, { location: tree.location }) + } path.push(tree) - const Cls = tree.isLink ? ArboristLink : ArboristNode return new Cls(tree, path) } diff --git a/node_modules/@npmcli/arborist/lib/shrinkwrap.js b/node_modules/@npmcli/arborist/lib/shrinkwrap.js index 828b9f328232e..ebbe004de72d6 100644 --- a/node_modules/@npmcli/arborist/lib/shrinkwrap.js +++ b/node_modules/@npmcli/arborist/lib/shrinkwrap.js @@ -32,7 +32,7 @@ const mismatch = (a, b) => a && b && a !== b // After calling this.commit(), any nodes not present in the tree will have // been removed from the shrinkwrap data as well. -const procLog = require('./proc-log.js') +const procLog = require('proc-log') const YarnLock = require('./yarn-lock.js') const {promisify} = require('util') const rimraf = promisify(require('rimraf')) @@ -41,6 +41,7 @@ const readFile = promisify(fs.readFile) const writeFile = promisify(fs.writeFile) const stat = promisify(fs.stat) const readdir_ = promisify(fs.readdir) +const readlink = promisify(fs.readlink) // XXX remove when drop support for node v10 const lstat = promisify(fs.lstat) @@ -176,10 +177,21 @@ const assertNoNewer = async (path, data, lockTime, dir = path, seen = null) => { : readdir(parent, { withFileTypes: true }) return children.catch(() => []) - .then(ents => Promise.all( - ents.filter(ent => ent.isDirectory() && !/^\./.test(ent.name)) - .map(ent => assertNoNewer(path, data, lockTime, resolve(parent, ent.name), seen)) - )).then(() => { + .then(ents => Promise.all(ents.map(async ent => { + const child = resolve(parent, ent.name) + if (ent.isDirectory() && !/^\./.test(ent.name)) + await assertNoNewer(path, data, lockTime, child, seen) + else if (ent.isSymbolicLink()) { + const target = resolve(parent, await readlink(child)) + const tstat = await stat(target).catch( + /* istanbul ignore next - windows */ () => null) + seen.add(relpath(path, child)) + /* istanbul ignore next - windows cannot do this */ + if (tstat && tstat.isDirectory() && !seen.has(relpath(path, target))) + await assertNoNewer(path, data, lockTime, target, seen) + } + }))) + .then(() => { if (dir !== path) return @@ -244,7 +256,7 @@ class Shrinkwrap { meta[key.replace(/^_/, '')] = val }) // we only include name if different from the node path name - const pname = node.package.name + const pname = node.packageName if (pname && pname !== node.name) meta.name = pname @@ -339,6 +351,7 @@ class Shrinkwrap { reset () { this.tree = null this[_awaitingUpdate] = new Map() + this.originalLockfileVersion = lockfileVersion this.data = { lockfileVersion, requires: true, @@ -704,6 +717,7 @@ class Shrinkwrap { resolved, integrity, hasShrinkwrap, + version, } = this.get(node.path) const pathFixed = !resolved ? null @@ -717,8 +731,12 @@ class Shrinkwrap { node.resolved === pathFixed const integrityOk = !integrity || !node.integrity || node.integrity === integrity + const versionOk = !version || !node.version || version === node.version + + const allOk = (resolved || integrity || version) && + resolvedOk && integrityOk && versionOk - if ((resolved || integrity) && resolvedOk && integrityOk) { + if (allOk) { node.resolved = node.resolved || pathFixed || null node.integrity = node.integrity || integrity || null node.hasShrinkwrap = node.hasShrinkwrap || hasShrinkwrap || false @@ -786,7 +804,7 @@ class Shrinkwrap { if (this.tree) { if (this.yarnLock) this.yarnLock.fromTree(this.tree) - const root = Shrinkwrap.metaFromNode(this.tree.target || this.tree, this.path) + const root = Shrinkwrap.metaFromNode(this.tree.target, this.path) this.data.packages = {} if (Object.keys(root).length) this.data.packages[''] = root @@ -815,7 +833,7 @@ class Shrinkwrap { [_buildLegacyLockfile] (node, lock, path = []) { if (node === this.tree) { // the root node - lock.name = node.package.name || node.name + lock.name = node.packageName || node.name if (node.version) lock.version = node.version } @@ -834,7 +852,7 @@ class Shrinkwrap { /* istanbul ignore next - sort calling order is indeterminate */ return aloc.length > bloc.length ? 1 : bloc.length > aloc.length ? -1 - : aloc[aloc.length - 1].localeCompare(bloc[bloc.length - 1]) + : aloc[aloc.length - 1].localeCompare(bloc[bloc.length - 1], 'en') })[0] const res = consistentResolve(node.resolved, this.path, this.path, true) @@ -848,7 +866,7 @@ class Shrinkwrap { const spec = !edge ? rSpec : npa.resolve(node.name, edge.spec, edge.from.realpath) - if (node.target) + if (node.isLink) lock.version = `file:${relpath(this.path, node.realpath)}` else if (spec && (spec.type === 'file' || spec.type === 'remote')) lock.version = spec.saveSpec @@ -860,9 +878,9 @@ class Shrinkwrap { lock.from = spec.raw } else if (!node.isRoot && node.package && - node.package.name && - node.package.name !== node.name) - lock.version = `npm:${node.package.name}@${node.version}` + node.packageName && + node.packageName !== node.name) + lock.version = `npm:${node.packageName}@${node.version}` else if (node.package && node.version) lock.version = node.version @@ -872,7 +890,7 @@ class Shrinkwrap { // when we didn't resolve to git, file, or dir, and didn't request // git, file, dir, or remote, then the resolved value is necessary. if (node.resolved && - !node.target && + !node.isLink && rSpec.type !== 'git' && rSpec.type !== 'file' && rSpec.type !== 'directory' && @@ -901,7 +919,7 @@ class Shrinkwrap { lock.optional = true } - const depender = node.target || node + const depender = node.target if (depender.edgesOut.size > 0) { if (node !== this.tree) { lock.requires = [...depender.edgesOut.entries()].reduce((set, [k, v]) => { @@ -926,7 +944,7 @@ class Shrinkwrap { } // now we walk the children, putting them in the 'dependencies' object - const {children} = node.target || node + const {children} = node.target if (!children.size) delete lock.dependencies else { diff --git a/node_modules/@npmcli/arborist/lib/tracker.js b/node_modules/@npmcli/arborist/lib/tracker.js index 47267872ce780..aefd5fe1bbf58 100644 --- a/node_modules/@npmcli/arborist/lib/tracker.js +++ b/node_modules/@npmcli/arborist/lib/tracker.js @@ -1,6 +1,6 @@ const _progress = Symbol('_progress') const _onError = Symbol('_onError') -const procLog = require('./proc-log.js') +const procLog = require('proc-log') module.exports = cls => class Tracker extends cls { constructor (options = {}) { diff --git a/node_modules/@npmcli/arborist/lib/tree-check.js b/node_modules/@npmcli/arborist/lib/tree-check.js index 00b43296fbdf5..a7e8d9c014213 100644 --- a/node_modules/@npmcli/arborist/lib/tree-check.js +++ b/node_modules/@npmcli/arborist/lib/tree-check.js @@ -1,6 +1,8 @@ const debug = require('./debug.js') const checkTree = (tree, checkUnreachable = true) => { + const log = [['START TREE CHECK', tree.path]] + // this can only happen in tests where we have a "tree" object // that isn't actually a tree. if (!tree.root || !tree.root.inventory) @@ -9,8 +11,21 @@ const checkTree = (tree, checkUnreachable = true) => { const { inventory } = tree.root const seen = new Set() const check = (node, via = tree, viaType = 'self') => { + log.push([ + 'CHECK', + node && node.location, + via && via.location, + viaType, + 'seen=' + seen.has(node), + 'promise=' + !!(node && node.then), + 'root=' + !!(node && node.isRoot), + ]) + if (!node || seen.has(node) || node.then) return + + seen.add(node) + if (node.isRoot && node !== tree.root) { throw Object.assign(new Error('double root'), { node: node.path, @@ -19,6 +34,7 @@ const checkTree = (tree, checkUnreachable = true) => { root: tree.root.path, via: via.path, viaType, + log, }) } @@ -31,6 +47,7 @@ const checkTree = (tree, checkUnreachable = true) => { via: via.path, viaType, otherRoot: node.root && node.root.path, + log, }) } @@ -43,6 +60,7 @@ const checkTree = (tree, checkUnreachable = true) => { viaType, inventory: [...node.inventory.values()].map(node => [node.path, node.location]), + log, }) } @@ -53,6 +71,7 @@ const checkTree = (tree, checkUnreachable = true) => { root: tree.root.path, via: via.path, viaType, + log, }) } @@ -65,14 +84,38 @@ const checkTree = (tree, checkUnreachable = true) => { via: via.path, viaType, devEdges: devEdges.map(e => [e.type, e.name, e.spec, e.error]), + log, + }) + } + + if (node.path === tree.root.path && node !== tree.root) { + throw Object.assign(new Error('node with same path as root'), { + node: node.path, + tree: tree.path, + root: tree.root.path, + via: via.path, + viaType, + log, + }) + } + + if (!node.isLink && node.path !== node.realpath) { + throw Object.assign(new Error('non-link with mismatched path/realpath'), { + node: node.path, + tree: tree.path, + realpath: node.realpath, + root: tree.root.path, + via: via.path, + viaType, + log, }) } const { parent, fsParent, target } = node - seen.add(node) check(parent, node, 'parent') check(fsParent, node, 'fsParent') check(target, node, 'target') + log.push(['CHILDREN', node.location, ...node.children.keys()]) for (const kid of node.children.values()) check(kid, node, 'children') for (const kid of node.fsChildren) @@ -81,6 +124,7 @@ const checkTree = (tree, checkUnreachable = true) => { check(link, node, 'linksIn') for (const top of node.tops) check(top, node, 'tops') + log.push(['DONE', node.location]) } check(tree) if (checkUnreachable) { @@ -92,6 +136,7 @@ const checkTree = (tree, checkUnreachable = true) => { location: node.location, root: tree.root.path, tree: tree.path, + log, }) } } diff --git a/node_modules/@npmcli/arborist/lib/update-root-package-json.js b/node_modules/@npmcli/arborist/lib/update-root-package-json.js deleted file mode 100644 index aba5614924ec7..0000000000000 --- a/node_modules/@npmcli/arborist/lib/update-root-package-json.js +++ /dev/null @@ -1,83 +0,0 @@ -const fs = require('fs') -const promisify = require('util').promisify -const readFile = promisify(fs.readFile) -const writeFile = promisify(fs.writeFile) -const {resolve} = require('path') - -const parseJSON = require('json-parse-even-better-errors') - -const { orderDeps } = require('./dep-spec.js') - -const depTypes = new Set([ - 'dependencies', - 'optionalDependencies', - 'devDependencies', - 'peerDependencies', -]) - -const parseJsonSafe = json => { - try { - return parseJSON(json) - } catch (er) { - return null - } -} - -const updateRootPackageJson = async tree => { - const filename = resolve(tree.path, 'package.json') - const originalJson = await readFile(filename, 'utf8').catch(() => null) - const originalContent = parseJsonSafe(originalJson) - - const depsData = orderDeps({ - ...tree.package, - }) - - // optionalDependencies don't need to be repeated in two places - if (depsData.dependencies) { - if (depsData.optionalDependencies) { - for (const name of Object.keys(depsData.optionalDependencies)) - delete depsData.dependencies[name] - } - if (Object.keys(depsData.dependencies).length === 0) - delete depsData.dependencies - } - - // if there's no package.json, just use internal pkg info as source of truth - // clone the object though, so we can still refer to what it originally was - const packageJsonContent = !originalContent ? depsData - : Object.assign({}, originalContent) - - // loop through all types of dependencies and update package json content - for (const type of depTypes) - packageJsonContent[type] = depsData[type] - - // if original package.json had dep in peerDeps AND deps, preserve that. - const { dependencies: origProd, peerDependencies: origPeer } = - originalContent || {} - const { peerDependencies: newPeer } = packageJsonContent - if (origProd && origPeer && newPeer) { - // we have original prod/peer deps, and new peer deps - // copy over any that were in both in the original - for (const name of Object.keys(origPeer)) { - if (origProd[name] !== undefined && newPeer[name] !== undefined) { - packageJsonContent.dependencies = packageJsonContent.dependencies || {} - packageJsonContent.dependencies[name] = newPeer[name] - } - } - } - - // format content - const { - [Symbol.for('indent')]: indent, - [Symbol.for('newline')]: newline, - } = tree.package - const format = indent === undefined ? ' ' : indent - const eol = newline === undefined ? '\n' : newline - const content = (JSON.stringify(packageJsonContent, null, format) + '\n') - .replace(/\n/g, eol) - - if (content !== originalJson) - return writeFile(filename, content) -} - -module.exports = updateRootPackageJson diff --git a/node_modules/@npmcli/arborist/lib/vuln.js b/node_modules/@npmcli/arborist/lib/vuln.js index 8f887a3fc96cb..5b1d1dc1ab83d 100644 --- a/node_modules/@npmcli/arborist/lib/vuln.js +++ b/node_modules/@npmcli/arborist/lib/vuln.js @@ -83,6 +83,9 @@ class Vuln { if (!specObj.registry) return true + if (specObj.subSpec) + spec = specObj.subSpec.rawSpec + for (const v of this.versions) { if (satisfies(v, spec) && !satisfies(v, this.range, semverOpt)) return false @@ -103,12 +106,12 @@ class Vuln { vulnerableVersions: undefined, id: undefined, }).sort((a, b) => - String(a.source || a).localeCompare(String(b.source || b))), + String(a.source || a).localeCompare(String(b.source || b, 'en'))), effects: [...this.effects].map(v => v.name) - .sort(/* istanbul ignore next */(a, b) => a.localeCompare(b)), + .sort(/* istanbul ignore next */(a, b) => a.localeCompare(b, 'en')), range: this.simpleRange, nodes: [...this.nodes].map(n => n.location) - .sort(/* istanbul ignore next */(a, b) => a.localeCompare(b)), + .sort(/* istanbul ignore next */(a, b) => a.localeCompare(b, 'en')), fixAvailable: this[_fixAvailable], } } diff --git a/node_modules/@npmcli/arborist/lib/yarn-lock.js b/node_modules/@npmcli/arborist/lib/yarn-lock.js index 14c7691f1bd42..e237cc5c6a461 100644 --- a/node_modules/@npmcli/arborist/lib/yarn-lock.js +++ b/node_modules/@npmcli/arborist/lib/yarn-lock.js @@ -34,7 +34,7 @@ const {breadth} = require('treeverse') // sort a key/value object into a string of JSON stringified keys and vals const sortKV = obj => Object.keys(obj) - .sort((a, b) => a.localeCompare(b)) + .sort((a, b) => a.localeCompare(b, 'en')) .map(k => ` ${JSON.stringify(k)} ${JSON.stringify(obj[k])}`) .join('\n') @@ -165,7 +165,7 @@ class YarnLock { toString () { return prefix + [...new Set([...this.entries.values()])] .map(e => e.toString()) - .sort((a, b) => a.localeCompare(b)).join('\n\n') + '\n' + .sort((a, b) => a.localeCompare(b, 'en')).join('\n\n') + '\n' } fromTree (tree) { @@ -175,7 +175,7 @@ class YarnLock { tree, visit: node => this.addEntryFromNode(node), getChildren: node => [...node.children.values(), ...node.fsChildren] - .sort((a, b) => a.depth - b.depth || a.name.localeCompare(b.name)), + .sort((a, b) => a.depth - b.depth || a.name.localeCompare(b.name, 'en')), }) return this } @@ -183,7 +183,7 @@ class YarnLock { addEntryFromNode (node) { const specs = [...node.edgesIn] .map(e => `${node.name}@${e.spec}`) - .sort((a, b) => a.localeCompare(b)) + .sort((a, b) => a.localeCompare(b, 'en')) // Note: // yarn will do excessive duplication in a case like this: @@ -309,7 +309,7 @@ class YarnLockEntry { toString () { // sort objects to the bottom, then alphabetical return ([...this[_specs]] - .sort((a, b) => a.localeCompare(b)) + .sort((a, b) => a.localeCompare(b, 'en')) .map(JSON.stringify).join(', ') + ':\n' + Object.getOwnPropertyNames(this) @@ -318,7 +318,7 @@ class YarnLockEntry { (a, b) => /* istanbul ignore next - sort call order is unpredictable */ (typeof this[a] === 'object') === (typeof this[b] === 'object') - ? a.localeCompare(b) + ? a.localeCompare(b, 'en') : typeof this[a] === 'object' ? 1 : -1) .map(prop => typeof this[prop] !== 'object' diff --git a/node_modules/@npmcli/arborist/package.json b/node_modules/@npmcli/arborist/package.json index cf4224234cd45..56046eaa5f357 100644 --- a/node_modules/@npmcli/arborist/package.json +++ b/node_modules/@npmcli/arborist/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/arborist", - "version": "2.2.5", + "version": "2.8.0", "description": "Manage node_modules trees", "dependencies": { "@npmcli/installed-package-contents": "^1.0.7", @@ -9,24 +9,29 @@ "@npmcli/move-file": "^1.1.0", "@npmcli/name-from-folder": "^1.0.1", "@npmcli/node-gyp": "^1.0.1", + "@npmcli/package-json": "^1.0.1", "@npmcli/run-script": "^1.8.2", "bin-links": "^2.2.1", "cacache": "^15.0.3", "common-ancestor-path": "^1.0.1", "json-parse-even-better-errors": "^2.3.1", - "json-stringify-nice": "^1.1.1", + "json-stringify-nice": "^1.1.4", + "mkdirp": "^1.0.4", "mkdirp-infer-owner": "^2.0.0", "npm-install-checks": "^4.0.0", - "npm-package-arg": "^8.1.0", + "npm-package-arg": "^8.1.5", "npm-pick-manifest": "^6.1.0", - "npm-registry-fetch": "^9.0.0", - "pacote": "^11.2.6", + "npm-registry-fetch": "^11.0.0", + "pacote": "^11.3.5", "parse-conflict-json": "^1.1.1", + "proc-log": "^1.0.0", "promise-all-reject-late": "^1.0.0", "promise-call-limit": "^1.0.1", "read-package-json-fast": "^2.0.2", "readdir-scoped-modules": "^1.1.0", - "semver": "^7.3.4", + "rimraf": "^3.0.2", + "semver": "^7.3.5", + "ssri": "^8.0.1", "tar": "^6.1.0", "treeverse": "^1.0.4", "walk-up-path": "^1.0.0" @@ -40,17 +45,15 @@ "eslint-plugin-promise": "^4.2.1", "eslint-plugin-standard": "^4.0.1", "minify-registry-metadata": "^2.1.0", - "mutate-fs": "^2.1.1", - "require-inject": "^1.4.4", - "tap": "^14.11.0", - "tcompare": "^3.0.4" + "tap": "^15.0.9", + "tcompare": "^5.0.6" }, "scripts": { "test": "npm run test-only --", "test-only": "tap", "posttest": "npm run lint", "snap": "tap", - "postsnap": "npm run lint", + "postsnap": "npm run lintfix", "test-proxy": "ARBORIST_TEST_PROXY=1 tap --snapshot", "preversion": "npm test", "postversion": "npm publish", @@ -75,18 +78,21 @@ "bin": { "arborist": "bin/index.js" }, + "//": "sk test-env locale to catch locale-specific sorting", "tap": { - "100": true, "after": "test/fixtures/cleanup.js", "coverage-map": "map.js", - "esm": false, "test-env": [ - "NODE_OPTIONS=--no-warnings" + "NODE_OPTIONS=--no-warnings", + "LC_ALL=sk" ], "node-arg": [ "--no-warnings", "--no-deprecation" ], - "timeout": "120" + "timeout": "240" + }, + "engines": { + "node": ">= 10" } } diff --git a/node_modules/@npmcli/ci-detect/README.md b/node_modules/@npmcli/ci-detect/README.md deleted file mode 100644 index 45f9a01c2f387..0000000000000 --- a/node_modules/@npmcli/ci-detect/README.md +++ /dev/null @@ -1,80 +0,0 @@ -# @npmcli/ci-detect - -Detect what kind of CI environment the program is in - -[![Build Status](https://travis-ci.com/npm/ci-detect.svg?branch=master)](https://travis-ci.com/npm/ci-detect) -[![Coverage Status](https://coveralls.io/repos/github/npm/ci-detect/badge.svg?branch=master)](https://coveralls.io/github/npm/ci-detect?branch=master) - -## USAGE - -```js -const ciDetect = require('@npmcli/ci-detect') -// false if not in CI -// otherwise, a string indicating the CI environment type -const inCI = ciDetect() -``` - -## CIs Detected - -Returns one of the following strings, or `false` if none match, by looking -at the appropriate environment variables. - -* `'gerrit'` Gerrit -* `'gitlab'` GitLab -* `'circleci'` Circle-CI -* `'semaphore'` Semaphore -* `'drone'` Drone -* `'github-actions'` GitHub Actions -* `'tddium'` TDDium -* `'jenkins'` Jenkins -* `'bamboo'` Bamboo -* `'gocd'` GoCD -* `'wercker'` Oracle Wercker -* `'netlify'` Netlify -* `'now-github'` Zeit.co's Now for GitHub deployment service -* `'now-bitbucket'` Zeit.co's Now for BitBucket deployment service -* `'now-gitlab'` Zeit.co's Now for GitLab deployment service -* `'now'` Zeit.co's Now service, but not GitHub/BitBucket/GitLab -* `'azure-pipelines'` Azure Pipelines -* `'bitrise'` Bitrise -* `'buddy'` Buddy -* `'buildkite'` Buildkite -* `'cirrus'` Cirrus CI -* `'dsari'` dsari CI -* `'strider'` Strider CI -* `'taskcluster'` Mozilla Taskcluster -* `'hudson'` Hudson CI -* `'magnum'` Magnum CI -* `'nevercode'` Nevercode -* `'render'` Render CI -* `'sail'` Sail CI -* `'shippable'` Shippable -* `'heroku'` Heroku -* `'codeship'` CodeShip -* Anything that sets the `CI_NAME` environment variable will return the - value as the result. (This is how CodeShip is detected.) -* `'travis-ci'` Travis-CI - A few other CI systems set `TRAVIS=1` in the - environment, because devs use that to indicate "test mode", so this one - can get some false positives, and is tested later in the process to - minimize this effect. -* `'aws-codebuild'` AWS CodeBuild -* `'builder'` Google Cloud Builder - This one is a bit weird. It doesn't - really set anything that can be reliably detected except - `BUILDER_OUTPUT`, so it can get false positives pretty easily. -* `'custom'` anything else that sets `CI` environment variable to either - `'1'` or `'true'`. - -## Caveats - -Note that since any program can set or unset whatever environment variables -they want, this is not 100% reliable. - -Also, note that if your program does different behavior in -CI/test/deployment than other places, then there's a good chance that -you're doing something wrong! - -But, for little niceties like setting colors or other output parameters, or -logging and that sort of non-essential thing, this module provides a way to -tweak without checking a bunch of things in a bunch of places. Mostly, -it's a single place to keep a note of what CI system sets which environment -variable. diff --git a/node_modules/@npmcli/config/README.md b/node_modules/@npmcli/config/README.md deleted file mode 100644 index fe70e4663eea1..0000000000000 --- a/node_modules/@npmcli/config/README.md +++ /dev/null @@ -1,224 +0,0 @@ -# `@npmcli/config` - -Configuration management for the npm cli. - -This module is the spiritual descendant of -[`npmconf`](http://npm.im/npmconf), and the code that once lived in npm's -`lib/config/` folder. - -It does the management of configuration files that npm uses, but -importantly, does _not_ define all the configuration defaults or types, as -those parts make more sense to live within the npm CLI itself. - -The only exceptions: - -- The `prefix` config value has some special semantics, setting the local - prefix if specified on the CLI options and not in global mode, or the - global prefix otherwise. -- The `project` config file is loaded based on the local prefix (which can - only be set by the CLI config options, and otherwise defaults to a walk - up the folder tree to the first parent containing a `node_modules` - folder, `package.json` file, or `package-lock.json` file.) -- The `userconfig` value, as set by the environment and CLI (defaulting to - `~/.npmrc`, is used to load user configs. -- The `globalconfig` value, as set by the environment, CLI, and - `userconfig` file (defaulting to `$PREFIX/etc/npmrc`) is used to load - global configs. -- A `builtin` config, read from a `npmrc` file in the root of the npm - project itself, overrides all defaults. - -The resulting hierarchy of configs: - -- CLI switches. eg `--some-key=some-value` on the command line. These are - parsed by [`nopt`](http://npm.im/nopt), which is not a great choice, but - it's the one that npm has used forever, and changing it will be - difficult. -- Environment variables. eg `npm_config_some_key=some_value` in the - environment. There is no way at this time to modify this prefix. -- INI-formatted project configs. eg `some-key = some-value` in the - `localPrefix` folder (ie, the `cwd`, or its nearest parent that contains - either a `node_modules` folder or `package.json` file.) -- INI-formatted userconfig file. eg `some-key = some-value` in `~/.npmrc`. - The `userconfig` config value can be overridden by the `cli`, `env`, or - `project` configs to change this value. -- INI-formatted globalconfig file. eg `some-key = some-value` in - the `globalPrefix` folder, which is inferred by looking at the location - of the node executable, or the `prefix` setting in the `cli`, `env`, - `project`, or `userconfig`. The `globalconfig` value at any of those - levels can override this. -- INI-formatted builtin config file. eg `some-key = some-value` in - `/usr/local/lib/node_modules/npm/npmrc`. This is not configurable, and - is determined by looking in the `npmPath` folder. -- Default values (passed in by npm when it loads this module). - -## USAGE - -```js -const Config = require('@npmcli/config') -// the types of all the configs we know about -const types = require('./config/types.js') -// default values for all the configs we know about -const defaults = require('./config/defaults.js') -// if you want -c to be short for --call and so on, define it here -const shorthands = require('./config/shorthands.js') - -const conf = new Config({ - // path to the npm module being run - npmPath: resolve(__dirname, '..'), - types, - shorthands, - defaults, - // optional, defaults to process.argv - argv: process.argv, - // optional, defaults to process.env - env: process.env, - // optional, defaults to process.execPath - execPath: process.execPath, - // optional, defaults to process.platform - platform: process.platform, - // optional, defaults to process.cwd() - cwd: process.cwd(), - // optional, defaults to emitting 'log' events on process object - // only silly, verbose, warn, and error are logged by this module - log: require('npmlog') -}) - -// returns a promise that fails if config loading fails, and -// resolves when the config object is ready for action -conf.load().then(() => { - console.log('loaded ok! some-key = ' + conf.get('some-key')) -}).catch(er => { - console.error('error loading configs!', er) -}) -``` - -## API - -The `Config` class is the sole export. - -```js -const Config = require('@npmcli/config') -``` - -### static `Config.typeDefs` - -The type definitions passed to `nopt` for CLI option parsing and known -configuration validation. - -### constructor `new Config(options)` - -Options: - -- `types` Types of all known config values. Note that some are effectively - given semantic value in the config loading process itself. -- `shorthands` An object mapping a shorthand value to an array of CLI - arguments that replace it. -- `defaults` Default values for each of the known configuration keys. - These should be defined for all configs given a type, and must be valid. -- `npmPath` The path to the `npm` module, for loading the `builtin` config - file. -- `cwd` Optional, defaults to `process.cwd()`, used for inferring the - `localPrefix` and loading the `project` config. -- `platform` Optional, defaults to `process.platform`. Used when inferring - the `globalPrefix` from the `execPath`, since this is done diferently on - Windows. -- `execPath` Optional, defaults to `process.execPath`. Used to infer the - `globalPrefix`. -- `log` Optional, the object used to log debug messages, warnings, and - errors. Defaults to emitting on the `process` object. -- `env` Optional, defaults to `process.env`. Source of the environment - variables for configuration. -- `argv` Optional, defaults to `process.argv`. Source of the CLI options - used for configuration. - -Returns a `config` object, which is not yet loaded. - -Fields: - -- `config.globalPrefix` The prefix for `global` operations. Set by the - `prefix` config value, or defaults based on the location of the - `execPath` option. -- `config.localPrefix` The prefix for `local` operations. Set by the - `prefix` config value on the CLI only, or defaults to either the `cwd` or - its nearest ancestor containing a `node_modules` folder or `package.json` - file. -- `config.sources` A read-only `Map` of the file (or a comment, if no file - found, or relevant) to the config level loaded from that source. -- `config.data` A `Map` of config level to `ConfigData` objects. These - objects should not be modified directly under any circumstances. - - `source` The source where this data was loaded from. - - `raw` The raw data used to generate this config data, as it was parsed - initially from the environment, config file, or CLI options. - - `data` The data object reflecting the inheritance of configs up to this - point in the chain. - - `loadError` Any errors encountered that prevented the loading of this - config data. -- `config.list` A list sorted in priority of all the config data objects in - the prototype chain. `config.list[0]` is the `cli` level, - `config.list[1]` is the `env` level, and so on. -- `cwd` The `cwd` param -- `env` The `env` param -- `argv` The `argv` param -- `execPath` The `execPath` param -- `platform` The `platform` param -- `log` The `log` param -- `defaults` The `defaults` param -- `shorthands` The `shorthands` param -- `types` The `types` param -- `npmPath` The `npmPath` param -- `globalPrefix` The effective `globalPrefix` -- `localPrefix` The effective `localPrefix` -- `prefix` If `config.get('global')` is true, then `globalPrefix`, - otherwise `localPrefix` -- `home` The user's home directory, found by looking at `env.HOME` or - calling `os.homedir()`. -- `loaded` A boolean indicating whether or not configs are loaded -- `valid` A getter that returns `true` if all the config objects are valid. - Any data objects that have been modified with `config.set(...)` will be - re-evaluated when `config.valid` is read. - -### `config.load()` - -Load configuration from the various sources of information. - -Returns a `Promise` that resolves when configuration is loaded, and fails -if a fatal error is encountered. - -### `config.find(key)` - -Find the effective place in the configuration levels a given key is set. -Returns one of: `cli`, `env`, `project`, `user`, `global`, `builtin`, or -`default`. - -Returns `null` if the key is not set. - -### `config.get(key, where = 'cli')` - -Load the given key from the config stack. - -### `config.set(key, value, where = 'cli')` - -Set the key to the specified value, at the specified level in the config -stack. - -### `config.delete(key, where = 'cli')` - -Delete the configuration key from the specified level in the config stack. - -### `config.validate(where)` - -Verify that all known configuration options are set to valid values, and -log a warning if they are invalid. - -If `where` is not set, then all config objects are validated. - -Returns `true` if all configs are valid. - -Note that it's usually enough (and more efficient) to just check -`config.valid`, since each data object is marked for re-evaluation on every -`config.set()` operation. - -### `config.save(where)` - -Save the config file specified by the `where` param. Must be one of -`project`, `user`, `global`, `builtin`. diff --git a/node_modules/@npmcli/config/lib/get-user-agent.js b/node_modules/@npmcli/config/lib/get-user-agent.js deleted file mode 100644 index c3d3c7a1bf3de..0000000000000 --- a/node_modules/@npmcli/config/lib/get-user-agent.js +++ /dev/null @@ -1,13 +0,0 @@ -// Accepts a config object, returns a user-agent string -const getUserAgent = (config) => { - const ciName = config.get('ci-name') - return (config.get('user-agent') || '') - .replace(/\{node-version\}/gi, config.get('node-version')) - .replace(/\{npm-version\}/gi, config.get('npm-version')) - .replace(/\{platform\}/gi, process.platform) - .replace(/\{arch\}/gi, process.arch) - .replace(/\{ci\}/gi, ciName ? `ci/${ciName}` : '') - .trim() -} - -module.exports = getUserAgent diff --git a/node_modules/@npmcli/config/lib/index.js b/node_modules/@npmcli/config/lib/index.js index e7fac96c1c8c8..f947896f0ba34 100644 --- a/node_modules/@npmcli/config/lib/index.js +++ b/node_modules/@npmcli/config/lib/index.js @@ -47,7 +47,6 @@ const envReplace = require('./env-replace.js') const parseField = require('./parse-field.js') const typeDescription = require('./type-description.js') const setEnvs = require('./set-envs.js') -const getUserAgent = require('./get-user-agent.js') // types that can be saved back to const confFileTypes = new Set([ @@ -69,6 +68,9 @@ const _get = Symbol('get') const _find = Symbol('find') const _loadObject = Symbol('loadObject') const _loadFile = Symbol('loadFile') +const _checkDeprecated = Symbol('checkDeprecated') +const _flatten = Symbol('flatten') +const _flatOptions = Symbol('flatOptions') class Config { static get typeDefs () { @@ -76,9 +78,9 @@ class Config { } constructor ({ - types, + definitions, shorthands, - defaults, + flatten, npmPath, // options just to override in tests, mostly @@ -89,10 +91,27 @@ class Config { execPath = process.execPath, cwd = process.cwd(), }) { - this.npmPath = npmPath + + // turn the definitions into nopt's weirdo syntax + this.definitions = definitions + const types = {} + const defaults = {} + this.deprecated = {} + for (const [key, def] of Object.entries(definitions)) { + defaults[key] = def.default + types[key] = def.type + if (def.deprecated) + this.deprecated[key] = def.deprecated.trim().replace(/\n +/, '\n') + } + + // populated the first time we flatten the object + this[_flatOptions] = null + this[_flatten] = flatten this.types = types this.shorthands = shorthands this.defaults = defaults + + this.npmPath = npmPath this.log = log this.argv = argv this.env = env @@ -178,15 +197,31 @@ class Config { throw new Error('call config.load() before setting values') if (!confTypes.has(where)) throw new Error('invalid config location param: ' + where) - if (key === '_auth') { - const { email } = this.getCredentialsByURI(this.get('registry')) - if (!email) - throw new Error('Cannot set _auth without first setting email') - } - this.data.get(where).data[key] = val + this[_checkDeprecated](key) + const { data } = this.data.get(where) + data[key] = val // this is now dirty, the next call to this.valid will have to check it this.data.get(where)[_valid] = null + + // the flat options are invalidated, regenerate next time they're needed + this[_flatOptions] = null + } + + get flat () { + if (this[_flatOptions]) + return this[_flatOptions] + + // create the object for flat options passed to deps + process.emit('time', 'config:load:flatten') + this[_flatOptions] = {} + // walk from least priority to highest + for (const { data } of this.data.values()) { + this[_flatten](data, this[_flatOptions]) + } + process.emit('timeEnd', 'config:load:flatten') + + return this[_flatOptions] } delete (key, where = 'cli') { @@ -233,11 +268,6 @@ class Config { await this.loadGlobalConfig() process.emit('timeEnd', 'config:load:global') - // now the extras - process.emit('time', 'config:load:cafile') - await this.loadCAFile() - process.emit('timeEnd', 'config:load:cafile') - // warn if anything is not valid process.emit('time', 'config:load:validate') this.validate() @@ -247,13 +277,17 @@ class Config { // symbols, as that module also does a bunch of get operations this[_loaded] = true + process.emit('time', 'config:load:credentials') + const reg = this.get('registry') + const creds = this.getCredentialsByURI(reg) + // ignore this error because a failed set will strip out anything that + // might be a security hazard, which was the intention. + try { this.setCredentialsByURI(reg, creds) } catch (_) {} + process.emit('timeEnd', 'config:load:credentials') + // set proper globalPrefix now that everything is loaded this.globalPrefix = this.get('prefix') - process.emit('time', 'config:load:setUserAgent') - this.setUserAgent() - process.emit('timeEnd', 'config:load:setUserAgent') - process.emit('time', 'config:load:setEnvs') this.setEnvs() process.emit('timeEnd', 'config:load:setEnvs') @@ -376,13 +410,13 @@ class Config { this.data.get(where)[_valid] = false if (Array.isArray(type)) { - if (type.indexOf(typeDefs.url.type) !== -1) + if (type.includes(typeDefs.url.type)) type = typeDefs.url.type else { /* istanbul ignore if - no actual configs matching this, but * path types SHOULD be handled this way, like URLs, for the * same reason */ - if (type.indexOf(typeDefs.path.type) !== -1) + if (type.includes(typeDefs.path.type)) type = typeDefs.path.type } } @@ -428,11 +462,21 @@ class Config { for (const [key, value] of Object.entries(obj)) { const k = envReplace(key, this.env) const v = this.parseField(value, k) + if (where !== 'default') + this[_checkDeprecated](k, where, obj, [key, value]) conf.data[k] = v } } } + [_checkDeprecated] (key, where, obj, kv) { + // XXX a future npm version will make this a warning. + // An even more future npm version will make this an error. + if (this.deprecated[key]) { + this.log.verbose('config', key, this.deprecated[key]) + } + } + // Parse a field, coercing it to the best type available. parseField (f, key, listElement = false) { return parseField(f, key, this, listElement) @@ -547,14 +591,17 @@ class Config { const nerfed = nerfDart(uri) const def = nerfDart(this.get('registry')) if (def === nerfed) { + // do not delete email, that shouldn't be nerfed any more. + // just delete the nerfed copy, if one exists. this.delete(`-authtoken`, 'user') this.delete(`_authToken`, 'user') + this.delete(`_authtoken`, 'user') this.delete(`_auth`, 'user') this.delete(`_password`, 'user') this.delete(`username`, 'user') - this.delete(`email`, 'user') } this.delete(`${nerfed}:-authtoken`, 'user') + this.delete(`${nerfed}:_authtoken`, 'user') this.delete(`${nerfed}:_authToken`, 'user') this.delete(`${nerfed}:_auth`, 'user') this.delete(`${nerfed}:_password`, 'user') @@ -562,7 +609,7 @@ class Config { this.delete(`${nerfed}:email`, 'user') } - setCredentialsByURI (uri, { token, username, password, email, alwaysAuth }) { + setCredentialsByURI (uri, { token, username, password, email }) { const nerfed = nerfDart(uri) const def = nerfDart(this.get('registry')) @@ -570,41 +617,45 @@ class Config { // remove old style auth info not limited to a single registry this.delete('_password', 'user') this.delete('username', 'user') - this.delete('email', 'user') this.delete('_auth', 'user') this.delete('_authtoken', 'user') + this.delete('-authtoken', 'user') this.delete('_authToken', 'user') } - this.delete(`${nerfed}:-authtoken`) + // email used to be nerfed always. if we're using the default + // registry, de-nerf it. + if (nerfed === def) { + email = email || + this.get('email', 'user') || + this.get(`${nerfed}:email`, 'user') + if (email) + this.set('email', email, 'user') + } + + // field that hasn't been used as documented for a LONG time, + // and as of npm 7.10.0, isn't used at all. We just always + // send auth if we have it, only to the URIs under the nerf dart. + this.delete(`${nerfed}:always-auth`, 'user') + + this.delete(`${nerfed}:-authtoken`, 'user') + this.delete(`${nerfed}:_authtoken`, 'user') + this.delete(`${nerfed}:email`, 'user') if (token) { this.set(`${nerfed}:_authToken`, token, 'user') this.delete(`${nerfed}:_password`, 'user') this.delete(`${nerfed}:username`, 'user') - this.delete(`${nerfed}:email`, 'user') - this.delete(`${nerfed}:always-auth`, 'user') - } else if (username || password || email) { - if (username || password) { - if (!username) - throw new Error('must include username') - if (!password) - throw new Error('must include password') - } - if (!email) - throw new Error('must include email') + } else if (username || password) { + if (!username) + throw new Error('must include username') + if (!password) + throw new Error('must include password') this.delete(`${nerfed}:_authToken`, 'user') - if (username || password) { - this.set(`${nerfed}:username`, username, 'user') - // note: not encrypted, no idea why we bothered to do this, but oh well - // protects against shoulder-hacks if password is memorable, I guess? - const encoded = Buffer.from(password, 'utf8').toString('base64') - this.set(`${nerfed}:_password`, encoded, 'user') - } - this.set(`${nerfed}:email`, email, 'user') - if (alwaysAuth !== undefined) - this.set(`${nerfed}:always-auth`, alwaysAuth, 'user') - else - this.delete(`${nerfed}:always-auth`, 'user') + this.set(`${nerfed}:username`, username, 'user') + // note: not encrypted, no idea why we bothered to do this, but oh well + // protects against shoulder-hacks if password is memorable, I guess? + const encoded = Buffer.from(password, 'utf8').toString('base64') + this.set(`${nerfed}:_password`, encoded, 'user') } else { throw new Error('No credentials to set.') } @@ -615,18 +666,12 @@ class Config { const nerfed = nerfDart(uri) const creds = {} - // you can set always-auth for a single registry, or as a default - const alwaysAuthReg = this.get(`${nerfed}:always-auth`) - if (alwaysAuthReg !== undefined) - creds.alwaysAuth = !!alwaysAuthReg - else - creds.alwaysAuth = this.get('always-auth') - const email = this.get(`${nerfed}:email`) || this.get('email') if (email) creds.email = email const tokenReg = this.get(`${nerfed}:_authToken`) || + this.get(`${nerfed}:_authtoken`) || this.get(`${nerfed}:-authtoken`) || nerfed === nerfDart(this.get('registry')) && this.get('_authToken') @@ -645,6 +690,16 @@ class Config { return creds } + const authReg = this.get(`${nerfed}:_auth`) + if (authReg) { + const authDecode = Buffer.from(authReg, 'base64').toString('utf8') + const authSplit = authDecode.split(':') + creds.username = authSplit.shift() + creds.password = authSplit.join(':') + creds.auth = authReg + return creds + } + // at this point, we can only use the values if the URI is the // default registry. const defaultNerf = nerfDart(this.get('registry')) @@ -675,48 +730,6 @@ class Config { return creds } - async loadCAFile () { - const where = this[_find]('cafile') - - /* istanbul ignore if - it'll always be set in the defaults */ - if (!where) - return - - const cafile = this[_get]('cafile', where) - const ca = this[_get]('ca', where) - - // if you have a ca, or cafile is set to null, then nothing to do here. - if (ca || !cafile) - return - - const raw = await readFile(cafile, 'utf8').catch(er => { - if (er.code !== 'ENOENT') - throw er - }) - if (!raw) - return - - const delim = '-----END CERTIFICATE-----' - const output = raw.replace(/\r\n/g, '\n').split(delim) - .filter(section => section.trim()) - .map(section => section.trimLeft() + delim) - - // make it non-enumerable so we don't save it back by accident - const { data } = this.data.get(where) - Object.defineProperty(data, 'ca', { - value: output, - enumerable: false, - configurable: true, - writable: true, - }) - } - - // the user-agent configuration is a template that gets populated - // with some variables, that takes place here - setUserAgent () { - this.set('user-agent', getUserAgent(this)) - } - // set up the environment object we have with npm_config_* environs // for all configs that are different from their default values, and // set EDITOR and HOME. diff --git a/node_modules/@npmcli/config/lib/set-envs.js b/node_modules/@npmcli/config/lib/set-envs.js index 36d37145466e0..370a2f3ffd34b 100644 --- a/node_modules/@npmcli/config/lib/set-envs.js +++ b/node_modules/@npmcli/config/lib/set-envs.js @@ -50,11 +50,13 @@ const setEnvs = (config) => { platform, env, defaults, + definitions, list: [cliConf, envConf], } = config env.INIT_CWD = process.cwd() + // if the key is deprecated, skip it always. // if the key is the default value, // if the environ is NOT the default value, // set the environ @@ -65,6 +67,10 @@ const setEnvs = (config) => { const cliSet = new Set(Object.keys(cliConf)) const envSet = new Set(Object.keys(envConf)) for (const key in cliConf) { + const { deprecated, envExport = true } = definitions[key] || {} + if (deprecated || envExport === false) + continue + if (sameConfigValue(defaults[key], cliConf[key])) { // config is the default, if the env thought different, then we // have to set it BACK to the default in the environment. diff --git a/node_modules/@npmcli/config/package.json b/node_modules/@npmcli/config/package.json index 644544a49d869..f80669640ebd4 100644 --- a/node_modules/@npmcli/config/package.json +++ b/node_modules/@npmcli/config/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/config", - "version": "1.2.9", + "version": "2.2.0", "files": [ "lib" ], @@ -24,7 +24,7 @@ "coverage-map": "map.js" }, "devDependencies": { - "tap": "^14.10.8" + "tap": "^15.0.4" }, "dependencies": { "ini": "^2.0.0", diff --git a/node_modules/@npmcli/disparity-colors/CHANGELOG.md b/node_modules/@npmcli/disparity-colors/CHANGELOG.md deleted file mode 100644 index 216d1db905db8..0000000000000 --- a/node_modules/@npmcli/disparity-colors/CHANGELOG.md +++ /dev/null @@ -1,6 +0,0 @@ -# Changelog - -## 1.0.0 - -- Initial release - diff --git a/node_modules/@npmcli/disparity-colors/README.md b/node_modules/@npmcli/disparity-colors/README.md deleted file mode 100644 index a89be36d87f2e..0000000000000 --- a/node_modules/@npmcli/disparity-colors/README.md +++ /dev/null @@ -1,49 +0,0 @@ -# @npmcli/disparity-colors - -[![NPM version](https://img.shields.io/npm/v/@npmcli/disparity-colors)](https://www.npmjs.com/package/@npmcli/disparity-colors) -[![Build Status](https://img.shields.io/github/workflow/status/npm/disparity-colors/node-ci)](https://github.com/npm/disparity-colors) -[![License](https://img.shields.io/github/license/npm/disparity-colors)](https://github.com/npm/disparity-colors/blob/master/LICENSE) - -Spiritual sucessor to [disparity](https://www.npmjs.com/package/disparity). Colorizes [Diff Unified format](https://en.wikipedia.org/wiki/Diff#Unified_format) output using [ansi-styles](https://www.npmjs.com/package/ansi-styles). - -## Install - -`npm install @npmcli/disparity-colors` - -## Usage: - -```js -const colorize = require('@npmcli/disparity-colors') -mapWorkspaces(`--- a/src/index.js -+++ b/src/index.js -@@ -1,4 +1,5 @@ - "use strict"; -+"use foo"; - - const os = require("os"); -`) -// --- a/src/index.js -// +++ b/src/index.js -// @@ -1,4 +1,5 @@ -// "use strict"; -// +"use foo"; -// -// const os = require("os"); -``` - -## API: - -### `colorize(str, opts = {}) -> String` - -- `str`: A [Diff Unified format](https://en.wikipedia.org/wiki/Diff#Unified_format) string -- `opts`: - - `headerLength`: A **Number** defining how many lines should be colorized as header - -#### Returns - -A **String** including the appropriate [ANSI escape codes](https://en.wikipedia.org/wiki/ANSI_escape_code#Colors_and_Styles) - -## LICENSE - -[ISC](./LICENSE) - diff --git a/node_modules/@npmcli/git/README.md b/node_modules/@npmcli/git/README.md deleted file mode 100644 index ca8afcbce41d5..0000000000000 --- a/node_modules/@npmcli/git/README.md +++ /dev/null @@ -1,157 +0,0 @@ -# @npmcli/git - -A utility for spawning git from npm CLI contexts. - -This is _not_ an implementation of git itself, it's just a thing that -spawns child processes to tell the system git CLI implementation to do -stuff. - -## USAGE - -```js -const git = require('@npmcli/git') -git.clone('git://foo/bar.git', 'some-branch', 'some-path', opts) // clone a repo - .then(() => git.spawn(['checkout', 'some-branch'], {cwd: 'bar'})) - .then(() => git.spawn(['you get the idea'])) -``` - -## API - -Most methods take an options object. Options are described below. - -### `git.spawn(args, opts = {})` - -Launch a `git` subprocess with the arguments specified. - -All the other functions call this one at some point. - -Processes are launched using -[`@npmcli/promise-spawn`](http://npm.im/@npmcli/promise-spawn), with the -`stdioString: true` option enabled by default, since git output is -generally in readable string format. - -Return value is a `Promise` that resolves to a result object with `{cmd, -args, code, signal, stdout, stderr}` members, or rejects with an error with -the same fields, passed back from -[`@npmcli/promise-spawn`](http://npm.im/@npmcli/promise-spawn). - -### `git.clone(repo, ref = 'HEAD', target = null, opts = {})` -> `Promise<sha String>` - -Clone the repository into `target` path (or the default path for the name -of the repository), checking out `ref`. - -Return value is the sha of the current HEAD in the locally cloned -repository. - -In lieu of a specific `ref`, you may also pass in a `spec` option, which is -a [`npm-package-arg`](http://npm.im/npm-package-arg) object for a `git` -package dependency reference. In this way, you can select SemVer tags -within a range, or any git committish value. For example: - -```js -const npa = require('npm-package-arg') -git.clone('git@github.com:npm/git.git', '', null, { - spec: npa('github:npm/git#semver:1.x'), -}) - -// only gitRange and gitCommittish are relevant, so this works, too -git.clone('git@github.com:npm/git.git', null, null, { - spec: { gitRange: '1.x' } -}) -``` - -This will automatically do a shallow `--depth=1` clone on any hosts that -are known to support it. To force a shallow or deep clone, you can set the -`gitShallow` option to `true` or `false` respectively. - -### `git.revs(repo, opts = {})` -> `Promise<rev doc Object>` - -Fetch a representation of all of the named references in a given -repository. The resulting doc is intentionally somewhat -[packument](https://www.npmjs.com/package/pacote#packuments)-like, so that -git semver ranges can be applied using the same -[`npm-pick-manifest`](http://npm.im/npm-pick-manifest) logic. - -The resulting object looks like: - -```js -revs = { - versions: { - // all semver-looking tags go in here... - // version: { sha, ref, rawRef, type } - '1.0.0': { - sha: '1bc5fba3353f8e1b56493b266bc459276ab23139', - ref: 'v1.0.0', - rawRef: 'refs/tags/v1.0.0', - type: 'tag', - }, - }, - 'dist-tags': { - HEAD: '1.0.0', - latest: '1.0.0', - }, - refs: { - // all the advertised refs that can be cloned down remotely - HEAD: { sha, ref, rawRef, type: 'head' }, - master: { ... }, - 'v1.0.0': { ... }, - 'refs/tags/v1.0.0': { ... }, - }, - shas: { - // all named shas referenced above - // sha: [list, of, refs] - '6b2501f9183a1753027a9bf89a184b7d3d4602c7': [ - 'HEAD', - 'master', - 'refs/heads/master', - ], - '1bc5fba3353f8e1b56493b266bc459276ab23139': [ 'v1.0.0', 'refs/tags/v1.0.0' ], - }, -} -``` - -### `git.is(opts)` -> `Promise<Boolean>` - -Resolve to `true` if the path argument refers to the root of a git -repository. - -It does this by looking for a file in `${path}/.git/index`, which is not an -airtight indicator, but at least avoids being fooled by an empty directory -or a file named `.git`. - -### `git.find(opts)` -> `Promise<String | null>` - -Given a path, walk up the file system tree until a git repo working -directory is found. Since this calls `stat` a bunch of times, it's -probably best to only call it if you're reasonably sure you're likely to be -in a git project somewhere. - -Resolves to `null` if not in a git project. - -### `git.isClean(opts = {})` -> `Promise<Boolean>` - -Return true if in a git dir, and that git dir is free of changes. This -will resolve `true` if the git working dir is clean, or `false` if not, and -reject if the path is not within a git directory or some other error -occurs. - -## OPTIONS - -- `retry` An object to configure retry behavior for transient network - errors with exponential backoff. - - `retries`: Defaults to `opts.fetchRetries` or 2 - - `factor`: Defaults to `opts.fetchRetryFactor` or 10 - - `maxTimeout`: Defaults to `opts.fetchRetryMaxtimeout` or 60000 - - `minTimeout`: Defaults to `opts.fetchRetryMintimeout` or 1000 -- `git` Path to the `git` binary to use. Will look up the first `git` in - the `PATH` if not specified. -- `spec` The [`npm-package-arg`](http://npm.im/npm-package-arg) specifier - object for the thing being fetched (if relevant). -- `fakePlatform` set to a fake value of `process.platform` to use. (Just - for testing `win32` behavior on Unix, and vice versa.) -- `cwd` The current working dir for the git command. Particularly for - `find` and `is` and `isClean`, it's good to know that this defaults to - `process.cwd()`, as one might expect. -- Any other options that can be passed to - [`@npmcli/promise-spawn`](http://npm.im/@npmcli/promise-spawn), or - `child_process.spawn()`. diff --git a/node_modules/@npmcli/git/lib/clone.js b/node_modules/@npmcli/git/lib/clone.js index 3c50a05cf1aec..6754fd7606009 100644 --- a/node_modules/@npmcli/git/lib/clone.js +++ b/node_modules/@npmcli/git/lib/clone.js @@ -15,20 +15,22 @@ const shallowHosts = new Set([ 'gist.github.com', 'gitlab.com', 'bitbucket.com', - 'bitbucket.org', + 'bitbucket.org' ]) -const { parse } = require('url') +// we have to use url.parse until we add the same shim that hosted-git-info has +// to handle scp:// urls +const { parse } = require('url') // eslint-disable-line node/no-deprecated-api const { basename, resolve } = require('path') const revs = require('./revs.js') const spawn = require('./spawn.js') +const { isWindows } = require('./utils.js') const pickManifest = require('npm-pick-manifest') -const { promisify } = require('util') const fs = require('fs') const mkdirp = require('mkdirp') -module.exports = (repo, ref = 'HEAD', target = null, /* istanbul ignore next */ opts = {}) => +module.exports = (repo, ref = 'HEAD', target = null, opts = {}) => revs(repo, opts).then(revs => clone( repo, revs, @@ -38,38 +40,56 @@ module.exports = (repo, ref = 'HEAD', target = null, /* istanbul ignore next */ opts )) -const maybeShallow = (repo, opts) => - opts.gitShallow === false || opts.gitShallow ? opts.gitShallow - : shallowHosts.has(parse(repo).host) - -const isWindows = opts => (opts.fakePlatform || process.platform) === 'win32' +const maybeShallow = (repo, opts) => { + if (opts.gitShallow === false || opts.gitShallow) { + return opts.gitShallow + } + return shallowHosts.has(parse(repo).host) +} const defaultTarget = (repo, /* istanbul ignore next */ cwd = process.cwd()) => - resolve(cwd, basename(repo.replace(/[\/\\]?\.git$/, ''))) - -const clone = (repo, revs, ref, revDoc, target, opts) => - !revDoc ? unresolved(repo, ref, target, opts) - : revDoc.sha === revs.refs.HEAD.sha ? plain(repo, revDoc, target, opts) - : revDoc.type === 'tag' || revDoc.type === 'branch' - ? branch(repo, revDoc, target, opts) - : other(repo, revDoc, target, opts) + resolve(cwd, basename(repo.replace(/[/\\]?\.git$/, ''))) + +const clone = (repo, revs, ref, revDoc, target, opts) => { + if (!revDoc) { + return unresolved(repo, ref, target, opts) + } + if (revDoc.sha === revs.refs.HEAD.sha) { + return plain(repo, revDoc, target, opts) + } + if (revDoc.type === 'tag' || revDoc.type === 'branch') { + return branch(repo, revDoc, target, opts) + } + return other(repo, revDoc, target, opts) +} const resolveRef = (revs, ref, opts) => { const { spec = {} } = opts ref = spec.gitCommittish || ref - return !revs ? /* istanbul ignore next - will fail anyway, can't pull */ null - : spec.gitRange ? pickManifest(revs, spec.gitRange, opts) - : !ref ? revs.refs.HEAD - : revs.refs[ref] ? revs.refs[ref] - : revs.shas[ref] ? revs.refs[revs.shas[ref][0]] - : null + /* istanbul ignore next - will fail anyway, can't pull */ + if (!revs) { + return null + } + if (spec.gitRange) { + return pickManifest(revs, spec.gitRange, opts) + } + if (!ref) { + return revs.refs.HEAD + } + if (revs.refs[ref]) { + return revs.refs[ref] + } + if (revs.shas[ref]) { + return revs.refs[revs.shas[ref][0]] + } + return null } // pull request or some other kind of advertised ref const other = (repo, revDoc, target, opts) => { const shallow = maybeShallow(repo, opts) - const fetchOrigin = [ 'fetch', 'origin', revDoc.rawRef ] + const fetchOrigin = ['fetch', 'origin', revDoc.rawRef] .concat(shallow ? ['--depth=1'] : []) const git = (args) => spawn(args, { ...opts, cwd: target }) @@ -93,12 +113,10 @@ const branch = (repo, revDoc, target, opts) => { revDoc.ref, repo, target, - '--recurse-submodules', + '--recurse-submodules' ] - if (maybeShallow(repo, opts)) - args.push('--depth=1') - if (isWindows(opts)) - args.push('--config', 'core.longpaths=true') + if (maybeShallow(repo, opts)) { args.push('--depth=1') } + if (isWindows(opts)) { args.push('--config', 'core.longpaths=true') } return spawn(args, opts).then(() => revDoc.sha) } @@ -110,22 +128,24 @@ const plain = (repo, revDoc, target, opts) => { target, '--recurse-submodules' ] - if (maybeShallow(repo, opts)) - args.push('--depth=1') - if (isWindows(opts)) - args.push('--config', 'core.longpaths=true') + if (maybeShallow(repo, opts)) { args.push('--depth=1') } + if (isWindows(opts)) { args.push('--config', 'core.longpaths=true') } return spawn(args, opts).then(() => revDoc.sha) } -const updateSubmodules = (target, opts) => new Promise(res => - fs.stat(target + '/.gitmodules', er => res(er ? null - : spawn([ +const updateSubmodules = (target, opts) => new Promise(resolve => + fs.stat(target + '/.gitmodules', er => { + if (er) { + return resolve(null) + } + return resolve(spawn([ 'submodule', 'update', '-q', '--init', '--recursive' - ], { ...opts, cwd: target })))) + ], { ...opts, cwd: target })) + })) const unresolved = (repo, ref, target, opts) => { // can't do this one shallowly, because the ref isn't advertised @@ -139,5 +159,5 @@ const unresolved = (repo, ref, target, opts) => { .then(() => git(['checkout', ref])) .then(() => updateSubmodules(target, opts)) .then(() => git(['rev-parse', '--revs-only', 'HEAD'])) - .then(({stdout}) => stdout.trim()) + .then(({ stdout }) => stdout.trim()) } diff --git a/node_modules/@npmcli/git/lib/env.js b/node_modules/@npmcli/git/lib/env.js deleted file mode 100644 index 87787cdbebe67..0000000000000 --- a/node_modules/@npmcli/git/lib/env.js +++ /dev/null @@ -1,33 +0,0 @@ -const uniqueFilename = require('unique-filename') -const { join } = require('path') -const {tmpdir} = require('os') - -const goodEnvVars = new Set([ - 'GIT_ASKPASS', - 'GIT_EXEC_PATH', - 'GIT_PROXY_COMMAND', - 'GIT_SSH', - 'GIT_SSH_COMMAND', - 'GIT_SSL_CAINFO', - 'GIT_SSL_NO_VERIFY' -]) - -// memoize -let gitEnv - -module.exports = () => { - if (gitEnv) - return gitEnv - - // we set the template dir to an empty folder to give git less to do - const tmpDir = join(tmpdir(), 'npmcli-git-template-tmp') - const tmpName = uniqueFilename(tmpDir, 'git-clone') - return gitEnv = Object.keys(process.env).reduce((gitEnv, k) => { - if (goodEnvVars.has(k) || !k.startsWith('GIT_')) - gitEnv[k] = process.env[k] - return gitEnv - }, { - GIT_ASKPASS: 'echo', - GIT_TEMPLATE_DIR: tmpName - }) -} diff --git a/node_modules/@npmcli/git/lib/errors.js b/node_modules/@npmcli/git/lib/errors.js new file mode 100644 index 0000000000000..25b2b9f9fd6a6 --- /dev/null +++ b/node_modules/@npmcli/git/lib/errors.js @@ -0,0 +1,36 @@ + +const maxRetry = 3 + +class GitError extends Error { + shouldRetry () { + return false + } +} + +class GitConnectionError extends GitError { + constructor (message) { + super('A git connection error occurred') + } + + shouldRetry (number) { + return number < maxRetry + } +} + +class GitPathspecError extends GitError { + constructor (message) { + super('The git reference could not be found') + } +} + +class GitUnknownError extends GitError { + constructor (message) { + super('An unknown git error occurred') + } +} + +module.exports = { + GitConnectionError, + GitPathspecError, + GitUnknownError +} diff --git a/node_modules/@npmcli/git/lib/find.js b/node_modules/@npmcli/git/lib/find.js index 2d2ad308665de..d58f01dbcc16f 100644 --- a/node_modules/@npmcli/git/lib/find.js +++ b/node_modules/@npmcli/git/lib/find.js @@ -1,7 +1,15 @@ const is = require('./is.js') const { dirname } = require('path') -const check = (cwd, prev) => is({ cwd }).then(isGit => - isGit ? cwd - : cwd === prev ? null - : check(dirname(cwd), cwd)) -module.exports = ({ cwd = process.cwd() } = {}) => check(cwd) + +module.exports = async ({ cwd = process.cwd() } = {}) => { + if (await is({ cwd })) { + return cwd + } + while (cwd !== dirname(cwd)) { + cwd = dirname(cwd) + if (await is({ cwd })) { + return cwd + } + } + return null +} diff --git a/node_modules/@npmcli/git/lib/index.js b/node_modules/@npmcli/git/lib/index.js index 7088792a1fc6e..20d7cfd01cfd1 100644 --- a/node_modules/@npmcli/git/lib/index.js +++ b/node_modules/@npmcli/git/lib/index.js @@ -5,4 +5,5 @@ module.exports = { is: require('./is.js'), find: require('./find.js'), isClean: require('./is-clean.js'), + errors: require('./errors.js') } diff --git a/node_modules/@npmcli/git/lib/is-clean.js b/node_modules/@npmcli/git/lib/is-clean.js index 19f8049e0fb14..182373be94193 100644 --- a/node_modules/@npmcli/git/lib/is-clean.js +++ b/node_modules/@npmcli/git/lib/is-clean.js @@ -1,6 +1,6 @@ const spawn = require('./spawn.js') module.exports = (opts = {}) => - spawn([ 'status', '--porcelain=v1', '-uno' ], opts) - .then(res => res.stdout.trim().split(/\r?\n+/) - .map(l => l.trim()).filter(l => l).length ? false : true) + spawn(['status', '--porcelain=v1', '-uno'], opts) + .then(res => !res.stdout.trim().split(/\r?\n+/) + .map(l => l.trim()).filter(l => l).length) diff --git a/node_modules/@npmcli/git/lib/lines-to-revs.js b/node_modules/@npmcli/git/lib/lines-to-revs.js index 524e672431ea4..9f879ca2475f5 100644 --- a/node_modules/@npmcli/git/lib/lines-to-revs.js +++ b/node_modules/@npmcli/git/lib/lines-to-revs.js @@ -7,7 +7,7 @@ module.exports = lines => finish(lines.reduce(linesToRevsReducer, { versions: {}, 'dist-tags': {}, refs: {}, - shas: {}, + shas: {} })) const finish = revs => distTags(shaList(peelTags(revs))) @@ -15,16 +15,16 @@ const finish = revs => distTags(shaList(peelTags(revs))) // We can check out shallow clones on specific SHAs if we have a ref const shaList = revs => { Object.keys(revs.refs).forEach(ref => { - doc = revs.refs[ref] - if (revs.shas[doc.sha]) - revs.shas[doc.sha].push(ref) - else + const doc = revs.refs[ref] + if (!revs.shas[doc.sha]) { revs.shas[doc.sha] = [ref] + } else { + revs.shas[doc.sha].push(ref) + } }) return revs } - // Replace any tags with their ^{} counterparts, if those exist const peelTags = revs => { Object.keys(revs.refs).filter(ref => ref.endsWith('^{}')).forEach(ref => { @@ -48,30 +48,38 @@ const distTags = revs => { // 'latest' branch if one exists and is a version, // or HEAD if not. const ver = revs.versions[v] - if (revs.refs.latest && ver.sha === revs.refs.latest.sha) + if (revs.refs.latest && ver.sha === revs.refs.latest.sha) { revs['dist-tags'].latest = v - else if (ver.sha === HEAD.sha) { + } else if (ver.sha === HEAD.sha) { revs['dist-tags'].HEAD = v - if (!revs.refs.latest) - revs['dist-tags'].latest = v + if (!revs.refs.latest) { revs['dist-tags'].latest = v } } }) return revs } -const refType = ref => - ref.startsWith('refs/tags/') ? 'tag' - : ref.startsWith('refs/heads/') ? 'branch' - : ref.startsWith('refs/pull/') ? 'pull' - : ref === 'HEAD' ? 'head' - // Could be anything, ignore for now - : /* istanbul ignore next */ 'other' +const refType = ref => { + if (ref.startsWith('refs/tags/')) { + return 'tag' + } + if (ref.startsWith('refs/heads/')) { + return 'branch' + } + if (ref.startsWith('refs/pull/')) { + return 'pull' + } + if (ref === 'HEAD') { + return 'head' + } + // Could be anything, ignore for now + /* istanbul ignore next */ + return 'other' +} // return the doc, or null if we should ignore it. const lineToRevDoc = line => { const split = line.trim().split(/\s+/, 2) - if (split.length < 2) - return null + if (split.length < 2) { return null } const sha = split[0].trim() const rawRef = split[1].trim() @@ -114,8 +122,7 @@ const lineToRevDoc = line => { const linesToRevsReducer = (revs, line) => { const doc = lineToRevDoc(line) - if (!doc) - return revs + if (!doc) { return revs } revs.refs[doc.ref] = doc revs.refs[doc.rawRef] = doc @@ -125,8 +132,9 @@ const linesToRevsReducer = (revs, line) => { // which is a pretty common pattern. const match = !doc.ref.endsWith('^{}') && doc.ref.match(/v?(\d+\.\d+\.\d+(?:[-+].+)?)$/) - if (match && semver.valid(match[1], true)) + if (match && semver.valid(match[1], true)) { revs.versions[semver.clean(match[1], true)] = doc + } } return revs diff --git a/node_modules/@npmcli/git/lib/make-error.js b/node_modules/@npmcli/git/lib/make-error.js new file mode 100644 index 0000000000000..043a8e6e95181 --- /dev/null +++ b/node_modules/@npmcli/git/lib/make-error.js @@ -0,0 +1,33 @@ +const { + GitConnectionError, + GitPathspecError, + GitUnknownError +} = require('./errors.js') + +const connectionErrorRe = new RegExp([ + 'remote error: Internal Server Error', + 'The remote end hung up unexpectedly', + 'Connection timed out', + 'Operation timed out', + 'Failed to connect to .* Timed out', + 'Connection reset by peer', + 'SSL_ERROR_SYSCALL', + 'The requested URL returned error: 503' +].join('|')) + +const missingPathspecRe = /pathspec .* did not match any file\(s\) known to git/ + +function makeError (er) { + const message = er.stderr + let gitEr + if (connectionErrorRe.test(message)) { + gitEr = new GitConnectionError(message) + } else if (missingPathspecRe.test(message)) { + gitEr = new GitPathspecError(message) + } else { + gitEr = new GitUnknownError(message) + } + return Object.assign(gitEr, er) +} + +module.exports = makeError diff --git a/node_modules/@npmcli/git/lib/opts.js b/node_modules/@npmcli/git/lib/opts.js index 7da4801a6aae0..144e0a3aaf03f 100644 --- a/node_modules/@npmcli/git/lib/opts.js +++ b/node_modules/@npmcli/git/lib/opts.js @@ -1,6 +1,12 @@ -const gitEnv = require('./env.js') +// Values we want to set if they're not already defined by the end user +// This defaults to accepting new ssh host key fingerprints +const gitEnv = { + GIT_ASKPASS: 'echo', + GIT_SSH_COMMAND: 'ssh -oStrictHostKeyChecking=accept-new' +} module.exports = (opts = {}) => ({ stdioString: true, ...opts, - env: opts.env || gitEnv(), + shell: false, + env: opts.env || { ...gitEnv, ...process.env } }) diff --git a/node_modules/@npmcli/git/lib/revs.js b/node_modules/@npmcli/git/lib/revs.js index 5744731872231..81059594f7aee 100644 --- a/node_modules/@npmcli/git/lib/revs.js +++ b/node_modules/@npmcli/git/lib/revs.js @@ -4,21 +4,25 @@ const LRU = require('lru-cache') const revsCache = new LRU({ max: 100, - maxAge: 5 * 60 * 1000, + maxAge: 5 * 60 * 1000 }) const linesToRevs = require('./lines-to-revs.js') -module.exports = (repo, opts = {}) => { +module.exports = async (repo, opts = {}) => { if (!opts.noGitRevCache) { const cached = revsCache.get(repo) - if (cached) - return Promise.resolve(cached) + if (cached) { + return cached + } } return pinflight(`ls-remote:${repo}`, () => spawn(['ls-remote', repo], opts) - .then(({stdout}) => linesToRevs(stdout.trim().split('\n'))) - .then(revs => (revsCache.set(repo, revs), revs)) + .then(({ stdout }) => linesToRevs(stdout.trim().split('\n'))) + .then(revs => { + revsCache.set(repo, revs) + return revs + }) ) } diff --git a/node_modules/@npmcli/git/lib/should-retry.js b/node_modules/@npmcli/git/lib/should-retry.js deleted file mode 100644 index 8082bb5d7c6e7..0000000000000 --- a/node_modules/@npmcli/git/lib/should-retry.js +++ /dev/null @@ -1,17 +0,0 @@ -const transientErrors = [ - 'remote error: Internal Server Error', - 'The remote end hung up unexpectedly', - 'Connection timed out', - 'Operation timed out', - 'Failed to connect to .* Timed out', - 'Connection reset by peer', - 'SSL_ERROR_SYSCALL', - 'The requested URL returned error: 503' -].join('|') - -const transientErrorRe = new RegExp(transientErrors) - -const maxRetry = 3 - -module.exports = (error, number) => - transientErrorRe.test(error) && (number < maxRetry) diff --git a/node_modules/@npmcli/git/lib/spawn.js b/node_modules/@npmcli/git/lib/spawn.js index 3c3a943fe9f0d..1c89a4c53cf86 100644 --- a/node_modules/@npmcli/git/lib/spawn.js +++ b/node_modules/@npmcli/git/lib/spawn.js @@ -1,6 +1,6 @@ const spawn = require('@npmcli/promise-spawn') const promiseRetry = require('promise-retry') -const shouldRetry = require('./should-retry.js') +const makeError = require('./make-error.js') const whichGit = require('./which.js') const makeOpts = require('./opts.js') const procLog = require('./proc-log.js') @@ -8,26 +8,36 @@ const procLog = require('./proc-log.js') module.exports = (gitArgs, opts = {}) => { const gitPath = whichGit(opts) - if (gitPath instanceof Error) - return Promise.reject(gitPath) + if (gitPath instanceof Error) { return Promise.reject(gitPath) } + + // undocumented option, mostly only here for tests + const args = opts.allowReplace || gitArgs[0] === '--no-replace-objects' + ? gitArgs + : ['--no-replace-objects', ...gitArgs] const log = opts.log || procLog + let retry = opts.retry + if (retry === null || retry === undefined) { + retry = { + retries: opts.fetchRetries || 2, + factor: opts.fetchRetryFactor || 10, + maxTimeout: opts.fetchRetryMaxtimeout || 60000, + minTimeout: opts.fetchRetryMintimeout || 1000 + } + } return promiseRetry((retry, number) => { - if (number !== 1) - log.silly('pacote', `Retrying git command: ${ - gitArgs.join(' ')} attempt # ${number}`) + if (number !== 1) { + log.silly('git', `Retrying git command: ${ + args.join(' ')} attempt # ${number}`) + } - return spawn(gitPath, gitArgs, makeOpts(opts)) + return spawn(gitPath, args, makeOpts(opts)) .catch(er => { - if (shouldRetry(er.stderr, number)) - retry(er) - else - throw er + const gitError = makeError(er) + if (!gitError.shouldRetry(number)) { + throw gitError + } + retry(gitError) }) - }, opts.retry !== null && opts.retry !== undefined ? opts.retry : { - retries: opts.fetchRetries || 2, - factor: opts.fetchRetryFactor || 10, - maxTimeout: opts.fetchRetryMaxtimeout || 60000, - minTimeout: opts.fetchRetryMintimeout || 1000, - }) + }, retry) } diff --git a/node_modules/@npmcli/git/lib/utils.js b/node_modules/@npmcli/git/lib/utils.js new file mode 100644 index 0000000000000..fcd9578a19597 --- /dev/null +++ b/node_modules/@npmcli/git/lib/utils.js @@ -0,0 +1,3 @@ +const isWindows = opts => (opts.fakePlatform || process.platform) === 'win32' + +exports.isWindows = isWindows diff --git a/node_modules/@npmcli/git/lib/which.js b/node_modules/@npmcli/git/lib/which.js index 9e82d391aaded..a2f690e1bce80 100644 --- a/node_modules/@npmcli/git/lib/which.js +++ b/node_modules/@npmcli/git/lib/which.js @@ -5,7 +5,12 @@ try { gitPath = which.sync('git') } catch (e) {} -module.exports = (opts = {}) => - opts.git || - opts.git !== false && gitPath || - Object.assign(new Error('No git binary found in $PATH'), { code: 'ENOGIT' }) +module.exports = (opts = {}) => { + if (opts.git) { + return opts.git + } + if (!gitPath || opts.git === false) { + return Object.assign(new Error('No git binary found in $PATH'), { code: 'ENOGIT' }) + } + return gitPath +} diff --git a/node_modules/@npmcli/git/package.json b/node_modules/@npmcli/git/package.json index 0e01efaf2fbce..9475da5007a7d 100644 --- a/node_modules/@npmcli/git/package.json +++ b/node_modules/@npmcli/git/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/git", - "version": "2.0.6", + "version": "2.1.0", "main": "lib/index.js", "files": [ "lib/*.js" @@ -13,11 +13,14 @@ "author": "Isaac Z. Schlueter <i@izs.me> (https://izs.me)", "license": "ISC", "scripts": { - "test": "tap", - "snap": "tap", - "preversion": "npm test", + "lint": "standard", + "lint:fix": "standard --fix", "postversion": "npm publish", - "prepublishOnly": "git push origin --follow-tags" + "prepublishOnly": "git push origin --follow-tags", + "preversion": "npm test", + "snap": "tap", + "test": "tap", + "posttest": "npm run lint" }, "tap": { "check-coverage": true, @@ -25,17 +28,17 @@ }, "devDependencies": { "slash": "^3.0.0", - "tap": "^14.10.6" + "standard": "^16.0.3", + "tap": "^15.0.6" }, "dependencies": { - "@npmcli/promise-spawn": "^1.1.0", + "@npmcli/promise-spawn": "^1.3.2", "lru-cache": "^6.0.0", - "mkdirp": "^1.0.3", - "npm-pick-manifest": "^6.0.0", + "mkdirp": "^1.0.4", + "npm-pick-manifest": "^6.1.1", "promise-inflight": "^1.0.1", "promise-retry": "^2.0.1", - "semver": "^7.3.2", - "unique-filename": "^1.1.1", + "semver": "^7.3.5", "which": "^2.0.2" } } diff --git a/node_modules/@npmcli/installed-package-contents/README.md b/node_modules/@npmcli/installed-package-contents/README.md deleted file mode 100644 index edd23bd26d64c..0000000000000 --- a/node_modules/@npmcli/installed-package-contents/README.md +++ /dev/null @@ -1,109 +0,0 @@ -# @npmcli/installed-package-contents - -Get the list of files installed in a package in node_modules, including -bundled dependencies. - -This is useful if you want to remove a package node from the tree _without_ -removing its child nodes, for example to extract a new version of the -dependency into place safely. - -It's sort of the reflection of [npm-packlist](http://npm.im/npm-packlist), -but for listing out the _installed_ files rather than the files that _will_ -be installed. This is of course a much simpler operation, because we don't -have to handle ignore files or package.json `files` lists. - -## USAGE - -```js -// programmatic usage -const pkgContents = require('@npmcli/installed-package-contents') - -pkgContents({ path: 'node_modules/foo', depth: 1 }).then(files => { - // files is an array of items that need to be passed to - // rimraf or moved out of the way to make the folder empty - // if foo bundled dependencies, those will be included. - // It will not traverse into child directories, because we set - // depth:1 in the options. - // If the folder doesn't exist, this returns an empty array. -}) - -pkgContents({ path: 'node_modules/foo', depth: Infinity }).then(files => { - // setting depth:Infinity tells it to keep walking forever - // until it hits something that isn't a directory, so we'll - // just get the list of all files, but not their containing - // directories. -}) -``` - -As a CLI: - -```bash -$ installed-package-contents node_modules/bundle-some -d1 -node_modules/.bin/some -node_modules/bundle-some/package.json -node_modules/bundle-some/node_modules/@scope/baz -node_modules/bundle-some/node_modules/.bin/foo -node_modules/bundle-some/node_modules/foo -``` - -CLI options: - -``` -Usage: - installed-package-contents <path> [-d<n> --depth=<n>] - -Lists the files installed for a package specified by <path>. - -Options: - -d<n> --depth=<n> Provide a numeric value ("Infinity" is allowed) - to specify how deep in the file tree to traverse. - Default=1 - -h --help Show this usage information -``` - -## OPTIONS - -* `depth` Number, default `1`. How deep to traverse through folders to get - contents. Typically you'd want to set this to either `1` (to get the - surface files and folders) or `Infinity` (to get all files), but any - other positive number is supported as well. If set to `0` or a - negative number, returns the path provided and (if it is a package) its - set of linked bins. -* `path` Required. Path to the package in `node_modules` where traversal - should begin. - -## RETURN VALUE - -A Promise that resolves to an array of fully-resolved files and folders -matching the criteria. This includes all bundled dependencies in -`node_modules`, and any linked executables in `node_modules/.bin` that the -package caused to be installed. - -An empty or missing package folder will return an empty array. Empty -directories _within_ package contents are listed, even if the `depth` -argument would cause them to be traversed into. - -## CAVEAT - -If using this module to generate a list of files that should be recursively -removed to clear away the package, note that this will leave empty -directories behind in certain cases: - -- If all child packages are bundled dependencies, then the - `node_modules` folder will remain. -- If all child packages within a given scope were bundled dependencies, - then the `node_modules/@scope` folder will remain. -- If all linked bin scripts were removed, then an empty `node_modules/.bin` - folder will remain. - -In the interest of speed and algorithmic complexity, this module does _not_ -do a subsequent readdir to see if it would remove all directory entries, -though it would be easier to look at if it returned `node_modules` or -`.bin` in that case rather than the contents. However, if the intent is to -pass these arguments to `rimraf`, it hardly makes sense to do _two_ -`readdir` calls just so that we can have the luxury of having to make a -third. - -Since the primary use case is to delete a package's contents so that they -can be re-filled with a new version of that package, this caveat does not -pose a problem. Empty directories are already ignored by both npm and git. diff --git a/node_modules/@npmcli/map-workspaces/CHANGELOG.md b/node_modules/@npmcli/map-workspaces/CHANGELOG.md deleted file mode 100644 index b890b58e1405a..0000000000000 --- a/node_modules/@npmcli/map-workspaces/CHANGELOG.md +++ /dev/null @@ -1,6 +0,0 @@ -# Changelog - -## 0.0.0-pre.0 - -- Initial pre-release. - diff --git a/node_modules/@npmcli/map-workspaces/README.md b/node_modules/@npmcli/map-workspaces/README.md deleted file mode 100644 index 52e4a42d37abe..0000000000000 --- a/node_modules/@npmcli/map-workspaces/README.md +++ /dev/null @@ -1,89 +0,0 @@ -# @npmcli/map-workspaces - -[![NPM version](https://img.shields.io/npm/v/@npmcli/map-workspaces)](https://www.npmjs.com/package/@npmcli/map-workspaces) -[![Build Status](https://img.shields.io/github/workflow/status/npm/map-workspaces/node-ci)](https://github.com/npm/map-workspaces) -[![License](https://img.shields.io/github/license/npm/map-workspaces)](https://github.com/npm/map-workspaces/blob/master/LICENSE) - -Retrieves a name:pathname Map for a given workspaces config. - -Long version: Reads the `workspaces` property from a valid **workspaces configuration** object and traverses the paths and globs defined there in order to find valid nested packages and return a **Map** of all found packages where keys are package names and values are folder locations. - -## Install - -`npm install map-workspaces` - -## Usage: - -```js -const mapWorkspaces = require('@npmcli/map-workspaces') -await mapWorkspaces({ - cwd, - pkg: { - workspaces: { - packages: [ - "a", - "b" - ] - } - } -}) -// -> -// Map { -// 'a': '<cwd>/a' -// 'b': '<cwd>/b' -// } -``` - -## Examples: - -### Glob usage: - -Given a folder structure such as: - -``` -├── package.json -└── apps - ├── a - │ └── package.json - ├── b - │ └── package.json - └── c - └── package.json -``` - -```js -const mapWorkspaces = require('@npmcli/map-workspaces') -await mapWorkspaces({ - cwd, - pkg: { - workspaces: [ - "apps/*" - ] - } -}) -// -> -// Map { -// 'a': '<cwd>/apps/a' -// 'b': '<cwd>/apps/b' -// 'c': '<cwd>/apps/c' -// } -``` - -## API: - -### `mapWorkspaces(opts) -> Promise<Map>` - -- `opts`: - - `pkg`: A valid `package.json` **Object** - - `cwd`: A **String** defining the base directory to use when reading globs and paths. - - `ignore`: An **Array** of paths to be ignored when using [globs](https://www.npmjs.com/package/glob) to look for nested package. - - ...[Also support all other glob options](https://www.npmjs.com/package/glob#options) - -#### Returns - -A **Map** in which keys are **package names** and values are the **pathnames** for each found **workspace**. - -## LICENSE - -[ISC](./LICENSE) - diff --git a/node_modules/@npmcli/metavuln-calculator/README.md b/node_modules/@npmcli/metavuln-calculator/README.md deleted file mode 100644 index 00f3064e117d0..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/README.md +++ /dev/null @@ -1,289 +0,0 @@ -# @npmcli/metavuln-calculator - -Calculate meta-vulnerabilities from package security advisories - -This is a pretty low-level package to abstract out the parts of -[@npmcli/arborist](http://npm.im/@npmcli/arborist) that calculate -metavulnerabilities from security advisories. If you just want to get an -audit for a package tree, probably what you want to use is -`arborist.audit()`. - -## USAGE - -```js -const Calculator = require('@npmcli/metavuln-calculator') -// pass in any options for cacache and pacote -// see those modules for option descriptions -const calculator = new Calculator(options) - -// get an advisory somehow, typically by POSTing a JSON payload like: -// {"pkgname":["1.2.3","4.3.5", ...versions], ...packages} -// to /-/npm/v1/security/advisories/bulk -// to get a payload response like: -// { -// "semver": [ -// { -// "id": 31, -// "url": "https://npmjs.com/advisories/31", -// "title": "Regular Expression Denial of Service", -// "severity": "moderate", -// "vulnerable_versions": "<4.3.2" -// } -// ], -// ...advisories -// } -const arb = new Aborist(options) -const tree = await arb.loadActual() -const advisories = await getBulkAdvisoryReportSomehow(tree) - -// then to get a comprehensive set of advisories including metavulns: -const set = new Set() -for (const [name, advisory] of Object.entries(advisories)) { - // make sure we have the advisories loaded with latest version lists - set.add(await calculator.calculate(name, {advisory})) -} - -for (const vuln of set) { - for (const node of tree.inventory.query('name', vuln.name)) { - // not vulnerable, just keep looking - if (!vuln.testVersion(node.version)) - continue - for (const { from: dep, spec } of node.edgesIn) { - const metaAdvisory = await calculator.calculate(dep.name, vuln) - if (metaAdvisory.testVersion(dep.version, spec)) { - set.add(metaAdvisory) - } - } - } -} -``` - -## API - -### Class: Advisory - -The `Calculator.calculate` method returns a Promise that resolves to a -`Advisory` object, filled in from the cache and updated if necessary with -the available advisory data. - -Do not instantiate `Advisory` objects directly. Use the `calculate()` -method to get one with appropriate data filled in. - -Do not mutate `Advisory` objects. Use the supplied methods only. - -#### Fields - -- `name` The name of the package that this vulnerability is about -- `id` The unique cache key for this vuln or metavuln. (See **Cache Keys** - below.) -- `dependency` For metavulns, the dependency that causes this package to be - have a vulnerability. For advisories, the same as `name`. -- `type` Either `'advisory'` or `'metavuln'`, depending on the type of - vulnerability that this object represents. -- `url` The url for the advisory (`null` for metavulns) -- `title` The text title of the advisory or metavuln -- `severity` The severity level info/low/medium/high/critical -- `range` The range that is vulnerable -- `versions` The set of available versions of the package -- `vulnerableVersions` The set of versions that are vulnerable -- `source` The numeric ID of the advisory, or the cache key of the - vulnerability that causes this metavuln -- `updated` Boolean indicating whether this vulnerability was updated since - being read from cache. -- `packument` The packument object for the package that this vulnerability - is about. - -#### `vuln.testVersion(version, [dependencySpecifier]) -> Boolean` - -Check to see if a given version is vulnerable. Returns `true` if the -version is vulnerable, and should be avoided. - -For metavulns, `dependencySpecifier` indicates the version range of the -source of the vulnerability, which the module depends on. If not provided, -will attempt to read from the packument. If not provided, and unable to -read from the packument, then `true` is returned, indicating that the (not -installable) package version should be avoided. - -#### Cache Keys - -The cache keys are calculated by hashing together the `source` and `name` -fields, prefixing with the string `'security-advisory:'` and the name of -the dependency that is vulnerable. - -So, a third-level metavulnerability might have a key like: - -``` -'security-advisory:foo:'+ hash(['foo', hash(['bar', hash(['baz', 123])])]) -``` - -Thus, the cached entry with this key would reflect the version of `foo` -that is vulnerable by virtue of dependending exclusively on versions of -`bar` which are vulnerable by virtue of depending exclusively on versions -of `baz` which are vulnerable by virtue of advisory ID `123`. - -Loading advisory data entirely from cache without hitting an npm registry -security advisory endpoint is not supported at this time, but technically -possible, and likely to come in a future version of this library. - -### `calculator = new Calculator(options)` - -Options object is used for `cacache` and `pacote` calls. - -### `calculator.calculate(name, source)` - -- `name` The name of the package that the advisory is about -- `source` Advisory object from the npm security endpoint, or a `Advisory` - object returned by a previous call to the `calculate()` method. - "Advisory" objects need to have: - - `id` id of the advisory or Advisory object - - `vulnerable_versions` range of versions affected - - `url` - - `title` - - `severity` - -Fetches the packument and returns a Promise that resolves to a -vulnerability object described above. - -Will perform required I/O to fetch package metadata from registry and -read from cache. Advisory information written back to cache. - -## Dependent Version Sampling - -Typically, dependency ranges don't change very frequently, and the most -recent version published on a given release line is most likely to contain -the fix for a given vulnerability. - -So, we see things like this: - -``` -3.0.4 - not vulnerable -3.0.3 - vulnerable -3.0.2 - vulnerable -3.0.1 - vulnerable -3.0.0 - vulnerable -2.3.107 - not vulnerable -2.3.106 - not vulnerable -2.3.105 - vulnerable -... 523 more vulnerable versions ... -2.0.0 - vulnerable -1.1.102 - not vulnerable -1.1.101 - vulnerable -... 387 more vulnerable versions ... -0.0.0 - vulnerable -``` - -In order to determine which versions of a package are affected by a -vulnerability in a dependency, this module uses the following algorithm to -minimize the number of tests required by performing a binary search on each -version set, and presuming that versions _between_ vulnerable versions -within a given set are also vulnerable. - -1. Sort list of available versions by SemVer precedence -2. Group versions into sets based on MAJOR/MINOR versions. - - 3.0.0 - 3.0.4 - 2.3.0 - 2.3.107 - 2.2.0 - 2.2.43 - 2.1.0 - 2.1.432 - 2.0.0 - 2.0.102 - 1.1.0 - 1.1.102 - 1.0.0 - 1.0.157 - 0.1.0 - 0.1.123 - 0.0.0 - 0.0.57 - -3. Test the highest and lowest in each MAJOR/MINOR set, and mark highest - and lowest with known-vulnerable status. (`(s)` means "safe" and `(v)` - means "vulnerable".) - - 3.0.0(v) - 3.0.4(s) - 2.3.0(v) - 2.3.107(s) - 2.2.0(v) - 2.2.43(v) - 2.1.0(v) - 2.1.432(v) - 2.0.0(v) - 2.0.102(v) - 1.1.0(v) - 1.1.102(s) - 1.0.0(v) - 1.0.157(v) - 0.1.0(v) - 0.1.123(v) - 0.0.0(v) - 0.0.57(v) - -4. For each set of package versions: - - 1. If highest and lowest both vulnerable, assume entire set is - vulnerable, and continue to next set. Ie, in the example, throw out - the following version sets: - - 2.2.0(v) - 2.2.43(v) - 2.1.0(v) - 2.1.432(v) - 2.0.0(v) - 2.0.102(v) - 1.0.0(v) - 1.0.157(v) - 0.1.0(v) - 0.1.123(v) - 0.0.0(v) - 0.0.57(v) - - 2. Test middle version MID in set, splitting into two sets. - - 3.0.0(v) - 3.0.2(v) - 3.0.4(s) - 2.3.0(v) - 2.3.54(v) - 2.3.107(s) - 1.1.0(v) - 1.1.51(v) - 1.1.102(s) - - 3. If any untested versions in Set(mid..highest) or Set(lowest..mid), - add to list of sets to test. - - 3.0.0(v) - 3.0.2(v) <-- thrown out on next iteration - 3.0.2(v) - 3.0.4(s) - 2.3.0(v) - 2.3.54(v) <-- thrown out on next iteration - 2.3.54(v) - 2.3.107(s) - 1.1.0(v) - 1.1.51(v) <-- thrown out on next iteration - 1.1.51(v) - 1.1.102(s) - -When the process finishes, all versions are either confirmed safe, or -confirmed/assumed vulnerable, and we avoid checking large sets of versions -where vulnerabilities went unfixed. - -### Testing Version for MetaVuln Status - -When the dependency is in `bundleDependencies`, we treat any dependent -version that _may_ be vulnerable as a vulnerability. If the dependency is -not in `bundleDependencies`, then we treat the dependent module as a -vulnerability if it can _only_ resolve to dependency versions that are -vulnerable. - -This relies on the reasonable assumption that the version of a bundled -dependency will be within the stated dependency range, and accounts for the -fact that we can't know ahead of time which version of a dependency may be -bundled. So, we avoid versions that _may_ bundle a vulnerable dependency. - -For example: - -Package `foo` depends on package `bar` at the following version ranges: - -``` -foo version bar version range -1.0.0 ^1.2.3 -1.0.1 ^1.2.4 -1.0.2 ^1.2.5 -1.1.0 ^1.3.1 -1.1.1 ^1.3.2 -1.1.2 ^1.3.3 -2.0.0 ^2.0.0 -2.0.1 ^2.0.1 -2.0.2 ^2.0.2 -``` - -There is an advisory for `bar@1.2.4 - 1.3.2`. So: - -``` -foo version vulnerable? -1.0.0 if bundled (can use 1.2.3, which is not vulnerable) -1.0.1 yes (must use ^1.2.4, entirely contained in vuln range) -1.0.2 yes (must use ^1.2.5, entirely contained in vuln range) -1.1.0 if bundled (can use 1.3.3, which is not vulnerable) -1.1.1 if bundled (can use 1.3.3, which is not vulnerable) -1.1.2 no (dep is outside of vuln range) -2.0.0 no (dep is outside of vuln range) -2.0.1 no (dep is outside of vuln range) -2.0.2 no (dep is outside of vuln range) -``` - -To test a package version for metaVulnerable status, we attempt to load the -manifest of the dependency, using the vulnerable version set as the `avoid` -versions. If we end up selecting a version that should be avoided, then -that means that the package is vulnerable by virtue of its dependency. diff --git a/node_modules/@npmcli/metavuln-calculator/lib/get-dep-spec.js b/node_modules/@npmcli/metavuln-calculator/lib/get-dep-spec.js index 35e83d02a1b63..8d1d72b8c46eb 100644 --- a/node_modules/@npmcli/metavuln-calculator/lib/get-dep-spec.js +++ b/node_modules/@npmcli/metavuln-calculator/lib/get-dep-spec.js @@ -8,8 +8,8 @@ module.exports = (mani, name) => { peerDependencies: peerDeps = {}, } = mani - return typeof deps[name] === 'string' ? deps[name] - : typeof optDeps[name] === 'string' ? optDeps[name] - : typeof peerDeps[name] === 'string' ? peerDeps[name] + return deps && typeof deps[name] === 'string' ? deps[name] + : optDeps && typeof optDeps[name] === 'string' ? optDeps[name] + : peerDeps && typeof peerDeps[name] === 'string' ? peerDeps[name] : null } diff --git a/node_modules/@npmcli/metavuln-calculator/package.json b/node_modules/@npmcli/metavuln-calculator/package.json index f7a4f5cc47a7e..4ad6193ae6aa8 100644 --- a/node_modules/@npmcli/metavuln-calculator/package.json +++ b/node_modules/@npmcli/metavuln-calculator/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/metavuln-calculator", - "version": "1.1.0", + "version": "1.1.1", "main": "lib/index.js", "files": [ "lib" diff --git a/node_modules/@npmcli/move-file/README.md b/node_modules/@npmcli/move-file/README.md deleted file mode 100644 index 8a5a57f0f8d92..0000000000000 --- a/node_modules/@npmcli/move-file/README.md +++ /dev/null @@ -1,69 +0,0 @@ -# @npmcli/move-file - -A fork of [move-file](https://github.com/sindresorhus/move-file) with -compatibility with all node 10.x versions. - -> Move a file (or directory) - -The built-in -[`fs.rename()`](https://nodejs.org/api/fs.html#fs_fs_rename_oldpath_newpath_callback) -is just a JavaScript wrapper for the C `rename(2)` function, which doesn't -support moving files across partitions or devices. This module is what you -would have expected `fs.rename()` to be. - -## Highlights - -- Promise API. -- Supports moving a file across partitions and devices. -- Optionally prevent overwriting an existing file. -- Creates non-existent destination directories for you. -- Support for Node versions that lack built-in recursive `fs.mkdir()` -- Automatically recurses when source is a directory. - -## Install - -``` -$ npm install @npmcli/move-file -``` - -## Usage - -```js -const moveFile = require('@npmcli/move-file'); - -(async () => { - await moveFile('source/unicorn.png', 'destination/unicorn.png'); - console.log('The file has been moved'); -})(); -``` - -## API - -### moveFile(source, destination, options?) - -Returns a `Promise` that resolves when the file has been moved. - -### moveFile.sync(source, destination, options?) - -#### source - -Type: `string` - -File, or directory, you want to move. - -#### destination - -Type: `string` - -Where you want the file or directory moved. - -#### options - -Type: `object` - -##### overwrite - -Type: `boolean`\ -Default: `true` - -Overwrite existing destination file(s). diff --git a/node_modules/@npmcli/name-from-folder/README.md b/node_modules/@npmcli/name-from-folder/README.md deleted file mode 100644 index 0735ca28a0aa3..0000000000000 --- a/node_modules/@npmcli/name-from-folder/README.md +++ /dev/null @@ -1,14 +0,0 @@ -# @npmcli/name-from-folder - -Get the package name from a folder path, including the scope if the -basename of the dirname starts with `@`. - -For a path like `/x/y/z/@scope/pkg` it'll return `@scope/pkg`. If the path -name is something like `/x/y/z/pkg`, then it'll return `pkg`. - -## USAGE - -```js -const nameFromFolder = require('@npmcli/name-from-folder') -const name = nameFromFolder('/some/folder/path') -``` diff --git a/node_modules/@npmcli/node-gyp/README.md b/node_modules/@npmcli/node-gyp/README.md deleted file mode 100644 index a32fb2cb80c6b..0000000000000 --- a/node_modules/@npmcli/node-gyp/README.md +++ /dev/null @@ -1,16 +0,0 @@ -# @npmcli/node-gyp - -This is the module npm uses to decide whether a package should be built -using [`node-gyp`](https://github.com/nodejs/node-gyp) by default. - -## API - -* `isNodeGypPackage(path)` - -Returns a Promise that resolves to `true` or `false` based on whether the -package at `path` has a `binding.gyp` file. - -* `defaultGypInstallScript` - -A string with the default string that should be used as the `install` -script for node-gyp packages. diff --git a/node_modules/@npmcli/package-json/LICENSE b/node_modules/@npmcli/package-json/LICENSE new file mode 100644 index 0000000000000..6a1f3708f6d70 --- /dev/null +++ b/node_modules/@npmcli/package-json/LICENSE @@ -0,0 +1,18 @@ +ISC License + +Copyright GitHub Inc. + +Permission to use, copy, modify, and/or distribute this +software for any purpose with or without fee is hereby +granted, provided that the above copyright notice and this +permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL +WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO +EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/@npmcli/package-json/lib/index.js b/node_modules/@npmcli/package-json/lib/index.js new file mode 100644 index 0000000000000..87c3a63093688 --- /dev/null +++ b/node_modules/@npmcli/package-json/lib/index.js @@ -0,0 +1,106 @@ +const fs = require('fs') +const promisify = require('util').promisify +const readFile = promisify(fs.readFile) +const writeFile = promisify(fs.writeFile) +const { resolve } = require('path') +const updateDeps = require('./update-dependencies.js') +const updateScripts = require('./update-scripts.js') +const updateWorkspaces = require('./update-workspaces.js') + +const parseJSON = require('json-parse-even-better-errors') + +const _filename = Symbol('filename') +const _manifest = Symbol('manifest') +const _readFileContent = Symbol('readFileContent') + +// a list of handy specialized helper functions that take +// care of special cases that are handled by the npm cli +const knownSteps = new Set([ + updateDeps, + updateScripts, + updateWorkspaces, +]) + +// list of all keys that are handled by "knownSteps" helpers +const knownKeys = new Set([ + ...updateDeps.knownKeys, + 'scripts', + 'workspaces', +]) + +class PackageJson { + static async load (path) { + return await new PackageJson(path).load() + } + + constructor (path) { + this[_filename] = resolve(path, 'package.json') + this[_manifest] = {} + this[_readFileContent] = '' + } + + async load () { + try { + this[_readFileContent] = + await readFile(this[_filename], 'utf8') + } catch (err) { + throw new Error('package.json not found') + } + + try { + this[_manifest] = + parseJSON(this[_readFileContent]) + } catch (err) { + throw new Error(`Invalid package.json: ${err}`) + } + + return this + } + + get content () { + return this[_manifest] + } + + update (content) { + // validates both current manifest and content param + const invalidContent = + typeof this[_manifest] !== 'object' + || typeof content !== 'object' + if (invalidContent) { + throw Object.assign( + new Error(`Can't update invalid package.json data`), + { code: 'EPACKAGEJSONUPDATE' } + ) + } + + for (const step of knownSteps) + this[_manifest] = step({ content, originalContent: this[_manifest] }) + + // unknown properties will just be overwitten + for (const [key, value] of Object.entries(content)) { + if (!knownKeys.has(key)) + this[_manifest][key] = value + } + + return this + } + + async save () { + const { + [Symbol.for('indent')]: indent, + [Symbol.for('newline')]: newline, + } = this[_manifest] + + const format = indent === undefined ? ' ' : indent + const eol = newline === undefined ? '\n' : newline + const fileContent = `${ + JSON.stringify(this[_manifest], null, format) + }\n` + .replace(/\n/g, eol) + + if (fileContent.trim() !== this[_readFileContent].trim()) + return await writeFile(this[_filename], fileContent) + } +} + +module.exports = PackageJson diff --git a/node_modules/@npmcli/package-json/lib/update-dependencies.js b/node_modules/@npmcli/package-json/lib/update-dependencies.js new file mode 100644 index 0000000000000..dac45a8bed7bf --- /dev/null +++ b/node_modules/@npmcli/package-json/lib/update-dependencies.js @@ -0,0 +1,72 @@ +const depTypes = new Set([ + 'dependencies', + 'optionalDependencies', + 'devDependencies', + 'peerDependencies', +]) + +// sort alphabetically all types of deps for a given package +const orderDeps = (content) => { + for (const type of depTypes) { + if (content && content[type]) { + content[type] = Object.keys(content[type]) + .sort((a, b) => a.localeCompare(b, 'en')) + .reduce((res, key) => { + res[key] = content[type][key] + return res + }, {}) + } + } + return content +} + +const updateDependencies = ({ content, originalContent }) => { + const pkg = orderDeps({ + ...content, + }) + + // optionalDependencies don't need to be repeated in two places + if (pkg.dependencies) { + if (pkg.optionalDependencies) { + for (const name of Object.keys(pkg.optionalDependencies)) + delete pkg.dependencies[name] + } + } + + const result = { ...originalContent } + + // loop through all types of dependencies and update package json pkg + for (const type of depTypes) { + if (pkg[type]) + result[type] = pkg[type] + + // prune empty type props from resulting object + const emptyDepType = + pkg[type] + && typeof pkg === 'object' + && Object.keys(pkg[type]).length === 0 + if (emptyDepType) + delete result[type] + } + + // if original package.json had dep in peerDeps AND deps, preserve that. + const { dependencies: origProd, peerDependencies: origPeer } = + originalContent || {} + const { peerDependencies: newPeer } = result + if (origProd && origPeer && newPeer) { + // we have original prod/peer deps, and new peer deps + // copy over any that were in both in the original + for (const name of Object.keys(origPeer)) { + if (origProd[name] !== undefined && newPeer[name] !== undefined) { + result.dependencies = result.dependencies || {} + result.dependencies[name] = newPeer[name] + } + } + } + + return result +} + +updateDependencies.knownKeys = depTypes + +module.exports = updateDependencies diff --git a/node_modules/@npmcli/package-json/lib/update-scripts.js b/node_modules/@npmcli/package-json/lib/update-scripts.js new file mode 100644 index 0000000000000..3a88d3e9a17a8 --- /dev/null +++ b/node_modules/@npmcli/package-json/lib/update-scripts.js @@ -0,0 +1,28 @@ +const updateScripts = ({ content, originalContent = {} }) => { + const newScripts = content.scripts + + if (!newScripts) + return originalContent + + // validate scripts content being appended + const hasInvalidScripts = () => + Object.entries(newScripts) + .some(([key, value]) => + typeof key !== 'string' || typeof value !== 'string') + if (hasInvalidScripts()) { + throw Object.assign( + new TypeError( + 'package.json scripts should be a key-value pair of strings.'), + { code: 'ESCRIPTSINVALID' } + ) + } + + return { + ...originalContent, + scripts: { + ...newScripts, + }, + } +} + +module.exports = updateScripts diff --git a/node_modules/@npmcli/package-json/lib/update-workspaces.js b/node_modules/@npmcli/package-json/lib/update-workspaces.js new file mode 100644 index 0000000000000..207dd94a236d7 --- /dev/null +++ b/node_modules/@npmcli/package-json/lib/update-workspaces.js @@ -0,0 +1,25 @@ +const updateWorkspaces = ({ content, originalContent = {} }) => { + const newWorkspaces = content.workspaces + + if (!newWorkspaces) + return originalContent + + // validate workspaces content being appended + const hasInvalidWorkspaces = () => + newWorkspaces.some(w => !(typeof w === 'string')) + if (!newWorkspaces.length || hasInvalidWorkspaces()) { + throw Object.assign( + new TypeError('workspaces should be an array of strings.'), + { code: 'EWORKSPACESINVALID' } + ) + } + + return { + ...originalContent, + workspaces: [ + ...newWorkspaces, + ], + } +} + +module.exports = updateWorkspaces diff --git a/node_modules/@npmcli/package-json/package.json b/node_modules/@npmcli/package-json/package.json new file mode 100644 index 0000000000000..8708ec5eb6fb1 --- /dev/null +++ b/node_modules/@npmcli/package-json/package.json @@ -0,0 +1,34 @@ +{ + "name": "@npmcli/package-json", + "version": "1.0.1", + "description": "Programmatic API to update package.json", + "main": "lib/index.js", + "files": [ + "lib" + ], + "scripts": { + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "snap": "tap", + "test": "tap", + "npmclilint": "npmcli-lint", + "lint": "npm run npmclilint -- \"lib/*.*js\" \"test/*.*js\"", + "lintfix": "npm run lint -- --fix", + "posttest": "npm run lint --", + "postsnap": "npm run lintfix --" + }, + "keywords": [ + "npm", + "oss" + ], + "author": "GitHub Inc.", + "license": "ISC", + "devDependencies": { + "@npmcli/lint": "^1.0.1", + "tap": "^15.0.9" + }, + "dependencies": { + "json-parse-even-better-errors": "^2.3.1" + } +} diff --git a/node_modules/@npmcli/promise-spawn/README.md b/node_modules/@npmcli/promise-spawn/README.md deleted file mode 100644 index b569948c9a111..0000000000000 --- a/node_modules/@npmcli/promise-spawn/README.md +++ /dev/null @@ -1,66 +0,0 @@ -# @npmcli/promise-spawn - -Spawn processes the way the npm cli likes to do. Give it some options, -it'll give you a Promise that resolves or rejects based on the results of -the execution. - -Note: When the current user is root, this will use -[`infer-owner`](http://npm.im/infer-owner) to find the owner of the current -working directory, and run with that effective uid/gid. Otherwise, it runs -as the current user always. (This helps prevent doing git checkouts and -such, and leaving root-owned files lying around in user-owned locations.) - -## USAGE - -```js -const promiseSpawn = require('@npmcli/promise-spawn') - -promiseSpawn('ls', [ '-laF', 'some/dir/*.js' ], { - cwd: '/tmp/some/path', // defaults to process.cwd() - stdioString: false, // stdout/stderr as strings rather than buffers - stdio: 'pipe', // any node spawn stdio arg is valid here - // any other arguments to node child_process.spawn can go here as well, - // but uid/gid will be ignored and set by infer-owner if relevant. -}, { - extra: 'things', - to: 'decorate', - the: 'result', -}).then(result => { - // {code === 0, signal === null, stdout, stderr, and all the extras} - console.log('ok!', result) -}).catch(er => { - // er has all the same properties as the result, set appropriately - console.error('failed!', er) -}) -``` - -## API - -### `promiseSpawn(cmd, args, opts, extra)` -> `Promise` - -Run the command, return a Promise that resolves/rejects based on the -process result. - -Result or error will be decorated with the properties in the `extra` -object. You can use this to attach some helpful info about _why_ the -command is being run, if it makes sense for your use case. - -If `stdio` is set to anything other than `'inherit'`, then the result/error -will be decorated with `stdout` and `stderr` values. If `stdioString` is -set to `true`, these will be strings. Otherwise they will be Buffer -objects. - -Returned promise is decorated with the `stdin` stream if the process is set -to pipe from `stdin`. Writing to this stream writes to the `stdin` of the -spawned process. - -#### Options - -- `stdioString` Boolean, default `false`. Return stdout/stderr output as - strings rather than buffers. -- `cwd` String, default `process.cwd()`. Current working directory for - running the script. Also the argument to `infer-owner` to determine - effective uid/gid when run as root on Unix systems. -- Any other options for `child_process.spawn` can be passed as well, but - note that `uid` and `gid` will be overridden by the owner of the cwd when - run as root on Unix systems, or `null` otherwise. diff --git a/node_modules/@npmcli/run-script/README.md b/node_modules/@npmcli/run-script/README.md deleted file mode 100644 index 59b473d94706c..0000000000000 --- a/node_modules/@npmcli/run-script/README.md +++ /dev/null @@ -1,149 +0,0 @@ -# @npmcli/run-script - -Run a lifecycle script for a package (descendant of npm-lifecycle) - -## USAGE - -```js -const runScript = require('@npmcli/run-script') - -runScript({ - // required, the script to run - event: 'install', - - // extra args to pass to the command, defaults to [] - args: [], - - // required, the folder where the package lives - path: '/path/to/package/folder', - - // optional, defaults to /bin/sh on unix, or cmd.exe on windows - scriptShell: '/bin/bash', - - // optional, defaults to false - // return stdout and stderr as strings rather than buffers - stdioString: true, - - // optional, additional environment variables to add - // note that process.env IS inherited by default - // Always set: - // - npm_package_json The package.json file in the folder - // - npm_lifecycle_event The event that this is being run for - // - npm_lifecycle_script The script being run - // The fields described in https://github.com/npm/rfcs/pull/183 - env: { - npm_package_from: 'foo@bar', - npm_package_resolved: 'https://registry.npmjs.org/foo/-/foo-1.2.3.tgz', - npm_package_integrity: 'sha512-foobarbaz', - }, - - // defaults to 'pipe'. Can also pass an array like you would to node's - // exec or spawn functions. Note that if it's anything other than - // 'pipe' then the stdout/stderr values on the result will be missing. - // npm cli sets this to 'inherit' for explicit run-scripts (test, etc.) - // but leaves it as 'pipe' for install scripts that run in parallel. - stdio: 'inherit', - - // print the package id and script, and the command to be run, like: - // > somepackage@1.2.3 postinstall - // > make all-the-things - // Defaults true when stdio:'inherit', otherwise suppressed - banner: true, -}) - .then(({ code, signal, stdout, stderr, pkgid, path, event, script }) => { - // do something with the results - }) - .catch(er => { - // command did not work. - // er is decorated with: - // - code - // - signal - // - stdout - // - stderr - // - path - // - pkgid (name@version string) - // - event - // - script - }) -``` - -## API - -Call the exported `runScript` function with an options object. - -Returns a promise that resolves to the result of the execution. Promise -rejects if the execution fails (exits non-zero) or has any other error. -Rejected errors are decorated with the same values as the result object. - -If the stdio options mean that it'll have a piped stdin, then the stdin is -ended immediately on the child process. If stdin is shared with the parent -terminal, then it is up to the user to end it, of course. - -### Results - -- `code` Process exit code -- `signal` Process exit signal -- `stdout` stdout data (Buffer, or String when `stdioString` set to true) -- `stderr` stderr data (Buffer, or String when `stdioString` set to true) -- `path` Path to the package executing its script -- `event` Lifecycle event being run -- `script` Command being run - -### Options - -- `path` Required. The path to the package having its script run. -- `event` Required. The event being executed. -- `args` Optional, default `[]`. Extra arguments to pass to the script. -- `env` Optional, object of fields to add to the environment of the - subprocess. Note that process.env IS inherited by default These are - always set: - - `npm_package_json` The package.json file in the folder - - `npm_lifecycle_event` The event that this is being run for - - `npm_lifecycle_script` The script being run - - The `package.json` fields described in - [RFC183](https://github.com/npm/rfcs/pull/183/files). -- `scriptShell` Optional, defaults to `/bin/sh` on Unix, defaults to - `env.comspec` or `cmd` on Windows. Custom script to use to execute the - command. -- `stdio` Optional, defaults to `'pipe'`. The same as the `stdio` argument - passed to `child_process` functions in Node.js. Note that if a stdio - output is set to anything other than `pipe`, it will not be present in - the result/error object. -- `cmd` Optional. Override the script from the `package.json` with - something else, which will be run in an otherwise matching environment. -- `stdioString` Optional, defaults to `false`. Return string values for - `stderr` and `stdout` rather than Buffers. -- `banner` Optional, defaults to `true`. If the `stdio` option is set to - `'inherit'`, then print a banner with the package name and version, event - name, and script command to be run. Set explicitly to `false` to disable - for inherited stdio. - -Note that this does _not_ run pre-event and post-event scripts. The -caller has to manage that process themselves. - -## Differences from [npm-lifecycle](https://github.com/npm/npm-lifecycle) - -This is an implementation to satisfy [RFC -90](https://github.com/npm/rfcs/pull/90), [RFC -77](https://github.com/npm/rfcs/pull/77), and [RFC -73](https://github.com/npm/rfcs/pull/73). - -Apart from those behavior changes in npm v7, this is also just refresh of -the codebase, with modern coding techniques and better test coverage. - -Functionally, this means: - -- Output is not dumped to the top level process's stdio by default. -- Less stuff is put into the environment. -- It is not opinionated about logging. (So, at least with the logging - framework in npm v7.0 and before, the caller has to call - `log.disableProgress()` and `log.enableProgress()` at the appropriate - times, if necessary.) -- The directory containing the `node` executable is _never_ added to the - `PATH` environment variable. (Ie, `--scripts-prepend-node-path` is - effectively always set to `false`.) Doing so causes more unintended side - effects than it ever prevented. -- Hook scripts are not run by this module. If the caller wishes to run - hook scripts, then they can override the default package script with an - explicit `cmd` option pointing to the `node_modules/.hook/${event}` - script. diff --git a/node_modules/@npmcli/run-script/lib/make-spawn-args.js b/node_modules/@npmcli/run-script/lib/make-spawn-args.js index 4c38b9401ddf0..8f299954a7a80 100644 --- a/node_modules/@npmcli/run-script/lib/make-spawn-args.js +++ b/node_modules/@npmcli/run-script/lib/make-spawn-args.js @@ -3,30 +3,12 @@ const isWindows = require('./is-windows.js') const setPATH = require('./set-path.js') const {resolve} = require('path') const npm_config_node_gyp = require.resolve('node-gyp/bin/node-gyp.js') -const { quoteForShell, ShellString, ShellStringText, ShellStringUnquoted } = require('puka') - -const escapeCmd = cmd => { - const result = [] - const parsed = ShellString.sh([cmd]) - for (const child of parsed.children) { - if (child instanceof ShellStringText) { - const children = child.contents.filter(segment => segment !== null).map(segment => quoteForShell(segment, false, isWindows && 'win32')) - result.push(...children) - } else if (child instanceof ShellStringUnquoted) { - result.push(child.value) - } else { - result.push(isWindows ? '&' : ';') - } - } - - return result.join('') -} const makeSpawnArgs = options => { const { event, path, - scriptShell = isWindows ? process.env.comspec || 'cmd' : 'sh', + scriptShell = isWindows ? process.env.ComSpec || 'cmd' : 'sh', env = {}, stdio, cmd, @@ -34,7 +16,7 @@ const makeSpawnArgs = options => { } = options const isCmd = /(?:^|\\)cmd(?:\.exe)?$/i.test(scriptShell) - const args = isCmd ? ['/d', '/s', '/c', escapeCmd(cmd)] : ['-c', cmd] + const args = isCmd ? ['/d', '/s', '/c', cmd] : ['-c', cmd] const spawnOpts = { env: setPATH(path, { diff --git a/node_modules/@npmcli/run-script/package.json b/node_modules/@npmcli/run-script/package.json index 9df5b31178747..756f87f1d4d38 100644 --- a/node_modules/@npmcli/run-script/package.json +++ b/node_modules/@npmcli/run-script/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/run-script", - "version": "1.8.3", + "version": "1.8.5", "description": "Run a lifecycle script for a package (descendant of npm-lifecycle)", "author": "Isaac Z. Schlueter <i@izs.me> (https://izs.me)", "license": "ISC", @@ -25,14 +25,13 @@ "eslint-plugin-standard": "^5.0.0", "minipass": "^3.1.1", "require-inject": "^1.4.4", - "tap": "^14.11.0" + "tap": "^15.0.4" }, "dependencies": { "@npmcli/node-gyp": "^1.0.2", "@npmcli/promise-spawn": "^1.3.2", "infer-owner": "^1.0.4", "node-gyp": "^7.1.0", - "puka": "^1.0.1", "read-package-json-fast": "^2.0.1" }, "files": [ diff --git a/node_modules/abbrev/README.md b/node_modules/abbrev/README.md deleted file mode 100644 index 99746fe67c462..0000000000000 --- a/node_modules/abbrev/README.md +++ /dev/null @@ -1,23 +0,0 @@ -# abbrev-js - -Just like [ruby's Abbrev](http://apidock.com/ruby/Abbrev). - -Usage: - - var abbrev = require("abbrev"); - abbrev("foo", "fool", "folding", "flop"); - - // returns: - { fl: 'flop' - , flo: 'flop' - , flop: 'flop' - , fol: 'folding' - , fold: 'folding' - , foldi: 'folding' - , foldin: 'folding' - , folding: 'folding' - , foo: 'foo' - , fool: 'fool' - } - -This is handy for command-line scripts, or other cases where you want to be able to accept shorthands. diff --git a/node_modules/agent-base/README.md b/node_modules/agent-base/README.md deleted file mode 100644 index 256f1f32196c6..0000000000000 --- a/node_modules/agent-base/README.md +++ /dev/null @@ -1,145 +0,0 @@ -agent-base -========== -### Turn a function into an [`http.Agent`][http.Agent] instance -[![Build Status](https://github.com/TooTallNate/node-agent-base/workflows/Node%20CI/badge.svg)](https://github.com/TooTallNate/node-agent-base/actions?workflow=Node+CI) - -This module provides an `http.Agent` generator. That is, you pass it an async -callback function, and it returns a new `http.Agent` instance that will invoke the -given callback function when sending outbound HTTP requests. - -#### Some subclasses: - -Here's some more interesting uses of `agent-base`. -Send a pull request to list yours! - - * [`http-proxy-agent`][http-proxy-agent]: An HTTP(s) proxy `http.Agent` implementation for HTTP endpoints - * [`https-proxy-agent`][https-proxy-agent]: An HTTP(s) proxy `http.Agent` implementation for HTTPS endpoints - * [`pac-proxy-agent`][pac-proxy-agent]: A PAC file proxy `http.Agent` implementation for HTTP and HTTPS - * [`socks-proxy-agent`][socks-proxy-agent]: A SOCKS proxy `http.Agent` implementation for HTTP and HTTPS - - -Installation ------------- - -Install with `npm`: - -``` bash -$ npm install agent-base -``` - - -Example -------- - -Here's a minimal example that creates a new `net.Socket` connection to the server -for every HTTP request (i.e. the equivalent of `agent: false` option): - -```js -var net = require('net'); -var tls = require('tls'); -var url = require('url'); -var http = require('http'); -var agent = require('agent-base'); - -var endpoint = 'http://nodejs.org/api/'; -var parsed = url.parse(endpoint); - -// This is the important part! -parsed.agent = agent(function (req, opts) { - var socket; - // `secureEndpoint` is true when using the https module - if (opts.secureEndpoint) { - socket = tls.connect(opts); - } else { - socket = net.connect(opts); - } - return socket; -}); - -// Everything else works just like normal... -http.get(parsed, function (res) { - console.log('"response" event!', res.headers); - res.pipe(process.stdout); -}); -``` - -Returning a Promise or using an `async` function is also supported: - -```js -agent(async function (req, opts) { - await sleep(1000); - // etc… -}); -``` - -Return another `http.Agent` instance to "pass through" the responsibility -for that HTTP request to that agent: - -```js -agent(function (req, opts) { - return opts.secureEndpoint ? https.globalAgent : http.globalAgent; -}); -``` - - -API ---- - -## Agent(Function callback[, Object options]) → [http.Agent][] - -Creates a base `http.Agent` that will execute the callback function `callback` -for every HTTP request that it is used as the `agent` for. The callback function -is responsible for creating a `stream.Duplex` instance of some kind that will be -used as the underlying socket in the HTTP request. - -The `options` object accepts the following properties: - - * `timeout` - Number - Timeout for the `callback()` function in milliseconds. Defaults to Infinity (optional). - -The callback function should have the following signature: - -### callback(http.ClientRequest req, Object options, Function cb) → undefined - -The ClientRequest `req` can be accessed to read request headers and -and the path, etc. The `options` object contains the options passed -to the `http.request()`/`https.request()` function call, and is formatted -to be directly passed to `net.connect()`/`tls.connect()`, or however -else you want a Socket to be created. Pass the created socket to -the callback function `cb` once created, and the HTTP request will -continue to proceed. - -If the `https` module is used to invoke the HTTP request, then the -`secureEndpoint` property on `options` _will be set to `true`_. - - -License -------- - -(The MIT License) - -Copyright (c) 2013 Nathan Rajlich <nathan@tootallnate.net> - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -'Software'), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, -TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE -SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -[http-proxy-agent]: https://github.com/TooTallNate/node-http-proxy-agent -[https-proxy-agent]: https://github.com/TooTallNate/node-https-proxy-agent -[pac-proxy-agent]: https://github.com/TooTallNate/node-pac-proxy-agent -[socks-proxy-agent]: https://github.com/TooTallNate/node-socks-proxy-agent -[http.Agent]: https://nodejs.org/api/http.html#http_class_http_agent diff --git a/node_modules/agentkeepalive/README.md b/node_modules/agentkeepalive/README.md deleted file mode 100644 index 70e57bbf6dd45..0000000000000 --- a/node_modules/agentkeepalive/README.md +++ /dev/null @@ -1,241 +0,0 @@ -# agentkeepalive - -[![NPM version][npm-image]][npm-url] -[![Known Vulnerabilities][snyk-image]][snyk-url] -[![npm download][download-image]][download-url] - -[npm-image]: https://img.shields.io/npm/v/agentkeepalive.svg?style=flat -[npm-url]: https://npmjs.org/package/agentkeepalive -[snyk-image]: https://snyk.io/test/npm/agentkeepalive/badge.svg?style=flat-square -[snyk-url]: https://snyk.io/test/npm/agentkeepalive -[download-image]: https://img.shields.io/npm/dm/agentkeepalive.svg?style=flat-square -[download-url]: https://npmjs.org/package/agentkeepalive - -The enhancement features `keep alive` `http.Agent`. Support `http` and `https`. - -## What's different from original `http.Agent`? - -- `keepAlive=true` by default -- Disable Nagle's algorithm: `socket.setNoDelay(true)` -- Add free socket timeout: avoid long time inactivity socket leak in the free-sockets queue. -- Add active socket timeout: avoid long time inactivity socket leak in the active-sockets queue. -- TTL for active socket. - -## Node.js version required - -Support Node.js >= `8.0.0` - -## Install - -```bash -$ npm install agentkeepalive --save -``` - -## new Agent([options]) - -* `options` {Object} Set of configurable options to set on the agent. - Can have the following fields: - * `keepAlive` {Boolean} Keep sockets around in a pool to be used by - other requests in the future. Default = `true`. - * `keepAliveMsecs` {Number} When using the keepAlive option, specifies the initial delay - for TCP Keep-Alive packets. Ignored when the keepAlive option is false or undefined. Defaults to 1000. - Default = `1000`. Only relevant if `keepAlive` is set to `true`. - * `freeSocketTimeout`: {Number} Sets the free socket to timeout - after `freeSocketTimeout` milliseconds of inactivity on the free socket. - Default is `15000`. - Only relevant if `keepAlive` is set to `true`. - * `timeout`: {Number} Sets the working socket to timeout - after `timeout` milliseconds of inactivity on the working socket. - Default is `freeSocketTimeout * 2`. - * `maxSockets` {Number} Maximum number of sockets to allow per - host. Default = `Infinity`. - * `maxFreeSockets` {Number} Maximum number of sockets (per host) to leave open - in a free state. Only relevant if `keepAlive` is set to `true`. - Default = `256`. - * `socketActiveTTL` {Number} Sets the socket active time to live, even if it's in use. - If not set, the behaviour keeps the same (the socket will be released only when free) - Default = `null`. - -## Usage - -```js -const http = require('http'); -const Agent = require('agentkeepalive'); - -const keepaliveAgent = new Agent({ - maxSockets: 100, - maxFreeSockets: 10, - timeout: 60000, // active socket keepalive for 60 seconds - freeSocketTimeout: 30000, // free socket keepalive for 30 seconds -}); - -const options = { - host: 'cnodejs.org', - port: 80, - path: '/', - method: 'GET', - agent: keepaliveAgent, -}; - -const req = http.request(options, res => { - console.log('STATUS: ' + res.statusCode); - console.log('HEADERS: ' + JSON.stringify(res.headers)); - res.setEncoding('utf8'); - res.on('data', function (chunk) { - console.log('BODY: ' + chunk); - }); -}); -req.on('error', e => { - console.log('problem with request: ' + e.message); -}); -req.end(); - -setTimeout(() => { - if (keepaliveAgent.statusChanged) { - console.log('[%s] agent status changed: %j', Date(), keepaliveAgent.getCurrentStatus()); - } -}, 2000); - -``` - -### `getter agent.statusChanged` - -counters have change or not after last checkpoint. - -### `agent.getCurrentStatus()` - -`agent.getCurrentStatus()` will return a object to show the status of this agent: - -```js -{ - createSocketCount: 10, - closeSocketCount: 5, - timeoutSocketCount: 0, - requestCount: 5, - freeSockets: { 'localhost:57479:': 3 }, - sockets: { 'localhost:57479:': 5 }, - requests: {} -} -``` - -### Support `https` - -```js -const https = require('https'); -const HttpsAgent = require('agentkeepalive').HttpsAgent; - -const keepaliveAgent = new HttpsAgent(); -// https://www.google.com/search?q=nodejs&sugexp=chrome,mod=12&sourceid=chrome&ie=UTF-8 -const options = { - host: 'www.google.com', - port: 443, - path: '/search?q=nodejs&sugexp=chrome,mod=12&sourceid=chrome&ie=UTF-8', - method: 'GET', - agent: keepaliveAgent, -}; - -const req = https.request(options, res => { - console.log('STATUS: ' + res.statusCode); - console.log('HEADERS: ' + JSON.stringify(res.headers)); - res.setEncoding('utf8'); - res.on('data', chunk => { - console.log('BODY: ' + chunk); - }); -}); - -req.on('error', e => { - console.log('problem with request: ' + e.message); -}); -req.end(); - -setTimeout(() => { - console.log('agent status: %j', keepaliveAgent.getCurrentStatus()); -}, 2000); -``` - -### Support `req.reusedSocket` - -This agent implements the `req.reusedSocket` to determine whether a request is send through a reused socket. - -When server closes connection at unfortunate time ([keep-alive race](https://code-examples.net/en/q/28a8069)), the http client will throw a `ECONNRESET` error. Under this circumstance, `req.reusedSocket` is useful when we want to retry the request automatically. - -```js -const http = require('http'); -const Agent = require('agentkeepalive'); -const agent = new Agent(); - -const req = http - .get('http://localhost:3000', { agent }, (res) => { - // ... - }) - .on('error', (err) => { - if (req.reusedSocket && err.code === 'ECONNRESET') { - // retry the request or anything else... - } - }) -``` - -This behavior is consistent with Node.js core. But through `agentkeepalive`, you can use this feature in older Node.js version. - -## [Benchmark](https://github.com/node-modules/agentkeepalive/tree/master/benchmark) - -run the benchmark: - -```bash -cd benchmark -sh start.sh -``` - -Intel(R) Core(TM)2 Duo CPU P8600 @ 2.40GHz - -node@v0.8.9 - -50 maxSockets, 60 concurrent, 1000 requests per concurrent, 5ms delay - -Keep alive agent (30 seconds): - -```js -Transactions: 60000 hits -Availability: 100.00 % -Elapsed time: 29.70 secs -Data transferred: 14.88 MB -Response time: 0.03 secs -Transaction rate: 2020.20 trans/sec -Throughput: 0.50 MB/sec -Concurrency: 59.84 -Successful transactions: 60000 -Failed transactions: 0 -Longest transaction: 0.15 -Shortest transaction: 0.01 -``` - -Normal agent: - -```js -Transactions: 60000 hits -Availability: 100.00 % -Elapsed time: 46.53 secs -Data transferred: 14.88 MB -Response time: 0.05 secs -Transaction rate: 1289.49 trans/sec -Throughput: 0.32 MB/sec -Concurrency: 59.81 -Successful transactions: 60000 -Failed transactions: 0 -Longest transaction: 0.45 -Shortest transaction: 0.00 -``` - -Socket created: - -```bash -[proxy.js:120000] keepalive, 50 created, 60000 requestFinished, 1200 req/socket, 0 requests, 0 sockets, 0 unusedSockets, 50 timeout -{" <10ms":662," <15ms":17825," <20ms":20552," <30ms":17646," <40ms":2315," <50ms":567," <100ms":377," <150ms":56," <200ms":0," >=200ms+":0} ----------------------------------------------------------------- -[proxy.js:120000] normal , 53866 created, 84260 requestFinished, 1.56 req/socket, 0 requests, 0 sockets -{" <10ms":75," <15ms":1112," <20ms":10947," <30ms":32130," <40ms":8228," <50ms":3002," <100ms":4274," <150ms":181," <200ms":18," >=200ms+":33} -``` - -## License - -[MIT](LICENSE) diff --git a/node_modules/ajv/README.md b/node_modules/ajv/README.md deleted file mode 100644 index 5aa2078d8920b..0000000000000 --- a/node_modules/ajv/README.md +++ /dev/null @@ -1,1497 +0,0 @@ -<img align="right" alt="Ajv logo" width="160" src="https://ajv.js.org/images/ajv_logo.png"> - -# Ajv: Another JSON Schema Validator - -The fastest JSON Schema validator for Node.js and browser. Supports draft-04/06/07. - -[![Build Status](https://travis-ci.org/ajv-validator/ajv.svg?branch=master)](https://travis-ci.org/ajv-validator/ajv) -[![npm](https://img.shields.io/npm/v/ajv.svg)](https://www.npmjs.com/package/ajv) -[![npm (beta)](https://img.shields.io/npm/v/ajv/beta)](https://www.npmjs.com/package/ajv/v/7.0.0-beta.0) -[![npm downloads](https://img.shields.io/npm/dm/ajv.svg)](https://www.npmjs.com/package/ajv) -[![Coverage Status](https://coveralls.io/repos/github/ajv-validator/ajv/badge.svg?branch=master)](https://coveralls.io/github/ajv-validator/ajv?branch=master) -[![Gitter](https://img.shields.io/gitter/room/ajv-validator/ajv.svg)](https://gitter.im/ajv-validator/ajv) -[![GitHub Sponsors](https://img.shields.io/badge/$-sponsors-brightgreen)](https://github.com/sponsors/epoberezkin) - - -## Ajv v7 beta is released - -[Ajv version 7.0.0-beta.0](https://github.com/ajv-validator/ajv/tree/v7-beta) is released with these changes: - -- to reduce the mistakes in JSON schemas and unexpected validation results, [strict mode](./docs/strict-mode.md) is added - it prohibits ignored or ambiguous JSON Schema elements. -- to make code injection from untrusted schemas impossible, [code generation](./docs/codegen.md) is fully re-written to be safe. -- to simplify Ajv extensions, the new keyword API that is used by pre-defined keywords is available to user-defined keywords - it is much easier to define any keywords now, especially with subschemas. -- schemas are compiled to ES6 code (ES5 code generation is supported with an option). -- to improve reliability and maintainability the code is migrated to TypeScript. - -**Please note**: - -- the support for JSON-Schema draft-04 is removed - if you have schemas using "id" attributes you have to replace them with "\$id" (or continue using version 6 that will be supported until 02/28/2021). -- all formats are separated to ajv-formats package - they have to be explicitely added if you use them. - -See [release notes](https://github.com/ajv-validator/ajv/releases/tag/v7.0.0-beta.0) for the details. - -To install the new version: - -```bash -npm install ajv@beta -``` - -See [Getting started with v7](https://github.com/ajv-validator/ajv/tree/v7-beta#usage) for code example. - - -## Mozilla MOSS grant and OpenJS Foundation - -[<img src="https://www.poberezkin.com/images/mozilla.png" width="240" height="68">](https://www.mozilla.org/en-US/moss/)     [<img src="https://www.poberezkin.com/images/openjs.png" width="220" height="68">](https://openjsf.org/blog/2020/08/14/ajv-joins-openjs-foundation-as-an-incubation-project/) - -Ajv has been awarded a grant from Mozilla’s [Open Source Support (MOSS) program](https://www.mozilla.org/en-US/moss/) in the “Foundational Technology” track! It will sponsor the development of Ajv support of [JSON Schema version 2019-09](https://tools.ietf.org/html/draft-handrews-json-schema-02) and of [JSON Type Definition](https://tools.ietf.org/html/draft-ucarion-json-type-definition-04). - -Ajv also joined [OpenJS Foundation](https://openjsf.org/) – having this support will help ensure the longevity and stability of Ajv for all its users. - -This [blog post](https://www.poberezkin.com/posts/2020-08-14-ajv-json-validator-mozilla-open-source-grant-openjs-foundation.html) has more details. - -I am looking for the long term maintainers of Ajv – working with [ReadySet](https://www.thereadyset.co/), also sponsored by Mozilla, to establish clear guidelines for the role of a "maintainer" and the contribution standards, and to encourage a wider, more inclusive, contribution from the community. - - -## Please [sponsor Ajv development](https://github.com/sponsors/epoberezkin) - -Since I asked to support Ajv development 40 people and 6 organizations contributed via GitHub and OpenCollective - this support helped receiving the MOSS grant! - -Your continuing support is very important - the funds will be used to develop and maintain Ajv once the next major version is released. - -Please sponsor Ajv via: -- [GitHub sponsors page](https://github.com/sponsors/epoberezkin) (GitHub will match it) -- [Ajv Open Collective️](https://opencollective.com/ajv) - -Thank you. - - -#### Open Collective sponsors - -<a href="https://opencollective.com/ajv"><img src="https://opencollective.com/ajv/individuals.svg?width=890"></a> - -<a href="https://opencollective.com/ajv/organization/0/website"><img src="https://opencollective.com/ajv/organization/0/avatar.svg"></a> -<a href="https://opencollective.com/ajv/organization/1/website"><img src="https://opencollective.com/ajv/organization/1/avatar.svg"></a> -<a href="https://opencollective.com/ajv/organization/2/website"><img src="https://opencollective.com/ajv/organization/2/avatar.svg"></a> -<a href="https://opencollective.com/ajv/organization/3/website"><img src="https://opencollective.com/ajv/organization/3/avatar.svg"></a> -<a href="https://opencollective.com/ajv/organization/4/website"><img src="https://opencollective.com/ajv/organization/4/avatar.svg"></a> -<a href="https://opencollective.com/ajv/organization/5/website"><img src="https://opencollective.com/ajv/organization/5/avatar.svg"></a> -<a href="https://opencollective.com/ajv/organization/6/website"><img src="https://opencollective.com/ajv/organization/6/avatar.svg"></a> -<a href="https://opencollective.com/ajv/organization/7/website"><img src="https://opencollective.com/ajv/organization/7/avatar.svg"></a> -<a href="https://opencollective.com/ajv/organization/8/website"><img src="https://opencollective.com/ajv/organization/8/avatar.svg"></a> -<a href="https://opencollective.com/ajv/organization/9/website"><img src="https://opencollective.com/ajv/organization/9/avatar.svg"></a> - - -## Using version 6 - -[JSON Schema draft-07](http://json-schema.org/latest/json-schema-validation.html) is published. - -[Ajv version 6.0.0](https://github.com/ajv-validator/ajv/releases/tag/v6.0.0) that supports draft-07 is released. It may require either migrating your schemas or updating your code (to continue using draft-04 and v5 schemas, draft-06 schemas will be supported without changes). - -__Please note__: To use Ajv with draft-06 schemas you need to explicitly add the meta-schema to the validator instance: - -```javascript -ajv.addMetaSchema(require('ajv/lib/refs/json-schema-draft-06.json')); -``` - -To use Ajv with draft-04 schemas in addition to explicitly adding meta-schema you also need to use option schemaId: - -```javascript -var ajv = new Ajv({schemaId: 'id'}); -// If you want to use both draft-04 and draft-06/07 schemas: -// var ajv = new Ajv({schemaId: 'auto'}); -ajv.addMetaSchema(require('ajv/lib/refs/json-schema-draft-04.json')); -``` - - -## Contents - -- [Performance](#performance) -- [Features](#features) -- [Getting started](#getting-started) -- [Frequently Asked Questions](https://github.com/ajv-validator/ajv/blob/master/FAQ.md) -- [Using in browser](#using-in-browser) - - [Ajv and Content Security Policies (CSP)](#ajv-and-content-security-policies-csp) -- [Command line interface](#command-line-interface) -- Validation - - [Keywords](#validation-keywords) - - [Annotation keywords](#annotation-keywords) - - [Formats](#formats) - - [Combining schemas with $ref](#ref) - - [$data reference](#data-reference) - - NEW: [$merge and $patch keywords](#merge-and-patch-keywords) - - [Defining custom keywords](#defining-custom-keywords) - - [Asynchronous schema compilation](#asynchronous-schema-compilation) - - [Asynchronous validation](#asynchronous-validation) -- [Security considerations](#security-considerations) - - [Security contact](#security-contact) - - [Untrusted schemas](#untrusted-schemas) - - [Circular references in objects](#circular-references-in-javascript-objects) - - [Trusted schemas](#security-risks-of-trusted-schemas) - - [ReDoS attack](#redos-attack) -- Modifying data during validation - - [Filtering data](#filtering-data) - - [Assigning defaults](#assigning-defaults) - - [Coercing data types](#coercing-data-types) -- API - - [Methods](#api) - - [Options](#options) - - [Validation errors](#validation-errors) -- [Plugins](#plugins) -- [Related packages](#related-packages) -- [Some packages using Ajv](#some-packages-using-ajv) -- [Tests, Contributing, Changes history](#tests) -- [Support, Code of conduct, License](#open-source-software-support) - - -## Performance - -Ajv generates code using [doT templates](https://github.com/olado/doT) to turn JSON Schemas into super-fast validation functions that are efficient for v8 optimization. - -Currently Ajv is the fastest and the most standard compliant validator according to these benchmarks: - -- [json-schema-benchmark](https://github.com/ebdrup/json-schema-benchmark) - 50% faster than the second place -- [jsck benchmark](https://github.com/pandastrike/jsck#benchmarks) - 20-190% faster -- [z-schema benchmark](https://rawgit.com/zaggino/z-schema/master/benchmark/results.html) -- [themis benchmark](https://cdn.rawgit.com/playlyfe/themis/master/benchmark/results.html) - - -Performance of different validators by [json-schema-benchmark](https://github.com/ebdrup/json-schema-benchmark): - -[![performance](https://chart.googleapis.com/chart?chxt=x,y&cht=bhs&chco=76A4FB&chls=2.0&chbh=32,4,1&chs=600x416&chxl=-1:|djv|ajv|json-schema-validator-generator|jsen|is-my-json-valid|themis|z-schema|jsck|skeemas|json-schema-library|tv4&chd=t:100,98,72.1,66.8,50.1,15.1,6.1,3.8,1.2,0.7,0.2)](https://github.com/ebdrup/json-schema-benchmark/blob/master/README.md#performance) - - -## Features - -- Ajv implements full JSON Schema [draft-06/07](http://json-schema.org/) and draft-04 standards: - - all validation keywords (see [JSON Schema validation keywords](https://github.com/ajv-validator/ajv/blob/master/KEYWORDS.md)) - - full support of remote refs (remote schemas have to be added with `addSchema` or compiled to be available) - - support of circular references between schemas - - correct string lengths for strings with unicode pairs (can be turned off) - - [formats](#formats) defined by JSON Schema draft-07 standard and custom formats (can be turned off) - - [validates schemas against meta-schema](#api-validateschema) -- supports [browsers](#using-in-browser) and Node.js 0.10-14.x -- [asynchronous loading](#asynchronous-schema-compilation) of referenced schemas during compilation -- "All errors" validation mode with [option allErrors](#options) -- [error messages with parameters](#validation-errors) describing error reasons to allow creating custom error messages -- i18n error messages support with [ajv-i18n](https://github.com/ajv-validator/ajv-i18n) package -- [filtering data](#filtering-data) from additional properties -- [assigning defaults](#assigning-defaults) to missing properties and items -- [coercing data](#coercing-data-types) to the types specified in `type` keywords -- [custom keywords](#defining-custom-keywords) -- draft-06/07 keywords `const`, `contains`, `propertyNames` and `if/then/else` -- draft-06 boolean schemas (`true`/`false` as a schema to always pass/fail). -- keywords `switch`, `patternRequired`, `formatMaximum` / `formatMinimum` and `formatExclusiveMaximum` / `formatExclusiveMinimum` from [JSON Schema extension proposals](https://github.com/json-schema/json-schema/wiki/v5-Proposals) with [ajv-keywords](https://github.com/ajv-validator/ajv-keywords) package -- [$data reference](#data-reference) to use values from the validated data as values for the schema keywords -- [asynchronous validation](#asynchronous-validation) of custom formats and keywords - - -## Install - -``` -npm install ajv -``` - - -## <a name="usage"></a>Getting started - -Try it in the Node.js REPL: https://tonicdev.com/npm/ajv - - -The fastest validation call: - -```javascript -// Node.js require: -var Ajv = require('ajv'); -// or ESM/TypeScript import -import Ajv from 'ajv'; - -var ajv = new Ajv(); // options can be passed, e.g. {allErrors: true} -var validate = ajv.compile(schema); -var valid = validate(data); -if (!valid) console.log(validate.errors); -``` - -or with less code - -```javascript -// ... -var valid = ajv.validate(schema, data); -if (!valid) console.log(ajv.errors); -// ... -``` - -or - -```javascript -// ... -var valid = ajv.addSchema(schema, 'mySchema') - .validate('mySchema', data); -if (!valid) console.log(ajv.errorsText()); -// ... -``` - -See [API](#api) and [Options](#options) for more details. - -Ajv compiles schemas to functions and caches them in all cases (using schema serialized with [fast-json-stable-stringify](https://github.com/epoberezkin/fast-json-stable-stringify) or a custom function as a key), so that the next time the same schema is used (not necessarily the same object instance) it won't be compiled again. - -The best performance is achieved when using compiled functions returned by `compile` or `getSchema` methods (there is no additional function call). - -__Please note__: every time a validation function or `ajv.validate` are called `errors` property is overwritten. You need to copy `errors` array reference to another variable if you want to use it later (e.g., in the callback). See [Validation errors](#validation-errors) - -__Note for TypeScript users__: `ajv` provides its own TypeScript declarations -out of the box, so you don't need to install the deprecated `@types/ajv` -module. - - -## Using in browser - -You can require Ajv directly from the code you browserify - in this case Ajv will be a part of your bundle. - -If you need to use Ajv in several bundles you can create a separate UMD bundle using `npm run bundle` script (thanks to [siddo420](https://github.com/siddo420)). - -Then you need to load Ajv in the browser: -```html -<script src="ajv.min.js"></script> -``` - -This bundle can be used with different module systems; it creates global `Ajv` if no module system is found. - -The browser bundle is available on [cdnjs](https://cdnjs.com/libraries/ajv). - -Ajv is tested with these browsers: - -[![Sauce Test Status](https://saucelabs.com/browser-matrix/epoberezkin.svg)](https://saucelabs.com/u/epoberezkin) - -__Please note__: some frameworks, e.g. Dojo, may redefine global require in such way that is not compatible with CommonJS module format. In such case Ajv bundle has to be loaded before the framework and then you can use global Ajv (see issue [#234](https://github.com/ajv-validator/ajv/issues/234)). - - -### Ajv and Content Security Policies (CSP) - -If you're using Ajv to compile a schema (the typical use) in a browser document that is loaded with a Content Security Policy (CSP), that policy will require a `script-src` directive that includes the value `'unsafe-eval'`. -:warning: NOTE, however, that `unsafe-eval` is NOT recommended in a secure CSP[[1]](https://developer.chrome.com/extensions/contentSecurityPolicy#relaxing-eval), as it has the potential to open the document to cross-site scripting (XSS) attacks. - -In order to make use of Ajv without easing your CSP, you can [pre-compile a schema using the CLI](https://github.com/ajv-validator/ajv-cli#compile-schemas). This will transpile the schema JSON into a JavaScript file that exports a `validate` function that works simlarly to a schema compiled at runtime. - -Note that pre-compilation of schemas is performed using [ajv-pack](https://github.com/ajv-validator/ajv-pack) and there are [some limitations to the schema features it can compile](https://github.com/ajv-validator/ajv-pack#limitations). A successfully pre-compiled schema is equivalent to the same schema compiled at runtime. - - -## Command line interface - -CLI is available as a separate npm package [ajv-cli](https://github.com/ajv-validator/ajv-cli). It supports: - -- compiling JSON Schemas to test their validity -- BETA: generating standalone module exporting a validation function to be used without Ajv (using [ajv-pack](https://github.com/ajv-validator/ajv-pack)) -- migrate schemas to draft-07 (using [json-schema-migrate](https://github.com/epoberezkin/json-schema-migrate)) -- validating data file(s) against JSON Schema -- testing expected validity of data against JSON Schema -- referenced schemas -- custom meta-schemas -- files in JSON, JSON5, YAML, and JavaScript format -- all Ajv options -- reporting changes in data after validation in [JSON-patch](https://tools.ietf.org/html/rfc6902) format - - -## Validation keywords - -Ajv supports all validation keywords from draft-07 of JSON Schema standard: - -- [type](https://github.com/ajv-validator/ajv/blob/master/KEYWORDS.md#type) -- [for numbers](https://github.com/ajv-validator/ajv/blob/master/KEYWORDS.md#keywords-for-numbers) - maximum, minimum, exclusiveMaximum, exclusiveMinimum, multipleOf -- [for strings](https://github.com/ajv-validator/ajv/blob/master/KEYWORDS.md#keywords-for-strings) - maxLength, minLength, pattern, format -- [for arrays](https://github.com/ajv-validator/ajv/blob/master/KEYWORDS.md#keywords-for-arrays) - maxItems, minItems, uniqueItems, items, additionalItems, [contains](https://github.com/ajv-validator/ajv/blob/master/KEYWORDS.md#contains) -- [for objects](https://github.com/ajv-validator/ajv/blob/master/KEYWORDS.md#keywords-for-objects) - maxProperties, minProperties, required, properties, patternProperties, additionalProperties, dependencies, [propertyNames](https://github.com/ajv-validator/ajv/blob/master/KEYWORDS.md#propertynames) -- [for all types](https://github.com/ajv-validator/ajv/blob/master/KEYWORDS.md#keywords-for-all-types) - enum, [const](https://github.com/ajv-validator/ajv/blob/master/KEYWORDS.md#const) -- [compound keywords](https://github.com/ajv-validator/ajv/blob/master/KEYWORDS.md#compound-keywords) - not, oneOf, anyOf, allOf, [if/then/else](https://github.com/ajv-validator/ajv/blob/master/KEYWORDS.md#ifthenelse) - -With [ajv-keywords](https://github.com/ajv-validator/ajv-keywords) package Ajv also supports validation keywords from [JSON Schema extension proposals](https://github.com/json-schema/json-schema/wiki/v5-Proposals) for JSON Schema standard: - -- [patternRequired](https://github.com/ajv-validator/ajv/blob/master/KEYWORDS.md#patternrequired-proposed) - like `required` but with patterns that some property should match. -- [formatMaximum, formatMinimum, formatExclusiveMaximum, formatExclusiveMinimum](https://github.com/ajv-validator/ajv/blob/master/KEYWORDS.md#formatmaximum--formatminimum-and-exclusiveformatmaximum--exclusiveformatminimum-proposed) - setting limits for date, time, etc. - -See [JSON Schema validation keywords](https://github.com/ajv-validator/ajv/blob/master/KEYWORDS.md) for more details. - - -## Annotation keywords - -JSON Schema specification defines several annotation keywords that describe schema itself but do not perform any validation. - -- `title` and `description`: information about the data represented by that schema -- `$comment` (NEW in draft-07): information for developers. With option `$comment` Ajv logs or passes the comment string to the user-supplied function. See [Options](#options). -- `default`: a default value of the data instance, see [Assigning defaults](#assigning-defaults). -- `examples` (NEW in draft-06): an array of data instances. Ajv does not check the validity of these instances against the schema. -- `readOnly` and `writeOnly` (NEW in draft-07): marks data-instance as read-only or write-only in relation to the source of the data (database, api, etc.). -- `contentEncoding`: [RFC 2045](https://tools.ietf.org/html/rfc2045#section-6.1 ), e.g., "base64". -- `contentMediaType`: [RFC 2046](https://tools.ietf.org/html/rfc2046), e.g., "image/png". - -__Please note__: Ajv does not implement validation of the keywords `examples`, `contentEncoding` and `contentMediaType` but it reserves them. If you want to create a plugin that implements some of them, it should remove these keywords from the instance. - - -## Formats - -Ajv implements formats defined by JSON Schema specification and several other formats. It is recommended NOT to use "format" keyword implementations with untrusted data, as they use potentially unsafe regular expressions - see [ReDoS attack](#redos-attack). - -__Please note__: if you need to use "format" keyword to validate untrusted data, you MUST assess their suitability and safety for your validation scenarios. - -The following formats are implemented for string validation with "format" keyword: - -- _date_: full-date according to [RFC3339](http://tools.ietf.org/html/rfc3339#section-5.6). -- _time_: time with optional time-zone. -- _date-time_: date-time from the same source (time-zone is mandatory). `date`, `time` and `date-time` validate ranges in `full` mode and only regexp in `fast` mode (see [options](#options)). -- _uri_: full URI. -- _uri-reference_: URI reference, including full and relative URIs. -- _uri-template_: URI template according to [RFC6570](https://tools.ietf.org/html/rfc6570) -- _url_ (deprecated): [URL record](https://url.spec.whatwg.org/#concept-url). -- _email_: email address. -- _hostname_: host name according to [RFC1034](http://tools.ietf.org/html/rfc1034#section-3.5). -- _ipv4_: IP address v4. -- _ipv6_: IP address v6. -- _regex_: tests whether a string is a valid regular expression by passing it to RegExp constructor. -- _uuid_: Universally Unique IDentifier according to [RFC4122](http://tools.ietf.org/html/rfc4122). -- _json-pointer_: JSON-pointer according to [RFC6901](https://tools.ietf.org/html/rfc6901). -- _relative-json-pointer_: relative JSON-pointer according to [this draft](http://tools.ietf.org/html/draft-luff-relative-json-pointer-00). - -__Please note__: JSON Schema draft-07 also defines formats `iri`, `iri-reference`, `idn-hostname` and `idn-email` for URLs, hostnames and emails with international characters. Ajv does not implement these formats. If you create Ajv plugin that implements them please make a PR to mention this plugin here. - -There are two modes of format validation: `fast` and `full`. This mode affects formats `date`, `time`, `date-time`, `uri`, `uri-reference`, and `email`. See [Options](#options) for details. - -You can add additional formats and replace any of the formats above using [addFormat](#api-addformat) method. - -The option `unknownFormats` allows changing the default behaviour when an unknown format is encountered. In this case Ajv can either fail schema compilation (default) or ignore it (default in versions before 5.0.0). You also can allow specific format(s) that will be ignored. See [Options](#options) for details. - -You can find regular expressions used for format validation and the sources that were used in [formats.js](https://github.com/ajv-validator/ajv/blob/master/lib/compile/formats.js). - - -## <a name="ref"></a>Combining schemas with $ref - -You can structure your validation logic across multiple schema files and have schemas reference each other using `$ref` keyword. - -Example: - -```javascript -var schema = { - "$id": "http://example.com/schemas/schema.json", - "type": "object", - "properties": { - "foo": { "$ref": "defs.json#/definitions/int" }, - "bar": { "$ref": "defs.json#/definitions/str" } - } -}; - -var defsSchema = { - "$id": "http://example.com/schemas/defs.json", - "definitions": { - "int": { "type": "integer" }, - "str": { "type": "string" } - } -}; -``` - -Now to compile your schema you can either pass all schemas to Ajv instance: - -```javascript -var ajv = new Ajv({schemas: [schema, defsSchema]}); -var validate = ajv.getSchema('http://example.com/schemas/schema.json'); -``` - -or use `addSchema` method: - -```javascript -var ajv = new Ajv; -var validate = ajv.addSchema(defsSchema) - .compile(schema); -``` - -See [Options](#options) and [addSchema](#api) method. - -__Please note__: -- `$ref` is resolved as the uri-reference using schema $id as the base URI (see the example). -- References can be recursive (and mutually recursive) to implement the schemas for different data structures (such as linked lists, trees, graphs, etc.). -- You don't have to host your schema files at the URIs that you use as schema $id. These URIs are only used to identify the schemas, and according to JSON Schema specification validators should not expect to be able to download the schemas from these URIs. -- The actual location of the schema file in the file system is not used. -- You can pass the identifier of the schema as the second parameter of `addSchema` method or as a property name in `schemas` option. This identifier can be used instead of (or in addition to) schema $id. -- You cannot have the same $id (or the schema identifier) used for more than one schema - the exception will be thrown. -- You can implement dynamic resolution of the referenced schemas using `compileAsync` method. In this way you can store schemas in any system (files, web, database, etc.) and reference them without explicitly adding to Ajv instance. See [Asynchronous schema compilation](#asynchronous-schema-compilation). - - -## $data reference - -With `$data` option you can use values from the validated data as the values for the schema keywords. See [proposal](https://github.com/json-schema-org/json-schema-spec/issues/51) for more information about how it works. - -`$data` reference is supported in the keywords: const, enum, format, maximum/minimum, exclusiveMaximum / exclusiveMinimum, maxLength / minLength, maxItems / minItems, maxProperties / minProperties, formatMaximum / formatMinimum, formatExclusiveMaximum / formatExclusiveMinimum, multipleOf, pattern, required, uniqueItems. - -The value of "$data" should be a [JSON-pointer](https://tools.ietf.org/html/rfc6901) to the data (the root is always the top level data object, even if the $data reference is inside a referenced subschema) or a [relative JSON-pointer](http://tools.ietf.org/html/draft-luff-relative-json-pointer-00) (it is relative to the current point in data; if the $data reference is inside a referenced subschema it cannot point to the data outside of the root level for this subschema). - -Examples. - -This schema requires that the value in property `smaller` is less or equal than the value in the property larger: - -```javascript -var ajv = new Ajv({$data: true}); - -var schema = { - "properties": { - "smaller": { - "type": "number", - "maximum": { "$data": "1/larger" } - }, - "larger": { "type": "number" } - } -}; - -var validData = { - smaller: 5, - larger: 7 -}; - -ajv.validate(schema, validData); // true -``` - -This schema requires that the properties have the same format as their field names: - -```javascript -var schema = { - "additionalProperties": { - "type": "string", - "format": { "$data": "0#" } - } -}; - -var validData = { - 'date-time': '1963-06-19T08:30:06.283185Z', - email: 'joe.bloggs@example.com' -} -``` - -`$data` reference is resolved safely - it won't throw even if some property is undefined. If `$data` resolves to `undefined` the validation succeeds (with the exclusion of `const` keyword). If `$data` resolves to incorrect type (e.g. not "number" for maximum keyword) the validation fails. - - -## $merge and $patch keywords - -With the package [ajv-merge-patch](https://github.com/ajv-validator/ajv-merge-patch) you can use the keywords `$merge` and `$patch` that allow extending JSON Schemas with patches using formats [JSON Merge Patch (RFC 7396)](https://tools.ietf.org/html/rfc7396) and [JSON Patch (RFC 6902)](https://tools.ietf.org/html/rfc6902). - -To add keywords `$merge` and `$patch` to Ajv instance use this code: - -```javascript -require('ajv-merge-patch')(ajv); -``` - -Examples. - -Using `$merge`: - -```json -{ - "$merge": { - "source": { - "type": "object", - "properties": { "p": { "type": "string" } }, - "additionalProperties": false - }, - "with": { - "properties": { "q": { "type": "number" } } - } - } -} -``` - -Using `$patch`: - -```json -{ - "$patch": { - "source": { - "type": "object", - "properties": { "p": { "type": "string" } }, - "additionalProperties": false - }, - "with": [ - { "op": "add", "path": "/properties/q", "value": { "type": "number" } } - ] - } -} -``` - -The schemas above are equivalent to this schema: - -```json -{ - "type": "object", - "properties": { - "p": { "type": "string" }, - "q": { "type": "number" } - }, - "additionalProperties": false -} -``` - -The properties `source` and `with` in the keywords `$merge` and `$patch` can use absolute or relative `$ref` to point to other schemas previously added to the Ajv instance or to the fragments of the current schema. - -See the package [ajv-merge-patch](https://github.com/ajv-validator/ajv-merge-patch) for more information. - - -## Defining custom keywords - -The advantages of using custom keywords are: - -- allow creating validation scenarios that cannot be expressed using JSON Schema -- simplify your schemas -- help bringing a bigger part of the validation logic to your schemas -- make your schemas more expressive, less verbose and closer to your application domain -- implement custom data processors that modify your data (`modifying` option MUST be used in keyword definition) and/or create side effects while the data is being validated - -If a keyword is used only for side-effects and its validation result is pre-defined, use option `valid: true/false` in keyword definition to simplify both generated code (no error handling in case of `valid: true`) and your keyword functions (no need to return any validation result). - -The concerns you have to be aware of when extending JSON Schema standard with custom keywords are the portability and understanding of your schemas. You will have to support these custom keywords on other platforms and to properly document these keywords so that everybody can understand them in your schemas. - -You can define custom keywords with [addKeyword](#api-addkeyword) method. Keywords are defined on the `ajv` instance level - new instances will not have previously defined keywords. - -Ajv allows defining keywords with: -- validation function -- compilation function -- macro function -- inline compilation function that should return code (as string) that will be inlined in the currently compiled schema. - -Example. `range` and `exclusiveRange` keywords using compiled schema: - -```javascript -ajv.addKeyword('range', { - type: 'number', - compile: function (sch, parentSchema) { - var min = sch[0]; - var max = sch[1]; - - return parentSchema.exclusiveRange === true - ? function (data) { return data > min && data < max; } - : function (data) { return data >= min && data <= max; } - } -}); - -var schema = { "range": [2, 4], "exclusiveRange": true }; -var validate = ajv.compile(schema); -console.log(validate(2.01)); // true -console.log(validate(3.99)); // true -console.log(validate(2)); // false -console.log(validate(4)); // false -``` - -Several custom keywords (typeof, instanceof, range and propertyNames) are defined in [ajv-keywords](https://github.com/ajv-validator/ajv-keywords) package - they can be used for your schemas and as a starting point for your own custom keywords. - -See [Defining custom keywords](https://github.com/ajv-validator/ajv/blob/master/CUSTOM.md) for more details. - - -## Asynchronous schema compilation - -During asynchronous compilation remote references are loaded using supplied function. See `compileAsync` [method](#api-compileAsync) and `loadSchema` [option](#options). - -Example: - -```javascript -var ajv = new Ajv({ loadSchema: loadSchema }); - -ajv.compileAsync(schema).then(function (validate) { - var valid = validate(data); - // ... -}); - -function loadSchema(uri) { - return request.json(uri).then(function (res) { - if (res.statusCode >= 400) - throw new Error('Loading error: ' + res.statusCode); - return res.body; - }); -} -``` - -__Please note__: [Option](#options) `missingRefs` should NOT be set to `"ignore"` or `"fail"` for asynchronous compilation to work. - - -## Asynchronous validation - -Example in Node.js REPL: https://tonicdev.com/esp/ajv-asynchronous-validation - -You can define custom formats and keywords that perform validation asynchronously by accessing database or some other service. You should add `async: true` in the keyword or format definition (see [addFormat](#api-addformat), [addKeyword](#api-addkeyword) and [Defining custom keywords](#defining-custom-keywords)). - -If your schema uses asynchronous formats/keywords or refers to some schema that contains them it should have `"$async": true` keyword so that Ajv can compile it correctly. If asynchronous format/keyword or reference to asynchronous schema is used in the schema without `$async` keyword Ajv will throw an exception during schema compilation. - -__Please note__: all asynchronous subschemas that are referenced from the current or other schemas should have `"$async": true` keyword as well, otherwise the schema compilation will fail. - -Validation function for an asynchronous custom format/keyword should return a promise that resolves with `true` or `false` (or rejects with `new Ajv.ValidationError(errors)` if you want to return custom errors from the keyword function). - -Ajv compiles asynchronous schemas to [es7 async functions](http://tc39.github.io/ecmascript-asyncawait/) that can optionally be transpiled with [nodent](https://github.com/MatAtBread/nodent). Async functions are supported in Node.js 7+ and all modern browsers. You can also supply any other transpiler as a function via `processCode` option. See [Options](#options). - -The compiled validation function has `$async: true` property (if the schema is asynchronous), so you can differentiate these functions if you are using both synchronous and asynchronous schemas. - -Validation result will be a promise that resolves with validated data or rejects with an exception `Ajv.ValidationError` that contains the array of validation errors in `errors` property. - - -Example: - -```javascript -var ajv = new Ajv; -// require('ajv-async')(ajv); - -ajv.addKeyword('idExists', { - async: true, - type: 'number', - validate: checkIdExists -}); - - -function checkIdExists(schema, data) { - return knex(schema.table) - .select('id') - .where('id', data) - .then(function (rows) { - return !!rows.length; // true if record is found - }); -} - -var schema = { - "$async": true, - "properties": { - "userId": { - "type": "integer", - "idExists": { "table": "users" } - }, - "postId": { - "type": "integer", - "idExists": { "table": "posts" } - } - } -}; - -var validate = ajv.compile(schema); - -validate({ userId: 1, postId: 19 }) -.then(function (data) { - console.log('Data is valid', data); // { userId: 1, postId: 19 } -}) -.catch(function (err) { - if (!(err instanceof Ajv.ValidationError)) throw err; - // data is invalid - console.log('Validation errors:', err.errors); -}); -``` - -### Using transpilers with asynchronous validation functions. - -[ajv-async](https://github.com/ajv-validator/ajv-async) uses [nodent](https://github.com/MatAtBread/nodent) to transpile async functions. To use another transpiler you should separately install it (or load its bundle in the browser). - - -#### Using nodent - -```javascript -var ajv = new Ajv; -require('ajv-async')(ajv); -// in the browser if you want to load ajv-async bundle separately you can: -// window.ajvAsync(ajv); -var validate = ajv.compile(schema); // transpiled es7 async function -validate(data).then(successFunc).catch(errorFunc); -``` - - -#### Using other transpilers - -```javascript -var ajv = new Ajv({ processCode: transpileFunc }); -var validate = ajv.compile(schema); // transpiled es7 async function -validate(data).then(successFunc).catch(errorFunc); -``` - -See [Options](#options). - - -## Security considerations - -JSON Schema, if properly used, can replace data sanitisation. It doesn't replace other API security considerations. It also introduces additional security aspects to consider. - - -##### Security contact - -To report a security vulnerability, please use the -[Tidelift security contact](https://tidelift.com/security). -Tidelift will coordinate the fix and disclosure. Please do NOT report security vulnerabilities via GitHub issues. - - -##### Untrusted schemas - -Ajv treats JSON schemas as trusted as your application code. This security model is based on the most common use case, when the schemas are static and bundled together with the application. - -If your schemas are received from untrusted sources (or generated from untrusted data) there are several scenarios you need to prevent: -- compiling schemas can cause stack overflow (if they are too deep) -- compiling schemas can be slow (e.g. [#557](https://github.com/ajv-validator/ajv/issues/557)) -- validating certain data can be slow - -It is difficult to predict all the scenarios, but at the very least it may help to limit the size of untrusted schemas (e.g. limit JSON string length) and also the maximum schema object depth (that can be high for relatively small JSON strings). You also may want to mitigate slow regular expressions in `pattern` and `patternProperties` keywords. - -Regardless the measures you take, using untrusted schemas increases security risks. - - -##### Circular references in JavaScript objects - -Ajv does not support schemas and validated data that have circular references in objects. See [issue #802](https://github.com/ajv-validator/ajv/issues/802). - -An attempt to compile such schemas or validate such data would cause stack overflow (or will not complete in case of asynchronous validation). Depending on the parser you use, untrusted data can lead to circular references. - - -##### Security risks of trusted schemas - -Some keywords in JSON Schemas can lead to very slow validation for certain data. These keywords include (but may be not limited to): - -- `pattern` and `format` for large strings - in some cases using `maxLength` can help mitigate it, but certain regular expressions can lead to exponential validation time even with relatively short strings (see [ReDoS attack](#redos-attack)). -- `patternProperties` for large property names - use `propertyNames` to mitigate, but some regular expressions can have exponential evaluation time as well. -- `uniqueItems` for large non-scalar arrays - use `maxItems` to mitigate - -__Please note__: The suggestions above to prevent slow validation would only work if you do NOT use `allErrors: true` in production code (using it would continue validation after validation errors). - -You can validate your JSON schemas against [this meta-schema](https://github.com/ajv-validator/ajv/blob/master/lib/refs/json-schema-secure.json) to check that these recommendations are followed: - -```javascript -const isSchemaSecure = ajv.compile(require('ajv/lib/refs/json-schema-secure.json')); - -const schema1 = {format: 'email'}; -isSchemaSecure(schema1); // false - -const schema2 = {format: 'email', maxLength: MAX_LENGTH}; -isSchemaSecure(schema2); // true -``` - -__Please note__: following all these recommendation is not a guarantee that validation of untrusted data is safe - it can still lead to some undesirable results. - - -##### Content Security Policies (CSP) -See [Ajv and Content Security Policies (CSP)](#ajv-and-content-security-policies-csp) - - -## ReDoS attack - -Certain regular expressions can lead to the exponential evaluation time even with relatively short strings. - -Please assess the regular expressions you use in the schemas on their vulnerability to this attack - see [safe-regex](https://github.com/substack/safe-regex), for example. - -__Please note__: some formats that Ajv implements use [regular expressions](https://github.com/ajv-validator/ajv/blob/master/lib/compile/formats.js) that can be vulnerable to ReDoS attack, so if you use Ajv to validate data from untrusted sources __it is strongly recommended__ to consider the following: - -- making assessment of "format" implementations in Ajv. -- using `format: 'fast'` option that simplifies some of the regular expressions (although it does not guarantee that they are safe). -- replacing format implementations provided by Ajv with your own implementations of "format" keyword that either uses different regular expressions or another approach to format validation. Please see [addFormat](#api-addformat) method. -- disabling format validation by ignoring "format" keyword with option `format: false` - -Whatever mitigation you choose, please assume all formats provided by Ajv as potentially unsafe and make your own assessment of their suitability for your validation scenarios. - - -## Filtering data - -With [option `removeAdditional`](#options) (added by [andyscott](https://github.com/andyscott)) you can filter data during the validation. - -This option modifies original data. - -Example: - -```javascript -var ajv = new Ajv({ removeAdditional: true }); -var schema = { - "additionalProperties": false, - "properties": { - "foo": { "type": "number" }, - "bar": { - "additionalProperties": { "type": "number" }, - "properties": { - "baz": { "type": "string" } - } - } - } -} - -var data = { - "foo": 0, - "additional1": 1, // will be removed; `additionalProperties` == false - "bar": { - "baz": "abc", - "additional2": 2 // will NOT be removed; `additionalProperties` != false - }, -} - -var validate = ajv.compile(schema); - -console.log(validate(data)); // true -console.log(data); // { "foo": 0, "bar": { "baz": "abc", "additional2": 2 } -``` - -If `removeAdditional` option in the example above were `"all"` then both `additional1` and `additional2` properties would have been removed. - -If the option were `"failing"` then property `additional1` would have been removed regardless of its value and property `additional2` would have been removed only if its value were failing the schema in the inner `additionalProperties` (so in the example above it would have stayed because it passes the schema, but any non-number would have been removed). - -__Please note__: If you use `removeAdditional` option with `additionalProperties` keyword inside `anyOf`/`oneOf` keywords your validation can fail with this schema, for example: - -```json -{ - "type": "object", - "oneOf": [ - { - "properties": { - "foo": { "type": "string" } - }, - "required": [ "foo" ], - "additionalProperties": false - }, - { - "properties": { - "bar": { "type": "integer" } - }, - "required": [ "bar" ], - "additionalProperties": false - } - ] -} -``` - -The intention of the schema above is to allow objects with either the string property "foo" or the integer property "bar", but not with both and not with any other properties. - -With the option `removeAdditional: true` the validation will pass for the object `{ "foo": "abc"}` but will fail for the object `{"bar": 1}`. It happens because while the first subschema in `oneOf` is validated, the property `bar` is removed because it is an additional property according to the standard (because it is not included in `properties` keyword in the same schema). - -While this behaviour is unexpected (issues [#129](https://github.com/ajv-validator/ajv/issues/129), [#134](https://github.com/ajv-validator/ajv/issues/134)), it is correct. To have the expected behaviour (both objects are allowed and additional properties are removed) the schema has to be refactored in this way: - -```json -{ - "type": "object", - "properties": { - "foo": { "type": "string" }, - "bar": { "type": "integer" } - }, - "additionalProperties": false, - "oneOf": [ - { "required": [ "foo" ] }, - { "required": [ "bar" ] } - ] -} -``` - -The schema above is also more efficient - it will compile into a faster function. - - -## Assigning defaults - -With [option `useDefaults`](#options) Ajv will assign values from `default` keyword in the schemas of `properties` and `items` (when it is the array of schemas) to the missing properties and items. - -With the option value `"empty"` properties and items equal to `null` or `""` (empty string) will be considered missing and assigned defaults. - -This option modifies original data. - -__Please note__: the default value is inserted in the generated validation code as a literal, so the value inserted in the data will be the deep clone of the default in the schema. - - -Example 1 (`default` in `properties`): - -```javascript -var ajv = new Ajv({ useDefaults: true }); -var schema = { - "type": "object", - "properties": { - "foo": { "type": "number" }, - "bar": { "type": "string", "default": "baz" } - }, - "required": [ "foo", "bar" ] -}; - -var data = { "foo": 1 }; - -var validate = ajv.compile(schema); - -console.log(validate(data)); // true -console.log(data); // { "foo": 1, "bar": "baz" } -``` - -Example 2 (`default` in `items`): - -```javascript -var schema = { - "type": "array", - "items": [ - { "type": "number" }, - { "type": "string", "default": "foo" } - ] -} - -var data = [ 1 ]; - -var validate = ajv.compile(schema); - -console.log(validate(data)); // true -console.log(data); // [ 1, "foo" ] -``` - -`default` keywords in other cases are ignored: - -- not in `properties` or `items` subschemas -- in schemas inside `anyOf`, `oneOf` and `not` (see [#42](https://github.com/ajv-validator/ajv/issues/42)) -- in `if` subschema of `switch` keyword -- in schemas generated by custom macro keywords - -The [`strictDefaults` option](#options) customizes Ajv's behavior for the defaults that Ajv ignores (`true` raises an error, and `"log"` outputs a warning). - - -## Coercing data types - -When you are validating user inputs all your data properties are usually strings. The option `coerceTypes` allows you to have your data types coerced to the types specified in your schema `type` keywords, both to pass the validation and to use the correctly typed data afterwards. - -This option modifies original data. - -__Please note__: if you pass a scalar value to the validating function its type will be coerced and it will pass the validation, but the value of the variable you pass won't be updated because scalars are passed by value. - - -Example 1: - -```javascript -var ajv = new Ajv({ coerceTypes: true }); -var schema = { - "type": "object", - "properties": { - "foo": { "type": "number" }, - "bar": { "type": "boolean" } - }, - "required": [ "foo", "bar" ] -}; - -var data = { "foo": "1", "bar": "false" }; - -var validate = ajv.compile(schema); - -console.log(validate(data)); // true -console.log(data); // { "foo": 1, "bar": false } -``` - -Example 2 (array coercions): - -```javascript -var ajv = new Ajv({ coerceTypes: 'array' }); -var schema = { - "properties": { - "foo": { "type": "array", "items": { "type": "number" } }, - "bar": { "type": "boolean" } - } -}; - -var data = { "foo": "1", "bar": ["false"] }; - -var validate = ajv.compile(schema); - -console.log(validate(data)); // true -console.log(data); // { "foo": [1], "bar": false } -``` - -The coercion rules, as you can see from the example, are different from JavaScript both to validate user input as expected and to have the coercion reversible (to correctly validate cases where different types are defined in subschemas of "anyOf" and other compound keywords). - -See [Coercion rules](https://github.com/ajv-validator/ajv/blob/master/COERCION.md) for details. - - -## API - -##### new Ajv(Object options) -> Object - -Create Ajv instance. - - -##### .compile(Object schema) -> Function<Object data> - -Generate validating function and cache the compiled schema for future use. - -Validating function returns a boolean value. This function has properties `errors` and `schema`. Errors encountered during the last validation are assigned to `errors` property (it is assigned `null` if there was no errors). `schema` property contains the reference to the original schema. - -The schema passed to this method will be validated against meta-schema unless `validateSchema` option is false. If schema is invalid, an error will be thrown. See [options](#options). - - -##### <a name="api-compileAsync"></a>.compileAsync(Object schema [, Boolean meta] [, Function callback]) -> Promise - -Asynchronous version of `compile` method that loads missing remote schemas using asynchronous function in `options.loadSchema`. This function returns a Promise that resolves to a validation function. An optional callback passed to `compileAsync` will be called with 2 parameters: error (or null) and validating function. The returned promise will reject (and the callback will be called with an error) when: - -- missing schema can't be loaded (`loadSchema` returns a Promise that rejects). -- a schema containing a missing reference is loaded, but the reference cannot be resolved. -- schema (or some loaded/referenced schema) is invalid. - -The function compiles schema and loads the first missing schema (or meta-schema) until all missing schemas are loaded. - -You can asynchronously compile meta-schema by passing `true` as the second parameter. - -See example in [Asynchronous compilation](#asynchronous-schema-compilation). - - -##### .validate(Object schema|String key|String ref, data) -> Boolean - -Validate data using passed schema (it will be compiled and cached). - -Instead of the schema you can use the key that was previously passed to `addSchema`, the schema id if it was present in the schema or any previously resolved reference. - -Validation errors will be available in the `errors` property of Ajv instance (`null` if there were no errors). - -__Please note__: every time this method is called the errors are overwritten so you need to copy them to another variable if you want to use them later. - -If the schema is asynchronous (has `$async` keyword on the top level) this method returns a Promise. See [Asynchronous validation](#asynchronous-validation). - - -##### .addSchema(Array<Object>|Object schema [, String key]) -> Ajv - -Add schema(s) to validator instance. This method does not compile schemas (but it still validates them). Because of that dependencies can be added in any order and circular dependencies are supported. It also prevents unnecessary compilation of schemas that are containers for other schemas but not used as a whole. - -Array of schemas can be passed (schemas should have ids), the second parameter will be ignored. - -Key can be passed that can be used to reference the schema and will be used as the schema id if there is no id inside the schema. If the key is not passed, the schema id will be used as the key. - - -Once the schema is added, it (and all the references inside it) can be referenced in other schemas and used to validate data. - -Although `addSchema` does not compile schemas, explicit compilation is not required - the schema will be compiled when it is used first time. - -By default the schema is validated against meta-schema before it is added, and if the schema does not pass validation the exception is thrown. This behaviour is controlled by `validateSchema` option. - -__Please note__: Ajv uses the [method chaining syntax](https://en.wikipedia.org/wiki/Method_chaining) for all methods with the prefix `add*` and `remove*`. -This allows you to do nice things like the following. - -```javascript -var validate = new Ajv().addSchema(schema).addFormat(name, regex).getSchema(uri); -``` - -##### .addMetaSchema(Array<Object>|Object schema [, String key]) -> Ajv - -Adds meta schema(s) that can be used to validate other schemas. That function should be used instead of `addSchema` because there may be instance options that would compile a meta schema incorrectly (at the moment it is `removeAdditional` option). - -There is no need to explicitly add draft-07 meta schema (http://json-schema.org/draft-07/schema) - it is added by default, unless option `meta` is set to `false`. You only need to use it if you have a changed meta-schema that you want to use to validate your schemas. See `validateSchema`. - - -##### <a name="api-validateschema"></a>.validateSchema(Object schema) -> Boolean - -Validates schema. This method should be used to validate schemas rather than `validate` due to the inconsistency of `uri` format in JSON Schema standard. - -By default this method is called automatically when the schema is added, so you rarely need to use it directly. - -If schema doesn't have `$schema` property, it is validated against draft 6 meta-schema (option `meta` should not be false). - -If schema has `$schema` property, then the schema with this id (that should be previously added) is used to validate passed schema. - -Errors will be available at `ajv.errors`. - - -##### .getSchema(String key) -> Function<Object data> - -Retrieve compiled schema previously added with `addSchema` by the key passed to `addSchema` or by its full reference (id). The returned validating function has `schema` property with the reference to the original schema. - - -##### .removeSchema([Object schema|String key|String ref|RegExp pattern]) -> Ajv - -Remove added/cached schema. Even if schema is referenced by other schemas it can be safely removed as dependent schemas have local references. - -Schema can be removed using: -- key passed to `addSchema` -- it's full reference (id) -- RegExp that should match schema id or key (meta-schemas won't be removed) -- actual schema object that will be stable-stringified to remove schema from cache - -If no parameter is passed all schemas but meta-schemas will be removed and the cache will be cleared. - - -##### <a name="api-addformat"></a>.addFormat(String name, String|RegExp|Function|Object format) -> Ajv - -Add custom format to validate strings or numbers. It can also be used to replace pre-defined formats for Ajv instance. - -Strings are converted to RegExp. - -Function should return validation result as `true` or `false`. - -If object is passed it should have properties `validate`, `compare` and `async`: - -- _validate_: a string, RegExp or a function as described above. -- _compare_: an optional comparison function that accepts two strings and compares them according to the format meaning. This function is used with keywords `formatMaximum`/`formatMinimum` (defined in [ajv-keywords](https://github.com/ajv-validator/ajv-keywords) package). It should return `1` if the first value is bigger than the second value, `-1` if it is smaller and `0` if it is equal. -- _async_: an optional `true` value if `validate` is an asynchronous function; in this case it should return a promise that resolves with a value `true` or `false`. -- _type_: an optional type of data that the format applies to. It can be `"string"` (default) or `"number"` (see https://github.com/ajv-validator/ajv/issues/291#issuecomment-259923858). If the type of data is different, the validation will pass. - -Custom formats can be also added via `formats` option. - - -##### <a name="api-addkeyword"></a>.addKeyword(String keyword, Object definition) -> Ajv - -Add custom validation keyword to Ajv instance. - -Keyword should be different from all standard JSON Schema keywords and different from previously defined keywords. There is no way to redefine keywords or to remove keyword definition from the instance. - -Keyword must start with a letter, `_` or `$`, and may continue with letters, numbers, `_`, `$`, or `-`. -It is recommended to use an application-specific prefix for keywords to avoid current and future name collisions. - -Example Keywords: -- `"xyz-example"`: valid, and uses prefix for the xyz project to avoid name collisions. -- `"example"`: valid, but not recommended as it could collide with future versions of JSON Schema etc. -- `"3-example"`: invalid as numbers are not allowed to be the first character in a keyword - -Keyword definition is an object with the following properties: - -- _type_: optional string or array of strings with data type(s) that the keyword applies to. If not present, the keyword will apply to all types. -- _validate_: validating function -- _compile_: compiling function -- _macro_: macro function -- _inline_: compiling function that returns code (as string) -- _schema_: an optional `false` value used with "validate" keyword to not pass schema -- _metaSchema_: an optional meta-schema for keyword schema -- _dependencies_: an optional list of properties that must be present in the parent schema - it will be checked during schema compilation -- _modifying_: `true` MUST be passed if keyword modifies data -- _statements_: `true` can be passed in case inline keyword generates statements (as opposed to expression) -- _valid_: pass `true`/`false` to pre-define validation result, the result returned from validation function will be ignored. This option cannot be used with macro keywords. -- _$data_: an optional `true` value to support [$data reference](#data-reference) as the value of custom keyword. The reference will be resolved at validation time. If the keyword has meta-schema it would be extended to allow $data and it will be used to validate the resolved value. Supporting $data reference requires that keyword has validating function (as the only option or in addition to compile, macro or inline function). -- _async_: an optional `true` value if the validation function is asynchronous (whether it is compiled or passed in _validate_ property); in this case it should return a promise that resolves with a value `true` or `false`. This option is ignored in case of "macro" and "inline" keywords. -- _errors_: an optional boolean or string `"full"` indicating whether keyword returns errors. If this property is not set Ajv will determine if the errors were set in case of failed validation. - -_compile_, _macro_ and _inline_ are mutually exclusive, only one should be used at a time. _validate_ can be used separately or in addition to them to support $data reference. - -__Please note__: If the keyword is validating data type that is different from the type(s) in its definition, the validation function will not be called (and expanded macro will not be used), so there is no need to check for data type inside validation function or inside schema returned by macro function (unless you want to enforce a specific type and for some reason do not want to use a separate `type` keyword for that). In the same way as standard keywords work, if the keyword does not apply to the data type being validated, the validation of this keyword will succeed. - -See [Defining custom keywords](#defining-custom-keywords) for more details. - - -##### .getKeyword(String keyword) -> Object|Boolean - -Returns custom keyword definition, `true` for pre-defined keywords and `false` if the keyword is unknown. - - -##### .removeKeyword(String keyword) -> Ajv - -Removes custom or pre-defined keyword so you can redefine them. - -While this method can be used to extend pre-defined keywords, it can also be used to completely change their meaning - it may lead to unexpected results. - -__Please note__: schemas compiled before the keyword is removed will continue to work without changes. To recompile schemas use `removeSchema` method and compile them again. - - -##### .errorsText([Array<Object> errors [, Object options]]) -> String - -Returns the text with all errors in a String. - -Options can have properties `separator` (string used to separate errors, ", " by default) and `dataVar` (the variable name that dataPaths are prefixed with, "data" by default). - - -## Options - -Defaults: - -```javascript -{ - // validation and reporting options: - $data: false, - allErrors: false, - verbose: false, - $comment: false, // NEW in Ajv version 6.0 - jsonPointers: false, - uniqueItems: true, - unicode: true, - nullable: false, - format: 'fast', - formats: {}, - unknownFormats: true, - schemas: {}, - logger: undefined, - // referenced schema options: - schemaId: '$id', - missingRefs: true, - extendRefs: 'ignore', // recommended 'fail' - loadSchema: undefined, // function(uri: string): Promise {} - // options to modify validated data: - removeAdditional: false, - useDefaults: false, - coerceTypes: false, - // strict mode options - strictDefaults: false, - strictKeywords: false, - strictNumbers: false, - // asynchronous validation options: - transpile: undefined, // requires ajv-async package - // advanced options: - meta: true, - validateSchema: true, - addUsedSchema: true, - inlineRefs: true, - passContext: false, - loopRequired: Infinity, - ownProperties: false, - multipleOfPrecision: false, - errorDataPath: 'object', // deprecated - messages: true, - sourceCode: false, - processCode: undefined, // function (str: string, schema: object): string {} - cache: new Cache, - serialize: undefined -} -``` - -##### Validation and reporting options - -- _$data_: support [$data references](#data-reference). Draft 6 meta-schema that is added by default will be extended to allow them. If you want to use another meta-schema you need to use $dataMetaSchema method to add support for $data reference. See [API](#api). -- _allErrors_: check all rules collecting all errors. Default is to return after the first error. -- _verbose_: include the reference to the part of the schema (`schema` and `parentSchema`) and validated data in errors (false by default). -- _$comment_ (NEW in Ajv version 6.0): log or pass the value of `$comment` keyword to a function. Option values: - - `false` (default): ignore $comment keyword. - - `true`: log the keyword value to console. - - function: pass the keyword value, its schema path and root schema to the specified function -- _jsonPointers_: set `dataPath` property of errors using [JSON Pointers](https://tools.ietf.org/html/rfc6901) instead of JavaScript property access notation. -- _uniqueItems_: validate `uniqueItems` keyword (true by default). -- _unicode_: calculate correct length of strings with unicode pairs (true by default). Pass `false` to use `.length` of strings that is faster, but gives "incorrect" lengths of strings with unicode pairs - each unicode pair is counted as two characters. -- _nullable_: support keyword "nullable" from [Open API 3 specification](https://swagger.io/docs/specification/data-models/data-types/). -- _format_: formats validation mode. Option values: - - `"fast"` (default) - simplified and fast validation (see [Formats](#formats) for details of which formats are available and affected by this option). - - `"full"` - more restrictive and slow validation. E.g., 25:00:00 and 2015/14/33 will be invalid time and date in 'full' mode but it will be valid in 'fast' mode. - - `false` - ignore all format keywords. -- _formats_: an object with custom formats. Keys and values will be passed to `addFormat` method. -- _keywords_: an object with custom keywords. Keys and values will be passed to `addKeyword` method. -- _unknownFormats_: handling of unknown formats. Option values: - - `true` (default) - if an unknown format is encountered the exception is thrown during schema compilation. If `format` keyword value is [$data reference](#data-reference) and it is unknown the validation will fail. - - `[String]` - an array of unknown format names that will be ignored. This option can be used to allow usage of third party schemas with format(s) for which you don't have definitions, but still fail if another unknown format is used. If `format` keyword value is [$data reference](#data-reference) and it is not in this array the validation will fail. - - `"ignore"` - to log warning during schema compilation and always pass validation (the default behaviour in versions before 5.0.0). This option is not recommended, as it allows to mistype format name and it won't be validated without any error message. This behaviour is required by JSON Schema specification. -- _schemas_: an array or object of schemas that will be added to the instance. In case you pass the array the schemas must have IDs in them. When the object is passed the method `addSchema(value, key)` will be called for each schema in this object. -- _logger_: sets the logging method. Default is the global `console` object that should have methods `log`, `warn` and `error`. See [Error logging](#error-logging). Option values: - - custom logger - it should have methods `log`, `warn` and `error`. If any of these methods is missing an exception will be thrown. - - `false` - logging is disabled. - - -##### Referenced schema options - -- _schemaId_: this option defines which keywords are used as schema URI. Option value: - - `"$id"` (default) - only use `$id` keyword as schema URI (as specified in JSON Schema draft-06/07), ignore `id` keyword (if it is present a warning will be logged). - - `"id"` - only use `id` keyword as schema URI (as specified in JSON Schema draft-04), ignore `$id` keyword (if it is present a warning will be logged). - - `"auto"` - use both `$id` and `id` keywords as schema URI. If both are present (in the same schema object) and different the exception will be thrown during schema compilation. -- _missingRefs_: handling of missing referenced schemas. Option values: - - `true` (default) - if the reference cannot be resolved during compilation the exception is thrown. The thrown error has properties `missingRef` (with hash fragment) and `missingSchema` (without it). Both properties are resolved relative to the current base id (usually schema id, unless it was substituted). - - `"ignore"` - to log error during compilation and always pass validation. - - `"fail"` - to log error and successfully compile schema but fail validation if this rule is checked. -- _extendRefs_: validation of other keywords when `$ref` is present in the schema. Option values: - - `"ignore"` (default) - when `$ref` is used other keywords are ignored (as per [JSON Reference](https://tools.ietf.org/html/draft-pbryan-zyp-json-ref-03#section-3) standard). A warning will be logged during the schema compilation. - - `"fail"` (recommended) - if other validation keywords are used together with `$ref` the exception will be thrown when the schema is compiled. This option is recommended to make sure schema has no keywords that are ignored, which can be confusing. - - `true` - validate all keywords in the schemas with `$ref` (the default behaviour in versions before 5.0.0). -- _loadSchema_: asynchronous function that will be used to load remote schemas when `compileAsync` [method](#api-compileAsync) is used and some reference is missing (option `missingRefs` should NOT be 'fail' or 'ignore'). This function should accept remote schema uri as a parameter and return a Promise that resolves to a schema. See example in [Asynchronous compilation](#asynchronous-schema-compilation). - - -##### Options to modify validated data - -- _removeAdditional_: remove additional properties - see example in [Filtering data](#filtering-data). This option is not used if schema is added with `addMetaSchema` method. Option values: - - `false` (default) - not to remove additional properties - - `"all"` - all additional properties are removed, regardless of `additionalProperties` keyword in schema (and no validation is made for them). - - `true` - only additional properties with `additionalProperties` keyword equal to `false` are removed. - - `"failing"` - additional properties that fail schema validation will be removed (where `additionalProperties` keyword is `false` or schema). -- _useDefaults_: replace missing or undefined properties and items with the values from corresponding `default` keywords. Default behaviour is to ignore `default` keywords. This option is not used if schema is added with `addMetaSchema` method. See examples in [Assigning defaults](#assigning-defaults). Option values: - - `false` (default) - do not use defaults - - `true` - insert defaults by value (object literal is used). - - `"empty"` - in addition to missing or undefined, use defaults for properties and items that are equal to `null` or `""` (an empty string). - - `"shared"` (deprecated) - insert defaults by reference. If the default is an object, it will be shared by all instances of validated data. If you modify the inserted default in the validated data, it will be modified in the schema as well. -- _coerceTypes_: change data type of data to match `type` keyword. See the example in [Coercing data types](#coercing-data-types) and [coercion rules](https://github.com/ajv-validator/ajv/blob/master/COERCION.md). Option values: - - `false` (default) - no type coercion. - - `true` - coerce scalar data types. - - `"array"` - in addition to coercions between scalar types, coerce scalar data to an array with one element and vice versa (as required by the schema). - - -##### Strict mode options - -- _strictDefaults_: report ignored `default` keywords in schemas. Option values: - - `false` (default) - ignored defaults are not reported - - `true` - if an ignored default is present, throw an error - - `"log"` - if an ignored default is present, log warning -- _strictKeywords_: report unknown keywords in schemas. Option values: - - `false` (default) - unknown keywords are not reported - - `true` - if an unknown keyword is present, throw an error - - `"log"` - if an unknown keyword is present, log warning -- _strictNumbers_: validate numbers strictly, failing validation for NaN and Infinity. Option values: - - `false` (default) - NaN or Infinity will pass validation for numeric types - - `true` - NaN or Infinity will not pass validation for numeric types - -##### Asynchronous validation options - -- _transpile_: Requires [ajv-async](https://github.com/ajv-validator/ajv-async) package. It determines whether Ajv transpiles compiled asynchronous validation function. Option values: - - `undefined` (default) - transpile with [nodent](https://github.com/MatAtBread/nodent) if async functions are not supported. - - `true` - always transpile with nodent. - - `false` - do not transpile; if async functions are not supported an exception will be thrown. - - -##### Advanced options - -- _meta_: add [meta-schema](http://json-schema.org/documentation.html) so it can be used by other schemas (true by default). If an object is passed, it will be used as the default meta-schema for schemas that have no `$schema` keyword. This default meta-schema MUST have `$schema` keyword. -- _validateSchema_: validate added/compiled schemas against meta-schema (true by default). `$schema` property in the schema can be http://json-schema.org/draft-07/schema or absent (draft-07 meta-schema will be used) or can be a reference to the schema previously added with `addMetaSchema` method. Option values: - - `true` (default) - if the validation fails, throw the exception. - - `"log"` - if the validation fails, log error. - - `false` - skip schema validation. -- _addUsedSchema_: by default methods `compile` and `validate` add schemas to the instance if they have `$id` (or `id`) property that doesn't start with "#". If `$id` is present and it is not unique the exception will be thrown. Set this option to `false` to skip adding schemas to the instance and the `$id` uniqueness check when these methods are used. This option does not affect `addSchema` method. -- _inlineRefs_: Affects compilation of referenced schemas. Option values: - - `true` (default) - the referenced schemas that don't have refs in them are inlined, regardless of their size - that substantially improves performance at the cost of the bigger size of compiled schema functions. - - `false` - to not inline referenced schemas (they will be compiled as separate functions). - - integer number - to limit the maximum number of keywords of the schema that will be inlined. -- _passContext_: pass validation context to custom keyword functions. If this option is `true` and you pass some context to the compiled validation function with `validate.call(context, data)`, the `context` will be available as `this` in your custom keywords. By default `this` is Ajv instance. -- _loopRequired_: by default `required` keyword is compiled into a single expression (or a sequence of statements in `allErrors` mode). In case of a very large number of properties in this keyword it may result in a very big validation function. Pass integer to set the number of properties above which `required` keyword will be validated in a loop - smaller validation function size but also worse performance. -- _ownProperties_: by default Ajv iterates over all enumerable object properties; when this option is `true` only own enumerable object properties (i.e. found directly on the object rather than on its prototype) are iterated. Contributed by @mbroadst. -- _multipleOfPrecision_: by default `multipleOf` keyword is validated by comparing the result of division with parseInt() of that result. It works for dividers that are bigger than 1. For small dividers such as 0.01 the result of the division is usually not integer (even when it should be integer, see issue [#84](https://github.com/ajv-validator/ajv/issues/84)). If you need to use fractional dividers set this option to some positive integer N to have `multipleOf` validated using this formula: `Math.abs(Math.round(division) - division) < 1e-N` (it is slower but allows for float arithmetics deviations). -- _errorDataPath_ (deprecated): set `dataPath` to point to 'object' (default) or to 'property' when validating keywords `required`, `additionalProperties` and `dependencies`. -- _messages_: Include human-readable messages in errors. `true` by default. `false` can be passed when custom messages are used (e.g. with [ajv-i18n](https://github.com/ajv-validator/ajv-i18n)). -- _sourceCode_: add `sourceCode` property to validating function (for debugging; this code can be different from the result of toString call). -- _processCode_: an optional function to process generated code before it is passed to Function constructor. It can be used to either beautify (the validating function is generated without line-breaks) or to transpile code. Starting from version 5.0.0 this option replaced options: - - `beautify` that formatted the generated function using [js-beautify](https://github.com/beautify-web/js-beautify). If you want to beautify the generated code pass a function calling `require('js-beautify').js_beautify` as `processCode: code => js_beautify(code)`. - - `transpile` that transpiled asynchronous validation function. You can still use `transpile` option with [ajv-async](https://github.com/ajv-validator/ajv-async) package. See [Asynchronous validation](#asynchronous-validation) for more information. -- _cache_: an optional instance of cache to store compiled schemas using stable-stringified schema as a key. For example, set-associative cache [sacjs](https://github.com/epoberezkin/sacjs) can be used. If not passed then a simple hash is used which is good enough for the common use case (a limited number of statically defined schemas). Cache should have methods `put(key, value)`, `get(key)`, `del(key)` and `clear()`. -- _serialize_: an optional function to serialize schema to cache key. Pass `false` to use schema itself as a key (e.g., if WeakMap used as a cache). By default [fast-json-stable-stringify](https://github.com/epoberezkin/fast-json-stable-stringify) is used. - - -## Validation errors - -In case of validation failure, Ajv assigns the array of errors to `errors` property of validation function (or to `errors` property of Ajv instance when `validate` or `validateSchema` methods were called). In case of [asynchronous validation](#asynchronous-validation), the returned promise is rejected with exception `Ajv.ValidationError` that has `errors` property. - - -### Error objects - -Each error is an object with the following properties: - -- _keyword_: validation keyword. -- _dataPath_: the path to the part of the data that was validated. By default `dataPath` uses JavaScript property access notation (e.g., `".prop[1].subProp"`). When the option `jsonPointers` is true (see [Options](#options)) `dataPath` will be set using JSON pointer standard (e.g., `"/prop/1/subProp"`). -- _schemaPath_: the path (JSON-pointer as a URI fragment) to the schema of the keyword that failed validation. -- _params_: the object with the additional information about error that can be used to create custom error messages (e.g., using [ajv-i18n](https://github.com/ajv-validator/ajv-i18n) package). See below for parameters set by all keywords. -- _message_: the standard error message (can be excluded with option `messages` set to false). -- _schema_: the schema of the keyword (added with `verbose` option). -- _parentSchema_: the schema containing the keyword (added with `verbose` option) -- _data_: the data validated by the keyword (added with `verbose` option). - -__Please note__: `propertyNames` keyword schema validation errors have an additional property `propertyName`, `dataPath` points to the object. After schema validation for each property name, if it is invalid an additional error is added with the property `keyword` equal to `"propertyNames"`. - - -### Error parameters - -Properties of `params` object in errors depend on the keyword that failed validation. - -- `maxItems`, `minItems`, `maxLength`, `minLength`, `maxProperties`, `minProperties` - property `limit` (number, the schema of the keyword). -- `additionalItems` - property `limit` (the maximum number of allowed items in case when `items` keyword is an array of schemas and `additionalItems` is false). -- `additionalProperties` - property `additionalProperty` (the property not used in `properties` and `patternProperties` keywords). -- `dependencies` - properties: - - `property` (dependent property), - - `missingProperty` (required missing dependency - only the first one is reported currently) - - `deps` (required dependencies, comma separated list as a string), - - `depsCount` (the number of required dependencies). -- `format` - property `format` (the schema of the keyword). -- `maximum`, `minimum` - properties: - - `limit` (number, the schema of the keyword), - - `exclusive` (boolean, the schema of `exclusiveMaximum` or `exclusiveMinimum`), - - `comparison` (string, comparison operation to compare the data to the limit, with the data on the left and the limit on the right; can be "<", "<=", ">", ">=") -- `multipleOf` - property `multipleOf` (the schema of the keyword) -- `pattern` - property `pattern` (the schema of the keyword) -- `required` - property `missingProperty` (required property that is missing). -- `propertyNames` - property `propertyName` (an invalid property name). -- `patternRequired` (in ajv-keywords) - property `missingPattern` (required pattern that did not match any property). -- `type` - property `type` (required type(s), a string, can be a comma-separated list) -- `uniqueItems` - properties `i` and `j` (indices of duplicate items). -- `const` - property `allowedValue` pointing to the value (the schema of the keyword). -- `enum` - property `allowedValues` pointing to the array of values (the schema of the keyword). -- `$ref` - property `ref` with the referenced schema URI. -- `oneOf` - property `passingSchemas` (array of indices of passing schemas, null if no schema passes). -- custom keywords (in case keyword definition doesn't create errors) - property `keyword` (the keyword name). - - -### Error logging - -Using the `logger` option when initiallizing Ajv will allow you to define custom logging. Here you can build upon the exisiting logging. The use of other logging packages is supported as long as the package or its associated wrapper exposes the required methods. If any of the required methods are missing an exception will be thrown. -- **Required Methods**: `log`, `warn`, `error` - -```javascript -var otherLogger = new OtherLogger(); -var ajv = new Ajv({ - logger: { - log: console.log.bind(console), - warn: function warn() { - otherLogger.logWarn.apply(otherLogger, arguments); - }, - error: function error() { - otherLogger.logError.apply(otherLogger, arguments); - console.error.apply(console, arguments); - } - } -}); -``` - - -## Plugins - -Ajv can be extended with plugins that add custom keywords, formats or functions to process generated code. When such plugin is published as npm package it is recommended that it follows these conventions: - -- it exports a function -- this function accepts ajv instance as the first parameter and returns the same instance to allow chaining -- this function can accept an optional configuration as the second parameter - -If you have published a useful plugin please submit a PR to add it to the next section. - - -## Related packages - -- [ajv-async](https://github.com/ajv-validator/ajv-async) - plugin to configure async validation mode -- [ajv-bsontype](https://github.com/BoLaMN/ajv-bsontype) - plugin to validate mongodb's bsonType formats -- [ajv-cli](https://github.com/jessedc/ajv-cli) - command line interface -- [ajv-errors](https://github.com/ajv-validator/ajv-errors) - plugin for custom error messages -- [ajv-i18n](https://github.com/ajv-validator/ajv-i18n) - internationalised error messages -- [ajv-istanbul](https://github.com/ajv-validator/ajv-istanbul) - plugin to instrument generated validation code to measure test coverage of your schemas -- [ajv-keywords](https://github.com/ajv-validator/ajv-keywords) - plugin with custom validation keywords (select, typeof, etc.) -- [ajv-merge-patch](https://github.com/ajv-validator/ajv-merge-patch) - plugin with keywords $merge and $patch -- [ajv-pack](https://github.com/ajv-validator/ajv-pack) - produces a compact module exporting validation functions -- [ajv-formats-draft2019](https://github.com/luzlab/ajv-formats-draft2019) - format validators for draft2019 that aren't already included in ajv (ie. `idn-hostname`, `idn-email`, `iri`, `iri-reference` and `duration`). - -## Some packages using Ajv - -- [webpack](https://github.com/webpack/webpack) - a module bundler. Its main purpose is to bundle JavaScript files for usage in a browser -- [jsonscript-js](https://github.com/JSONScript/jsonscript-js) - the interpreter for [JSONScript](http://www.jsonscript.org) - scripted processing of existing endpoints and services -- [osprey-method-handler](https://github.com/mulesoft-labs/osprey-method-handler) - Express middleware for validating requests and responses based on a RAML method object, used in [osprey](https://github.com/mulesoft/osprey) - validating API proxy generated from a RAML definition -- [har-validator](https://github.com/ahmadnassri/har-validator) - HTTP Archive (HAR) validator -- [jsoneditor](https://github.com/josdejong/jsoneditor) - a web-based tool to view, edit, format, and validate JSON http://jsoneditoronline.org -- [JSON Schema Lint](https://github.com/nickcmaynard/jsonschemalint) - a web tool to validate JSON/YAML document against a single JSON Schema http://jsonschemalint.com -- [objection](https://github.com/vincit/objection.js) - SQL-friendly ORM for Node.js -- [table](https://github.com/gajus/table) - formats data into a string table -- [ripple-lib](https://github.com/ripple/ripple-lib) - a JavaScript API for interacting with [Ripple](https://ripple.com) in Node.js and the browser -- [restbase](https://github.com/wikimedia/restbase) - distributed storage with REST API & dispatcher for backend services built to provide a low-latency & high-throughput API for Wikipedia / Wikimedia content -- [hippie-swagger](https://github.com/CacheControl/hippie-swagger) - [Hippie](https://github.com/vesln/hippie) wrapper that provides end to end API testing with swagger validation -- [react-form-controlled](https://github.com/seeden/react-form-controlled) - React controlled form components with validation -- [rabbitmq-schema](https://github.com/tjmehta/rabbitmq-schema) - a schema definition module for RabbitMQ graphs and messages -- [@query/schema](https://www.npmjs.com/package/@query/schema) - stream filtering with a URI-safe query syntax parsing to JSON Schema -- [chai-ajv-json-schema](https://github.com/peon374/chai-ajv-json-schema) - chai plugin to us JSON Schema with expect in mocha tests -- [grunt-jsonschema-ajv](https://github.com/SignpostMarv/grunt-jsonschema-ajv) - Grunt plugin for validating files against JSON Schema -- [extract-text-webpack-plugin](https://github.com/webpack-contrib/extract-text-webpack-plugin) - extract text from bundle into a file -- [electron-builder](https://github.com/electron-userland/electron-builder) - a solution to package and build a ready for distribution Electron app -- [addons-linter](https://github.com/mozilla/addons-linter) - Mozilla Add-ons Linter -- [gh-pages-generator](https://github.com/epoberezkin/gh-pages-generator) - multi-page site generator converting markdown files to GitHub pages -- [ESLint](https://github.com/eslint/eslint) - the pluggable linting utility for JavaScript and JSX - - -## Tests - -``` -npm install -git submodule update --init -npm test -``` - -## Contributing - -All validation functions are generated using doT templates in [dot](https://github.com/ajv-validator/ajv/tree/master/lib/dot) folder. Templates are precompiled so doT is not a run-time dependency. - -`npm run build` - compiles templates to [dotjs](https://github.com/ajv-validator/ajv/tree/master/lib/dotjs) folder. - -`npm run watch` - automatically compiles templates when files in dot folder change - -Please see [Contributing guidelines](https://github.com/ajv-validator/ajv/blob/master/CONTRIBUTING.md) - - -## Changes history - -See https://github.com/ajv-validator/ajv/releases - -__Please note__: [Changes in version 7.0.0-beta](https://github.com/ajv-validator/ajv/releases/tag/v7.0.0-beta.0) - -[Version 6.0.0](https://github.com/ajv-validator/ajv/releases/tag/v6.0.0). - -## Code of conduct - -Please review and follow the [Code of conduct](https://github.com/ajv-validator/ajv/blob/master/CODE_OF_CONDUCT.md). - -Please report any unacceptable behaviour to ajv.validator@gmail.com - it will be reviewed by the project team. - - -## Open-source software support - -Ajv is a part of [Tidelift subscription](https://tidelift.com/subscription/pkg/npm-ajv?utm_source=npm-ajv&utm_medium=referral&utm_campaign=readme) - it provides a centralised support to open-source software users, in addition to the support provided by software maintainers. - - -## License - -[MIT](https://github.com/ajv-validator/ajv/blob/master/LICENSE) diff --git a/node_modules/ajv/lib/dotjs/README.md b/node_modules/ajv/lib/dotjs/README.md deleted file mode 100644 index 4d994846c81f6..0000000000000 --- a/node_modules/ajv/lib/dotjs/README.md +++ /dev/null @@ -1,3 +0,0 @@ -These files are compiled dot templates from dot folder. - -Do NOT edit them directly, edit the templates and run `npm run build` from main ajv folder. diff --git a/node_modules/ajv/scripts/.eslintrc.yml b/node_modules/ajv/scripts/.eslintrc.yml deleted file mode 100644 index 493d7d312d429..0000000000000 --- a/node_modules/ajv/scripts/.eslintrc.yml +++ /dev/null @@ -1,3 +0,0 @@ -rules: - no-console: 0 - no-empty: [2, allowEmptyCatch: true] diff --git a/node_modules/ansicolors/README.md b/node_modules/ansicolors/README.md deleted file mode 100644 index f3e9d070b25a9..0000000000000 --- a/node_modules/ansicolors/README.md +++ /dev/null @@ -1,62 +0,0 @@ -# ansicolors [![build status](https://secure.travis-ci.org/thlorenz/ansicolors.png)](http://next.travis-ci.org/thlorenz/ansicolors) - -Functions that surround a string with ansicolor codes so it prints in color. - -In case you need styles, like `bold`, have a look at [ansistyles](https://github.com/thlorenz/ansistyles). - -## Installation - - npm install ansicolors - -## Usage - -```js -var colors = require('ansicolors'); - -// foreground colors -var redHerring = colors.red('herring'); -var blueMoon = colors.blue('moon'); -var brighBlueMoon = colors.brightBlue('moon'); - -console.log(redHerring); // this will print 'herring' in red -console.log(blueMoon); // this 'moon' in blue -console.log(brightBlueMoon); // I think you got the idea - -// background colors -console.log(colors.bgYellow('printed on yellow background')); -console.log(colors.bgBrightBlue('printed on bright blue background')); - -// mixing background and foreground colors -// below two lines have same result (order in which bg and fg are combined doesn't matter) -console.log(colors.bgYellow(colors.blue('printed on yellow background in blue'))); -console.log(colors.blue(colors.bgYellow('printed on yellow background in blue'))); -``` - -## Advanced API - -**ansicolors** allows you to access opening and closing escape sequences separately. - -```js -var colors = require('ansicolors'); - -function inspect(obj, depth) { - return require('util').inspect(obj, false, depth || 5, true); -} - -console.log('open blue', inspect(colors.open.blue)); -console.log('close bgBlack', inspect(colors.close.bgBlack)); - -// => open blue '\u001b[34m' -// close bgBlack '\u001b[49m' -``` - -## Tests - -Look at the [tests](https://github.com/thlorenz/ansicolors/blob/master/test/ansicolors.js) to see more examples and/or run them via: - - npm explore ansicolors && npm test - -## Alternatives - -**ansicolors** tries to meet simple use cases with a very simple API. However, if you need a more powerful ansi formatting tool, -I'd suggest to look at the [features](https://github.com/TooTallNate/ansi.js#features) of the [ansi module](https://github.com/TooTallNate/ansi.js). diff --git a/node_modules/ansistyles/README.md b/node_modules/ansistyles/README.md deleted file mode 100644 index e39b8dfb6d827..0000000000000 --- a/node_modules/ansistyles/README.md +++ /dev/null @@ -1,71 +0,0 @@ -# ansistyles [![build status](https://secure.travis-ci.org/thlorenz/ansistyles.png)](http://next.travis-ci.org/thlorenz/ansistyles) - -Functions that surround a string with ansistyle codes so it prints in style. - -In case you need colors, like `red`, have a look at [ansicolors](https://github.com/thlorenz/ansicolors). - -## Installation - - npm install ansistyles - -## Usage - -```js -var styles = require('ansistyles'); - -console.log(styles.bright('hello world')); // prints hello world in 'bright' white -console.log(styles.underline('hello world')); // prints hello world underlined -console.log(styles.inverse('hello world')); // prints hello world black on white -``` - -## Combining with ansicolors - -Get the ansicolors module: - - npm install ansicolors - -```js -var styles = require('ansistyles') - , colors = require('ansicolors'); - - console.log( - // prints hello world underlined in blue on a green background - colors.bgGreen(colors.blue(styles.underline('hello world'))) - ); -``` - -## Tests - -Look at the [tests](https://github.com/thlorenz/ansistyles/blob/master/test/ansistyles.js) to see more examples and/or run them via: - - npm explore ansistyles && npm test - -## More Styles - -As you can see from [here](https://github.com/thlorenz/ansistyles/blob/master/ansistyles.js#L4-L15), more styles are available, -but didn't have any effect on the terminals that I tested on Mac Lion and Ubuntu Linux. - -I included them for completeness, but didn't show them in the examples because they seem to have no effect. - -### reset - -A style reset function is also included, please note however that this is not nestable. - -Therefore the below only underlines `hell` only, but not `world`. - -```js -console.log(styles.underline('hell' + styles.reset('o') + ' world')); -``` - -It is essentially the same as: - -```js -console.log(styles.underline('hell') + styles.reset('') + 'o world'); -``` - - - -## Alternatives - -**ansistyles** tries to meet simple use cases with a very simple API. However, if you need a more powerful ansi formatting tool, -I'd suggest to look at the [features](https://github.com/TooTallNate/ansi.js#features) of the [ansi module](https://github.com/TooTallNate/ansi.js). diff --git a/node_modules/aproba/CHANGELOG.md b/node_modules/aproba/CHANGELOG.md deleted file mode 100644 index bab30ecb7e625..0000000000000 --- a/node_modules/aproba/CHANGELOG.md +++ /dev/null @@ -1,4 +0,0 @@ -2.0.0 - * Drop support for 0.10 and 0.12. They haven't been in travis but still, - since we _know_ we'll break with them now it's only polite to do a - major bump. diff --git a/node_modules/aproba/README.md b/node_modules/aproba/README.md deleted file mode 100644 index 0bfc594c56a37..0000000000000 --- a/node_modules/aproba/README.md +++ /dev/null @@ -1,94 +0,0 @@ -aproba -====== - -A ridiculously light-weight function argument validator - -``` -var validate = require("aproba") - -function myfunc(a, b, c) { - // `a` must be a string, `b` a number, `c` a function - validate('SNF', arguments) // [a,b,c] is also valid -} - -myfunc('test', 23, function () {}) // ok -myfunc(123, 23, function () {}) // type error -myfunc('test', 23) // missing arg error -myfunc('test', 23, function () {}, true) // too many args error - -``` - -Valid types are: - -| type | description -| :--: | :---------- -| * | matches any type -| A | `Array.isArray` OR an `arguments` object -| S | typeof == string -| N | typeof == number -| F | typeof == function -| O | typeof == object and not type A and not type E -| B | typeof == boolean -| E | `instanceof Error` OR `null` **(special: see below)** -| Z | == `null` - -Validation failures throw one of three exception types, distinguished by a -`code` property of `EMISSINGARG`, `EINVALIDTYPE` or `ETOOMANYARGS`. - -If you pass in an invalid type then it will throw with a code of -`EUNKNOWNTYPE`. - -If an **error** argument is found and is not null then the remaining -arguments are optional. That is, if you say `ESO` then that's like using a -non-magical `E` in: `E|ESO|ZSO`. - -### But I have optional arguments?! - -You can provide more than one signature by separating them with pipes `|`. -If any signature matches the arguments then they'll be considered valid. - -So for example, say you wanted to write a signature for -`fs.createWriteStream`. The docs for it describe it thusly: - -``` -fs.createWriteStream(path[, options]) -``` - -This would be a signature of `SO|S`. That is, a string and and object, or -just a string. - -Now, if you read the full `fs` docs, you'll see that actually path can ALSO -be a buffer. And options can be a string, that is: -``` -path <String> | <Buffer> -options <String> | <Object> -``` - -To reproduce this you have to fully enumerate all of the possible -combinations and that implies a signature of `SO|SS|OO|OS|S|O`. The -awkwardness is a feature: It reminds you of the complexity you're adding to -your API when you do this sort of thing. - - -### Browser support - -This has no dependencies and should work in browsers, though you'll have -noisier stack traces. - -### Why this exists - -I wanted a very simple argument validator. It needed to do two things: - -1. Be more concise and easier to use than assertions - -2. Not encourage an infinite bikeshed of DSLs - -This is why types are specified by a single character and there's no such -thing as an optional argument. - -This is not intended to validate user data. This is specifically about -asserting the interface of your functions. - -If you need greater validation, I encourage you to write them by hand or -look elsewhere. - diff --git a/node_modules/archy/.travis.yml b/node_modules/archy/.travis.yml deleted file mode 100644 index 895dbd3623421..0000000000000 --- a/node_modules/archy/.travis.yml +++ /dev/null @@ -1,4 +0,0 @@ -language: node_js -node_js: - - 0.6 - - 0.8 diff --git a/node_modules/archy/README.markdown b/node_modules/archy/README.markdown deleted file mode 100644 index ef7a5cf34be1f..0000000000000 --- a/node_modules/archy/README.markdown +++ /dev/null @@ -1,88 +0,0 @@ -# archy - -Render nested hierarchies `npm ls` style with unicode pipes. - -[![browser support](http://ci.testling.com/substack/node-archy.png)](http://ci.testling.com/substack/node-archy) - -[![build status](https://secure.travis-ci.org/substack/node-archy.png)](http://travis-ci.org/substack/node-archy) - -# example - -``` js -var archy = require('archy'); -var s = archy({ - label : 'beep', - nodes : [ - 'ity', - { - label : 'boop', - nodes : [ - { - label : 'o_O', - nodes : [ - { - label : 'oh', - nodes : [ 'hello', 'puny' ] - }, - 'human' - ] - }, - 'party\ntime!' - ] - } - ] -}); -console.log(s); -``` - -output - -``` -beep -├── ity -└─┬ boop - ├─┬ o_O - │ ├─┬ oh - │ │ ├── hello - │ │ └── puny - │ └── human - └── party - time! -``` - -# methods - -var archy = require('archy') - -## archy(obj, prefix='', opts={}) - -Return a string representation of `obj` with unicode pipe characters like how -`npm ls` looks. - -`obj` should be a tree of nested objects with `'label'` and `'nodes'` fields. -`'label'` is a string of text to display at a node level and `'nodes'` is an -array of the descendents of the current node. - -If a node is a string, that string will be used as the `'label'` and an empty -array of `'nodes'` will be used. - -`prefix` gets prepended to all the lines and is used by the algorithm to -recursively update. - -If `'label'` has newlines they will be indented at the present indentation level -with the current prefix. - -To disable unicode results in favor of all-ansi output set `opts.unicode` to -`false`. - -# install - -With [npm](http://npmjs.org) do: - -``` -npm install archy -``` - -# license - -MIT diff --git a/node_modules/are-we-there-yet/README.md b/node_modules/are-we-there-yet/README.md deleted file mode 100644 index 7e2b42d866bd5..0000000000000 --- a/node_modules/are-we-there-yet/README.md +++ /dev/null @@ -1,195 +0,0 @@ -are-we-there-yet ----------------- - -Track complex hiearchies of asynchronous task completion statuses. This is -intended to give you a way of recording and reporting the progress of the big -recursive fan-out and gather type workflows that are so common in async. - -What you do with this completion data is up to you, but the most common use case is to -feed it to one of the many progress bar modules. - -Most progress bar modules include a rudamentary version of this, but my -needs were more complex. - -Usage -===== - -```javascript -var TrackerGroup = require("are-we-there-yet").TrackerGroup - -var top = new TrackerGroup("program") - -var single = top.newItem("one thing", 100) -single.completeWork(20) - -console.log(top.completed()) // 0.2 - -fs.stat("file", function(er, stat) { - if (er) throw er - var stream = top.newStream("file", stat.size) - console.log(top.completed()) // now 0.1 as single is 50% of the job and is 20% complete - // and 50% * 20% == 10% - fs.createReadStream("file").pipe(stream).on("data", function (chunk) { - // do stuff with chunk - }) - top.on("change", function (name) { - // called each time a chunk is read from "file" - // top.completed() will start at 0.1 and fill up to 0.6 as the file is read - }) -}) -``` - -Shared Methods -============== - -* var completed = tracker.completed() - -Implemented in: `Tracker`, `TrackerGroup`, `TrackerStream` - -Returns the ratio of completed work to work to be done. Range of 0 to 1. - -* tracker.finish() - -Implemented in: `Tracker`, `TrackerGroup` - -Marks the tracker as completed. With a TrackerGroup this marks all of its -components as completed. - -Marks all of the components of this tracker as finished, which in turn means -that `tracker.completed()` for this will now be 1. - -This will result in one or more `change` events being emitted. - -Events -====== - -All tracker objects emit `change` events with the following arguments: - -``` -function (name, completed, tracker) -``` - -`name` is the name of the tracker that originally emitted the event, -or if it didn't have one, the first containing tracker group that had one. - -`completed` is the percent complete (as returned by `tracker.completed()` method). - -`tracker` is the tracker object that you are listening for events on. - -TrackerGroup -============ - -* var tracker = new TrackerGroup(**name**) - - * **name** *(optional)* - The name of this tracker group, used in change - notifications if the component updating didn't have a name. Defaults to undefined. - -Creates a new empty tracker aggregation group. These are trackers whose -completion status is determined by the completion status of other trackers. - -* tracker.addUnit(**otherTracker**, **weight**) - - * **otherTracker** - Any of the other are-we-there-yet tracker objects - * **weight** *(optional)* - The weight to give the tracker, defaults to 1. - -Adds the **otherTracker** to this aggregation group. The weight determines -how long you expect this tracker to take to complete in proportion to other -units. So for instance, if you add one tracker with a weight of 1 and -another with a weight of 2, you're saying the second will take twice as long -to complete as the first. As such, the first will account for 33% of the -completion of this tracker and the second will account for the other 67%. - -Returns **otherTracker**. - -* var subGroup = tracker.newGroup(**name**, **weight**) - -The above is exactly equivalent to: - -```javascript - var subGroup = tracker.addUnit(new TrackerGroup(name), weight) -``` - -* var subItem = tracker.newItem(**name**, **todo**, **weight**) - -The above is exactly equivalent to: - -```javascript - var subItem = tracker.addUnit(new Tracker(name, todo), weight) -``` - -* var subStream = tracker.newStream(**name**, **todo**, **weight**) - -The above is exactly equivalent to: - -```javascript - var subStream = tracker.addUnit(new TrackerStream(name, todo), weight) -``` - -* console.log( tracker.debug() ) - -Returns a tree showing the completion of this tracker group and all of its -children, including recursively entering all of the children. - -Tracker -======= - -* var tracker = new Tracker(**name**, **todo**) - - * **name** *(optional)* The name of this counter to report in change - events. Defaults to undefined. - * **todo** *(optional)* The amount of work todo (a number). Defaults to 0. - -Ordinarily these are constructed as a part of a tracker group (via -`newItem`). - -* var completed = tracker.completed() - -Returns the ratio of completed work to work to be done. Range of 0 to 1. If -total work to be done is 0 then it will return 0. - -* tracker.addWork(**todo**) - - * **todo** A number to add to the amount of work to be done. - -Increases the amount of work to be done, thus decreasing the completion -percentage. Triggers a `change` event. - -* tracker.completeWork(**completed**) - - * **completed** A number to add to the work complete - -Increase the amount of work complete, thus increasing the completion percentage. -Will never increase the work completed past the amount of work todo. That is, -percentages > 100% are not allowed. Triggers a `change` event. - -* tracker.finish() - -Marks this tracker as finished, tracker.completed() will now be 1. Triggers -a `change` event. - -TrackerStream -============= - -* var tracker = new TrackerStream(**name**, **size**, **options**) - - * **name** *(optional)* The name of this counter to report in change - events. Defaults to undefined. - * **size** *(optional)* The number of bytes being sent through this stream. - * **options** *(optional)* A hash of stream options - -The tracker stream object is a pass through stream that updates an internal -tracker object each time a block passes through. It's intended to track -downloads, file extraction and other related activities. You use it by piping -your data source into it and then using it as your data source. - -If your data has a length attribute then that's used as the amount of work -completed when the chunk is passed through. If it does not (eg, object -streams) then each chunk counts as completing 1 unit of work, so your size -should be the total number of objects being streamed. - -* tracker.addWork(**todo**) - - * **todo** Increase the expected overall size by **todo** bytes. - -Increases the amount of work to be done, thus decreasing the completion -percentage. Triggers a `change` event. diff --git a/node_modules/asap/README.md b/node_modules/asap/README.md deleted file mode 100644 index 452fd8c203709..0000000000000 --- a/node_modules/asap/README.md +++ /dev/null @@ -1,237 +0,0 @@ -# ASAP - -[![Build Status](https://travis-ci.org/kriskowal/asap.png?branch=master)](https://travis-ci.org/kriskowal/asap) - -Promise and asynchronous observer libraries, as well as hand-rolled callback -programs and libraries, often need a mechanism to postpone the execution of a -callback until the next available event. -(See [Designing API’s for Asynchrony][Zalgo].) -The `asap` function executes a task **as soon as possible** but not before it -returns, waiting only for the completion of the current event and previously -scheduled tasks. - -```javascript -asap(function () { - // ... -}); -``` - -[Zalgo]: http://blog.izs.me/post/59142742143/designing-apis-for-asynchrony - -This CommonJS package provides an `asap` module that exports a function that -executes a task function *as soon as possible*. - -ASAP strives to schedule events to occur before yielding for IO, reflow, -or redrawing. -Each event receives an independent stack, with only platform code in parent -frames and the events run in the order they are scheduled. - -ASAP provides a fast event queue that will execute tasks until it is -empty before yielding to the JavaScript engine's underlying event-loop. -When a task gets added to a previously empty event queue, ASAP schedules a flush -event, preferring for that event to occur before the JavaScript engine has an -opportunity to perform IO tasks or rendering, thus making the first task and -subsequent tasks semantically indistinguishable. -ASAP uses a variety of techniques to preserve this invariant on different -versions of browsers and Node.js. - -By design, ASAP prevents input events from being handled until the task -queue is empty. -If the process is busy enough, this may cause incoming connection requests to be -dropped, and may cause existing connections to inform the sender to reduce the -transmission rate or stall. -ASAP allows this on the theory that, if there is enough work to do, there is no -sense in looking for trouble. -As a consequence, ASAP can interfere with smooth animation. -If your task should be tied to the rendering loop, consider using -`requestAnimationFrame` instead. -A long sequence of tasks can also effect the long running script dialog. -If this is a problem, you may be able to use ASAP’s cousin `setImmediate` to -break long processes into shorter intervals and periodically allow the browser -to breathe. -`setImmediate` will yield for IO, reflow, and repaint events. -It also returns a handler and can be canceled. -For a `setImmediate` shim, consider [YuzuJS setImmediate][setImmediate]. - -[setImmediate]: https://github.com/YuzuJS/setImmediate - -Take care. -ASAP can sustain infinite recursive calls without warning. -It will not halt from a stack overflow, and it will not consume unbounded -memory. -This is behaviorally equivalent to an infinite loop. -Just as with infinite loops, you can monitor a Node.js process for this behavior -with a heart-beat signal. -As with infinite loops, a very small amount of caution goes a long way to -avoiding problems. - -```javascript -function loop() { - asap(loop); -} -loop(); -``` - -In browsers, if a task throws an exception, it will not interrupt the flushing -of high-priority tasks. -The exception will be postponed to a later, low-priority event to avoid -slow-downs. -In Node.js, if a task throws an exception, ASAP will resume flushing only if—and -only after—the error is handled by `domain.on("error")` or -`process.on("uncaughtException")`. - -## Raw ASAP - -Checking for exceptions comes at a cost. -The package also provides an `asap/raw` module that exports the underlying -implementation which is faster but stalls if a task throws an exception. -This internal version of the ASAP function does not check for errors. -If a task does throw an error, it will stall the event queue unless you manually -call `rawAsap.requestFlush()` before throwing the error, or any time after. - -In Node.js, `asap/raw` also runs all tasks outside any domain. -If you need a task to be bound to your domain, you will have to do it manually. - -```js -if (process.domain) { - task = process.domain.bind(task); -} -rawAsap(task); -``` - -## Tasks - -A task may be any object that implements `call()`. -A function will suffice, but closures tend not to be reusable and can cause -garbage collector churn. -Both `asap` and `rawAsap` accept task objects to give you the option of -recycling task objects or using higher callable object abstractions. -See the `asap` source for an illustration. - - -## Compatibility - -ASAP is tested on Node.js v0.10 and in a broad spectrum of web browsers. -The following charts capture the browser test results for the most recent -release. -The first chart shows test results for ASAP running in the main window context. -The second chart shows test results for ASAP running in a web worker context. -Test results are inconclusive (grey) on browsers that do not support web -workers. -These data are captured automatically by [Continuous -Integration][]. - -[Continuous Integration]: https://github.com/kriskowal/asap/blob/master/CONTRIBUTING.md - -![Browser Compatibility](http://kriskowal-asap.s3-website-us-west-2.amazonaws.com/train/integration-2/saucelabs-results-matrix.svg) - -![Compatibility in Web Workers](http://kriskowal-asap.s3-website-us-west-2.amazonaws.com/train/integration-2/saucelabs-worker-results-matrix.svg) - -## Caveats - -When a task is added to an empty event queue, it is not always possible to -guarantee that the task queue will begin flushing immediately after the current -event. -However, once the task queue begins flushing, it will not yield until the queue -is empty, even if the queue grows while executing tasks. - -The following browsers allow the use of [DOM mutation observers][] to access -the HTML [microtask queue][], and thus begin flushing ASAP's task queue -immediately at the end of the current event loop turn, before any rendering or -IO: - -[microtask queue]: http://www.whatwg.org/specs/web-apps/current-work/multipage/webappapis.html#microtask-queue -[DOM mutation observers]: http://dom.spec.whatwg.org/#mutation-observers - -- Android 4–4.3 -- Chrome 26–34 -- Firefox 14–29 -- Internet Explorer 11 -- iPad Safari 6–7.1 -- iPhone Safari 7–7.1 -- Safari 6–7 - -In the absense of mutation observers, there are a few browsers, and situations -like web workers in some of the above browsers, where [message channels][] -would be a useful way to avoid falling back to timers. -Message channels give direct access to the HTML [task queue][], so the ASAP -task queue would flush after any already queued rendering and IO tasks, but -without having the minimum delay imposed by timers. -However, among these browsers, Internet Explorer 10 and Safari do not reliably -dispatch messages, so they are not worth the trouble to implement. - -[message channels]: http://www.whatwg.org/specs/web-apps/current-work/multipage/web-messaging.html#message-channels -[task queue]: http://www.whatwg.org/specs/web-apps/current-work/multipage/webappapis.html#concept-task - -- Internet Explorer 10 -- Safair 5.0-1 -- Opera 11-12 - -In the absense of mutation observers, these browsers and the following browsers -all fall back to using `setTimeout` and `setInterval` to ensure that a `flush` -occurs. -The implementation uses both and cancels whatever handler loses the race, since -`setTimeout` tends to occasionally skip tasks in unisolated circumstances. -Timers generally delay the flushing of ASAP's task queue for four milliseconds. - -- Firefox 3–13 -- Internet Explorer 6–10 -- iPad Safari 4.3 -- Lynx 2.8.7 - - -## Heritage - -ASAP has been factored out of the [Q][] asynchronous promise library. -It originally had a naïve implementation in terms of `setTimeout`, but -[Malte Ubl][NonBlocking] provided an insight that `postMessage` might be -useful for creating a high-priority, no-delay event dispatch hack. -Since then, Internet Explorer proposed and implemented `setImmediate`. -Robert Katić began contributing to Q by measuring the performance of -the internal implementation of `asap`, paying particular attention to -error recovery. -Domenic, Robert, and Kris Kowal collectively settled on the current strategy of -unrolling the high-priority event queue internally regardless of what strategy -we used to dispatch the potentially lower-priority flush event. -Domenic went on to make ASAP cooperate with Node.js domains. - -[Q]: https://github.com/kriskowal/q -[NonBlocking]: http://www.nonblocking.io/2011/06/windownexttick.html - -For further reading, Nicholas Zakas provided a thorough article on [The -Case for setImmediate][NCZ]. - -[NCZ]: http://www.nczonline.net/blog/2013/07/09/the-case-for-setimmediate/ - -Ember’s RSVP promise implementation later [adopted][RSVP ASAP] the name ASAP but -further developed the implentation. -Particularly, The `MessagePort` implementation was abandoned due to interaction -[problems with Mobile Internet Explorer][IE Problems] in favor of an -implementation backed on the newer and more reliable DOM `MutationObserver` -interface. -These changes were back-ported into this library. - -[IE Problems]: https://github.com/cujojs/when/issues/197 -[RSVP ASAP]: https://github.com/tildeio/rsvp.js/blob/cddf7232546a9cf858524b75cde6f9edf72620a7/lib/rsvp/asap.js - -In addition, ASAP factored into `asap` and `asap/raw`, such that `asap` remained -exception-safe, but `asap/raw` provided a tight kernel that could be used for -tasks that guaranteed that they would not throw exceptions. -This core is useful for promise implementations that capture thrown errors in -rejected promises and do not need a second safety net. -At the same time, the exception handling in `asap` was factored into separate -implementations for Node.js and browsers, using the the [Browserify][Browser -Config] `browser` property in `package.json` to instruct browser module loaders -and bundlers, including [Browserify][], [Mr][], and [Mop][], to use the -browser-only implementation. - -[Browser Config]: https://gist.github.com/defunctzombie/4339901 -[Browserify]: https://github.com/substack/node-browserify -[Mr]: https://github.com/montagejs/mr -[Mop]: https://github.com/montagejs/mop - -## License - -Copyright 2009-2014 by Contributors -MIT License (enclosed) - diff --git a/node_modules/asn1/README.md b/node_modules/asn1/README.md deleted file mode 100644 index 2208210a33bd8..0000000000000 --- a/node_modules/asn1/README.md +++ /dev/null @@ -1,50 +0,0 @@ -node-asn1 is a library for encoding and decoding ASN.1 datatypes in pure JS. -Currently BER encoding is supported; at some point I'll likely have to do DER. - -## Usage - -Mostly, if you're *actually* needing to read and write ASN.1, you probably don't -need this readme to explain what and why. If you have no idea what ASN.1 is, -see this: ftp://ftp.rsa.com/pub/pkcs/ascii/layman.asc - -The source is pretty much self-explanatory, and has read/write methods for the -common types out there. - -### Decoding - -The following reads an ASN.1 sequence with a boolean. - - var Ber = require('asn1').Ber; - - var reader = new Ber.Reader(Buffer.from([0x30, 0x03, 0x01, 0x01, 0xff])); - - reader.readSequence(); - console.log('Sequence len: ' + reader.length); - if (reader.peek() === Ber.Boolean) - console.log(reader.readBoolean()); - -### Encoding - -The following generates the same payload as above. - - var Ber = require('asn1').Ber; - - var writer = new Ber.Writer(); - - writer.startSequence(); - writer.writeBoolean(true); - writer.endSequence(); - - console.log(writer.buffer); - -## Installation - - npm install asn1 - -## License - -MIT. - -## Bugs - -See <https://github.com/joyent/node-asn1/issues>. diff --git a/node_modules/assert-plus/README.md b/node_modules/assert-plus/README.md deleted file mode 100644 index ec200d161efc9..0000000000000 --- a/node_modules/assert-plus/README.md +++ /dev/null @@ -1,162 +0,0 @@ -# assert-plus - -This library is a super small wrapper over node's assert module that has two -things: (1) the ability to disable assertions with the environment variable -NODE\_NDEBUG, and (2) some API wrappers for argument testing. Like -`assert.string(myArg, 'myArg')`. As a simple example, most of my code looks -like this: - -```javascript - var assert = require('assert-plus'); - - function fooAccount(options, callback) { - assert.object(options, 'options'); - assert.number(options.id, 'options.id'); - assert.bool(options.isManager, 'options.isManager'); - assert.string(options.name, 'options.name'); - assert.arrayOfString(options.email, 'options.email'); - assert.func(callback, 'callback'); - - // Do stuff - callback(null, {}); - } -``` - -# API - -All methods that *aren't* part of node's core assert API are simply assumed to -take an argument, and then a string 'name' that's not a message; `AssertionError` -will be thrown if the assertion fails with a message like: - - AssertionError: foo (string) is required - at test (/home/mark/work/foo/foo.js:3:9) - at Object.<anonymous> (/home/mark/work/foo/foo.js:15:1) - at Module._compile (module.js:446:26) - at Object..js (module.js:464:10) - at Module.load (module.js:353:31) - at Function._load (module.js:311:12) - at Array.0 (module.js:484:10) - at EventEmitter._tickCallback (node.js:190:38) - -from: - -```javascript - function test(foo) { - assert.string(foo, 'foo'); - } -``` - -There you go. You can check that arrays are of a homogeneous type with `Arrayof$Type`: - -```javascript - function test(foo) { - assert.arrayOfString(foo, 'foo'); - } -``` - -You can assert IFF an argument is not `undefined` (i.e., an optional arg): - -```javascript - assert.optionalString(foo, 'foo'); -``` - -Lastly, you can opt-out of assertion checking altogether by setting the -environment variable `NODE_NDEBUG=1`. This is pseudo-useful if you have -lots of assertions, and don't want to pay `typeof ()` taxes to v8 in -production. Be advised: The standard functions re-exported from `assert` are -also disabled in assert-plus if NDEBUG is specified. Using them directly from -the `assert` module avoids this behavior. - -The complete list of APIs is: - -* assert.array -* assert.bool -* assert.buffer -* assert.func -* assert.number -* assert.finite -* assert.object -* assert.string -* assert.stream -* assert.date -* assert.regexp -* assert.uuid -* assert.arrayOfArray -* assert.arrayOfBool -* assert.arrayOfBuffer -* assert.arrayOfFunc -* assert.arrayOfNumber -* assert.arrayOfFinite -* assert.arrayOfObject -* assert.arrayOfString -* assert.arrayOfStream -* assert.arrayOfDate -* assert.arrayOfRegexp -* assert.arrayOfUuid -* assert.optionalArray -* assert.optionalBool -* assert.optionalBuffer -* assert.optionalFunc -* assert.optionalNumber -* assert.optionalFinite -* assert.optionalObject -* assert.optionalString -* assert.optionalStream -* assert.optionalDate -* assert.optionalRegexp -* assert.optionalUuid -* assert.optionalArrayOfArray -* assert.optionalArrayOfBool -* assert.optionalArrayOfBuffer -* assert.optionalArrayOfFunc -* assert.optionalArrayOfNumber -* assert.optionalArrayOfFinite -* assert.optionalArrayOfObject -* assert.optionalArrayOfString -* assert.optionalArrayOfStream -* assert.optionalArrayOfDate -* assert.optionalArrayOfRegexp -* assert.optionalArrayOfUuid -* assert.AssertionError -* assert.fail -* assert.ok -* assert.equal -* assert.notEqual -* assert.deepEqual -* assert.notDeepEqual -* assert.strictEqual -* assert.notStrictEqual -* assert.throws -* assert.doesNotThrow -* assert.ifError - -# Installation - - npm install assert-plus - -## License - -The MIT License (MIT) -Copyright (c) 2012 Mark Cavage - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software is furnished to do so, -subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - -## Bugs - -See <https://github.com/mcavage/node-assert-plus/issues>. diff --git a/node_modules/asynckit/README.md b/node_modules/asynckit/README.md deleted file mode 100644 index ddcc7e6b95ca9..0000000000000 --- a/node_modules/asynckit/README.md +++ /dev/null @@ -1,233 +0,0 @@ -# asynckit [![NPM Module](https://img.shields.io/npm/v/asynckit.svg?style=flat)](https://www.npmjs.com/package/asynckit) - -Minimal async jobs utility library, with streams support. - -[![PhantomJS Build](https://img.shields.io/travis/alexindigo/asynckit/v0.4.0.svg?label=browser&style=flat)](https://travis-ci.org/alexindigo/asynckit) -[![Linux Build](https://img.shields.io/travis/alexindigo/asynckit/v0.4.0.svg?label=linux:0.12-6.x&style=flat)](https://travis-ci.org/alexindigo/asynckit) -[![Windows Build](https://img.shields.io/appveyor/ci/alexindigo/asynckit/v0.4.0.svg?label=windows:0.12-6.x&style=flat)](https://ci.appveyor.com/project/alexindigo/asynckit) - -[![Coverage Status](https://img.shields.io/coveralls/alexindigo/asynckit/v0.4.0.svg?label=code+coverage&style=flat)](https://coveralls.io/github/alexindigo/asynckit?branch=master) -[![Dependency Status](https://img.shields.io/david/alexindigo/asynckit/v0.4.0.svg?style=flat)](https://david-dm.org/alexindigo/asynckit) -[![bitHound Overall Score](https://www.bithound.io/github/alexindigo/asynckit/badges/score.svg)](https://www.bithound.io/github/alexindigo/asynckit) - -<!-- [![Readme](https://img.shields.io/badge/readme-tested-brightgreen.svg?style=flat)](https://www.npmjs.com/package/reamde) --> - -AsyncKit provides harness for `parallel` and `serial` iterators over list of items represented by arrays or objects. -Optionally it accepts abort function (should be synchronously return by iterator for each item), and terminates left over jobs upon an error event. For specific iteration order built-in (`ascending` and `descending`) and custom sort helpers also supported, via `asynckit.serialOrdered` method. - -It ensures async operations to keep behavior more stable and prevent `Maximum call stack size exceeded` errors, from sync iterators. - -| compression | size | -| :----------------- | -------: | -| asynckit.js | 12.34 kB | -| asynckit.min.js | 4.11 kB | -| asynckit.min.js.gz | 1.47 kB | - - -## Install - -```sh -$ npm install --save asynckit -``` - -## Examples - -### Parallel Jobs - -Runs iterator over provided array in parallel. Stores output in the `result` array, -on the matching positions. In unlikely event of an error from one of the jobs, -will terminate rest of the active jobs (if abort function is provided) -and return error along with salvaged data to the main callback function. - -#### Input Array - -```javascript -var parallel = require('asynckit').parallel - , assert = require('assert') - ; - -var source = [ 1, 1, 4, 16, 64, 32, 8, 2 ] - , expectedResult = [ 2, 2, 8, 32, 128, 64, 16, 4 ] - , expectedTarget = [ 1, 1, 2, 4, 8, 16, 32, 64 ] - , target = [] - ; - -parallel(source, asyncJob, function(err, result) -{ - assert.deepEqual(result, expectedResult); - assert.deepEqual(target, expectedTarget); -}); - -// async job accepts one element from the array -// and a callback function -function asyncJob(item, cb) -{ - // different delays (in ms) per item - var delay = item * 25; - - // pretend different jobs take different time to finish - // and not in consequential order - var timeoutId = setTimeout(function() { - target.push(item); - cb(null, item * 2); - }, delay); - - // allow to cancel "leftover" jobs upon error - // return function, invoking of which will abort this job - return clearTimeout.bind(null, timeoutId); -} -``` - -More examples could be found in [test/test-parallel-array.js](test/test-parallel-array.js). - -#### Input Object - -Also it supports named jobs, listed via object. - -```javascript -var parallel = require('asynckit/parallel') - , assert = require('assert') - ; - -var source = { first: 1, one: 1, four: 4, sixteen: 16, sixtyFour: 64, thirtyTwo: 32, eight: 8, two: 2 } - , expectedResult = { first: 2, one: 2, four: 8, sixteen: 32, sixtyFour: 128, thirtyTwo: 64, eight: 16, two: 4 } - , expectedTarget = [ 1, 1, 2, 4, 8, 16, 32, 64 ] - , expectedKeys = [ 'first', 'one', 'two', 'four', 'eight', 'sixteen', 'thirtyTwo', 'sixtyFour' ] - , target = [] - , keys = [] - ; - -parallel(source, asyncJob, function(err, result) -{ - assert.deepEqual(result, expectedResult); - assert.deepEqual(target, expectedTarget); - assert.deepEqual(keys, expectedKeys); -}); - -// supports full value, key, callback (shortcut) interface -function asyncJob(item, key, cb) -{ - // different delays (in ms) per item - var delay = item * 25; - - // pretend different jobs take different time to finish - // and not in consequential order - var timeoutId = setTimeout(function() { - keys.push(key); - target.push(item); - cb(null, item * 2); - }, delay); - - // allow to cancel "leftover" jobs upon error - // return function, invoking of which will abort this job - return clearTimeout.bind(null, timeoutId); -} -``` - -More examples could be found in [test/test-parallel-object.js](test/test-parallel-object.js). - -### Serial Jobs - -Runs iterator over provided array sequentially. Stores output in the `result` array, -on the matching positions. In unlikely event of an error from one of the jobs, -will not proceed to the rest of the items in the list -and return error along with salvaged data to the main callback function. - -#### Input Array - -```javascript -var serial = require('asynckit/serial') - , assert = require('assert') - ; - -var source = [ 1, 1, 4, 16, 64, 32, 8, 2 ] - , expectedResult = [ 2, 2, 8, 32, 128, 64, 16, 4 ] - , expectedTarget = [ 0, 1, 2, 3, 4, 5, 6, 7 ] - , target = [] - ; - -serial(source, asyncJob, function(err, result) -{ - assert.deepEqual(result, expectedResult); - assert.deepEqual(target, expectedTarget); -}); - -// extended interface (item, key, callback) -// also supported for arrays -function asyncJob(item, key, cb) -{ - target.push(key); - - // it will be automatically made async - // even it iterator "returns" in the same event loop - cb(null, item * 2); -} -``` - -More examples could be found in [test/test-serial-array.js](test/test-serial-array.js). - -#### Input Object - -Also it supports named jobs, listed via object. - -```javascript -var serial = require('asynckit').serial - , assert = require('assert') - ; - -var source = [ 1, 1, 4, 16, 64, 32, 8, 2 ] - , expectedResult = [ 2, 2, 8, 32, 128, 64, 16, 4 ] - , expectedTarget = [ 0, 1, 2, 3, 4, 5, 6, 7 ] - , target = [] - ; - -var source = { first: 1, one: 1, four: 4, sixteen: 16, sixtyFour: 64, thirtyTwo: 32, eight: 8, two: 2 } - , expectedResult = { first: 2, one: 2, four: 8, sixteen: 32, sixtyFour: 128, thirtyTwo: 64, eight: 16, two: 4 } - , expectedTarget = [ 1, 1, 4, 16, 64, 32, 8, 2 ] - , target = [] - ; - - -serial(source, asyncJob, function(err, result) -{ - assert.deepEqual(result, expectedResult); - assert.deepEqual(target, expectedTarget); -}); - -// shortcut interface (item, callback) -// works for object as well as for the arrays -function asyncJob(item, cb) -{ - target.push(item); - - // it will be automatically made async - // even it iterator "returns" in the same event loop - cb(null, item * 2); -} -``` - -More examples could be found in [test/test-serial-object.js](test/test-serial-object.js). - -_Note: Since _object_ is an _unordered_ collection of properties, -it may produce unexpected results with sequential iterations. -Whenever order of the jobs' execution is important please use `serialOrdered` method._ - -### Ordered Serial Iterations - -TBD - -For example [compare-property](compare-property) package. - -### Streaming interface - -TBD - -## Want to Know More? - -More examples can be found in [test folder](test/). - -Or open an [issue](https://github.com/alexindigo/asynckit/issues) with questions and/or suggestions. - -## License - -AsyncKit is licensed under the MIT license. diff --git a/node_modules/aws-sign2/README.md b/node_modules/aws-sign2/README.md deleted file mode 100644 index 763564e0aa5b8..0000000000000 --- a/node_modules/aws-sign2/README.md +++ /dev/null @@ -1,4 +0,0 @@ -aws-sign -======== - -AWS signing. Originally pulled from LearnBoost/knox, maintained as vendor in request, now a standalone module. diff --git a/node_modules/aws4/.github/FUNDING.yml b/node_modules/aws4/.github/FUNDING.yml deleted file mode 100644 index b7fdd9747f71d..0000000000000 --- a/node_modules/aws4/.github/FUNDING.yml +++ /dev/null @@ -1,3 +0,0 @@ -# These are supported funding model platforms - -github: mhart diff --git a/node_modules/aws4/.travis.yml b/node_modules/aws4/.travis.yml deleted file mode 100644 index 178bf31ed7186..0000000000000 --- a/node_modules/aws4/.travis.yml +++ /dev/null @@ -1,9 +0,0 @@ -language: node_js -node_js: - - "0.10" - - "0.12" - - "4" - - "6" - - "8" - - "10" - - "12" diff --git a/node_modules/aws4/README.md b/node_modules/aws4/README.md deleted file mode 100644 index 7202e452f8c43..0000000000000 --- a/node_modules/aws4/README.md +++ /dev/null @@ -1,183 +0,0 @@ -aws4 ----- - -[![Build Status](https://api.travis-ci.org/mhart/aws4.png?branch=master)](https://travis-ci.org/github/mhart/aws4) - -A small utility to sign vanilla Node.js http(s) request options using Amazon's -[AWS Signature Version 4](https://docs.aws.amazon.com/general/latest/gr/signature-version-4.html). - -If you want to sign and send AWS requests in a modern browser, or an environment like [Cloudflare Workers](https://developers.cloudflare.com/workers/), then check out [aws4fetch](https://github.com/mhart/aws4fetch) – otherwise you can also bundle this library for use [in older browsers](./browser). - -The only AWS service that *doesn't* support v4 as of 2020-05-22 is -[SimpleDB](https://docs.aws.amazon.com/AmazonSimpleDB/latest/DeveloperGuide/SDB_API.html) -(it only supports [AWS Signature Version 2](https://github.com/mhart/aws2)). - -It also provides defaults for a number of core AWS headers and -request parameters, making it very easy to query AWS services, or -build out a fully-featured AWS library. - -Example -------- - -```javascript -var https = require('https') -var aws4 = require('aws4') - -// to illustrate usage, we'll create a utility function to request and pipe to stdout -function request(opts) { https.request(opts, function(res) { res.pipe(process.stdout) }).end(opts.body || '') } - -// aws4 will sign an options object as you'd pass to http.request, with an AWS service and region -var opts = { host: 'my-bucket.s3.us-west-1.amazonaws.com', path: '/my-object', service: 's3', region: 'us-west-1' } - -// aws4.sign() will sign and modify these options, ready to pass to http.request -aws4.sign(opts, { accessKeyId: '', secretAccessKey: '' }) - -// or it can get credentials from process.env.AWS_ACCESS_KEY_ID, etc -aws4.sign(opts) - -// for most AWS services, aws4 can figure out the service and region if you pass a host -opts = { host: 'my-bucket.s3.us-west-1.amazonaws.com', path: '/my-object' } - -// usually it will add/modify request headers, but you can also sign the query: -opts = { host: 'my-bucket.s3.amazonaws.com', path: '/?X-Amz-Expires=12345', signQuery: true } - -// and for services with simple hosts, aws4 can infer the host from service and region: -opts = { service: 'sqs', region: 'us-east-1', path: '/?Action=ListQueues' } - -// and if you're using us-east-1, it's the default: -opts = { service: 'sqs', path: '/?Action=ListQueues' } - -aws4.sign(opts) -console.log(opts) -/* -{ - host: 'sqs.us-east-1.amazonaws.com', - path: '/?Action=ListQueues', - headers: { - Host: 'sqs.us-east-1.amazonaws.com', - 'X-Amz-Date': '20121226T061030Z', - Authorization: 'AWS4-HMAC-SHA256 Credential=ABCDEF/20121226/us-east-1/sqs/aws4_request, ...' - } -} -*/ - -// we can now use this to query AWS -request(opts) -/* -<?xml version="1.0"?> -<ListQueuesResponse xmlns="https://queue.amazonaws.com/doc/2012-11-05/"> -... -*/ - -// aws4 can infer the HTTP method if a body is passed in -// method will be POST and Content-Type: 'application/x-www-form-urlencoded; charset=utf-8' -request(aws4.sign({ service: 'iam', body: 'Action=ListGroups&Version=2010-05-08' })) -/* -<ListGroupsResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/"> -... -*/ - -// you can specify any custom option or header as per usual -request(aws4.sign({ - service: 'dynamodb', - region: 'ap-southeast-2', - method: 'POST', - path: '/', - headers: { - 'Content-Type': 'application/x-amz-json-1.0', - 'X-Amz-Target': 'DynamoDB_20120810.ListTables' - }, - body: '{}' -})) -/* -{"TableNames":[]} -... -*/ - -// The raw RequestSigner can be used to generate CodeCommit Git passwords -var signer = new aws4.RequestSigner({ - service: 'codecommit', - host: 'git-codecommit.us-east-1.amazonaws.com', - method: 'GIT', - path: '/v1/repos/MyAwesomeRepo', -}) -var password = signer.getDateTime() + 'Z' + signer.signature() - -// see example.js for examples with other services -``` - -API ---- - -### aws4.sign(requestOptions, [credentials]) - -Calculates and populates any necessary AWS headers and/or request -options on `requestOptions`. Returns `requestOptions` as a convenience for chaining. - -`requestOptions` is an object holding the same options that the Node.js -[http.request](https://nodejs.org/docs/latest/api/http.html#http_http_request_options_callback) -function takes. - -The following properties of `requestOptions` are used in the signing or -populated if they don't already exist: - -- `hostname` or `host` (will try to be determined from `service` and `region` if not given) -- `method` (will use `'GET'` if not given or `'POST'` if there is a `body`) -- `path` (will use `'/'` if not given) -- `body` (will use `''` if not given) -- `service` (will try to be calculated from `hostname` or `host` if not given) -- `region` (will try to be calculated from `hostname` or `host` or use `'us-east-1'` if not given) -- `signQuery` (to sign the query instead of adding an `Authorization` header, defaults to false) -- `headers['Host']` (will use `hostname` or `host` or be calculated if not given) -- `headers['Content-Type']` (will use `'application/x-www-form-urlencoded; charset=utf-8'` - if not given and there is a `body`) -- `headers['Date']` (used to calculate the signature date if given, otherwise `new Date` is used) - -Your AWS credentials (which can be found in your -[AWS console](https://portal.aws.amazon.com/gp/aws/securityCredentials)) -can be specified in one of two ways: - -- As the second argument, like this: - -```javascript -aws4.sign(requestOptions, { - secretAccessKey: "<your-secret-access-key>", - accessKeyId: "<your-access-key-id>", - sessionToken: "<your-session-token>" -}) -``` - -- From `process.env`, such as this: - -``` -export AWS_ACCESS_KEY_ID="<your-access-key-id>" -export AWS_SECRET_ACCESS_KEY="<your-secret-access-key>" -export AWS_SESSION_TOKEN="<your-session-token>" -``` - -(will also use `AWS_ACCESS_KEY` and `AWS_SECRET_KEY` if available) - -The `sessionToken` property and `AWS_SESSION_TOKEN` environment variable are optional for signing -with [IAM STS temporary credentials](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_use-resources.html). - -Installation ------------- - -With [npm](https://www.npmjs.com/) do: - -``` -npm install aws4 -``` - -Can also be used [in the browser](./browser). - -Thanks ------- - -Thanks to [@jed](https://github.com/jed) for his -[dynamo-client](https://github.com/jed/dynamo-client) lib where I first -committed and subsequently extracted this code. - -Also thanks to the -[official Node.js AWS SDK](https://github.com/aws/aws-sdk-js) for giving -me a start on implementing the v4 signature. diff --git a/node_modules/balanced-match/.npmignore b/node_modules/balanced-match/.npmignore deleted file mode 100644 index ae5d8c36ac652..0000000000000 --- a/node_modules/balanced-match/.npmignore +++ /dev/null @@ -1,5 +0,0 @@ -test -.gitignore -.travis.yml -Makefile -example.js diff --git a/node_modules/balanced-match/README.md b/node_modules/balanced-match/README.md deleted file mode 100644 index 08e918c0db9a6..0000000000000 --- a/node_modules/balanced-match/README.md +++ /dev/null @@ -1,91 +0,0 @@ -# balanced-match - -Match balanced string pairs, like `{` and `}` or `<b>` and `</b>`. Supports regular expressions as well! - -[![build status](https://secure.travis-ci.org/juliangruber/balanced-match.svg)](http://travis-ci.org/juliangruber/balanced-match) -[![downloads](https://img.shields.io/npm/dm/balanced-match.svg)](https://www.npmjs.org/package/balanced-match) - -[![testling badge](https://ci.testling.com/juliangruber/balanced-match.png)](https://ci.testling.com/juliangruber/balanced-match) - -## Example - -Get the first matching pair of braces: - -```js -var balanced = require('balanced-match'); - -console.log(balanced('{', '}', 'pre{in{nested}}post')); -console.log(balanced('{', '}', 'pre{first}between{second}post')); -console.log(balanced(/\s+\{\s+/, /\s+\}\s+/, 'pre { in{nest} } post')); -``` - -The matches are: - -```bash -$ node example.js -{ start: 3, end: 14, pre: 'pre', body: 'in{nested}', post: 'post' } -{ start: 3, - end: 9, - pre: 'pre', - body: 'first', - post: 'between{second}post' } -{ start: 3, end: 17, pre: 'pre', body: 'in{nest}', post: 'post' } -``` - -## API - -### var m = balanced(a, b, str) - -For the first non-nested matching pair of `a` and `b` in `str`, return an -object with those keys: - -* **start** the index of the first match of `a` -* **end** the index of the matching `b` -* **pre** the preamble, `a` and `b` not included -* **body** the match, `a` and `b` not included -* **post** the postscript, `a` and `b` not included - -If there's no match, `undefined` will be returned. - -If the `str` contains more `a` than `b` / there are unmatched pairs, the first match that was closed will be used. For example, `{{a}` will match `['{', 'a', '']` and `{a}}` will match `['', 'a', '}']`. - -### var r = balanced.range(a, b, str) - -For the first non-nested matching pair of `a` and `b` in `str`, return an -array with indexes: `[ <a index>, <b index> ]`. - -If there's no match, `undefined` will be returned. - -If the `str` contains more `a` than `b` / there are unmatched pairs, the first match that was closed will be used. For example, `{{a}` will match `[ 1, 3 ]` and `{a}}` will match `[0, 2]`. - -## Installation - -With [npm](https://npmjs.org) do: - -```bash -npm install balanced-match -``` - -## License - -(MIT) - -Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/node_modules/balanced-match/index.js b/node_modules/balanced-match/index.js index 1685a76293255..c67a64608df7f 100644 --- a/node_modules/balanced-match/index.js +++ b/node_modules/balanced-match/index.js @@ -28,6 +28,9 @@ function range(a, b, str) { var i = ai; if (ai >= 0 && bi > 0) { + if(a===b) { + return [ai, bi]; + } begs = []; left = str.length; diff --git a/node_modules/balanced-match/package.json b/node_modules/balanced-match/package.json index 61349c6edad62..ce6073e0403b5 100644 --- a/node_modules/balanced-match/package.json +++ b/node_modules/balanced-match/package.json @@ -1,7 +1,7 @@ { "name": "balanced-match", "description": "Match balanced character pairs, like \"{\" and \"}\"", - "version": "1.0.0", + "version": "1.0.2", "repository": { "type": "git", "url": "git://github.com/juliangruber/balanced-match.git" @@ -9,10 +9,9 @@ "homepage": "https://github.com/juliangruber/balanced-match", "main": "index.js", "scripts": { - "test": "make test", - "bench": "make bench" + "test": "tape test/test.js", + "bench": "matcha test/bench.js" }, - "dependencies": {}, "devDependencies": { "matcha": "^0.7.0", "tape": "^4.6.0" diff --git a/node_modules/bcrypt-pbkdf/README.md b/node_modules/bcrypt-pbkdf/README.md deleted file mode 100644 index 7551f335cc0bc..0000000000000 --- a/node_modules/bcrypt-pbkdf/README.md +++ /dev/null @@ -1,45 +0,0 @@ -Port of the OpenBSD `bcrypt_pbkdf` function to pure Javascript. `npm`-ified -version of [Devi Mandiri's port](https://github.com/devi/tmp/blob/master/js/bcrypt_pbkdf.js), -with some minor performance improvements. The code is copied verbatim (and -un-styled) from Devi's work. - -This product includes software developed by Niels Provos. - -## API - -### `bcrypt_pbkdf.pbkdf(pass, passlen, salt, saltlen, key, keylen, rounds)` - -Derive a cryptographic key of arbitrary length from a given password and salt, -using the OpenBSD `bcrypt_pbkdf` function. This is a combination of Blowfish and -SHA-512. - -See [this article](http://www.tedunangst.com/flak/post/bcrypt-pbkdf) for -further information. - -Parameters: - - * `pass`, a Uint8Array of length `passlen` - * `passlen`, an integer Number - * `salt`, a Uint8Array of length `saltlen` - * `saltlen`, an integer Number - * `key`, a Uint8Array of length `keylen`, will be filled with output - * `keylen`, an integer Number - * `rounds`, an integer Number, number of rounds of the PBKDF to run - -### `bcrypt_pbkdf.hash(sha2pass, sha2salt, out)` - -Calculate a Blowfish hash, given SHA2-512 output of a password and salt. Used as -part of the inner round function in the PBKDF. - -Parameters: - - * `sha2pass`, a Uint8Array of length 64 - * `sha2salt`, a Uint8Array of length 64 - * `out`, a Uint8Array of length 32, will be filled with output - -## License - -This source form is a 1:1 port from the OpenBSD `blowfish.c` and `bcrypt_pbkdf.c`. -As a result, it retains the original copyright and license. The two files are -under slightly different (but compatible) licenses, and are here combined in -one file. For each of the full license texts see `LICENSE`. diff --git a/node_modules/bin-links/CHANGELOG.md b/node_modules/bin-links/CHANGELOG.md deleted file mode 100644 index 0531b01ca47f5..0000000000000 --- a/node_modules/bin-links/CHANGELOG.md +++ /dev/null @@ -1,89 +0,0 @@ -# Change Log - -## 2.0.0 - -* Rewrite to promisify and remove dependence on gentle-fs - -<a name="1.1.7"></a> -## [1.1.7](https://github.com/npm/bin-links/compare/v1.1.6...v1.1.7) (2019-12-26) - - -### Bug Fixes - -* resolve folder that is passed in ([0bbd303](https://github.com/npm/bin-links/commit/0bbd303)) - - - -<a name="1.1.6"></a> -## [1.1.6](https://github.com/npm/bin-links/compare/v1.1.5...v1.1.6) (2019-12-11) - - -### Bug Fixes - -* prevent improper clobbering of man/bin links ([642cd18](https://github.com/npm/bin-links/commit/642cd18)), closes [#11](https://github.com/npm/bin-links/issues/11) [#12](https://github.com/npm/bin-links/issues/12) - - - -<a name="1.1.5"></a> -## [1.1.5](https://github.com/npm/bin-links/compare/v1.1.4...v1.1.5) (2019-12-10) - - -### Bug Fixes - -* don't filter out ./ man references ([b3cfd2e](https://github.com/npm/bin-links/commit/b3cfd2e)) - - - -<a name="1.1.4"></a> -## [1.1.4](https://github.com/npm/bin-links/compare/v1.1.3...v1.1.4) (2019-12-09) - - -### Bug Fixes - -* sanitize and validate bin and man link targets ([25a34f9](https://github.com/npm/bin-links/commit/25a34f9)) - - - -<a name="1.1.3"></a> -## [1.1.3](https://github.com/npm/bin-links/compare/v1.1.2...v1.1.3) (2019-08-14) - - - -<a name="1.1.2"></a> -## [1.1.2](https://github.com/npm/bin-links/compare/v1.1.1...v1.1.2) (2018-03-22) - - -### Bug Fixes - -* **linkMans:** return the promise! ([5eccc7f](https://github.com/npm/bin-links/commit/5eccc7f)) - - - -<a name="1.1.1"></a> -## [1.1.1](https://github.com/npm/bin-links/compare/v1.1.0...v1.1.1) (2018-03-07) - - -### Bug Fixes - -* **shebangs:** only convert CR when doing CRLF -> LF ([#2](https://github.com/npm/bin-links/issues/2)) ([43bf857](https://github.com/npm/bin-links/commit/43bf857)) - - - -<a name="1.1.0"></a> -# [1.1.0](https://github.com/npm/bin-links/compare/v1.0.0...v1.1.0) (2017-11-20) - - -### Features - -* **dos2unix:** Log the fact line endings are being changed upon install. ([e9f8a6f](https://github.com/npm/bin-links/commit/e9f8a6f)) - - - -<a name="1.0.0"></a> -# 1.0.0 (2017-10-07) - - -### Features - -* **import:** initial extraction from npm ([6ed0bfb](https://github.com/npm/bin-links/commit/6ed0bfb)) -* **initial commit:** README ([3fc9cf0](https://github.com/npm/bin-links/commit/3fc9cf0)) diff --git a/node_modules/bin-links/README.md b/node_modules/bin-links/README.md deleted file mode 100644 index fb9d902109eb6..0000000000000 --- a/node_modules/bin-links/README.md +++ /dev/null @@ -1,90 +0,0 @@ -# bin-links [![npm version](https://img.shields.io/npm/v/bin-links.svg)](https://npm.im/bin-links) [![license](https://img.shields.io/npm/l/bin-links.svg)](https://npm.im/bin-links) [![Travis](https://img.shields.io/travis/npm/bin-links.svg)](https://travis-ci.org/npm/bin-links) [![AppVeyor](https://ci.appveyor.com/api/projects/status/github/npm/bin-links?svg=true)](https://ci.appveyor.com/project/npm/bin-links) [![Coverage Status](https://coveralls.io/repos/github/npm/bin-links/badge.svg?branch=latest)](https://coveralls.io/github/npm/bin-links?branch=latest) - -[`bin-links`](https://github.com/npm/bin-links) is a standalone library that links -binaries and man pages for Javascript packages - -## Install - -`$ npm install bin-links` - -## Table of Contents - -* [Example](#example) -* [Features](#features) -* [Contributing](#contributing) -* [API](#api) - * [`binLinks`](#binLinks) - * [`binLinks.getPaths()`](#getPaths) - * [`binLinks.checkBins()`](#checkBins) - -### Example - -```javascript -const binLinks = require('bin-links') -const readPackageJson = require('read-package-json-fast') -binLinks({ - path: '/path/to/node_modules/some-package', - pkg: readPackageJson('/path/to/node_modules/some-package/package.json'), - - // true if it's a global install, false for local. default: false - global: true, - - // true if it's the top level package being installed, false otherwise - top: true, - - // true if you'd like to recklessly overwrite files. - force: true, -}) -``` - -### Features - -* Links bin files listed under the `bin` property of pkg to the - `node_modules/.bin` directory of the installing environment. (Or - `${prefix}/bin` for top level global packages on unix, and `${prefix}` - for top level global packages on Windows.) -* Links man files listed under the `man` property of pkg to the share/man - directory. (This is only done for top-level global packages on Unix - systems.) - -### Contributing - -The npm team enthusiastically welcomes contributions and project participation! -There's a bunch of things you can do if you want to contribute! The [Contributor -Guide](CONTRIBUTING.md) has all the information you need for everything from -reporting bugs to contributing entire new features. Please don't hesitate to -jump in if you'd like to, or even ask us questions if something isn't clear. - -### API - -#### <a name="binLinks"></a> `> binLinks({path, pkg, force, global, top})` - -Returns a Promise that resolves when the requisite things have been linked. - -#### <a name="getPaths"></a> `> binLinks.getPaths({path, pkg, global, top })` - -Returns an array of all the paths of links and shims that _might_ be -created (assuming that they exist!) for the package at the specified path. - -Does not touch the filesystem. - -#### <a name="checkBins"></a> `> binLinks.checkBins({path, pkg, global, top, force })` - -Checks if there are any conflicting bins which will prevent the linking of -bins for the given package. Returns a Promise that resolves with no value -if the way is clear, and rejects if there's something in the way. - -Always returns successfully if `global` or `top` are false, or if `force` -is true, or if the `pkg` object does not contain any bins to link. - -Note that changes to the file system _may_ still cause the `binLinks` -method to fail even if this method succeeds. Does not check for -conflicting `man` links. - -Reads from the filesystem but does not make any changes. - -##### Example - -```javascript -binLinks({path, pkg, force, global, top}).then(() => console.log('bins linked!')) -``` diff --git a/node_modules/brace-expansion/README.md b/node_modules/brace-expansion/README.md deleted file mode 100644 index 6b4e0e1640915..0000000000000 --- a/node_modules/brace-expansion/README.md +++ /dev/null @@ -1,129 +0,0 @@ -# brace-expansion - -[Brace expansion](https://www.gnu.org/software/bash/manual/html_node/Brace-Expansion.html), -as known from sh/bash, in JavaScript. - -[![build status](https://secure.travis-ci.org/juliangruber/brace-expansion.svg)](http://travis-ci.org/juliangruber/brace-expansion) -[![downloads](https://img.shields.io/npm/dm/brace-expansion.svg)](https://www.npmjs.org/package/brace-expansion) -[![Greenkeeper badge](https://badges.greenkeeper.io/juliangruber/brace-expansion.svg)](https://greenkeeper.io/) - -[![testling badge](https://ci.testling.com/juliangruber/brace-expansion.png)](https://ci.testling.com/juliangruber/brace-expansion) - -## Example - -```js -var expand = require('brace-expansion'); - -expand('file-{a,b,c}.jpg') -// => ['file-a.jpg', 'file-b.jpg', 'file-c.jpg'] - -expand('-v{,,}') -// => ['-v', '-v', '-v'] - -expand('file{0..2}.jpg') -// => ['file0.jpg', 'file1.jpg', 'file2.jpg'] - -expand('file-{a..c}.jpg') -// => ['file-a.jpg', 'file-b.jpg', 'file-c.jpg'] - -expand('file{2..0}.jpg') -// => ['file2.jpg', 'file1.jpg', 'file0.jpg'] - -expand('file{0..4..2}.jpg') -// => ['file0.jpg', 'file2.jpg', 'file4.jpg'] - -expand('file-{a..e..2}.jpg') -// => ['file-a.jpg', 'file-c.jpg', 'file-e.jpg'] - -expand('file{00..10..5}.jpg') -// => ['file00.jpg', 'file05.jpg', 'file10.jpg'] - -expand('{{A..C},{a..c}}') -// => ['A', 'B', 'C', 'a', 'b', 'c'] - -expand('ppp{,config,oe{,conf}}') -// => ['ppp', 'pppconfig', 'pppoe', 'pppoeconf'] -``` - -## API - -```js -var expand = require('brace-expansion'); -``` - -### var expanded = expand(str) - -Return an array of all possible and valid expansions of `str`. If none are -found, `[str]` is returned. - -Valid expansions are: - -```js -/^(.*,)+(.+)?$/ -// {a,b,...} -``` - -A comma separated list of options, like `{a,b}` or `{a,{b,c}}` or `{,a,}`. - -```js -/^-?\d+\.\.-?\d+(\.\.-?\d+)?$/ -// {x..y[..incr]} -``` - -A numeric sequence from `x` to `y` inclusive, with optional increment. -If `x` or `y` start with a leading `0`, all the numbers will be padded -to have equal length. Negative numbers and backwards iteration work too. - -```js -/^-?\d+\.\.-?\d+(\.\.-?\d+)?$/ -// {x..y[..incr]} -``` - -An alphabetic sequence from `x` to `y` inclusive, with optional increment. -`x` and `y` must be exactly one character, and if given, `incr` must be a -number. - -For compatibility reasons, the string `${` is not eligible for brace expansion. - -## Installation - -With [npm](https://npmjs.org) do: - -```bash -npm install brace-expansion -``` - -## Contributors - -- [Julian Gruber](https://github.com/juliangruber) -- [Isaac Z. Schlueter](https://github.com/isaacs) - -## Sponsors - -This module is proudly supported by my [Sponsors](https://github.com/juliangruber/sponsors)! - -Do you want to support modules like this to improve their quality, stability and weigh in on new features? Then please consider donating to my [Patreon](https://www.patreon.com/juliangruber). Not sure how much of my modules you're using? Try [feross/thanks](https://github.com/feross/thanks)! - -## License - -(MIT) - -Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/node_modules/builtins/.travis.yml b/node_modules/builtins/.travis.yml deleted file mode 100644 index cc4dba29d959a..0000000000000 --- a/node_modules/builtins/.travis.yml +++ /dev/null @@ -1,4 +0,0 @@ -language: node_js -node_js: - - "0.8" - - "0.10" diff --git a/node_modules/byte-size/LICENSE b/node_modules/byte-size/LICENSE index d9e1e9d6918a4..5699dfbe51830 100644 --- a/node_modules/byte-size/LICENSE +++ b/node_modules/byte-size/LICENSE @@ -1,6 +1,6 @@ The MIT License (MIT) -Copyright (c) 2014-20 Lloyd Brookes <75pound@gmail.com> +Copyright (c) 2014-21 Lloyd Brookes <75pound@gmail.com> Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/node_modules/byte-size/README.hbs b/node_modules/byte-size/README.hbs deleted file mode 100644 index 5b677f2a98870..0000000000000 --- a/node_modules/byte-size/README.hbs +++ /dev/null @@ -1,164 +0,0 @@ -[![view on npm](https://badgen.net/npm/v/byte-size)](https://www.npmjs.org/package/byte-size) -[![npm module downloads](https://badgen.net/npm/dt/byte-size)](https://www.npmjs.org/package/byte-size) -[![Gihub repo dependents](https://badgen.net/github/dependents-repo/75lb/byte-size)](https://github.com/75lb/byte-size/network/dependents?dependent_type=REPOSITORY) -[![Gihub package dependents](https://badgen.net/github/dependents-pkg/75lb/byte-size)](https://github.com/75lb/byte-size/network/dependents?dependent_type=PACKAGE) -[![Build Status](https://travis-ci.org/75lb/byte-size.svg?branch=master)](https://travis-ci.org/75lb/byte-size) -[![Coverage Status](https://coveralls.io/repos/github/75lb/byte-size/badge.svg)](https://coveralls.io/github/75lb/byte-size) -[![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg)](https://github.com/feross/standard) - -***Upgraders, please check the [release notes](https://github.com/75lb/byte-size/releases).*** - -# byte-size - -An isomorphic, load-anywhere function to convert a bytes value (e.g. `3456`) to a human-readable string (`'3.5 kB'`). Choose between [metric or IEC units](https://en.wikipedia.org/wiki/Gigabyte) (summarised below) or specify your own custom units. - -Value | Metric | Metric (octet) | ------ | ------------- | -------------- | -1000 | kB kilobyte | ko kilooctet | -1000^2 | MB megabyte | Mo megaoctet | -1000^3 | GB gigabyte | Go gigaoctet | -1000^4 | TB terabyte | To teraoctet | -1000^5 | PB petabyte | Po petaoctet | -1000^6 | EB exabyte | Eo exaoctet | -1000^7 | ZB zettabyte | Zo zettaoctet | -1000^8 | YB yottabyte | Yo yottaoctet | - -Value | IEC | IEC (octet) | ------- | ------------ | ------------- | -1024 | KiB kibibyte | Kio kibioctet | -1024^2 | MiB mebibyte | Mio mebioctet | -1024^3 | GiB gibibyte | Gio gibioctet | -1024^4 | TiB tebibyte | Tio tebioctet | -1024^5 | PiB pebibyte | Pio pebioctet | -1024^6 | EiB exbibyte | Eio exbioctet | -1024^7 | ZiB zebibyte | Zio zebioctet | -1024^8 | YiB yobibyte | Yio yobioctet | - -## Synopsis - -By default, `byteSize` converts the input number to a human readable string with metric units and a precision of 1. - -```js -> const byteSize = require('byte-size') - -> byteSize(1580) -{ value: '1.6', unit: 'kB', long: 'kilobytes' } -``` - -The object returned by `byteSize` defines a `toString` method therefore can be used directly in string context. - -```js -> `Filesize: ${byteSize(12400)}` -'Filesize: 12.4 kB' -``` - -Override the default `toString` behaviour by setting [`options.toStringFn`](#bytesizebytes-options--object-). - -```js -> function toStringFn () { - return `**${this.value}${this.unit}**` -} - -> `Filesize: ${byteSize(12400, { toStringFn })}` -'Filesize: **12.4kB**' -``` - -Beside the default of `metric`, there are three other built-in units available: `metric_octet`, `iec` and `iec_octet`. - -```js -> byteSize(1580, { units: 'iec' }) -{ value: '1.5', unit: 'KiB', long: 'kibibytes' } - -> byteSize(1580, { units: 'iec_octet' }) -{ value: '1.5', unit: 'Kio', long: 'kibioctets' } - -> byteSize(1580, { units: 'metric_octet' }) -{ value: '1.6', unit: 'ko', long: 'kilooctets' } -``` - -You can adjust the `precision`. - -```js -> byteSize(1580, { units: 'iec', precision: 3 }) -{ value: '1.543', unit: 'KiB', long: 'kibibytes' } - -> byteSize(1580, { units: 'iec', precision: 0 }) -{ value: '2', unit: 'KiB', long: 'kibibytes' } -``` - -Define custom units by passing an object containing one or more additional conversion tables to `options.customUnits`. In `options.units`, specify the name of a property from the `customUnits` object. - -```js -> const customUnits = { - simple: [ - { from: 0 , to: 1e3 , unit: '' }, - { from: 1e3 , to: 1e6 , unit: 'K', long: 'thousand' }, - { from: 1e6 , to: 1e9 , unit: 'Mn', long: 'million' }, - { from: 1e9 , to: 1e12, unit: 'Bn', long: 'billion' } - ] -} - -> const { value, unit } = byteSize(10000, { customUnits, units: 'simple' }) - -> `${value}${unit}` -'10.0K' -``` - -Override the built-in defaults for the duration of the process by passing an options object to `byteSize.defaultOptions()`. This results in cleaner code in cases where `byteSize` is used often with the same options. - -```js -> byteSize.defaultOptions({ - units: 'simple', - precision: 2, - customUnits: { - simple: [ - { from: 0, to: 1e3, unit: '' }, - { from: 1e3, to: 1e6, unit: 'k' }, - { from: 1e6, to: 1e9, unit: 'm' }, - { from: 1e9, to: 1e12, unit: 'bn' }, - ] - }, - toStringFn: function () { - return this.value + this.unit - } -}) - -> [2400, 16400, 3991200].map(byteSize).join(', ') -'2.40k, 16.40k, 3.99m' -``` - -{{>main}} - -## Load anywhere - -This library is compatible with Node.js, the Web and any style of module loader. It can be loaded anywhere, natively without transpilation. - -Node.js: - -```js -const byteSize = require('byte-size') -``` - -Within Node.js with ECMAScript Module support enabled: - -```js -import byteSize from 'byte-size' -``` - -Within a modern browser ECMAScript Module: - -```js -import byteSize from './node_modules/byte-size/index.mjs' -``` - -Old browser (adds `window.byteSize`): - -```html -<script nomodule src="./node_modules/byte-size/dist/index.js"></script> -``` - -* * * - -© 2014-20 Lloyd Brookes \<75pound@gmail.com\>. - -Isomorphic test suite by [test-runner](https://github.com/test-runner-js/test-runner) and [web-runner](https://github.com/test-runner-js/web-runner). Documented by [jsdoc-to-markdown](https://github.com/jsdoc2md/jsdoc-to-markdown). diff --git a/node_modules/byte-size/README.md b/node_modules/byte-size/README.md deleted file mode 100644 index 71f47b1e49b3c..0000000000000 --- a/node_modules/byte-size/README.md +++ /dev/null @@ -1,198 +0,0 @@ -[![view on npm](https://badgen.net/npm/v/byte-size)](https://www.npmjs.org/package/byte-size) -[![npm module downloads](https://badgen.net/npm/dt/byte-size)](https://www.npmjs.org/package/byte-size) -[![Gihub repo dependents](https://badgen.net/github/dependents-repo/75lb/byte-size)](https://github.com/75lb/byte-size/network/dependents?dependent_type=REPOSITORY) -[![Gihub package dependents](https://badgen.net/github/dependents-pkg/75lb/byte-size)](https://github.com/75lb/byte-size/network/dependents?dependent_type=PACKAGE) -[![Build Status](https://travis-ci.org/75lb/byte-size.svg?branch=master)](https://travis-ci.org/75lb/byte-size) -[![Coverage Status](https://coveralls.io/repos/github/75lb/byte-size/badge.svg)](https://coveralls.io/github/75lb/byte-size) -[![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg)](https://github.com/feross/standard) - -***Upgraders, please check the [release notes](https://github.com/75lb/byte-size/releases).*** - -# byte-size - -An isomorphic, load-anywhere function to convert a bytes value (e.g. `3456`) to a human-readable string (`'3.5 kB'`). Choose between [metric or IEC units](https://en.wikipedia.org/wiki/Gigabyte) (summarised below) or specify your own custom units. - -Value | Metric | Metric (octet) | ------ | ------------- | -------------- | -1000 | kB kilobyte | ko kilooctet | -1000^2 | MB megabyte | Mo megaoctet | -1000^3 | GB gigabyte | Go gigaoctet | -1000^4 | TB terabyte | To teraoctet | -1000^5 | PB petabyte | Po petaoctet | -1000^6 | EB exabyte | Eo exaoctet | -1000^7 | ZB zettabyte | Zo zettaoctet | -1000^8 | YB yottabyte | Yo yottaoctet | - -Value | IEC | IEC (octet) | ------- | ------------ | ------------- | -1024 | KiB kibibyte | Kio kibioctet | -1024^2 | MiB mebibyte | Mio mebioctet | -1024^3 | GiB gibibyte | Gio gibioctet | -1024^4 | TiB tebibyte | Tio tebioctet | -1024^5 | PiB pebibyte | Pio pebioctet | -1024^6 | EiB exbibyte | Eio exbioctet | -1024^7 | ZiB zebibyte | Zio zebioctet | -1024^8 | YiB yobibyte | Yio yobioctet | - -## Synopsis - -By default, `byteSize` converts the input number to a human readable string with metric units and a precision of 1. - -```js -> const byteSize = require('byte-size') - -> byteSize(1580) -{ value: '1.6', unit: 'kB', long: 'kilobytes' } -``` - -The object returned by `byteSize` defines a `toString` method therefore can be used directly in string context. - -```js -> `Filesize: ${byteSize(12400)}` -'Filesize: 12.4 kB' -``` - -Override the default `toString` behaviour by setting [`options.toStringFn`](#bytesizebytes-options--object-). - -```js -> function toStringFn () { - return `**${this.value}${this.unit}**` -} - -> `Filesize: ${byteSize(12400, { toStringFn })}` -'Filesize: **12.4kB**' -``` - -Beside the default of `metric`, there are three other built-in units available: `metric_octet`, `iec` and `iec_octet`. - -```js -> byteSize(1580, { units: 'iec' }) -{ value: '1.5', unit: 'KiB', long: 'kibibytes' } - -> byteSize(1580, { units: 'iec_octet' }) -{ value: '1.5', unit: 'Kio', long: 'kibioctets' } - -> byteSize(1580, { units: 'metric_octet' }) -{ value: '1.6', unit: 'ko', long: 'kilooctets' } -``` - -You can adjust the `precision`. - -```js -> byteSize(1580, { units: 'iec', precision: 3 }) -{ value: '1.543', unit: 'KiB', long: 'kibibytes' } - -> byteSize(1580, { units: 'iec', precision: 0 }) -{ value: '2', unit: 'KiB', long: 'kibibytes' } -``` - -Define custom units by passing an object containing one or more additional conversion tables to `options.customUnits`. In `options.units`, specify the name of a property from the `customUnits` object. - -```js -> const customUnits = { - simple: [ - { from: 0 , to: 1e3 , unit: '' }, - { from: 1e3 , to: 1e6 , unit: 'K', long: 'thousand' }, - { from: 1e6 , to: 1e9 , unit: 'Mn', long: 'million' }, - { from: 1e9 , to: 1e12, unit: 'Bn', long: 'billion' } - ] -} - -> const { value, unit } = byteSize(10000, { customUnits, units: 'simple' }) - -> `${value}${unit}` -'10.0K' -``` - -Override the built-in defaults for the duration of the process by passing an options object to `byteSize.defaultOptions()`. This results in cleaner code in cases where `byteSize` is used often with the same options. - -```js -> byteSize.defaultOptions({ - units: 'simple', - precision: 2, - customUnits: { - simple: [ - { from: 0, to: 1e3, unit: '' }, - { from: 1e3, to: 1e6, unit: 'k' }, - { from: 1e6, to: 1e9, unit: 'm' }, - { from: 1e9, to: 1e12, unit: 'bn' }, - ] - }, - toStringFn: function () { - return this.value + this.unit - } -}) - -> [2400, 16400, 3991200].map(byteSize).join(', ') -'2.40k, 16.40k, 3.99m' -``` - -<a name="module_byte-size"></a> - -## byte-size - -* [byte-size](#module_byte-size) - * [byteSize(bytes, [options])](#exp_module_byte-size--byteSize) ⇒ <code>object</code> ⏏ - * [.defaultOptions(options)](#module_byte-size--byteSize.defaultOptions) - -<a name="exp_module_byte-size--byteSize"></a> - -### byteSize(bytes, [options]) ⇒ <code>object</code> ⏏ -Returns an object with the spec `{ value: string, unit: string, long: string }`. The returned object defines a `toString` method meaning it can be used in any string context. - -**Kind**: Exported function - -| Param | Type | Description | -| --- | --- | --- | -| bytes | <code>number</code> | The bytes value to convert. | -| [options] | <code>object</code> | Optional config. | -| [options.precision] | <code>number</code> | Number of decimal places. Defaults to `1`. | -| [options.units] | <code>string</code> | Specify `'metric'`, `'iec'`, `'metric_octet'`, `'iec_octet'` or the name of a property from the custom units table in `options.customUnits`. Defaults to `metric`. | -| [options.customUnits] | <code>object</code> | An object containing one or more custom unit lookup tables. | -| [options.toStringFn] | <code>function</code> | A `toString` function to override the default. | - -<a name="module_byte-size--byteSize.defaultOptions"></a> - -#### byteSize.defaultOptions(options) -Set the default `byteSize` options for the duration of the process. - -**Kind**: static method of [<code>byteSize</code>](#exp_module_byte-size--byteSize) - -| Param | Type | Description | -| --- | --- | --- | -| options | <code>object</code> | A `byteSize` options object. | - - -## Load anywhere - -This library is compatible with Node.js, the Web and any style of module loader. It can be loaded anywhere, natively without transpilation. - -Node.js: - -```js -const byteSize = require('byte-size') -``` - -Within Node.js with ECMAScript Module support enabled: - -```js -import byteSize from 'byte-size' -``` - -Within a modern browser ECMAScript Module: - -```js -import byteSize from './node_modules/byte-size/index.mjs' -``` - -Old browser (adds `window.byteSize`): - -```html -<script nomodule src="./node_modules/byte-size/dist/index.js"></script> -``` - -* * * - -© 2014-20 Lloyd Brookes \<75pound@gmail.com\>. - -Isomorphic test suite by [test-runner](https://github.com/test-runner-js/test-runner) and [web-runner](https://github.com/test-runner-js/web-runner). Documented by [jsdoc-to-markdown](https://github.com/jsdoc2md/jsdoc-to-markdown). diff --git a/node_modules/byte-size/dist/index.js b/node_modules/byte-size/dist/index.js index 78129d8b02d21..dd1debda59abd 100644 --- a/node_modules/byte-size/dist/index.js +++ b/node_modules/byte-size/dist/index.js @@ -1,7 +1,7 @@ (function (global, factory) { typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() : typeof define === 'function' && define.amd ? define(factory) : - (global = global || self, global.byteSize = factory()); + (global = typeof globalThis !== 'undefined' ? globalThis : global || self, global.byteSize = factory()); }(this, (function () { 'use strict'; /** diff --git a/node_modules/byte-size/package.json b/node_modules/byte-size/package.json index e69b7e5f53ae2..b5f454592da10 100644 --- a/node_modules/byte-size/package.json +++ b/node_modules/byte-size/package.json @@ -8,7 +8,7 @@ "url": "http://repejota.com" } ], - "version": "7.0.0", + "version": "7.0.1", "main": "dist/index.js", "license": "MIT", "engines": { @@ -39,12 +39,12 @@ "dist": "rollup -f umd -n byteSize -o dist/index.js index.mjs" }, "devDependencies": { - "@test-runner/web": "^0.3.4", + "@test-runner/web": "^0.3.5", "coveralls": "^3.1.0", "esm-runner": "^0.3.4", "isomorphic-assert": "^0.1.1", - "jsdoc-to-markdown": "^5.0.3", - "rollup": "^2.10.9", + "jsdoc-to-markdown": "^7.0.0", + "rollup": "^2.40.0", "test-object-model": "^0.6.1" }, "standard": { diff --git a/node_modules/cacache/CHANGELOG.md b/node_modules/cacache/CHANGELOG.md deleted file mode 100644 index 14eee0b381c07..0000000000000 --- a/node_modules/cacache/CHANGELOG.md +++ /dev/null @@ -1,794 +0,0 @@ -# Changelog - -All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. - -### [15.0.5](https://github.com/npm/cacache/compare/v15.0.4...v15.0.5) (2020-07-11) - -### [15.0.4](https://github.com/npm/cacache/compare/v15.0.3...v15.0.4) (2020-06-03) - - -### Bug Fixes - -* replace move-file dep with @npmcli/move-file ([bf88af0](https://github.com/npm/cacache/commit/bf88af04e50cca9b54041151139ffc1fd415e2dc)), closes [#37](https://github.com/npm/cacache/issues/37) - -### [15.0.3](https://github.com/npm/cacache/compare/v15.0.2...v15.0.3) (2020-04-28) - - -### Bug Fixes - -* actually remove move-concurrently dep ([29e6eec](https://github.com/npm/cacache/commit/29e6eec9fee73444ee09daf1c1be06ddd5fe57f6)) - -### [15.0.2](https://github.com/npm/cacache/compare/v15.0.1...v15.0.2) (2020-04-28) - - -### Bug Fixes - -* tacks should be a dev dependency ([93ec158](https://github.com/npm/cacache/commit/93ec15852f0fdf1753ea7f75b4b8926daf8a7565)) - -## [15.0.1](https://github.com/npm/cacache/compare/v15.0.0...v15.0.1) (2020-04-27) - -* **deps:** Use move-file instead of move-file-concurrently. ([92b125](https://github.com/npm/cacache/commit/92b1251a11b9848878b6c0d101b18bd8845acaa6)) - -## [15.0.0](https://github.com/npm/cacache/compare/v14.0.0...v15.0.0) (2020-02-18) - - -### ⚠ BREAKING CHANGES - -* drop figgy-pudding and use canonical option names. - -### Features - -* remove figgy-pudding ([57d11bc](https://github.com/npm/cacache/commit/57d11bce34f979247d1057d258acc204c4944491)) - -## [14.0.0](https://github.com/npm/cacache/compare/v13.0.1...v14.0.0) (2020-01-28) - - -### ⚠ BREAKING CHANGES - -* **deps:** bumps engines to >= 10 - -* **deps:** tar v6 and mkdirp v1 ([5a66e7a](https://github.com/npm/cacache/commit/5a66e7a)) - -### [13.0.1](https://github.com/npm/cacache/compare/v13.0.0...v13.0.1) (2019-09-30) - - -### Bug Fixes - -* **fix-owner:** chownr.sync quits on non-root uid ([08801be](https://github.com/npm/cacache/commit/08801be)) - -## [13.0.0](https://github.com/npm/cacache/compare/v12.0.3...v13.0.0) (2019-09-25) - - -### ⚠ BREAKING CHANGES - -* This subtly changes the streaming interface of -everything in cacache that streams, which is, well, everything in -cacache. Most users will probably not notice, but any code that -depended on stream behavior always being deferred until next tick will -need to adjust. - -The mississippi methods 'to', 'from', 'through', and so on, have been -replaced with their Minipass counterparts, and streaming interaction -with the file system is done via fs-minipass. - -The following modules are of interest here: - -- [minipass](http://npm.im/minipass) The core stream library. - -- [fs-minipass](http://npm.im/fs-minipass) Note that the 'WriteStream' - class from fs-minipass is _not_ a Minipass stream, but rather a plain - old EventEmitter that duck types as a Writable. - -- [minipass-collect](http://npm.im/minipass-collect) Gather up all the - data from a stream. Cacache only uses Collect.PassThrough, which is a - basic Minipass passthrough stream which emits a 'collect' event with - the completed data just before the 'end' event. - -- [minipass-pipeline](http://npm.im/minipass-pipeline) Connect one or - more streams into a pipe chain. Errors anywhere in the pipeline are - proxied down the chain and then up to the Pipeline object itself. - Writes go into the head, reads go to the tail. Used in place of - pump() and pumpify(). - -- [minipass-flush](http://npm.im/minipass-flush) A Minipass passthrough - stream that defers its 'end' event until after a flush() method has - completed (either calling the supplied callback, or returning a - promise.) Use in place of flush-write-stream (aka mississippi.to). - -Streams from through2, concat-stream, and the behavior provided by -end-of-stream are all implemented in Minipass itself. - -Features of interest to cacache, which make Minipass a particularly good -fit: - -- All of the 'endish' events are normalized, so we can just listen on - 'end' and know that finish, prefinish, and close will be handled as - well. -- Minipass doesn't waste time [containing - zalgo](https://blog.izs.me/2013/08/designing-apis-for-asynchrony). -- Minipass has built-in support for promises that indicate the end or - error: stream.promise(), stream.collect(), and stream.concat(). -- With reliable and consistent timing guarantees, much less - error-checking logic is required. We can be more confident that an - error is being thrown or emitted in the correct place, rather than in - a callback which is deferred, resulting in a hung promise or - uncaughtException. - -The biggest downside of Minipass is that it lacks some of the internal -characteristics of node-core streams, which many community modules use -to identify streams. They have no _writableState or _readableState -objects, or _read or _write methods. As a result, the is-stream module -(at least, at the time of this commit) doesn't recognize Minipass -streams as readable or writable streams. - -All in all, the changes required of downstream users should be minimal, -but are unlikely to be zero. Hence the semver major change. - -### Features - -* replace all streams with Minipass streams ([f4c0962](https://github.com/npm/cacache/commit/f4c0962)) -* **deps:** Add minipass and minipass-pipeline ([a6545a9](https://github.com/npm/cacache/commit/a6545a9)) -* **promise:** converted .resolve to native promise, converted .map and .reduce to native ([220c56d](https://github.com/npm/cacache/commit/220c56d)) -* **promise:** individually promisifing functions as needed ([74b939e](https://github.com/npm/cacache/commit/74b939e)) -* **promise:** moved .reject from bluebird to native promise ([1d56da1](https://github.com/npm/cacache/commit/1d56da1)) -* **promise:** removed .fromNode, removed .join ([9c457a0](https://github.com/npm/cacache/commit/9c457a0)) -* **promise:** removed .map, replaced with p-map. removed .try ([cc3ee05](https://github.com/npm/cacache/commit/cc3ee05)) -* **promise:** removed .tap ([0260f12](https://github.com/npm/cacache/commit/0260f12)) -* **promise:** removed .using/.disposer ([5d832f3](https://github.com/npm/cacache/commit/5d832f3)) -* **promise:** removed bluebird ([c21298c](https://github.com/npm/cacache/commit/c21298c)) -* **promise:** removed bluebird specific .catch calls ([28aeeac](https://github.com/npm/cacache/commit/28aeeac)) -* **promise:** replaced .reduce and .mapSeries ([478f5cb](https://github.com/npm/cacache/commit/478f5cb)) - -### [12.0.3](https://github.com/npm/cacache/compare/v12.0.2...v12.0.3) (2019-08-19) - - -### Bug Fixes - -* do not chown if not running as root ([2d80af9](https://github.com/npm/cacache/commit/2d80af9)) - - - -### [12.0.2](https://github.com/npm/cacache/compare/v12.0.1...v12.0.2) (2019-07-19) - - - -### [12.0.1](https://github.com/npm/cacache/compare/v12.0.0...v12.0.1) (2019-07-19) - -* **deps** Abstracted out `lib/util/infer-owner.js` to - [@npmcli/infer-owner](https://www.npmjs.com/package/@npmcli/infer-owner) - so that it could be more easily used in other parts of the npm CLI. - - -## [12.0.0](https://github.com/npm/cacache/compare/v11.3.3...v12.0.0) (2019-07-15) - - -### Features - -* infer uid/gid instead of accepting as options ([ac84d14](https://github.com/npm/cacache/commit/ac84d14)) -* **i18n:** add another error message ([676cb32](https://github.com/npm/cacache/commit/676cb32)) - - -### BREAKING CHANGES - -* the uid gid options are no longer respected or -necessary. As of this change, cacache will always match the cache -contents to the ownership of the cache directory (or its parent -directory), regardless of what the caller passes in. - -Reasoning: - -The number one reason to use a uid or gid option was to keep root-owned -files from causing problems in the cache. In npm's case, this meant -that CLI's ./lib/command.js had to work out the appropriate uid and gid, -then pass it to the libnpmcommand module, which had to in turn pass the -uid and gid to npm-registry-fetch, which then passed it to -make-fetch-happen, which passed it to cacache. (For package fetching, -pacote would be in that mix as well.) - -Added to that, `cacache.rm()` will actually _write_ a file into the -cache index, but has no way to accept an option so that its call to -entry-index.js will write the index with the appropriate uid/gid. -Little ownership bugs were all over the place, and tricky to trace -through. (Why should make-fetch-happen even care about accepting or -passing uids and gids? It's an http library.) - -This change allows us to keep the cache from having mixed ownership in -any situation. - -Of course, this _does_ mean that if you have a root-owned but -user-writable folder (for example, `/tmp`), then the cache will try to -chown everything to root. - -The solution is for the user to create a folder, make it user-owned, and -use that, rather than relying on cacache to create the root cache folder. - -If we decide to restore the uid/gid opts, and use ownership inference -only when uid/gid are unset, then take care to also make rm take an -option object, and pass it through to entry-index.js. - - - -### [11.3.3](https://github.com/npm/cacache/compare/v11.3.2...v11.3.3) (2019-06-17) - - -### Bug Fixes - -* **audit:** npm audit fix ([200a6d5](https://github.com/npm/cacache/commit/200a6d5)) -* **config:** Add ssri config 'error' option ([#146](https://github.com/npm/cacache/issues/146)) ([47de8f5](https://github.com/npm/cacache/commit/47de8f5)) -* **deps:** npm audit fix ([481a7dc](https://github.com/npm/cacache/commit/481a7dc)) -* **standard:** standard --fix ([7799149](https://github.com/npm/cacache/commit/7799149)) -* **write:** avoid another cb never called situation ([5156561](https://github.com/npm/cacache/commit/5156561)) - - - -<a name="11.3.2"></a> -## [11.3.2](https://github.com/npm/cacache/compare/v11.3.1...v11.3.2) (2018-12-21) - - -### Bug Fixes - -* **get:** make sure to handle errors in the .then ([b10bcd0](https://github.com/npm/cacache/commit/b10bcd0)) - - - -<a name="11.3.1"></a> -## [11.3.1](https://github.com/npm/cacache/compare/v11.3.0...v11.3.1) (2018-11-05) - - -### Bug Fixes - -* **get:** export hasContent.sync properly ([d76c920](https://github.com/npm/cacache/commit/d76c920)) - - - -<a name="11.3.0"></a> -# [11.3.0](https://github.com/npm/cacache/compare/v11.2.0...v11.3.0) (2018-11-05) - - -### Features - -* **get:** add sync API for reading ([db1e094](https://github.com/npm/cacache/commit/db1e094)) - - - -<a name="11.2.0"></a> -# [11.2.0](https://github.com/npm/cacache/compare/v11.1.0...v11.2.0) (2018-08-08) - - -### Features - -* **read:** add sync support to other internal read.js fns ([fe638b6](https://github.com/npm/cacache/commit/fe638b6)) - - - -<a name="11.1.0"></a> -# [11.1.0](https://github.com/npm/cacache/compare/v11.0.3...v11.1.0) (2018-08-01) - - -### Features - -* **read:** add sync support for low-level content read ([b43af83](https://github.com/npm/cacache/commit/b43af83)) - - - -<a name="11.0.3"></a> -## [11.0.3](https://github.com/npm/cacache/compare/v11.0.2...v11.0.3) (2018-08-01) - - -### Bug Fixes - -* **config:** add ssri config options ([#136](https://github.com/npm/cacache/issues/136)) ([10d5d9a](https://github.com/npm/cacache/commit/10d5d9a)) -* **perf:** refactor content.read to avoid lstats ([c5ac10e](https://github.com/npm/cacache/commit/c5ac10e)) -* **test:** oops when removing safe-buffer ([1950490](https://github.com/npm/cacache/commit/1950490)) - - - -<a name="11.0.2"></a> -## [11.0.2](https://github.com/npm/cacache/compare/v11.0.1...v11.0.2) (2018-05-07) - - -### Bug Fixes - -* **verify:** size param no longer lost in a verify ([#131](https://github.com/npm/cacache/issues/131)) ([c614a19](https://github.com/npm/cacache/commit/c614a19)), closes [#130](https://github.com/npm/cacache/issues/130) - - - -<a name="11.0.1"></a> -## [11.0.1](https://github.com/npm/cacache/compare/v11.0.0...v11.0.1) (2018-04-10) - - - -<a name="11.0.0"></a> -# [11.0.0](https://github.com/npm/cacache/compare/v10.0.4...v11.0.0) (2018-04-09) - - -### Features - -* **opts:** use figgy-pudding for opts ([#128](https://github.com/npm/cacache/issues/128)) ([33d4eed](https://github.com/npm/cacache/commit/33d4eed)) - - -### meta - -* drop support for node@4 ([529f347](https://github.com/npm/cacache/commit/529f347)) - - -### BREAKING CHANGES - -* node@4 is no longer supported - - - -<a name="10.0.4"></a> -## [10.0.4](https://github.com/npm/cacache/compare/v10.0.3...v10.0.4) (2018-02-16) - - - -<a name="10.0.3"></a> -## [10.0.3](https://github.com/npm/cacache/compare/v10.0.2...v10.0.3) (2018-02-16) - - -### Bug Fixes - -* **content:** rethrow aggregate errors as ENOENT ([fa918f5](https://github.com/npm/cacache/commit/fa918f5)) - - - -<a name="10.0.2"></a> -## [10.0.2](https://github.com/npm/cacache/compare/v10.0.1...v10.0.2) (2018-01-07) - - -### Bug Fixes - -* **ls:** deleted entries could cause a premature stream EOF ([347dc36](https://github.com/npm/cacache/commit/347dc36)) - - - -<a name="10.0.1"></a> -## [10.0.1](https://github.com/npm/cacache/compare/v10.0.0...v10.0.1) (2017-11-15) - - -### Bug Fixes - -* **move-file:** actually use the fallback to `move-concurrently` (#110) ([073fbe1](https://github.com/npm/cacache/commit/073fbe1)) - - - -<a name="10.0.0"></a> -# [10.0.0](https://github.com/npm/cacache/compare/v9.3.0...v10.0.0) (2017-10-23) - - -### Features - -* **license:** relicense to ISC (#111) ([fdbb4e5](https://github.com/npm/cacache/commit/fdbb4e5)) - - -### Performance Improvements - -* more copyFile benchmarks ([63787bb](https://github.com/npm/cacache/commit/63787bb)) - - -### BREAKING CHANGES - -* **license:** the license has been changed from CC0-1.0 to ISC. - - - -<a name="9.3.0"></a> -# [9.3.0](https://github.com/npm/cacache/compare/v9.2.9...v9.3.0) (2017-10-07) - - -### Features - -* **copy:** added cacache.get.copy api for fast copies (#107) ([067b5f6](https://github.com/npm/cacache/commit/067b5f6)) - - - -<a name="9.2.9"></a> -## [9.2.9](https://github.com/npm/cacache/compare/v9.2.8...v9.2.9) (2017-06-17) - - - -<a name="9.2.8"></a> -## [9.2.8](https://github.com/npm/cacache/compare/v9.2.7...v9.2.8) (2017-06-05) - - -### Bug Fixes - -* **ssri:** bump ssri for bugfix ([c3232ea](https://github.com/npm/cacache/commit/c3232ea)) - - - -<a name="9.2.7"></a> -## [9.2.7](https://github.com/npm/cacache/compare/v9.2.6...v9.2.7) (2017-06-05) - - -### Bug Fixes - -* **content:** make verified content completely read-only (#96) ([4131196](https://github.com/npm/cacache/commit/4131196)) - - - -<a name="9.2.6"></a> -## [9.2.6](https://github.com/npm/cacache/compare/v9.2.5...v9.2.6) (2017-05-31) - - -### Bug Fixes - -* **node:** update ssri to prevent old node 4 crash ([5209ffe](https://github.com/npm/cacache/commit/5209ffe)) - - - -<a name="9.2.5"></a> -## [9.2.5](https://github.com/npm/cacache/compare/v9.2.4...v9.2.5) (2017-05-25) - - -### Bug Fixes - -* **deps:** fix lockfile issues and bump ssri ([84e1d7e](https://github.com/npm/cacache/commit/84e1d7e)) - - - -<a name="9.2.4"></a> -## [9.2.4](https://github.com/npm/cacache/compare/v9.2.3...v9.2.4) (2017-05-24) - - -### Bug Fixes - -* **deps:** bumping deps ([bbccb12](https://github.com/npm/cacache/commit/bbccb12)) - - - -<a name="9.2.3"></a> -## [9.2.3](https://github.com/npm/cacache/compare/v9.2.2...v9.2.3) (2017-05-24) - - -### Bug Fixes - -* **rm:** stop crashing if content is missing on rm ([ac90bc0](https://github.com/npm/cacache/commit/ac90bc0)) - - - -<a name="9.2.2"></a> -## [9.2.2](https://github.com/npm/cacache/compare/v9.2.1...v9.2.2) (2017-05-14) - - -### Bug Fixes - -* **i18n:** lets pretend this didn't happen ([519b4ee](https://github.com/npm/cacache/commit/519b4ee)) - - - -<a name="9.2.1"></a> -## [9.2.1](https://github.com/npm/cacache/compare/v9.2.0...v9.2.1) (2017-05-14) - - -### Bug Fixes - -* **docs:** fixing translation messup ([bb9e4f9](https://github.com/npm/cacache/commit/bb9e4f9)) - - - -<a name="9.2.0"></a> -# [9.2.0](https://github.com/npm/cacache/compare/v9.1.0...v9.2.0) (2017-05-14) - - -### Features - -* **i18n:** add Spanish translation for API ([531f9a4](https://github.com/npm/cacache/commit/531f9a4)) - - - -<a name="9.1.0"></a> -# [9.1.0](https://github.com/npm/cacache/compare/v9.0.0...v9.1.0) (2017-05-14) - - -### Features - -* **i18n:** Add Spanish translation and i18n setup (#91) ([323b90c](https://github.com/npm/cacache/commit/323b90c)) - - - -<a name="9.0.0"></a> -# [9.0.0](https://github.com/npm/cacache/compare/v8.0.0...v9.0.0) (2017-04-28) - - -### Bug Fixes - -* **memoization:** actually use the LRU ([0e55dc9](https://github.com/npm/cacache/commit/0e55dc9)) - - -### Features - -* **memoization:** memoizers can be injected through opts.memoize (#90) ([e5614c7](https://github.com/npm/cacache/commit/e5614c7)) - - -### BREAKING CHANGES - -* **memoization:** If you were passing an object to opts.memoize, it will now be used as an injected memoization object. If you were only passing booleans and other non-objects through that option, no changes are needed. - - - -<a name="8.0.0"></a> -# [8.0.0](https://github.com/npm/cacache/compare/v7.1.0...v8.0.0) (2017-04-22) - - -### Features - -* **read:** change hasContent to return {sri, size} (#88) ([bad6c49](https://github.com/npm/cacache/commit/bad6c49)), closes [#87](https://github.com/npm/cacache/issues/87) - - -### BREAKING CHANGES - -* **read:** hasContent now returns an object with `{sri, size}` instead of `sri`. Use `result.sri` anywhere that needed the old return value. - - - -<a name="7.1.0"></a> -# [7.1.0](https://github.com/npm/cacache/compare/v7.0.5...v7.1.0) (2017-04-20) - - -### Features - -* **size:** handle content size info (#49) ([91230af](https://github.com/npm/cacache/commit/91230af)) - - - -<a name="7.0.5"></a> -## [7.0.5](https://github.com/npm/cacache/compare/v7.0.4...v7.0.5) (2017-04-18) - - -### Bug Fixes - -* **integrity:** new ssri with fixed integrity stream ([6d13e8e](https://github.com/npm/cacache/commit/6d13e8e)) -* **write:** wrap stuff in promises to improve errors ([3624fc5](https://github.com/npm/cacache/commit/3624fc5)) - - - -<a name="7.0.4"></a> -## [7.0.4](https://github.com/npm/cacache/compare/v7.0.3...v7.0.4) (2017-04-15) - - -### Bug Fixes - -* **fix-owner:** throw away ENOENTs on chownr ([d49bbcd](https://github.com/npm/cacache/commit/d49bbcd)) - - - -<a name="7.0.3"></a> -## [7.0.3](https://github.com/npm/cacache/compare/v7.0.2...v7.0.3) (2017-04-05) - - -### Bug Fixes - -* **read:** fixing error message for integrity verification failures ([9d4f0a5](https://github.com/npm/cacache/commit/9d4f0a5)) - - - -<a name="7.0.2"></a> -## [7.0.2](https://github.com/npm/cacache/compare/v7.0.1...v7.0.2) (2017-04-03) - - -### Bug Fixes - -* **integrity:** use EINTEGRITY error code and update ssri ([8dc2e62](https://github.com/npm/cacache/commit/8dc2e62)) - - - -<a name="7.0.1"></a> -## [7.0.1](https://github.com/npm/cacache/compare/v7.0.0...v7.0.1) (2017-04-03) - - -### Bug Fixes - -* **docs:** fix header name conflict in readme ([afcd456](https://github.com/npm/cacache/commit/afcd456)) - - - -<a name="7.0.0"></a> -# [7.0.0](https://github.com/npm/cacache/compare/v6.3.0...v7.0.0) (2017-04-03) - - -### Bug Fixes - -* **test:** fix content.write tests when running in docker ([d2e9b6a](https://github.com/npm/cacache/commit/d2e9b6a)) - - -### Features - -* **integrity:** subresource integrity support (#78) ([b1e731f](https://github.com/npm/cacache/commit/b1e731f)) - - -### BREAKING CHANGES - -* **integrity:** The entire API has been overhauled to use SRI hashes instead of digest/hashAlgorithm pairs. SRI hashes follow the Subresource Integrity standard and support strings and objects compatible with [`ssri`](https://npm.im/ssri). - -* This change bumps the index version, which will invalidate all previous index entries. Content entries will remain intact, and existing caches will automatically reuse any content from before this breaking change. - -* `cacache.get.info()`, `cacache.ls()`, and `cacache.ls.stream()` will now return objects that looks like this: - -``` -{ - key: String, - integrity: '<algorithm>-<base64hash>', - path: ContentPath, - time: Date<ms>, - metadata: Any -} -``` - -* `opts.digest` and `opts.hashAlgorithm` are obsolete for any API calls that used them. - -* Anywhere `opts.digest` was accepted, `opts.integrity` is now an option. Any valid SRI hash is accepted here -- multiple hash entries will be resolved according to the standard: first, the "strongest" hash algorithm will be picked, and then each of the entries for that algorithm will be matched against the content. Content will be validated if *any* of the entries match (so, a single integrity string can be used for multiple "versions" of the same document/data). - -* `put.byDigest()`, `put.stream.byDigest`, `get.byDigest()` and `get.stream.byDigest()` now expect an SRI instead of a `digest` + `opts.hashAlgorithm` pairing. - -* `get.hasContent()` now expects an integrity hash instead of a digest. If content exists, it will return the specific single integrity hash that was found in the cache. - -* `verify()` has learned to handle integrity-based caches, and forgotten how to handle old-style cache indices due to the format change. - -* `cacache.rm.content()` now expects an integrity hash instead of a hex digest. - - - -<a name="6.3.0"></a> -# [6.3.0](https://github.com/npm/cacache/compare/v6.2.0...v6.3.0) (2017-04-01) - - -### Bug Fixes - -* **fixOwner:** ignore EEXIST race condition from mkdirp ([4670e9b](https://github.com/npm/cacache/commit/4670e9b)) -* **index:** ignore index removal races when inserting ([b9d2fa2](https://github.com/npm/cacache/commit/b9d2fa2)) -* **memo:** use lru-cache for better mem management (#75) ([d8ac5aa](https://github.com/npm/cacache/commit/d8ac5aa)) - - -### Features - -* **dependencies:** Switch to move-concurrently (#77) ([dc6482d](https://github.com/npm/cacache/commit/dc6482d)) - - - -<a name="6.2.0"></a> -# [6.2.0](https://github.com/npm/cacache/compare/v6.1.2...v6.2.0) (2017-03-15) - - -### Bug Fixes - -* **index:** additional bucket entry verification with checksum (#72) ([f8e0f25](https://github.com/npm/cacache/commit/f8e0f25)) -* **verify:** return fixOwner.chownr promise ([6818521](https://github.com/npm/cacache/commit/6818521)) - - -### Features - -* **tmp:** safe tmp dir creation/management util (#73) ([c42da71](https://github.com/npm/cacache/commit/c42da71)) - - - -<a name="6.1.2"></a> -## [6.1.2](https://github.com/npm/cacache/compare/v6.1.1...v6.1.2) (2017-03-13) - - -### Bug Fixes - -* **index:** set default hashAlgorithm ([d6eb2f0](https://github.com/npm/cacache/commit/d6eb2f0)) - - - -<a name="6.1.1"></a> -## [6.1.1](https://github.com/npm/cacache/compare/v6.1.0...v6.1.1) (2017-03-13) - - -### Bug Fixes - -* **coverage:** bumping coverage for verify (#71) ([0b7faf6](https://github.com/npm/cacache/commit/0b7faf6)) -* **deps:** glob should have been a regular dep :< ([0640bc4](https://github.com/npm/cacache/commit/0640bc4)) - - - -<a name="6.1.0"></a> -# [6.1.0](https://github.com/npm/cacache/compare/v6.0.2...v6.1.0) (2017-03-12) - - -### Bug Fixes - -* **coverage:** more coverage for content reads (#70) ([ef4f70a](https://github.com/npm/cacache/commit/ef4f70a)) -* **tests:** use safe-buffer because omfg (#69) ([6ab8132](https://github.com/npm/cacache/commit/6ab8132)) - - -### Features - -* **rm:** limited rm.all and fixed bugs (#66) ([d5d25ba](https://github.com/npm/cacache/commit/d5d25ba)), closes [#66](https://github.com/npm/cacache/issues/66) -* **verify:** tested, working cache verifier/gc (#68) ([45ad77a](https://github.com/npm/cacache/commit/45ad77a)) - - - -<a name="6.0.2"></a> -## [6.0.2](https://github.com/npm/cacache/compare/v6.0.1...v6.0.2) (2017-03-11) - - -### Bug Fixes - -* **index:** segment cache items with another subbucket (#64) ([c3644e5](https://github.com/npm/cacache/commit/c3644e5)) - - - -<a name="6.0.1"></a> -## [6.0.1](https://github.com/npm/cacache/compare/v6.0.0...v6.0.1) (2017-03-05) - - -### Bug Fixes - -* **docs:** Missed spots in README ([8ffb7fa](https://github.com/npm/cacache/commit/8ffb7fa)) - - - -<a name="6.0.0"></a> -# [6.0.0](https://github.com/npm/cacache/compare/v5.0.3...v6.0.0) (2017-03-05) - - -### Bug Fixes - -* **api:** keep memo cache mostly-internal ([2f72d0a](https://github.com/npm/cacache/commit/2f72d0a)) -* **content:** use the rest of the string, not the whole string ([fa8f3c3](https://github.com/npm/cacache/commit/fa8f3c3)) -* **deps:** removed `format-number@2.0.2` ([1187791](https://github.com/npm/cacache/commit/1187791)) -* **deps:** removed inflight@1.0.6 ([0d1819c](https://github.com/npm/cacache/commit/0d1819c)) -* **deps:** rimraf@2.6.1 ([9efab6b](https://github.com/npm/cacache/commit/9efab6b)) -* **deps:** standard@9.0.0 ([4202cba](https://github.com/npm/cacache/commit/4202cba)) -* **deps:** tap@10.3.0 ([aa03088](https://github.com/npm/cacache/commit/aa03088)) -* **deps:** weallcontribute@1.0.8 ([ad4f4dc](https://github.com/npm/cacache/commit/ad4f4dc)) -* **docs:** add security note to hashKey ([03f81ba](https://github.com/npm/cacache/commit/03f81ba)) -* **hashes:** change default hashAlgorithm to sha512 ([ea00ba6](https://github.com/npm/cacache/commit/ea00ba6)) -* **hashes:** missed a spot for hashAlgorithm defaults ([45997d8](https://github.com/npm/cacache/commit/45997d8)) -* **index:** add length header before JSON for verification ([fb8cb4d](https://github.com/npm/cacache/commit/fb8cb4d)) -* **index:** change index filenames to sha1s of keys ([bbc5fca](https://github.com/npm/cacache/commit/bbc5fca)) -* **index:** who cares about race conditions anyway ([b1d3888](https://github.com/npm/cacache/commit/b1d3888)) -* **perf:** bulk-read get+read for massive speed ([d26cdf9](https://github.com/npm/cacache/commit/d26cdf9)) -* **perf:** use bulk file reads for index reads ([79a8891](https://github.com/npm/cacache/commit/79a8891)) -* **put-stream:** remove tmp file on stream insert error ([65f6632](https://github.com/npm/cacache/commit/65f6632)) -* **put-stream:** robustified and predictibilized ([daf9e08](https://github.com/npm/cacache/commit/daf9e08)) -* **put-stream:** use new promise API for moves ([1d36013](https://github.com/npm/cacache/commit/1d36013)) -* **readme:** updated to reflect new default hashAlgo ([c60a2fa](https://github.com/npm/cacache/commit/c60a2fa)) -* **verify:** tiny typo fix ([db22d05](https://github.com/npm/cacache/commit/db22d05)) - - -### Features - -* **api:** converted external api ([7bf032f](https://github.com/npm/cacache/commit/7bf032f)) -* **cacache:** exported clearMemoized() utility ([8d2c5b6](https://github.com/npm/cacache/commit/8d2c5b6)) -* **cache:** add versioning to content and index ([31bc549](https://github.com/npm/cacache/commit/31bc549)) -* **content:** collate content files into subdirs ([c094d9f](https://github.com/npm/cacache/commit/c094d9f)) -* **deps:** `@npmcorp/move@1.0.0` ([bdd00bf](https://github.com/npm/cacache/commit/bdd00bf)) -* **deps:** `bluebird@3.4.7` ([3a17aff](https://github.com/npm/cacache/commit/3a17aff)) -* **deps:** `promise-inflight@1.0.1` ([a004fe6](https://github.com/npm/cacache/commit/a004fe6)) -* **get:** added memoization support for get ([c77d794](https://github.com/npm/cacache/commit/c77d794)) -* **get:** export hasContent ([2956ec3](https://github.com/npm/cacache/commit/2956ec3)) -* **index:** add hashAlgorithm and format insert ret val ([b639746](https://github.com/npm/cacache/commit/b639746)) -* **index:** collate index files into subdirs ([e8402a5](https://github.com/npm/cacache/commit/e8402a5)) -* **index:** promisify entry index ([cda3335](https://github.com/npm/cacache/commit/cda3335)) -* **memo:** added memoization lib ([da07b92](https://github.com/npm/cacache/commit/da07b92)) -* **memo:** export memoization api ([954b1b3](https://github.com/npm/cacache/commit/954b1b3)) -* **move-file:** add move fallback for weird errors ([5cf4616](https://github.com/npm/cacache/commit/5cf4616)) -* **perf:** bulk content write api ([51b536e](https://github.com/npm/cacache/commit/51b536e)) -* **put:** added memoization support to put ([b613a70](https://github.com/npm/cacache/commit/b613a70)) -* **read:** switched to promises ([a869362](https://github.com/npm/cacache/commit/a869362)) -* **rm:** added memoization support to rm ([4205cf0](https://github.com/npm/cacache/commit/4205cf0)) -* **rm:** switched to promises ([a000d24](https://github.com/npm/cacache/commit/a000d24)) -* **util:** promise-inflight ownership fix requests ([9517cd7](https://github.com/npm/cacache/commit/9517cd7)) -* **util:** use promises for api ([ae204bb](https://github.com/npm/cacache/commit/ae204bb)) -* **verify:** converted to Promises ([f0b3974](https://github.com/npm/cacache/commit/f0b3974)) - - -### BREAKING CHANGES - -* cache: index/content directories are now versioned. Previous caches are no longer compatible and cannot be migrated. -* util: fix-owner now uses Promises instead of callbacks -* index: Previously-generated index entries are no longer compatible and the index must be regenerated. -* index: The index format has changed and previous caches are no longer compatible. Existing caches will need to be regenerated. -* hashes: Default hashAlgorithm changed from sha1 to sha512. If you -rely on the prior setting, pass `opts.hashAlgorithm` in explicitly. -* content: Previously-generated content directories are no longer compatible -and must be regenerated. -* verify: API is now promise-based -* read: Switches to a Promise-based API and removes callback stuff -* rm: Switches to a Promise-based API and removes callback stuff -* index: this changes the API to work off promises instead of callbacks -* api: this means we are going all in on promises now diff --git a/node_modules/cacache/README.md b/node_modules/cacache/README.md deleted file mode 100644 index 3f70f49a40b6c..0000000000000 --- a/node_modules/cacache/README.md +++ /dev/null @@ -1,669 +0,0 @@ -# cacache [![npm version](https://img.shields.io/npm/v/cacache.svg)](https://npm.im/cacache) [![license](https://img.shields.io/npm/l/cacache.svg)](https://npm.im/cacache) [![Travis](https://img.shields.io/travis/npm/cacache.svg)](https://travis-ci.org/npm/cacache) [![AppVeyor](https://ci.appveyor.com/api/projects/status/github/npm/cacache?svg=true)](https://ci.appveyor.com/project/npm/cacache) [![Coverage Status](https://coveralls.io/repos/github/npm/cacache/badge.svg?branch=latest)](https://coveralls.io/github/npm/cacache?branch=latest) - -[`cacache`](https://github.com/npm/cacache) is a Node.js library for managing -local key and content address caches. It's really fast, really good at -concurrency, and it will never give you corrupted data, even if cache files -get corrupted or manipulated. - -On systems that support user and group settings on files, cacache will -match the `uid` and `gid` values to the folder where the cache lives, even -when running as `root`. - -It was written to be used as [npm](https://npm.im)'s local cache, but can -just as easily be used on its own. - -## Install - -`$ npm install --save cacache` - -## Table of Contents - -* [Example](#example) -* [Features](#features) -* [Contributing](#contributing) -* [API](#api) - * [Using localized APIs](#localized-api) - * Reading - * [`ls`](#ls) - * [`ls.stream`](#ls-stream) - * [`get`](#get-data) - * [`get.stream`](#get-stream) - * [`get.info`](#get-info) - * [`get.hasContent`](#get-hasContent) - * Writing - * [`put`](#put-data) - * [`put.stream`](#put-stream) - * [`rm.all`](#rm-all) - * [`rm.entry`](#rm-entry) - * [`rm.content`](#rm-content) - * Utilities - * [`clearMemoized`](#clear-memoized) - * [`tmp.mkdir`](#tmp-mkdir) - * [`tmp.withTmp`](#with-tmp) - * Integrity - * [Subresource Integrity](#integrity) - * [`verify`](#verify) - * [`verify.lastRun`](#verify-last-run) - -### Example - -```javascript -const cacache = require('cacache') -const fs = require('fs') - -const tarball = '/path/to/mytar.tgz' -const cachePath = '/tmp/my-toy-cache' -const key = 'my-unique-key-1234' - -// Cache it! Use `cachePath` as the root of the content cache -cacache.put(cachePath, key, '10293801983029384').then(integrity => { - console.log(`Saved content to ${cachePath}.`) -}) - -const destination = '/tmp/mytar.tgz' - -// Copy the contents out of the cache and into their destination! -// But this time, use stream instead! -cacache.get.stream( - cachePath, key -).pipe( - fs.createWriteStream(destination) -).on('finish', () => { - console.log('done extracting!') -}) - -// The same thing, but skip the key index. -cacache.get.byDigest(cachePath, integrityHash).then(data => { - fs.writeFile(destination, data, err => { - console.log('tarball data fetched based on its sha512sum and written out!') - }) -}) -``` - -### Features - -* Extraction by key or by content address (shasum, etc) -* [Subresource Integrity](#integrity) web standard support -* Multi-hash support - safely host sha1, sha512, etc, in a single cache -* Automatic content deduplication -* Fault tolerance (immune to corruption, partial writes, process races, etc) -* Consistency guarantees on read and write (full data verification) -* Lockless, high-concurrency cache access -* Streaming support -* Promise support -* Fast -- sub-millisecond reads and writes including verification -* Arbitrary metadata storage -* Garbage collection and additional offline verification -* Thorough test coverage -* There's probably a bloom filter in there somewhere. Those are cool, right? 🤔 - -### Contributing - -The cacache team enthusiastically welcomes contributions and project participation! There's a bunch of things you can do if you want to contribute! The [Contributor Guide](CONTRIBUTING.md) has all the information you need for everything from reporting bugs to contributing entire new features. Please don't hesitate to jump in if you'd like to, or even ask us questions if something isn't clear. - -All participants and maintainers in this project are expected to follow [Code of Conduct](CODE_OF_CONDUCT.md), and just generally be excellent to each other. - -Please refer to the [Changelog](CHANGELOG.md) for project history details, too. - -Happy hacking! - -### API - -#### <a name="ls"></a> `> cacache.ls(cache) -> Promise<Object>` - -Lists info for all entries currently in the cache as a single large object. Each -entry in the object will be keyed by the unique index key, with corresponding -[`get.info`](#get-info) objects as the values. - -##### Example - -```javascript -cacache.ls(cachePath).then(console.log) -// Output -{ - 'my-thing': { - key: 'my-thing', - integrity: 'sha512-BaSe64/EnCoDED+HAsh==' - path: '.testcache/content/deadbeef', // joined with `cachePath` - time: 12345698490, - size: 4023948, - metadata: { - name: 'blah', - version: '1.2.3', - description: 'this was once a package but now it is my-thing' - } - }, - 'other-thing': { - key: 'other-thing', - integrity: 'sha1-ANothER+hasH=', - path: '.testcache/content/bada55', - time: 11992309289, - size: 111112 - } -} -``` - -#### <a name="ls-stream"></a> `> cacache.ls.stream(cache) -> Readable` - -Lists info for all entries currently in the cache as a single large object. - -This works just like [`ls`](#ls), except [`get.info`](#get-info) entries are -returned as `'data'` events on the returned stream. - -##### Example - -```javascript -cacache.ls.stream(cachePath).on('data', console.log) -// Output -{ - key: 'my-thing', - integrity: 'sha512-BaSe64HaSh', - path: '.testcache/content/deadbeef', // joined with `cachePath` - time: 12345698490, - size: 13423, - metadata: { - name: 'blah', - version: '1.2.3', - description: 'this was once a package but now it is my-thing' - } -} - -{ - key: 'other-thing', - integrity: 'whirlpool-WoWSoMuchSupport', - path: '.testcache/content/bada55', - time: 11992309289, - size: 498023984029 -} - -{ - ... -} -``` - -#### <a name="get-data"></a> `> cacache.get(cache, key, [opts]) -> Promise({data, metadata, integrity})` - -Returns an object with the cached data, digest, and metadata identified by -`key`. The `data` property of this object will be a `Buffer` instance that -presumably holds some data that means something to you. I'm sure you know what -to do with it! cacache just won't care. - -`integrity` is a [Subresource -Integrity](#integrity) -string. That is, a string that can be used to verify `data`, which looks like -`<hash-algorithm>-<base64-integrity-hash>`. - -If there is no content identified by `key`, or if the locally-stored data does -not pass the validity checksum, the promise will be rejected. - -A sub-function, `get.byDigest` may be used for identical behavior, except lookup -will happen by integrity hash, bypassing the index entirely. This version of the -function *only* returns `data` itself, without any wrapper. - -See: [options](#get-options) - -##### Note - -This function loads the entire cache entry into memory before returning it. If -you're dealing with Very Large data, consider using [`get.stream`](#get-stream) -instead. - -##### Example - -```javascript -// Look up by key -cache.get(cachePath, 'my-thing').then(console.log) -// Output: -{ - metadata: { - thingName: 'my' - }, - integrity: 'sha512-BaSe64HaSh', - data: Buffer#<deadbeef>, - size: 9320 -} - -// Look up by digest -cache.get.byDigest(cachePath, 'sha512-BaSe64HaSh').then(console.log) -// Output: -Buffer#<deadbeef> -``` - -#### <a name="get-stream"></a> `> cacache.get.stream(cache, key, [opts]) -> Readable` - -Returns a [Readable Stream](https://nodejs.org/api/stream.html#stream_readable_streams) of the cached data identified by `key`. - -If there is no content identified by `key`, or if the locally-stored data does -not pass the validity checksum, an error will be emitted. - -`metadata` and `integrity` events will be emitted before the stream closes, if -you need to collect that extra data about the cached entry. - -A sub-function, `get.stream.byDigest` may be used for identical behavior, -except lookup will happen by integrity hash, bypassing the index entirely. This -version does not emit the `metadata` and `integrity` events at all. - -See: [options](#get-options) - -##### Example - -```javascript -// Look up by key -cache.get.stream( - cachePath, 'my-thing' -).on('metadata', metadata => { - console.log('metadata:', metadata) -}).on('integrity', integrity => { - console.log('integrity:', integrity) -}).pipe( - fs.createWriteStream('./x.tgz') -) -// Outputs: -metadata: { ... } -integrity: 'sha512-SoMeDIGest+64==' - -// Look up by digest -cache.get.stream.byDigest( - cachePath, 'sha512-SoMeDIGest+64==' -).pipe( - fs.createWriteStream('./x.tgz') -) -``` - -#### <a name="get-info"></a> `> cacache.get.info(cache, key) -> Promise` - -Looks up `key` in the cache index, returning information about the entry if -one exists. - -##### Fields - -* `key` - Key the entry was looked up under. Matches the `key` argument. -* `integrity` - [Subresource Integrity hash](#integrity) for the content this entry refers to. -* `path` - Filesystem path where content is stored, joined with `cache` argument. -* `time` - Timestamp the entry was first added on. -* `metadata` - User-assigned metadata associated with the entry/content. - -##### Example - -```javascript -cacache.get.info(cachePath, 'my-thing').then(console.log) - -// Output -{ - key: 'my-thing', - integrity: 'sha256-MUSTVERIFY+ALL/THINGS==' - path: '.testcache/content/deadbeef', - time: 12345698490, - size: 849234, - metadata: { - name: 'blah', - version: '1.2.3', - description: 'this was once a package but now it is my-thing' - } -} -``` - -#### <a name="get-hasContent"></a> `> cacache.get.hasContent(cache, integrity) -> Promise` - -Looks up a [Subresource Integrity hash](#integrity) in the cache. If content -exists for this `integrity`, it will return an object, with the specific single integrity hash -that was found in `sri` key, and the size of the found content as `size`. If no content exists for this integrity, it will return `false`. - -##### Example - -```javascript -cacache.get.hasContent(cachePath, 'sha256-MUSTVERIFY+ALL/THINGS==').then(console.log) - -// Output -{ - sri: { - source: 'sha256-MUSTVERIFY+ALL/THINGS==', - algorithm: 'sha256', - digest: 'MUSTVERIFY+ALL/THINGS==', - options: [] - }, - size: 9001 -} - -cacache.get.hasContent(cachePath, 'sha521-NOT+IN/CACHE==').then(console.log) - -// Output -false -``` - -##### <a name="get-options"></a> Options - -##### `opts.integrity` -If present, the pre-calculated digest for the inserted content. If this option -is provided and does not match the post-insertion digest, insertion will fail -with an `EINTEGRITY` error. - -##### `opts.memoize` - -Default: null - -If explicitly truthy, cacache will read from memory and memoize data on bulk read. If `false`, cacache will read from disk data. Reader functions by default read from in-memory cache. - -##### `opts.size` -If provided, the data stream will be verified to check that enough data was -passed through. If there's more or less data than expected, insertion will fail -with an `EBADSIZE` error. - - -#### <a name="put-data"></a> `> cacache.put(cache, key, data, [opts]) -> Promise` - -Inserts data passed to it into the cache. The returned Promise resolves with a -digest (generated according to [`opts.algorithms`](#optsalgorithms)) after the -cache entry has been successfully written. - -See: [options](#put-options) - -##### Example - -```javascript -fetch( - 'https://registry.npmjs.org/cacache/-/cacache-1.0.0.tgz' -).then(data => { - return cacache.put(cachePath, 'registry.npmjs.org|cacache@1.0.0', data) -}).then(integrity => { - console.log('integrity hash is', integrity) -}) -``` - -#### <a name="put-stream"></a> `> cacache.put.stream(cache, key, [opts]) -> Writable` - -Returns a [Writable -Stream](https://nodejs.org/api/stream.html#stream_writable_streams) that inserts -data written to it into the cache. Emits an `integrity` event with the digest of -written contents when it succeeds. - -See: [options](#put-options) - -##### Example - -```javascript -request.get( - 'https://registry.npmjs.org/cacache/-/cacache-1.0.0.tgz' -).pipe( - cacache.put.stream( - cachePath, 'registry.npmjs.org|cacache@1.0.0' - ).on('integrity', d => console.log(`integrity digest is ${d}`)) -) -``` - -##### <a name="put-options"></a> Options - -##### `opts.metadata` - -Arbitrary metadata to be attached to the inserted key. - -##### `opts.size` - -If provided, the data stream will be verified to check that enough data was -passed through. If there's more or less data than expected, insertion will fail -with an `EBADSIZE` error. - -##### `opts.integrity` - -If present, the pre-calculated digest for the inserted content. If this option -is provided and does not match the post-insertion digest, insertion will fail -with an `EINTEGRITY` error. - -`algorithms` has no effect if this option is present. - -##### `opts.algorithms` - -Default: ['sha512'] - -Hashing algorithms to use when calculating the [subresource integrity -digest](#integrity) -for inserted data. Can use any algorithm listed in `crypto.getHashes()` or -`'omakase'`/`'お任せします'` to pick a random hash algorithm on each insertion. You -may also use any anagram of `'modnar'` to use this feature. - -Currently only supports one algorithm at a time (i.e., an array length of -exactly `1`). Has no effect if `opts.integrity` is present. - -##### `opts.memoize` - -Default: null - -If provided, cacache will memoize the given cache insertion in memory, bypassing -any filesystem checks for that key or digest in future cache fetches. Nothing -will be written to the in-memory cache unless this option is explicitly truthy. - -If `opts.memoize` is an object or a `Map`-like (that is, an object with `get` -and `set` methods), it will be written to instead of the global memoization -cache. - -Reading from disk data can be forced by explicitly passing `memoize: false` to -the reader functions, but their default will be to read from memory. - -##### `opts.tmpPrefix` -Default: null - -Prefix to append on the temporary directory name inside the cache's tmp dir. - -#### <a name="rm-all"></a> `> cacache.rm.all(cache) -> Promise` - -Clears the entire cache. Mainly by blowing away the cache directory itself. - -##### Example - -```javascript -cacache.rm.all(cachePath).then(() => { - console.log('THE APOCALYPSE IS UPON US 😱') -}) -``` - -#### <a name="rm-entry"></a> `> cacache.rm.entry(cache, key) -> Promise` - -Alias: `cacache.rm` - -Removes the index entry for `key`. Content will still be accessible if -requested directly by content address ([`get.stream.byDigest`](#get-stream)). - -To remove the content itself (which might still be used by other entries), use -[`rm.content`](#rm-content). Or, to safely vacuum any unused content, use -[`verify`](#verify). - -##### Example - -```javascript -cacache.rm.entry(cachePath, 'my-thing').then(() => { - console.log('I did not like it anyway') -}) -``` - -#### <a name="rm-content"></a> `> cacache.rm.content(cache, integrity) -> Promise` - -Removes the content identified by `integrity`. Any index entries referring to it -will not be usable again until the content is re-added to the cache with an -identical digest. - -##### Example - -```javascript -cacache.rm.content(cachePath, 'sha512-SoMeDIGest/IN+BaSE64==').then(() => { - console.log('data for my-thing is gone!') -}) -``` - -#### <a name="clear-memoized"></a> `> cacache.clearMemoized()` - -Completely resets the in-memory entry cache. - -#### <a name="tmp-mkdir"></a> `> tmp.mkdir(cache, opts) -> Promise<Path>` - -Returns a unique temporary directory inside the cache's `tmp` dir. This -directory will use the same safe user assignment that all the other stuff use. - -Once the directory is made, it's the user's responsibility that all files -within are given the appropriate `gid`/`uid` ownership settings to match -the rest of the cache. If not, you can ask cacache to do it for you by -calling [`tmp.fix()`](#tmp-fix), which will fix all tmp directory -permissions. - -If you want automatic cleanup of this directory, use -[`tmp.withTmp()`](#with-tpm) - -See: [options](#tmp-options) - -##### Example - -```javascript -cacache.tmp.mkdir(cache).then(dir => { - fs.writeFile(path.join(dir, 'blablabla'), Buffer#<1234>, ...) -}) -``` - -#### <a name="tmp-fix"></a> `> tmp.fix(cache) -> Promise` - -Sets the `uid` and `gid` properties on all files and folders within the tmp -folder to match the rest of the cache. - -Use this after manually writing files into [`tmp.mkdir`](#tmp-mkdir) or -[`tmp.withTmp`](#with-tmp). - -##### Example - -```javascript -cacache.tmp.mkdir(cache).then(dir => { - writeFile(path.join(dir, 'file'), someData).then(() => { - // make sure we didn't just put a root-owned file in the cache - cacache.tmp.fix().then(() => { - // all uids and gids match now - }) - }) -}) -``` - -#### <a name="with-tmp"></a> `> tmp.withTmp(cache, opts, cb) -> Promise` - -Creates a temporary directory with [`tmp.mkdir()`](#tmp-mkdir) and calls `cb` -with it. The created temporary directory will be removed when the return value -of `cb()` resolves, the tmp directory will be automatically deleted once that -promise completes. - -The same caveats apply when it comes to managing permissions for the tmp dir's -contents. - -See: [options](#tmp-options) - -##### Example - -```javascript -cacache.tmp.withTmp(cache, dir => { - return fs.writeFileAsync(path.join(dir, 'blablabla'), Buffer#<1234>, ...) -}).then(() => { - // `dir` no longer exists -}) -``` - -##### <a name="tmp-options"></a> Options - -##### `opts.tmpPrefix` -Default: null - -Prefix to append on the temporary directory name inside the cache's tmp dir. - -#### <a name="integrity"></a> Subresource Integrity Digests - -For content verification and addressing, cacache uses strings following the -[Subresource -Integrity spec](https://developer.mozilla.org/en-US/docs/Web/Security/Subresource_Integrity). -That is, any time cacache expects an `integrity` argument or option, it -should be in the format `<hashAlgorithm>-<base64-hash>`. - -One deviation from the current spec is that cacache will support any hash -algorithms supported by the underlying Node.js process. You can use -`crypto.getHashes()` to see which ones you can use. - -##### Generating Digests Yourself - -If you have an existing content shasum, they are generally formatted as a -hexadecimal string (that is, a sha1 would look like: -`5f5513f8822fdbe5145af33b64d8d970dcf95c6e`). In order to be compatible with -cacache, you'll need to convert this to an equivalent subresource integrity -string. For this example, the corresponding hash would be: -`sha1-X1UT+IIv2+UUWvM7ZNjZcNz5XG4=`. - -If you want to generate an integrity string yourself for existing data, you can -use something like this: - -```javascript -const crypto = require('crypto') -const hashAlgorithm = 'sha512' -const data = 'foobarbaz' - -const integrity = ( - hashAlgorithm + - '-' + - crypto.createHash(hashAlgorithm).update(data).digest('base64') -) -``` - -You can also use [`ssri`](https://npm.im/ssri) to have a richer set of functionality -around SRI strings, including generation, parsing, and translating from existing -hex-formatted strings. - -#### <a name="verify"></a> `> cacache.verify(cache, opts) -> Promise` - -Checks out and fixes up your cache: - -* Cleans up corrupted or invalid index entries. -* Custom entry filtering options. -* Garbage collects any content entries not referenced by the index. -* Checks integrity for all content entries and removes invalid content. -* Fixes cache ownership. -* Removes the `tmp` directory in the cache and all its contents. - -When it's done, it'll return an object with various stats about the verification -process, including amount of storage reclaimed, number of valid entries, number -of entries removed, etc. - -##### <a name="verify-options"></a> Options - -##### `opts.concurrency` - -Default: 20 - -Number of concurrently read files in the filesystem while doing clean up. - -##### `opts.filter` -Receives a formatted entry. Return false to remove it. -Note: might be called more than once on the same entry. - -##### `opts.log` -Custom logger function: -``` - log: { silly () {} } - log.silly('verify', 'verifying cache at', cache) -``` - -##### Example - -```sh -echo somegarbage >> $CACHEPATH/content/deadbeef -``` - -```javascript -cacache.verify(cachePath).then(stats => { - // deadbeef collected, because of invalid checksum. - console.log('cache is much nicer now! stats:', stats) -}) -``` - -#### <a name="verify-last-run"></a> `> cacache.verify.lastRun(cache) -> Promise` - -Returns a `Date` representing the last time `cacache.verify` was run on `cache`. - -##### Example - -```javascript -cacache.verify(cachePath).then(() => { - cacache.verify.lastRun(cachePath).then(lastTime => { - console.log('cacache.verify was last called on' + lastTime) - }) -}) -``` diff --git a/node_modules/cacache/get.js b/node_modules/cacache/get.js index b6bae1e504eba..fe710bbd68def 100644 --- a/node_modules/cacache/get.js +++ b/node_modules/cacache/get.js @@ -32,18 +32,18 @@ function getData (byDigest, cache, key, opts = {}) { metadata: memoized.entry.metadata, data: memoized.data, integrity: memoized.entry.integrity, - size: memoized.entry.size + size: memoized.entry.size, } ) } return (byDigest ? Promise.resolve(null) : index.find(cache, key, opts)).then( (entry) => { - if (!entry && !byDigest) { + if (!entry && !byDigest) throw new index.NotFoundError(cache, key) - } + return read(cache, byDigest ? key : entry.integrity, { integrity, - size + size, }) .then((data) => byDigest @@ -52,15 +52,15 @@ function getData (byDigest, cache, key, opts = {}) { data, metadata: entry.metadata, size: entry.size, - integrity: entry.integrity + integrity: entry.integrity, } ) .then((res) => { - if (memoize && byDigest) { + if (memoize && byDigest) memo.put.byDigest(cache, key, res, opts) - } else if (memoize) { + else if (memoize) memo.put(cache, entry, res.data, opts) - } + return res }) } @@ -86,16 +86,16 @@ function getDataSync (byDigest, cache, key, opts = {}) { metadata: memoized.entry.metadata, data: memoized.data, integrity: memoized.entry.integrity, - size: memoized.entry.size + size: memoized.entry.size, } } const entry = !byDigest && index.find.sync(cache, key, opts) - if (!entry && !byDigest) { + if (!entry && !byDigest) throw new index.NotFoundError(cache, key) - } + const data = read.sync(cache, byDigest ? key : entry.integrity, { integrity: integrity, - size: size + size: size, }) const res = byDigest ? data @@ -103,13 +103,13 @@ function getDataSync (byDigest, cache, key, opts = {}) { metadata: entry.metadata, data: data, size: entry.size, - integrity: entry.integrity + integrity: entry.integrity, } - if (memoize && byDigest) { + if (memoize && byDigest) memo.put.byDigest(cache, key, res, opts) - } else if (memoize) { + else if (memoize) memo.put(cache, entry, res.data, opts) - } + return res } @@ -129,17 +129,16 @@ const getMemoizedStream = (memoized) => { function getStream (cache, key, opts = {}) { const { memoize, size } = opts const memoized = memo.get(cache, key, opts) - if (memoized && memoize !== false) { + if (memoized && memoize !== false) return getMemoizedStream(memoized) - } const stream = new Pipeline() index .find(cache, key) .then((entry) => { - if (!entry) { + if (!entry) throw new index.NotFoundError(cache, key) - } + stream.emit('metadata', entry.metadata) stream.emit('integrity', entry.integrity) stream.emit('size', entry.size) @@ -178,9 +177,9 @@ function getStreamDigest (cache, integrity, opts = {}) { return stream } else { const stream = read.readStream(cache, integrity, opts) - if (!memoize) { + if (!memoize) return stream - } + const memoStream = new Collect.PassThrough() memoStream.on('collect', data => memo.put.byDigest( cache, @@ -197,11 +196,10 @@ module.exports.info = info function info (cache, key, opts = {}) { const { memoize } = opts const memoized = memo.get(cache, key, opts) - if (memoized && memoize !== false) { + if (memoized && memoize !== false) return Promise.resolve(memoized.entry) - } else { + else return index.find(cache, key) - } } module.exports.hasContent = read.hasContent @@ -224,9 +222,9 @@ function copy (byDigest, cache, key, dest, opts = {}) { ? Promise.resolve(null) : index.find(cache, key, opts) ).then((entry) => { - if (!entry && !byDigest) { + if (!entry && !byDigest) throw new index.NotFoundError(cache, key) - } + return read .copy(cache, byDigest ? key : entry.integrity, dest, opts) .then(() => { @@ -235,7 +233,7 @@ function copy (byDigest, cache, key, dest, opts = {}) { : { metadata: entry.metadata, size: entry.size, - integrity: entry.integrity + integrity: entry.integrity, } }) }) @@ -248,7 +246,7 @@ function copy (byDigest, cache, key, dest, opts = {}) { : { metadata: res.metadata, size: res.size, - integrity: res.integrity + integrity: res.integrity, } }) }) diff --git a/node_modules/cacache/index.js b/node_modules/cacache/index.js index 08ba14835ab5a..c8c52b0417dea 100644 --- a/node_modules/cacache/index.js +++ b/node_modules/cacache/index.js @@ -7,6 +7,11 @@ const rm = require('./rm.js') const verify = require('./verify.js') const { clearMemoized } = require('./lib/memoization.js') const tmp = require('./lib/util/tmp.js') +const index = require('./lib/entry-index.js') + +module.exports.index = {} +module.exports.index.compact = index.compact +module.exports.index.insert = index.insert module.exports.ls = ls module.exports.ls.stream = ls.stream diff --git a/node_modules/cacache/lib/content/read.js b/node_modules/cacache/lib/content/read.js index 7cc16482d44c8..034e8eee05b10 100644 --- a/node_modules/cacache/lib/content/read.js +++ b/node_modules/cacache/lib/content/read.js @@ -20,17 +20,16 @@ function read (cache, integrity, opts = {}) { // get size return lstat(cpath).then(stat => ({ stat, cpath, sri })) }).then(({ stat, cpath, sri }) => { - if (typeof size === 'number' && stat.size !== size) { + if (typeof size === 'number' && stat.size !== size) throw sizeError(size, stat.size) - } - if (stat.size > MAX_SINGLE_READ_SIZE) { + + if (stat.size > MAX_SINGLE_READ_SIZE) return readPipeline(cpath, stat.size, sri, new Pipeline()).concat() - } return readFile(cpath, null).then((data) => { - if (!ssri.checkData(data, sri)) { + if (!ssri.checkData(data, sri)) throw integrityError(sri, cpath) - } + return data }) }) @@ -40,11 +39,11 @@ const readPipeline = (cpath, size, sri, stream) => { stream.push( new fsm.ReadStream(cpath, { size, - readSize: MAX_SINGLE_READ_SIZE + readSize: MAX_SINGLE_READ_SIZE, }), ssri.integrityStream({ integrity: sri, - size + size, }) ) return stream @@ -56,13 +55,11 @@ function readSync (cache, integrity, opts = {}) { const { size } = opts return withContentSriSync(cache, integrity, (cpath, sri) => { const data = fs.readFileSync(cpath) - if (typeof size === 'number' && size !== data.length) { + if (typeof size === 'number' && size !== data.length) throw sizeError(size, data.length) - } - if (ssri.checkData(data, sri)) { + if (ssri.checkData(data, sri)) return data - } throw integrityError(sri, cpath) }) @@ -78,9 +75,9 @@ function readStream (cache, integrity, opts = {}) { // just lstat to ensure it exists return lstat(cpath).then((stat) => ({ stat, cpath, sri })) }).then(({ stat, cpath, sri }) => { - if (typeof size === 'number' && size !== stat.size) { + if (typeof size === 'number' && size !== stat.size) return stream.emit('error', sizeError(size, stat.size)) - } + readPipeline(cpath, stat.size, sri, stream) }, er => stream.emit('error', er)) @@ -109,22 +106,21 @@ function copySync (cache, integrity, dest) { module.exports.hasContent = hasContent function hasContent (cache, integrity) { - if (!integrity) { + if (!integrity) return Promise.resolve(false) - } + return withContentSri(cache, integrity, (cpath, sri) => { return lstat(cpath).then((stat) => ({ size: stat.size, sri, stat })) }).catch((err) => { - if (err.code === 'ENOENT') { + if (err.code === 'ENOENT') return false - } + if (err.code === 'EPERM') { /* istanbul ignore else */ - if (process.platform !== 'win32') { + if (process.platform !== 'win32') throw err - } else { + else return false - } } }) } @@ -132,24 +128,23 @@ function hasContent (cache, integrity) { module.exports.hasContent.sync = hasContentSync function hasContentSync (cache, integrity) { - if (!integrity) { + if (!integrity) return false - } + return withContentSriSync(cache, integrity, (cpath, sri) => { try { const stat = fs.lstatSync(cpath) return { size: stat.size, sri, stat } } catch (err) { - if (err.code === 'ENOENT') { + if (err.code === 'ENOENT') return false - } + if (err.code === 'EPERM') { /* istanbul ignore else */ - if (process.platform !== 'win32') { + if (process.platform !== 'win32') throw err - } else { + else return false - } } } }) @@ -167,7 +162,8 @@ function withContentSri (cache, integrity, fn) { const cpath = contentPath(cache, digests[0]) return fn(cpath, digests[0]) } else { - // Can't use race here because a generic error can happen before a ENOENT error, and can happen before a valid result + // Can't use race here because a generic error can happen before + // a ENOENT error, and can happen before a valid result return Promise .all(digests.map((meta) => { return withContentSri(cache, meta, fn) @@ -184,15 +180,13 @@ function withContentSri (cache, integrity, fn) { .then((results) => { // Return the first non error if it is found const result = results.find((r) => !(r instanceof Error)) - if (result) { + if (result) return result - } // Throw the No matching content found error const enoentError = results.find((r) => r.code === 'ENOENT') - if (enoentError) { + if (enoentError) throw enoentError - } // Throw generic error throw results.find((r) => r instanceof Error) diff --git a/node_modules/cacache/lib/content/rm.js b/node_modules/cacache/lib/content/rm.js index 50612364e9b48..6a3d1a3d02340 100644 --- a/node_modules/cacache/lib/content/rm.js +++ b/node_modules/cacache/lib/content/rm.js @@ -11,10 +11,9 @@ module.exports = rm function rm (cache, integrity) { return hasContent(cache, integrity).then((content) => { // ~pretty~ sure we can't end up with a content lacking sri, but be safe - if (content && content.sri) { + if (content && content.sri) return rimraf(contentPath(cache, content.sri)).then(() => true) - } else { + else return false - } }) } diff --git a/node_modules/cacache/lib/content/write.js b/node_modules/cacache/lib/content/write.js index e8f3e3534940c..dde1bd1dd5dae 100644 --- a/node_modules/cacache/lib/content/write.js +++ b/node_modules/cacache/lib/content/write.js @@ -22,16 +22,15 @@ module.exports = write function write (cache, data, opts = {}) { const { algorithms, size, integrity } = opts - if (algorithms && algorithms.length > 1) { + if (algorithms && algorithms.length > 1) throw new Error('opts.algorithms only supports a single algorithm for now') - } - if (typeof size === 'number' && data.length !== size) { + + if (typeof size === 'number' && data.length !== size) return Promise.reject(sizeError(size, data.length)) - } + const sri = ssri.fromData(data, algorithms ? { algorithms } : {}) - if (integrity && !ssri.checkData(data, integrity, opts)) { + if (integrity && !ssri.checkData(data, integrity, opts)) return Promise.reject(checksumError(integrity, sri)) - } return disposer(makeTmp(cache, opts), makeTmpDisposer, (tmp) => { @@ -112,13 +111,17 @@ function pipeToTmp (inputStream, cache, tmpTarget, opts) { const hashStream = ssri.integrityStream({ integrity: opts.integrity, algorithms: opts.algorithms, - size: opts.size + size: opts.size, + }) + hashStream.on('integrity', i => { + integrity = i + }) + hashStream.on('size', s => { + size = s }) - hashStream.on('integrity', i => { integrity = i }) - hashStream.on('size', s => { size = s }) const outStream = new fsm.WriteStream(tmpTarget, { - flags: 'wx' + flags: 'wx', }) // NB: this can throw if the hashStream has a problem with @@ -132,21 +135,23 @@ function pipeToTmp (inputStream, cache, tmpTarget, opts) { return pipeline.promise() .then(() => ({ integrity, size })) - .catch(er => rimraf(tmpTarget).then(() => { throw er })) + .catch(er => rimraf(tmpTarget).then(() => { + throw er + })) } function makeTmp (cache, opts) { const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) return fixOwner.mkdirfix(cache, path.dirname(tmpTarget)).then(() => ({ target: tmpTarget, - moved: false + moved: false, })) } function makeTmpDisposer (tmp) { - if (tmp.moved) { + if (tmp.moved) return Promise.resolve() - } + return rimraf(tmp.target) } diff --git a/node_modules/cacache/lib/entry-index.js b/node_modules/cacache/lib/entry-index.js index 58b205bfe7f5b..71aac5ed75b14 100644 --- a/node_modules/cacache/lib/entry-index.js +++ b/node_modules/cacache/lib/entry-index.js @@ -1,20 +1,27 @@ 'use strict' const util = require('util') - const crypto = require('crypto') const fs = require('fs') const Minipass = require('minipass') const path = require('path') const ssri = require('ssri') +const uniqueFilename = require('unique-filename') + +const { disposer } = require('./util/disposer') const contentPath = require('./content/path') const fixOwner = require('./util/fix-owner') const hashToSegments = require('./util/hash-to-segments') const indexV = require('../package.json')['cache-version'].index +const moveFile = require('@npmcli/move-file') +const _rimraf = require('rimraf') +const rimraf = util.promisify(_rimraf) +rimraf.sync = _rimraf.sync const appendFile = util.promisify(fs.appendFile) const readFile = util.promisify(fs.readFile) const readdir = util.promisify(fs.readdir) +const writeFile = util.promisify(fs.writeFile) module.exports.NotFoundError = class NotFoundError extends Error { constructor (cache, key) { @@ -25,6 +32,82 @@ module.exports.NotFoundError = class NotFoundError extends Error { } } +module.exports.compact = compact + +async function compact (cache, key, matchFn, opts = {}) { + const bucket = bucketPath(cache, key) + const entries = await bucketEntries(bucket) + const newEntries = [] + // we loop backwards because the bottom-most result is the newest + // since we add new entries with appendFile + for (let i = entries.length - 1; i >= 0; --i) { + const entry = entries[i] + // a null integrity could mean either a delete was appended + // or the user has simply stored an index that does not map + // to any content. we determine if the user wants to keep the + // null integrity based on the validateEntry function passed in options. + // if the integrity is null and no validateEntry is provided, we break + // as we consider the null integrity to be a deletion of everything + // that came before it. + if (entry.integrity === null && !opts.validateEntry) + break + + // if this entry is valid, and it is either the first entry or + // the newEntries array doesn't already include an entry that + // matches this one based on the provided matchFn, then we add + // it to the beginning of our list + if ((!opts.validateEntry || opts.validateEntry(entry) === true) && + (newEntries.length === 0 || + !newEntries.find((oldEntry) => matchFn(oldEntry, entry)))) + newEntries.unshift(entry) + } + + const newIndex = '\n' + newEntries.map((entry) => { + const stringified = JSON.stringify(entry) + const hash = hashEntry(stringified) + return `${hash}\t${stringified}` + }).join('\n') + + const setup = async () => { + const target = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) + await fixOwner.mkdirfix(cache, path.dirname(target)) + return { + target, + moved: false, + } + } + + const teardown = async (tmp) => { + if (!tmp.moved) + return rimraf(tmp.target) + } + + const write = async (tmp) => { + await writeFile(tmp.target, newIndex, { flag: 'wx' }) + await fixOwner.mkdirfix(cache, path.dirname(bucket)) + // we use @npmcli/move-file directly here because we + // want to overwrite the existing file + await moveFile(tmp.target, bucket) + tmp.moved = true + try { + await fixOwner.chownr(cache, bucket) + } catch (err) { + if (err.code !== 'ENOENT') + throw err + } + } + + // write the file atomically + await disposer(setup(), teardown, write) + + // we reverse the list we generated such that the newest + // entries come first in order to make looping through them easier + // the true passed to formatEntry tells it to keep null + // integrity values, if they made it this far it's because + // validateEntry returned true, and as such we should return it + return newEntries.reverse().map((entry) => formatEntry(cache, entry, true)) +} + module.exports.insert = insert function insert (cache, key, integrity, opts = {}) { @@ -35,7 +118,7 @@ function insert (cache, key, integrity, opts = {}) { integrity: integrity && ssri.stringify(integrity), time: Date.now(), size, - metadata + metadata, } return fixOwner .mkdirfix(cache, path.dirname(bucket)) @@ -47,14 +130,15 @@ function insert (cache, key, integrity, opts = {}) { // another while still preserving the string length of the JSON in // question. So, we just slap the length in there and verify it on read. // - // Thanks to @isaacs for the whiteboarding session that ended up with this. + // Thanks to @isaacs for the whiteboarding session that ended up with + // this. return appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`) }) .then(() => fixOwner.chownr(cache, bucket)) .catch((err) => { - if (err.code === 'ENOENT') { + if (err.code === 'ENOENT') return undefined - } + throw err // There's a class of race conditions that happen when things get deleted // during fixOwner, or between the two mkdirfix/chownr calls. @@ -77,7 +161,7 @@ function insertSync (cache, key, integrity, opts = {}) { integrity: integrity && ssri.stringify(integrity), time: Date.now(), size, - metadata + metadata, } fixOwner.mkdirfix.sync(cache, path.dirname(bucket)) const stringified = JSON.stringify(entry) @@ -85,9 +169,8 @@ function insertSync (cache, key, integrity, opts = {}) { try { fixOwner.chownr.sync(cache, bucket) } catch (err) { - if (err.code !== 'ENOENT') { + if (err.code !== 'ENOENT') throw err - } } return formatEntry(cache, entry) } @@ -99,19 +182,17 @@ function find (cache, key) { return bucketEntries(bucket) .then((entries) => { return entries.reduce((latest, next) => { - if (next && next.key === key) { + if (next && next.key === key) return formatEntry(cache, next) - } else { + else return latest - } }, null) }) .catch((err) => { - if (err.code === 'ENOENT') { + if (err.code === 'ENOENT') return null - } else { + else throw err - } }) } @@ -121,31 +202,37 @@ function findSync (cache, key) { const bucket = bucketPath(cache, key) try { return bucketEntriesSync(bucket).reduce((latest, next) => { - if (next && next.key === key) { + if (next && next.key === key) return formatEntry(cache, next) - } else { + else return latest - } }, null) } catch (err) { - if (err.code === 'ENOENT') { + if (err.code === 'ENOENT') return null - } else { + else throw err - } } } module.exports.delete = del -function del (cache, key, opts) { - return insert(cache, key, null, opts) +function del (cache, key, opts = {}) { + if (!opts.removeFully) + return insert(cache, key, null, opts) + + const bucket = bucketPath(cache, key) + return rimraf(bucket) } module.exports.delete.sync = delSync -function delSync (cache, key, opts) { - return insertSync(cache, key, null, opts) +function delSync (cache, key, opts = {}) { + if (!opts.removeFully) + return insertSync(cache, key, null, opts) + + const bucket = bucketPath(cache, key) + return rimraf.sync(bucket) } module.exports.lsStream = lsStream @@ -176,12 +263,12 @@ function lsStream (cache) { // reduced is a map of key => entry for (const entry of reduced.values()) { const formatted = formatEntry(cache, entry) - if (formatted) { + if (formatted) stream.write(formatted) - } } }).catch(err => { - if (err.code === 'ENOENT') { return undefined } + if (err.code === 'ENOENT') + return undefined throw err }) }) @@ -209,10 +296,14 @@ function ls (cache) { ) } +module.exports.bucketEntries = bucketEntries + function bucketEntries (bucket, filter) { return readFile(bucket, 'utf8').then((data) => _bucketEntries(data, filter)) } +module.exports.bucketEntries.sync = bucketEntriesSync + function bucketEntriesSync (bucket, filter) { const data = fs.readFileSync(bucket, 'utf8') return _bucketEntries(data, filter) @@ -221,9 +312,9 @@ function bucketEntriesSync (bucket, filter) { function _bucketEntries (data, filter) { const entries = [] data.split('\n').forEach((entry) => { - if (!entry) { + if (!entry) return - } + const pieces = entry.split('\t') if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) { // Hash is no good! Corruption or malice? Doesn't matter! @@ -237,9 +328,8 @@ function _bucketEntries (data, filter) { // Entry is corrupted! return } - if (obj) { + if (obj) entries.push(obj) - } }) return entries } @@ -279,26 +369,25 @@ function hash (str, digest) { .digest('hex') } -function formatEntry (cache, entry) { +function formatEntry (cache, entry, keepAll) { // Treat null digests as deletions. They'll shadow any previous entries. - if (!entry.integrity) { + if (!entry.integrity && !keepAll) return null - } + return { key: entry.key, integrity: entry.integrity, - path: contentPath(cache, entry.integrity), + path: entry.integrity ? contentPath(cache, entry.integrity) : undefined, size: entry.size, time: entry.time, - metadata: entry.metadata + metadata: entry.metadata, } } function readdirOrEmpty (dir) { return readdir(dir).catch((err) => { - if (err.code === 'ENOENT' || err.code === 'ENOTDIR') { + if (err.code === 'ENOENT' || err.code === 'ENOTDIR') return [] - } throw err }) diff --git a/node_modules/cacache/lib/memoization.js b/node_modules/cacache/lib/memoization.js index 185141d8eadad..d5465f39fc581 100644 --- a/node_modules/cacache/lib/memoization.js +++ b/node_modules/cacache/lib/memoization.js @@ -8,7 +8,7 @@ const MAX_AGE = 3 * 60 * 1000 const MEMOIZED = new LRU({ max: MAX_SIZE, maxAge: MAX_AGE, - length: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length + length: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length, }) module.exports.clearMemoized = clearMemoized @@ -62,13 +62,12 @@ class ObjProxy { } function pickMem (opts) { - if (!opts || !opts.memoize) { + if (!opts || !opts.memoize) return MEMOIZED - } else if (opts.memoize.get && opts.memoize.set) { + else if (opts.memoize.get && opts.memoize.set) return opts.memoize - } else if (typeof opts.memoize === 'object') { + else if (typeof opts.memoize === 'object') return new ObjProxy(opts.memoize) - } else { + else return MEMOIZED - } } diff --git a/node_modules/cacache/lib/util/disposer.js b/node_modules/cacache/lib/util/disposer.js index 8a24ad2f2a2a2..aa8aed54da551 100644 --- a/node_modules/cacache/lib/util/disposer.js +++ b/node_modules/cacache/lib/util/disposer.js @@ -8,9 +8,9 @@ function disposer (creatorFn, disposerFn, fn) { .then( // disposer resolved, do something with original fn's promise () => { - if (shouldThrow) { + if (shouldThrow) throw result - } + return result }, // Disposer fn failed, crash process diff --git a/node_modules/cacache/lib/util/fix-owner.js b/node_modules/cacache/lib/util/fix-owner.js index 9afa638a8c839..90ffece524f54 100644 --- a/node_modules/cacache/lib/util/fix-owner.js +++ b/node_modules/cacache/lib/util/fix-owner.js @@ -49,9 +49,8 @@ function fixOwner (cache, filepath) { const { uid, gid } = owner // No need to override if it's already what we used. - if (self.uid === uid && self.gid === gid) { + if (self.uid === uid && self.gid === gid) return - } return inflight('fixOwner: fixing ownership on ' + filepath, () => chownr( @@ -59,9 +58,9 @@ function fixOwner (cache, filepath) { typeof uid === 'number' ? uid : self.uid, typeof gid === 'number' ? gid : self.gid ).catch((err) => { - if (err.code === 'ENOENT') { + if (err.code === 'ENOENT') return null - } + throw err }) ) @@ -94,9 +93,9 @@ function fixOwnerSync (cache, filepath) { ) } catch (err) { // only catch ENOENT, any other error is a problem. - if (err.code === 'ENOENT') { + if (err.code === 'ENOENT') return null - } + throw err } } @@ -111,14 +110,13 @@ function mkdirfix (cache, p, cb) { return Promise.resolve(inferOwner(cache)).then(() => { return mkdirp(p) .then((made) => { - if (made) { + if (made) return fixOwner(cache, made).then(() => made) - } }) .catch((err) => { - if (err.code === 'EEXIST') { + if (err.code === 'EEXIST') return fixOwner(cache, p).then(() => null) - } + throw err }) }) @@ -138,8 +136,7 @@ function mkdirfixSync (cache, p) { if (err.code === 'EEXIST') { fixOwnerSync(cache, p) return null - } else { + } else throw err - } } } diff --git a/node_modules/cacache/lib/util/move-file.js b/node_modules/cacache/lib/util/move-file.js index 84130b2e9ffb8..c3f9e35eb99c7 100644 --- a/node_modules/cacache/lib/util/move-file.js +++ b/node_modules/cacache/lib/util/move-file.js @@ -38,19 +38,17 @@ function moveFile (src, dest) { } else if (err.code === 'EEXIST' || err.code === 'EBUSY') { // file already exists, so whatever return resolve() - } else { + } else return reject(err) - } - } else { + } else return resolve() - } }) }) .then(() => { // content should never change for any reason, so make it read-only return Promise.all([ unlink(src), - !isWindows && chmod(dest, '0444') + !isWindows && chmod(dest, '0444'), ]) }) .catch(() => { diff --git a/node_modules/cacache/lib/verify.js b/node_modules/cacache/lib/verify.js index 5a011a3f1d2cb..e9d679eceaf51 100644 --- a/node_modules/cacache/lib/verify.js +++ b/node_modules/cacache/lib/verify.js @@ -24,7 +24,7 @@ const readFile = util.promisify(fs.readFile) const verifyOpts = (opts) => ({ concurrency: 20, log: { silly () {} }, - ...opts + ...opts, }) module.exports = verify @@ -40,7 +40,7 @@ function verify (cache, opts) { rebuildIndex, cleanTmp, writeVerifile, - markEndTime + markEndTime, ] return steps @@ -54,9 +54,9 @@ function verify (cache, opts) { stats[k] = s[k] }) const end = new Date() - if (!stats.runTime) { + if (!stats.runTime) stats.runTime = {} - } + stats.runTime[label] = end - start return Promise.resolve(stats) }) @@ -108,9 +108,9 @@ function garbageCollect (cache, opts) { const indexStream = index.lsStream(cache) const liveContent = new Set() indexStream.on('data', (entry) => { - if (opts.filter && !opts.filter(entry)) { + if (opts.filter && !opts.filter(entry)) return - } + liveContent.add(entry.integrity.toString()) }) return new Promise((resolve, reject) => { @@ -120,14 +120,14 @@ function garbageCollect (cache, opts) { return glob(path.join(contentDir, '**'), { follow: false, nodir: true, - nosort: true + nosort: true, }).then((files) => { return Promise.resolve({ verifiedContent: 0, reclaimedCount: 0, reclaimedSize: 0, badContentCount: 0, - keptSize: 0 + keptSize: 0, }).then((stats) => pMap( files, @@ -171,14 +171,14 @@ function verifyContent (filepath, sri) { .then((s) => { const contentInfo = { size: s.size, - valid: true + valid: true, } return ssri .checkStream(new fsm.ReadStream(filepath), sri) .catch((err) => { - if (err.code !== 'EINTEGRITY') { + if (err.code !== 'EINTEGRITY') throw err - } + return rimraf(filepath).then(() => { contentInfo.valid = false }) @@ -186,9 +186,9 @@ function verifyContent (filepath, sri) { .then(() => contentInfo) }) .catch((err) => { - if (err.code === 'ENOENT') { + if (err.code === 'ENOENT') return { size: 0, valid: false } - } + throw err }) } @@ -199,7 +199,7 @@ function rebuildIndex (cache, opts) { const stats = { missingContent: 0, rejectedEntries: 0, - totalEntries: 0 + totalEntries: 0, } const buckets = {} for (const k in entries) { @@ -209,9 +209,9 @@ function rebuildIndex (cache, opts) { const entry = entries[k] const excluded = opts.filter && !opts.filter(entry) excluded && stats.rejectedEntries++ - if (buckets[hashed] && !excluded) { + if (buckets[hashed] && !excluded) buckets[hashed].push(entry) - } else if (buckets[hashed] && excluded) { + else if (buckets[hashed] && excluded) { // skip } else if (excluded) { buckets[hashed] = [] @@ -244,7 +244,7 @@ function rebuildBucket (cache, bucket, stats, opts) { return index .insert(cache, entry.key, entry.integrity, { metadata: entry.metadata, - size: entry.size + size: entry.size, }) .then(() => { stats.totalEntries++ diff --git a/node_modules/cacache/package.json b/node_modules/cacache/package.json index 053c245b52b72..3c2e65c0404a0 100644 --- a/node_modules/cacache/package.json +++ b/node_modules/cacache/package.json @@ -1,6 +1,6 @@ { "name": "cacache", - "version": "15.0.5", + "version": "15.2.0", "cache-version": { "content": "2", "index": "5" @@ -13,15 +13,17 @@ ], "scripts": { "benchmarks": "node test/benchmarks", - "lint": "standard", - "postrelease": "npm publish", - "posttest": "npm run lint", - "prepublishOnly": "git push --follow-tags", - "prerelease": "npm t", - "release": "standard-version -s", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", "test": "tap", + "snap": "tap", "coverage": "tap", - "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test" + "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test", + "lint": "npm run npmclilint -- \"*.*js\" \"lib/**/*.*js\" \"test/**/*.*js\"", + "npmclilint": "npmcli-lint", + "lintfix": "npm run lint -- --fix", + "postsnap": "npm run lintfix --" }, "repository": "https://github.com/npm/cacache", "keywords": [ @@ -39,23 +41,6 @@ "disk cache", "disk storage" ], - "author": { - "name": "Kat Marchán", - "email": "kzm@sykosomatic.org", - "twitter": "maybekatz" - }, - "contributors": [ - { - "name": "Charlotte Spencer", - "email": "charlottelaspencer@gmail.com", - "twitter": "charlotteis" - }, - { - "name": "Rebecca Turner", - "email": "me@re-becca.org", - "twitter": "ReBeccaOrg" - } - ], "license": "ISC", "dependencies": { "@npmcli/move-file": "^1.0.1", @@ -72,18 +57,17 @@ "p-map": "^4.0.0", "promise-inflight": "^1.0.1", "rimraf": "^3.0.2", - "ssri": "^8.0.0", + "ssri": "^8.0.1", "tar": "^6.0.2", "unique-filename": "^1.1.1" }, "devDependencies": { + "@npmcli/lint": "^1.0.1", "benchmark": "^2.1.4", "chalk": "^4.0.0", "require-inject": "^1.4.4", - "standard": "^14.3.1", - "standard-version": "^7.1.0", "tacks": "^1.3.0", - "tap": "^14.10.6" + "tap": "^15.0.9" }, "tap": { "100": true, diff --git a/node_modules/cacache/put.js b/node_modules/cacache/put.js index eb21aa867173f..84e9562bc33ab 100644 --- a/node_modules/cacache/put.js +++ b/node_modules/cacache/put.js @@ -9,7 +9,7 @@ const Pipeline = require('minipass-pipeline') const putOpts = (opts) => ({ algorithms: ['sha512'], - ...opts + ...opts, }) module.exports = putData @@ -21,9 +21,9 @@ function putData (cache, key, data, opts = {}) { return index .insert(cache, key, res.integrity, { ...opts, size: res.size }) .then((entry) => { - if (memoize) { + if (memoize) memo.put(cache, entry, data, opts) - } + return res.integrity }) }) @@ -67,17 +67,16 @@ function putStream (cache, key, opts = {}) { return index .insert(cache, key, integrity, { ...opts, size }) .then((entry) => { - if (memoize && memoData) { + if (memoize && memoData) memo.put(cache, entry, memoData, opts) - } - if (integrity) { + + if (integrity) pipeline.emit('integrity', integrity) - } - if (size) { + + if (size) pipeline.emit('size', size) - } }) - } + }, })) return pipeline diff --git a/node_modules/cacache/rm.js b/node_modules/cacache/rm.js index 7dd4e8c8b07f1..f2ef6b190f457 100644 --- a/node_modules/cacache/rm.js +++ b/node_modules/cacache/rm.js @@ -11,9 +11,9 @@ const rmContent = require('./lib/content/rm') module.exports = entry module.exports.entry = entry -function entry (cache, key) { +function entry (cache, key, opts) { memo.clearMemoized() - return index.delete(cache, key) + return index.delete(cache, key, opts) } module.exports.content = content diff --git a/node_modules/caseless/README.md b/node_modules/caseless/README.md deleted file mode 100644 index e5077a21659b2..0000000000000 --- a/node_modules/caseless/README.md +++ /dev/null @@ -1,45 +0,0 @@ -## Caseless -- wrap an object to set and get property with caseless semantics but also preserve caseing. - -This library is incredibly useful when working with HTTP headers. It allows you to get/set/check for headers in a caseless manner while also preserving the caseing of headers the first time they are set. - -## Usage - -```javascript -var headers = {} - , c = caseless(headers) - ; -c.set('a-Header', 'asdf') -c.get('a-header') === 'asdf' -``` - -## has(key) - -Has takes a name and if it finds a matching header will return that header name with the preserved caseing it was set with. - -```javascript -c.has('a-header') === 'a-Header' -``` - -## set(key, value[, clobber=true]) - -Set is fairly straight forward except that if the header exists and clobber is disabled it will add `','+value` to the existing header. - -```javascript -c.set('a-Header', 'fdas') -c.set('a-HEADER', 'more', false) -c.get('a-header') === 'fdsa,more' -``` - -## swap(key) - -Swaps the casing of a header with the new one that is passed in. - -```javascript -var headers = {} - , c = caseless(headers) - ; -c.set('a-Header', 'fdas') -c.swap('a-HEADER') -c.has('a-header') === 'a-HEADER' -headers === {'a-HEADER': 'fdas'} -``` diff --git a/node_modules/chalk/package.json b/node_modules/chalk/package.json index 0d99f0f28621f..c2d63f67e5e95 100644 --- a/node_modules/chalk/package.json +++ b/node_modules/chalk/package.json @@ -1,6 +1,6 @@ { "name": "chalk", - "version": "4.1.0", + "version": "4.1.1", "description": "Terminal string styling done right", "license": "MIT", "repository": "chalk/chalk", diff --git a/node_modules/chalk/readme.md b/node_modules/chalk/readme.md index 338f42cb8b525..851259216bc19 100644 --- a/node_modules/chalk/readme.md +++ b/node_modules/chalk/readme.md @@ -13,6 +13,48 @@ <img src="https://cdn.jsdelivr.net/gh/chalk/ansi-styles@8261697c95bf34b6c7767e2cbe9941a851d59385/screenshot.svg" width="900"> +<br> + +--- + +<div align="center"> + <p> + <p> + <sup> + Sindre Sorhus' open source work is supported by the community on <a href="https://github.com/sponsors/sindresorhus">GitHub Sponsors</a> and <a href="https://stakes.social/0x44d871aebF0126Bf646753E2C976Aa7e68A66c15">Dev</a> + </sup> + </p> + <sup>Special thanks to:</sup> + <br> + <br> + <a href="https://standardresume.co/tech"> + <img src="https://sindresorhus.com/assets/thanks/standard-resume-logo.svg" width="160"/> + </a> + <br> + <br> + <a href="https://retool.com/?utm_campaign=sindresorhus"> + <img src="https://sindresorhus.com/assets/thanks/retool-logo.svg" width="210"/> + </a> + <br> + <br> + <a href="https://doppler.com/?utm_campaign=github_repo&utm_medium=referral&utm_content=chalk&utm_source=github"> + <div> + <img src="https://dashboard.doppler.com/imgs/logo-long.svg" width="240" alt="Doppler"> + </div> + <b>All your environment variables, in one place</b> + <div> + <span>Stop struggling with scattered API keys, hacking together home-brewed tools,</span> + <br> + <span>and avoiding access controls. Keep your team and servers in sync with Doppler.</span> + </div> + </a> + </p> +</div> + +--- + +<br> + ## Highlights - Expressive API diff --git a/node_modules/chownr/README.md b/node_modules/chownr/README.md deleted file mode 100644 index 70e9a54a32b8e..0000000000000 --- a/node_modules/chownr/README.md +++ /dev/null @@ -1,3 +0,0 @@ -Like `chown -R`. - -Takes the same arguments as `fs.chown()` diff --git a/node_modules/cidr-regex/README.md b/node_modules/cidr-regex/README.md deleted file mode 100644 index b2d110242b25d..0000000000000 --- a/node_modules/cidr-regex/README.md +++ /dev/null @@ -1,61 +0,0 @@ -# cidr-regex -[![](https://img.shields.io/npm/v/cidr-regex.svg?style=flat)](https://www.npmjs.org/package/cidr-regex) [![](https://img.shields.io/npm/dm/cidr-regex.svg)](https://www.npmjs.org/package/cidr-regex) - -> Regular expression for matching IP addresses in CIDR notation - -## Usage - -```sh -$ npm i cidr-regex -``` - -```js -const cidrRegex = require("cidr-regex"); - -// Contains a CIDR IP address? -cidrRegex().test("foo 192.168.0.1/24"); -//=> true - -// Is a CIDR IP address? -cidrRegex({exact: true}).test("foo 192.168.0.1/24"); -//=> false - -cidrRegex.v6({exact: true}).test("1:2:3:4:5:6:7:8/64"); -//=> true - -// Extract CIDRs from string -"foo 192.168.0.1/24 bar 1:2:3:4:5:6:7:8/64 baz".match(cidrRegex()); -//=> ["192.168.0.1/24", "1:2:3:4:5:6:7:8/64"] -``` - -## API -### cidrRegex([options]) - -Returns a regex for matching both IPv4 and IPv6 CIDR IP addresses. - -### cidrRegex.v4([options]) - -Returns a regex for matching IPv4 CIDR IP addresses. - -### cidrRegex.v6([options]) - -Returns a regex for matching IPv6 CIDR IP addresses. - -#### options.exact - -Type: `boolean`<br> -Default: `false` *(Matches any CIDR IP address in a string)* - -Only match an exact string. Useful with `RegExp#test()` to check if a string is a CIDR IP address. - -## Related - -- [is-cidr](https://github.com/silverwind/is-cidr) - Check if a string is an IP address in CIDR notation -- [is-ip](https://github.com/sindresorhus/is-ip) - Check if a string is an IP address -- [ip-regex](https://github.com/sindresorhus/ip-regex) - Regular expression for matching IP addresses - -## License - -© [silverwind](https://github.com/silverwind), distributed under BSD licence - -Based on previous work by [Felipe Apostol](https://github.com/flipjs) diff --git a/node_modules/cli-columns/README.md b/node_modules/cli-columns/README.md deleted file mode 100644 index abcabefbd69f9..0000000000000 --- a/node_modules/cli-columns/README.md +++ /dev/null @@ -1,69 +0,0 @@ -# `cli-columns` - -[![NPM version][npm-img]][npm-url] [![Downloads][downloads-img]][npm-url] [![Build Status][travis-img]][travis-url] [![Coverage Status][coveralls-img]][coveralls-url] [![Chat][gitter-img]][gitter-url] [![Tip][amazon-img]][amazon-url] - -Columnated lists for the CLI. Unicode and ANSI safe. - -## Install - - $ npm install --save cli-columns - -## Usage - -```js -const chalk = require('chalk'); -const columns = require('.'); - -const values = [ - 'blue' + chalk.bgBlue('berry'), - '笔菠萝' + chalk.yellow('苹果笔'), - chalk.red('apple'), 'pomegranate', - 'durian', chalk.green('star fruit'), - 'パイナップル', 'apricot', 'banana', - 'pineapple', chalk.bgRed.yellow('orange') -]; - -console.log(columns(values)); -``` - -<img alt="screenshot" src="https://user-images.githubusercontent.com/155164/28672800-bd415c86-72ae-11e7-855c-6f6aa108921b.png"> - -## API - -### columns(values [, options]): String - -- `values` `{Array<String>}` Array of strings to display. -- `options` `{Object}` - - `character` `{String}` (default: `' '`) Padding character. - - `newline` `{String}` (default: `'\n'`) Newline character. - - `padding` `{Number}` (default: `2`) Space between columns. - - `sort` `{Boolean}` (default: `true`) Whether to sort results. - - `width` `{Number}` (default: `process.stdout.columns`) Max width of list. - -Sorts and formats a list of values into columns suitable to display in a given width. - -## Contribute - -Standards for this project, including tests, code coverage, and semantics are enforced with a build tool. Pull requests must include passing tests with 100% code coverage and no linting errors. - -### Test - - $ npm test - ----- - -© Shannon Moeller <me@shannonmoeller.com> (shannonmoeller.com) - -Licensed under [MIT](http://shannonmoeller.com/mit.txt) - -[amazon-img]: https://img.shields.io/badge/amazon-tip_jar-yellow.svg?style=flat-square -[amazon-url]: https://www.amazon.com/gp/registry/wishlist/1VQM9ID04YPC5?sort=universal-price -[coveralls-img]: http://img.shields.io/coveralls/shannonmoeller/cli-columns/master.svg?style=flat-square -[coveralls-url]: https://coveralls.io/r/shannonmoeller/cli-columns -[downloads-img]: http://img.shields.io/npm/dm/cli-columns.svg?style=flat-square -[gitter-img]: http://img.shields.io/badge/gitter-join_chat-1dce73.svg?style=flat-square -[gitter-url]: https://gitter.im/shannonmoeller/shannonmoeller -[npm-img]: http://img.shields.io/npm/v/cli-columns.svg?style=flat-square -[npm-url]: https://npmjs.org/package/cli-columns -[travis-img]: http://img.shields.io/travis/shannonmoeller/cli-columns.svg?style=flat-square -[travis-url]: https://travis-ci.org/shannonmoeller/cli-columns diff --git a/node_modules/cli-table3/CHANGELOG.md b/node_modules/cli-table3/CHANGELOG.md deleted file mode 100644 index 1ad2e7581458d..0000000000000 --- a/node_modules/cli-table3/CHANGELOG.md +++ /dev/null @@ -1,81 +0,0 @@ -# Changelog - -## v0.6.0 (2020-03-30) - -#### :boom: Breaking Change -* [#156](https://github.com/cli-table/cli-table3/pull/156) Drop support for Node 6 and 8 ([@Turbo87](https://github.com/Turbo87)) - -#### :bug: Bug Fix -* [#92](https://github.com/cli-table/cli-table3/pull/92) Emoji Length Calculation Fix ([@acupoftee](https://github.com/acupoftee)) -* [#53](https://github.com/cli-table/cli-table3/pull/53) "Table" union type definition fix ([@macieklad](https://github.com/macieklad)) - -#### :memo: Documentation -* [#135](https://github.com/cli-table/cli-table3/pull/135) docs: use https ([@DanielRuf](https://github.com/DanielRuf)) - -#### :house: Internal -* [#132](https://github.com/cli-table/cli-table3/pull/132) Update lockfile ([@DanielRuf](https://github.com/DanielRuf)) -* [#134](https://github.com/cli-table/cli-table3/pull/134) Fix ESLint errors ([@DanielRuf](https://github.com/DanielRuf)) -* [#103](https://github.com/cli-table/cli-table3/pull/103) Fix Jest configuration ([@boneskull](https://github.com/boneskull)) - -#### Committers: 5 -- Christopher Hiller ([@boneskull](https://github.com/boneskull)) -- Daniel Ruf ([@DanielRuf](https://github.com/DanielRuf)) -- Maciej Ładoś ([@macieklad](https://github.com/macieklad)) -- Tee ([@acupoftee](https://github.com/acupoftee)) -- Tobias Bieniek ([@Turbo87](https://github.com/Turbo87)) - - -## v0.5.1 (2018-07-19) - -#### :rocket: Enhancement -* [#21](https://github.com/cli-table/cli-table3/pull/21) Import type definition from `@types/cli-table2` ([@Turbo87](https://github.com/Turbo87)) - -#### Committers: 1 -- Tobias Bieniek ([Turbo87](https://github.com/Turbo87)) - - -## v0.5.0 (2018-06-11) - -#### :boom: Breaking Change -* [#2](https://github.com/cli-table/cli-table3/pull/2) Update Node version requirements. ([@Turbo87](https://github.com/Turbo87)) - -#### :memo: Documentation -* [#11](https://github.com/cli-table/cli-table3/pull/11) Update Documentation. ([@Turbo87](https://github.com/Turbo87)) - -#### :house: Internal -* [#16](https://github.com/cli-table/cli-table3/pull/16) Replace `kind-of` dependency with `typeof` and `Array.isArray()`. ([@Turbo87](https://github.com/Turbo87)) -* [#15](https://github.com/cli-table/cli-table3/pull/15) Remove Gulp. ([@Turbo87](https://github.com/Turbo87)) -* [#13](https://github.com/cli-table/cli-table3/pull/13) Use ES6 class syntax and `let/const`. ([@Turbo87](https://github.com/Turbo87)) -* [#12](https://github.com/cli-table/cli-table3/pull/12) Add ESLint and Prettier. ([@Turbo87](https://github.com/Turbo87)) -* [#10](https://github.com/cli-table/cli-table3/pull/10) chore: use yarn cache. ([@DanielRuf](https://github.com/DanielRuf)) -* [#9](https://github.com/cli-table/cli-table3/pull/9) Use Jest for testing. ([@Turbo87](https://github.com/Turbo87)) -* [#3](https://github.com/cli-table/cli-table3/pull/3) Add `yarn.lock` file. ([@Turbo87](https://github.com/Turbo87)) -* [#1](https://github.com/cli-table/cli-table3/pull/1) Skip broken test. ([@Turbo87](https://github.com/Turbo87)) - -#### Committers: 2 -- Daniel Ruf ([DanielRuf](https://github.com/DanielRuf)) -- Tobias Bieniek ([Turbo87](https://github.com/Turbo87)) - - -## v0.4.0 (2018-06-10) - -First official release as `cli-table3`. Changes compares to `cli-table2` v0.2.0: - -#### :rocket: Enhancement -* [#27](https://github.com/jamestalmage/cli-table2/pull/27) Remove "lodash" dependency. ([@Turbo87](https://github.com/Turbo87)) - -#### :bug: Bug Fix -* [#29](https://github.com/jamestalmage/cli-table2/pull/29) Fix wordWrap with colSpan. ([@mmurphy](https://github.com/mmurphy)) -* [#24](https://github.com/jamestalmage/cli-table2/pull/24) Fixing the runtime error when content is truncated. ([@sthadeshwar](https://github.com/sthadeshwar)) - -#### :memo: Documentation -* [#41](https://github.com/jamestalmage/cli-table2/pull/41) Create LICENSE. ([@GantMan](https://github.com/GantMan)) - -#### :house: Internal -* [#26](https://github.com/jamestalmage/cli-table2/pull/26) package.json: Whitelist JS files ([@Turbo87](https://github.com/Turbo87)) - -#### Committers: 4 -- Gant Laborde ([GantMan](https://github.com/GantMan)) -- Martin Murphy ([mmurphy](https://github.com/mmurphy)) -- Satyajit Thadeshwar ([sthadeshwar](https://github.com/sthadeshwar)) -- Tobias Bieniek ([Turbo87](https://github.com/Turbo87)) diff --git a/node_modules/cli-table3/README.md b/node_modules/cli-table3/README.md deleted file mode 100644 index 03f805437cc4e..0000000000000 --- a/node_modules/cli-table3/README.md +++ /dev/null @@ -1,218 +0,0 @@ -cli-table3 -=============================================================================== - -[![npm version](https://img.shields.io/npm/v/cli-table3.svg)](https://www.npmjs.com/package/cli-table3) -[![Build Status](https://travis-ci.com/cli-table/cli-table3.svg?branch=master)](https://travis-ci.com/cli-table/cli-table3) - -This utility allows you to render unicode-aided tables on the command line from -your node.js scripts. - -`cli-table3` is based on (and api compatible with) the original [cli-table](https://github.com/Automattic/cli-table), -and [cli-table2](https://github.com/jamestalmage/cli-table2), which are both -unmaintained. `cli-table3` includes all the additional features from -`cli-table2`. - -![Screenshot](https://i.imgur.com/sYq4T.png) - -## Features not in the original cli-table - -- Ability to make cells span columns and/or rows. -- Ability to set custom styles per cell (border characters/colors, padding, etc). -- Vertical alignment (top, bottom, center). -- Automatic word wrapping. -- More robust truncation of cell text that contains ansi color characters. -- Better handling of text color that spans multiple lines. -- API compatible with the original cli-table. -- Exhaustive test suite including the entire original cli-table test suite. -- Lots of examples auto-generated from the tests ([basic](https://github.com/cli-table/cli-table3/blob/master/basic-usage.md), [advanced](https://github.com/cli-table/cli-table3/blob/master/advanced-usage.md)). - -## Features - -- Customizable characters that constitute the table. -- Color/background styling in the header through - [colors.js](https://github.com/marak/colors.js) -- Column width customization -- Text truncation based on predefined widths -- Text alignment (left, right, center) -- Padding (left, right) -- Easy-to-use API - -## Installation - -```bash -npm install cli-table3 -``` - -## How to use - -A portion of the unit test suite is used to generate examples: -- [basic-usage](https://github.com/cli-table/cli-table3/blob/master/basic-usage.md) - covers basic uses. -- [advanced](https://github.com/cli-table/cli-table3/blob/master/advanced-usage.md) - covers using the new column and row span features. - -This package is api compatible with the original [cli-table](https://github.com/Automattic/cli-table). -So all the original documentation still applies (copied below). - -### Horizontal Tables -```javascript -var Table = require('cli-table3'); - -// instantiate -var table = new Table({ - head: ['TH 1 label', 'TH 2 label'] - , colWidths: [100, 200] -}); - -// table is an Array, so you can `push`, `unshift`, `splice` and friends -table.push( - ['First value', 'Second value'] - , ['First value', 'Second value'] -); - -console.log(table.toString()); -``` - -### Vertical Tables -```javascript -var Table = require('cli-table3'); -var table = new Table(); - -table.push( - { 'Some key': 'Some value' } - , { 'Another key': 'Another value' } -); - -console.log(table.toString()); -``` -### Cross Tables -Cross tables are very similar to vertical tables, with two key differences: - -1. They require a `head` setting when instantiated that has an empty string as the first header -2. The individual rows take the general form of { "Header": ["Row", "Values"] } - -```javascript -var Table = require('cli-table3'); -var table = new Table({ head: ["", "Top Header 1", "Top Header 2"] }); - -table.push( - { 'Left Header 1': ['Value Row 1 Col 1', 'Value Row 1 Col 2'] } - , { 'Left Header 2': ['Value Row 2 Col 1', 'Value Row 2 Col 2'] } -); - -console.log(table.toString()); -``` - -### Custom styles -The ```chars``` property controls how the table is drawn: -```javascript -var table = new Table({ - chars: { 'top': '═' , 'top-mid': '╤' , 'top-left': '╔' , 'top-right': '╗' - , 'bottom': '═' , 'bottom-mid': '╧' , 'bottom-left': '╚' , 'bottom-right': '╝' - , 'left': '║' , 'left-mid': '╟' , 'mid': '─' , 'mid-mid': '┼' - , 'right': '║' , 'right-mid': '╢' , 'middle': '│' } -}); - -table.push( - ['foo', 'bar', 'baz'] - , ['frob', 'bar', 'quuz'] -); - -console.log(table.toString()); -// Outputs: -// -//╔══════╤═════╤══════╗ -//║ foo │ bar │ baz ║ -//╟──────┼─────┼──────╢ -//║ frob │ bar │ quuz ║ -//╚══════╧═════╧══════╝ -``` - -Empty decoration lines will be skipped, to avoid vertical separator rows just -set the 'mid', 'left-mid', 'mid-mid', 'right-mid' to the empty string: -```javascript -var table = new Table({ chars: {'mid': '', 'left-mid': '', 'mid-mid': '', 'right-mid': ''} }); -table.push( - ['foo', 'bar', 'baz'] - , ['frobnicate', 'bar', 'quuz'] -); - -console.log(table.toString()); -// Outputs: (note the lack of the horizontal line between rows) -//┌────────────┬─────┬──────┐ -//│ foo │ bar │ baz │ -//│ frobnicate │ bar │ quuz │ -//└────────────┴─────┴──────┘ -``` - -By setting all chars to empty with the exception of 'middle' being set to a -single space and by setting padding to zero, it's possible to get the most -compact layout with no decorations: -```javascript -var table = new Table({ - chars: { 'top': '' , 'top-mid': '' , 'top-left': '' , 'top-right': '' - , 'bottom': '' , 'bottom-mid': '' , 'bottom-left': '' , 'bottom-right': '' - , 'left': '' , 'left-mid': '' , 'mid': '' , 'mid-mid': '' - , 'right': '' , 'right-mid': '' , 'middle': ' ' }, - style: { 'padding-left': 0, 'padding-right': 0 } -}); - -table.push( - ['foo', 'bar', 'baz'] - , ['frobnicate', 'bar', 'quuz'] -); - -console.log(table.toString()); -// Outputs: -//foo bar baz -//frobnicate bar quuz -``` - -## Build Targets - -Clone the repository and run `yarn install` to install all its submodules, then run one of the following commands: - -###### Run the tests with coverage reports. -```bash -$ yarn test:coverage -``` - -###### Run the tests every time a file changes. -```bash -$ yarn test:watch -``` - -###### Update the documentation. -```bash -$ yarn docs -``` - -## Credits - -- James Talmage - author <james.talmage@jrtechnical.com> ([jamestalmage](https://github.com/jamestalmage)) -- Guillermo Rauch - author of the original cli-table <guillermo@learnboost.com> ([Guille](https://github.com/guille)) - -## License - -(The MIT License) - -Copyright (c) 2014 James Talmage <james.talmage@jrtechnical.com> - -Original cli-table code/documentation: Copyright (c) 2010 LearnBoost <dev@learnboost.com> - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -'Software'), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, -TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE -SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/cli-table3/node_modules/string-width/index.js b/node_modules/cli-table3/node_modules/string-width/index.js index a348067f032df..f4d261a96a099 100644 --- a/node_modules/cli-table3/node_modules/string-width/index.js +++ b/node_modules/cli-table3/node_modules/string-width/index.js @@ -4,14 +4,18 @@ const isFullwidthCodePoint = require('is-fullwidth-code-point'); const emojiRegex = require('emoji-regex'); const stringWidth = string => { - string = string.replace(emojiRegex(), ' '); - if (typeof string !== 'string' || string.length === 0) { return 0; } string = stripAnsi(string); + if (string.length === 0) { + return 0; + } + + string = string.replace(emojiRegex(), ' '); + let width = 0; for (let i = 0; i < string.length; i++) { diff --git a/node_modules/cli-table3/node_modules/string-width/package.json b/node_modules/cli-table3/node_modules/string-width/package.json index 5751de5a6464e..b9b20caaf6f1c 100644 --- a/node_modules/cli-table3/node_modules/string-width/package.json +++ b/node_modules/cli-table3/node_modules/string-width/package.json @@ -1,6 +1,6 @@ { "name": "string-width", - "version": "4.2.0", + "version": "4.2.2", "description": "Get the visual width of a string - the number of columns required to display it", "license": "MIT", "repository": "sindresorhus/string-width", diff --git a/node_modules/cli-table3/node_modules/string-width/readme.md b/node_modules/cli-table3/node_modules/string-width/readme.md index 705f206001b77..bdd314129ca74 100644 --- a/node_modules/cli-table3/node_modules/string-width/readme.md +++ b/node_modules/cli-table3/node_modules/string-width/readme.md @@ -1,4 +1,4 @@ -# string-width [![Build Status](https://travis-ci.org/sindresorhus/string-width.svg?branch=master)](https://travis-ci.org/sindresorhus/string-width) +# string-width > Get the visual width of a string - the number of columns required to display it diff --git a/node_modules/clone/.npmignore b/node_modules/clone/.npmignore deleted file mode 100644 index c797cbf396337..0000000000000 --- a/node_modules/clone/.npmignore +++ /dev/null @@ -1,4 +0,0 @@ -/node_modules/ -/test.js -/*.html -/.travis.yml diff --git a/node_modules/clone/README.md b/node_modules/clone/README.md deleted file mode 100644 index 0b6cecae29b52..0000000000000 --- a/node_modules/clone/README.md +++ /dev/null @@ -1,126 +0,0 @@ -# clone - -[![build status](https://secure.travis-ci.org/pvorb/node-clone.png)](http://travis-ci.org/pvorb/node-clone) - -[![info badge](https://nodei.co/npm/clone.png?downloads=true&downloadRank=true&stars=true)](http://npm-stat.com/charts.html?package=clone) - -offers foolproof _deep cloning_ of objects, arrays, numbers, strings etc. in JavaScript. - - -## Installation - - npm install clone - -(It also works with browserify, ender or standalone.) - - -## Example - -~~~ javascript -var clone = require('clone'); - -var a, b; - -a = { foo: { bar: 'baz' } }; // initial value of a - -b = clone(a); // clone a -> b -a.foo.bar = 'foo'; // change a - -console.log(a); // show a -console.log(b); // show b -~~~ - -This will print: - -~~~ javascript -{ foo: { bar: 'foo' } } -{ foo: { bar: 'baz' } } -~~~ - -**clone** masters cloning simple objects (even with custom prototype), arrays, -Date objects, and RegExp objects. Everything is cloned recursively, so that you -can clone dates in arrays in objects, for example. - - -## API - -`clone(val, circular, depth)` - - * `val` -- the value that you want to clone, any type allowed - * `circular` -- boolean - - Call `clone` with `circular` set to `false` if you are certain that `obj` - contains no circular references. This will give better performance if needed. - There is no error if `undefined` or `null` is passed as `obj`. - * `depth` -- depth to which the object is to be cloned (optional, - defaults to infinity) - -`clone.clonePrototype(obj)` - - * `obj` -- the object that you want to clone - -Does a prototype clone as -[described by Oran Looney](http://oranlooney.com/functional-javascript/). - - -## Circular References - -~~~ javascript -var a, b; - -a = { hello: 'world' }; - -a.myself = a; -b = clone(a); - -console.log(b); -~~~ - -This will print: - -~~~ javascript -{ hello: "world", myself: [Circular] } -~~~ - -So, `b.myself` points to `b`, not `a`. Neat! - - -## Test - - npm test - - -## Caveat - -Some special objects like a socket or `process.stdout`/`stderr` are known to not -be cloneable. If you find other objects that cannot be cloned, please [open an -issue](https://github.com/pvorb/node-clone/issues/new). - - -## Bugs and Issues - -If you encounter any bugs or issues, feel free to [open an issue at -github](https://github.com/pvorb/node-clone/issues) or send me an email to -<paul@vorba.ch>. I also always like to hear from you, if you’re using my code. - -## License - -Copyright © 2011-2015 [Paul Vorbach](http://paul.vorba.ch/) and -[contributors](https://github.com/pvorb/node-clone/graphs/contributors). - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the “Software”), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software is furnished to do so, -subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS -FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR -COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER -IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, OUT OF OR IN CONNECTION WITH THE -SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/cmd-shim/README.md b/node_modules/cmd-shim/README.md deleted file mode 100644 index 60e6625f37545..0000000000000 --- a/node_modules/cmd-shim/README.md +++ /dev/null @@ -1,34 +0,0 @@ -# cmd-shim - -The cmd-shim used in npm to create executable scripts on Windows, -since symlinks are not suitable for this purpose there. - -On Unix systems, you should use a symbolic link instead. - -[![Build Status](https://img.shields.io/travis/npm/cmd-shim/master.svg)](https://travis-ci.org/npm/cmd-shim) -[![Dependency Status](https://img.shields.io/david/npm/cmd-shim.svg)](https://david-dm.org/npm/cmd-shim) -[![npm version](https://img.shields.io/npm/v/cmd-shim.svg)](https://www.npmjs.com/package/cmd-shim) - -## Installation - -``` -npm install cmd-shim -``` - -## API - -### cmdShim(from, to) -> Promise - -Create a cmd shim at `to` for the command line program at `from`. -e.g. - -```javascript -var cmdShim = require('cmd-shim'); -cmdShim(__dirname + '/cli.js', '/usr/bin/command-name').then(() => { - // shims are created! -}) -``` - -### cmdShim.ifExists(from, to) -> Promise - -The same as above, but will just continue if the file does not exist. diff --git a/node_modules/color-convert/CHANGELOG.md b/node_modules/color-convert/CHANGELOG.md deleted file mode 100644 index 0a7bce4fd570a..0000000000000 --- a/node_modules/color-convert/CHANGELOG.md +++ /dev/null @@ -1,54 +0,0 @@ -# 1.0.0 - 2016-01-07 - -- Removed: unused speed test -- Added: Automatic routing between previously unsupported conversions -([#27](https://github.com/Qix-/color-convert/pull/27)) -- Removed: `xxx2xxx()` and `xxx2xxxRaw()` functions -([#27](https://github.com/Qix-/color-convert/pull/27)) -- Removed: `convert()` class -([#27](https://github.com/Qix-/color-convert/pull/27)) -- Changed: all functions to lookup dictionary -([#27](https://github.com/Qix-/color-convert/pull/27)) -- Changed: `ansi` to `ansi256` -([#27](https://github.com/Qix-/color-convert/pull/27)) -- Fixed: argument grouping for functions requiring only one argument -([#27](https://github.com/Qix-/color-convert/pull/27)) - -# 0.6.0 - 2015-07-23 - -- Added: methods to handle -[ANSI](https://en.wikipedia.org/wiki/ANSI_escape_code#Colors) 16/256 colors: - - rgb2ansi16 - - rgb2ansi - - hsl2ansi16 - - hsl2ansi - - hsv2ansi16 - - hsv2ansi - - hwb2ansi16 - - hwb2ansi - - cmyk2ansi16 - - cmyk2ansi - - keyword2ansi16 - - keyword2ansi - - ansi162rgb - - ansi162hsl - - ansi162hsv - - ansi162hwb - - ansi162cmyk - - ansi162keyword - - ansi2rgb - - ansi2hsl - - ansi2hsv - - ansi2hwb - - ansi2cmyk - - ansi2keyword -([#18](https://github.com/harthur/color-convert/pull/18)) - -# 0.5.3 - 2015-06-02 - -- Fixed: hsl2hsv does not return `NaN` anymore when using `[0,0,0]` -([#15](https://github.com/harthur/color-convert/issues/15)) - ---- - -Check out commit logs for older releases diff --git a/node_modules/color-convert/README.md b/node_modules/color-convert/README.md deleted file mode 100644 index d4b08fc369948..0000000000000 --- a/node_modules/color-convert/README.md +++ /dev/null @@ -1,68 +0,0 @@ -# color-convert - -[![Build Status](https://travis-ci.org/Qix-/color-convert.svg?branch=master)](https://travis-ci.org/Qix-/color-convert) - -Color-convert is a color conversion library for JavaScript and node. -It converts all ways between `rgb`, `hsl`, `hsv`, `hwb`, `cmyk`, `ansi`, `ansi16`, `hex` strings, and CSS `keyword`s (will round to closest): - -```js -var convert = require('color-convert'); - -convert.rgb.hsl(140, 200, 100); // [96, 48, 59] -convert.keyword.rgb('blue'); // [0, 0, 255] - -var rgbChannels = convert.rgb.channels; // 3 -var cmykChannels = convert.cmyk.channels; // 4 -var ansiChannels = convert.ansi16.channels; // 1 -``` - -# Install - -```console -$ npm install color-convert -``` - -# API - -Simply get the property of the _from_ and _to_ conversion that you're looking for. - -All functions have a rounded and unrounded variant. By default, return values are rounded. To get the unrounded (raw) results, simply tack on `.raw` to the function. - -All 'from' functions have a hidden property called `.channels` that indicates the number of channels the function expects (not including alpha). - -```js -var convert = require('color-convert'); - -// Hex to LAB -convert.hex.lab('DEADBF'); // [ 76, 21, -2 ] -convert.hex.lab.raw('DEADBF'); // [ 75.56213190997677, 20.653827952644754, -2.290532499330533 ] - -// RGB to CMYK -convert.rgb.cmyk(167, 255, 4); // [ 35, 0, 98, 0 ] -convert.rgb.cmyk.raw(167, 255, 4); // [ 34.509803921568626, 0, 98.43137254901961, 0 ] -``` - -### Arrays -All functions that accept multiple arguments also support passing an array. - -Note that this does **not** apply to functions that convert from a color that only requires one value (e.g. `keyword`, `ansi256`, `hex`, etc.) - -```js -var convert = require('color-convert'); - -convert.rgb.hex(123, 45, 67); // '7B2D43' -convert.rgb.hex([123, 45, 67]); // '7B2D43' -``` - -## Routing - -Conversions that don't have an _explicitly_ defined conversion (in [conversions.js](conversions.js)), but can be converted by means of sub-conversions (e.g. XYZ -> **RGB** -> CMYK), are automatically routed together. This allows just about any color model supported by `color-convert` to be converted to any other model, so long as a sub-conversion path exists. This is also true for conversions requiring more than one step in between (e.g. LCH -> **LAB** -> **XYZ** -> **RGB** -> Hex). - -Keep in mind that extensive conversions _may_ result in a loss of precision, and exist only to be complete. For a list of "direct" (single-step) conversions, see [conversions.js](conversions.js). - -# Contribute - -If there is a new model you would like to support, or want to add a direct conversion between two existing models, please send us a pull request. - -# License -Copyright © 2011-2016, Heather Arthur and Josh Junon. Licensed under the [MIT License](LICENSE). diff --git a/node_modules/color-name/README.md b/node_modules/color-name/README.md deleted file mode 100644 index 932b979176f33..0000000000000 --- a/node_modules/color-name/README.md +++ /dev/null @@ -1,11 +0,0 @@ -A JSON with color names and its values. Based on http://dev.w3.org/csswg/css-color/#named-colors. - -[![NPM](https://nodei.co/npm/color-name.png?mini=true)](https://nodei.co/npm/color-name/) - - -```js -var colors = require('color-name'); -colors.red //[255,0,0] -``` - -<a href="LICENSE"><img src="https://upload.wikimedia.org/wikipedia/commons/0/0c/MIT_logo.svg" width="120"/></a> diff --git a/node_modules/color-support/LICENSE b/node_modules/color-support/LICENSE new file mode 100644 index 0000000000000..19129e315fe59 --- /dev/null +++ b/node_modules/color-support/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/color-support/bin.js b/node_modules/color-support/bin.js new file mode 100755 index 0000000000000..3c0a967218083 --- /dev/null +++ b/node_modules/color-support/bin.js @@ -0,0 +1,3 @@ +#!/usr/bin/env node +var colorSupport = require('./')({alwaysReturn: true }) +console.log(JSON.stringify(colorSupport, null, 2)) diff --git a/node_modules/color-support/browser.js b/node_modules/color-support/browser.js new file mode 100644 index 0000000000000..ab5c6631a35b8 --- /dev/null +++ b/node_modules/color-support/browser.js @@ -0,0 +1,14 @@ +module.exports = colorSupport({ alwaysReturn: true }, colorSupport) + +function colorSupport(options, obj) { + obj = obj || {} + options = options || {} + obj.level = 0 + obj.hasBasic = false + obj.has256 = false + obj.has16m = false + if (!options.alwaysReturn) { + return false + } + return obj +} diff --git a/node_modules/color-support/index.js b/node_modules/color-support/index.js new file mode 100644 index 0000000000000..6b6f3b2819424 --- /dev/null +++ b/node_modules/color-support/index.js @@ -0,0 +1,134 @@ +// call it on itself so we can test the export val for basic stuff +module.exports = colorSupport({ alwaysReturn: true }, colorSupport) + +function hasNone (obj, options) { + obj.level = 0 + obj.hasBasic = false + obj.has256 = false + obj.has16m = false + if (!options.alwaysReturn) { + return false + } + return obj +} + +function hasBasic (obj) { + obj.hasBasic = true + obj.has256 = false + obj.has16m = false + obj.level = 1 + return obj +} + +function has256 (obj) { + obj.hasBasic = true + obj.has256 = true + obj.has16m = false + obj.level = 2 + return obj +} + +function has16m (obj) { + obj.hasBasic = true + obj.has256 = true + obj.has16m = true + obj.level = 3 + return obj +} + +function colorSupport (options, obj) { + options = options || {} + + obj = obj || {} + + // if just requesting a specific level, then return that. + if (typeof options.level === 'number') { + switch (options.level) { + case 0: + return hasNone(obj, options) + case 1: + return hasBasic(obj) + case 2: + return has256(obj) + case 3: + return has16m(obj) + } + } + + obj.level = 0 + obj.hasBasic = false + obj.has256 = false + obj.has16m = false + + if (typeof process === 'undefined' || + !process || + !process.stdout || + !process.env || + !process.platform) { + return hasNone(obj, options) + } + + var env = options.env || process.env + var stream = options.stream || process.stdout + var term = options.term || env.TERM || '' + var platform = options.platform || process.platform + + if (!options.ignoreTTY && !stream.isTTY) { + return hasNone(obj, options) + } + + if (!options.ignoreDumb && term === 'dumb' && !env.COLORTERM) { + return hasNone(obj, options) + } + + if (platform === 'win32') { + return hasBasic(obj) + } + + if (env.TMUX) { + return has256(obj) + } + + if (!options.ignoreCI && (env.CI || env.TEAMCITY_VERSION)) { + if (env.TRAVIS) { + return has256(obj) + } else { + return hasNone(obj, options) + } + } + + // TODO: add more term programs + switch (env.TERM_PROGRAM) { + case 'iTerm.app': + var ver = env.TERM_PROGRAM_VERSION || '0.' + if (/^[0-2]\./.test(ver)) { + return has256(obj) + } else { + return has16m(obj) + } + + case 'HyperTerm': + case 'Hyper': + return has16m(obj) + + case 'MacTerm': + return has16m(obj) + + case 'Apple_Terminal': + return has256(obj) + } + + if (/^xterm-256/.test(term)) { + return has256(obj) + } + + if (/^screen|^xterm|^vt100|color|ansi|cygwin|linux/i.test(term)) { + return hasBasic(obj) + } + + if (env.COLORTERM) { + return hasBasic(obj) + } + + return hasNone(obj, options) +} diff --git a/node_modules/color-support/package.json b/node_modules/color-support/package.json new file mode 100644 index 0000000000000..f3e3b77145d6b --- /dev/null +++ b/node_modules/color-support/package.json @@ -0,0 +1,36 @@ +{ + "name": "color-support", + "version": "1.1.3", + "description": "A module which will endeavor to guess your terminal's level of color support.", + "main": "index.js", + "browser": "browser.js", + "bin": "bin.js", + "devDependencies": { + "tap": "^10.3.3" + }, + "scripts": { + "test": "tap test/*.js --100 -J", + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --all; git push origin --tags" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/isaacs/color-support.git" + }, + "keywords": [ + "terminal", + "color", + "support", + "xterm", + "truecolor", + "256" + ], + "author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)", + "license": "ISC", + "files": [ + "browser.js", + "index.js", + "bin.js" + ] +} diff --git a/node_modules/colors/README.md b/node_modules/colors/README.md deleted file mode 100644 index fabe558902e9e..0000000000000 --- a/node_modules/colors/README.md +++ /dev/null @@ -1,221 +0,0 @@ -# colors.js -[![Build Status](https://travis-ci.org/Marak/colors.js.svg?branch=master)](https://travis-ci.org/Marak/colors.js) -[![version](https://img.shields.io/npm/v/colors.svg)](https://www.npmjs.org/package/colors) -[![dependencies](https://david-dm.org/Marak/colors.js.svg)](https://david-dm.org/Marak/colors.js) -[![devDependencies](https://david-dm.org/Marak/colors.js/dev-status.svg)](https://david-dm.org/Marak/colors.js#info=devDependencies) - -Please check out the [roadmap](ROADMAP.md) for upcoming features and releases. Please open Issues to provide feedback, and check the `develop` branch for the latest bleeding-edge updates. - -## get color and style in your node.js console - -![Demo](https://raw.githubusercontent.com/Marak/colors.js/master/screenshots/colors.png) - -## Installation - - npm install colors - -## colors and styles! - -### text colors - - - black - - red - - green - - yellow - - blue - - magenta - - cyan - - white - - gray - - grey - -### bright text colors - - - brightRed - - brightGreen - - brightYellow - - brightBlue - - brightMagenta - - brightCyan - - brightWhite - -### background colors - - - bgBlack - - bgRed - - bgGreen - - bgYellow - - bgBlue - - bgMagenta - - bgCyan - - bgWhite - - bgGray - - bgGrey - -### bright background colors - - - bgBrightRed - - bgBrightGreen - - bgBrightYellow - - bgBrightBlue - - bgBrightMagenta - - bgBrightCyan - - bgBrightWhite - -### styles - - - reset - - bold - - dim - - italic - - underline - - inverse - - hidden - - strikethrough - -### extras - - - rainbow - - zebra - - america - - trap - - random - - -## Usage - -By popular demand, `colors` now ships with two types of usages! - -The super nifty way - -```js -var colors = require('colors'); - -console.log('hello'.green); // outputs green text -console.log('i like cake and pies'.underline.red) // outputs red underlined text -console.log('inverse the color'.inverse); // inverses the color -console.log('OMG Rainbows!'.rainbow); // rainbow -console.log('Run the trap'.trap); // Drops the bass - -``` - -or a slightly less nifty way which doesn't extend `String.prototype` - -```js -var colors = require('colors/safe'); - -console.log(colors.green('hello')); // outputs green text -console.log(colors.red.underline('i like cake and pies')) // outputs red underlined text -console.log(colors.inverse('inverse the color')); // inverses the color -console.log(colors.rainbow('OMG Rainbows!')); // rainbow -console.log(colors.trap('Run the trap')); // Drops the bass - -``` - -I prefer the first way. Some people seem to be afraid of extending `String.prototype` and prefer the second way. - -If you are writing good code you will never have an issue with the first approach. If you really don't want to touch `String.prototype`, the second usage will not touch `String` native object. - -## Enabling/Disabling Colors - -The package will auto-detect whether your terminal can use colors and enable/disable accordingly. When colors are disabled, the color functions do nothing. You can override this with a command-line flag: - -```bash -node myapp.js --no-color -node myapp.js --color=false - -node myapp.js --color -node myapp.js --color=true -node myapp.js --color=always - -FORCE_COLOR=1 node myapp.js -``` - -Or in code: - -```javascript -var colors = require('colors'); -colors.enable(); -colors.disable(); -``` - -## Console.log [string substitution](http://nodejs.org/docs/latest/api/console.html#console_console_log_data) - -```js -var name = 'Marak'; -console.log(colors.green('Hello %s'), name); -// outputs -> 'Hello Marak' -``` - -## Custom themes - -### Using standard API - -```js - -var colors = require('colors'); - -colors.setTheme({ - silly: 'rainbow', - input: 'grey', - verbose: 'cyan', - prompt: 'grey', - info: 'green', - data: 'grey', - help: 'cyan', - warn: 'yellow', - debug: 'blue', - error: 'red' -}); - -// outputs red text -console.log("this is an error".error); - -// outputs yellow text -console.log("this is a warning".warn); -``` - -### Using string safe API - -```js -var colors = require('colors/safe'); - -// set single property -var error = colors.red; -error('this is red'); - -// set theme -colors.setTheme({ - silly: 'rainbow', - input: 'grey', - verbose: 'cyan', - prompt: 'grey', - info: 'green', - data: 'grey', - help: 'cyan', - warn: 'yellow', - debug: 'blue', - error: 'red' -}); - -// outputs red text -console.log(colors.error("this is an error")); - -// outputs yellow text -console.log(colors.warn("this is a warning")); - -``` - -### Combining Colors - -```javascript -var colors = require('colors'); - -colors.setTheme({ - custom: ['red', 'underline'] -}); - -console.log('test'.custom); -``` - -*Protip: There is a secret undocumented style in `colors`. If you find the style you can summon him.* diff --git a/node_modules/common-ancestor-path/README.md b/node_modules/common-ancestor-path/README.md deleted file mode 100644 index 2e876437359d6..0000000000000 --- a/node_modules/common-ancestor-path/README.md +++ /dev/null @@ -1,28 +0,0 @@ -# common-ancestor-path - -Find the common ancestor of 2 or more paths on Windows or Unix - -## USAGE - -Give it two or more path strings, and it'll do the thing. - -```js -const ancestor = require('common-ancestor-path') - -// output /a/b -console.log(ancestor('/a/b/c/d', '/a/b/x/y/z', '/a/b/c/i/j/k')) - -// normalizes separators, but NOT cases, since it matters sometimes -console.log(ancestor('C:\\a\\b\\c', 'C:\\a\\b\\x')) - -// no common ancestor on different windows drive letters -// so, this returns null -console.log(ancestor('c:\\a\\b\\c', 'd:\\d\\e\\f')) -``` - -## API - -`commonAncestorPath(...paths)` - -Returns the nearest (deepest) common ancestor path, or `null` if on -different roots on Windows. diff --git a/node_modules/concat-map/.travis.yml b/node_modules/concat-map/.travis.yml deleted file mode 100644 index f1d0f13c8a54d..0000000000000 --- a/node_modules/concat-map/.travis.yml +++ /dev/null @@ -1,4 +0,0 @@ -language: node_js -node_js: - - 0.4 - - 0.6 diff --git a/node_modules/concat-map/README.markdown b/node_modules/concat-map/README.markdown deleted file mode 100644 index 408f70a1be473..0000000000000 --- a/node_modules/concat-map/README.markdown +++ /dev/null @@ -1,62 +0,0 @@ -concat-map -========== - -Concatenative mapdashery. - -[![browser support](http://ci.testling.com/substack/node-concat-map.png)](http://ci.testling.com/substack/node-concat-map) - -[![build status](https://secure.travis-ci.org/substack/node-concat-map.png)](http://travis-ci.org/substack/node-concat-map) - -example -======= - -``` js -var concatMap = require('concat-map'); -var xs = [ 1, 2, 3, 4, 5, 6 ]; -var ys = concatMap(xs, function (x) { - return x % 2 ? [ x - 0.1, x, x + 0.1 ] : []; -}); -console.dir(ys); -``` - -*** - -``` -[ 0.9, 1, 1.1, 2.9, 3, 3.1, 4.9, 5, 5.1 ] -``` - -methods -======= - -``` js -var concatMap = require('concat-map') -``` - -concatMap(xs, fn) ------------------ - -Return an array of concatenated elements by calling `fn(x, i)` for each element -`x` and each index `i` in the array `xs`. - -When `fn(x, i)` returns an array, its result will be concatenated with the -result array. If `fn(x, i)` returns anything else, that value will be pushed -onto the end of the result array. - -install -======= - -With [npm](http://npmjs.org) do: - -``` -npm install concat-map -``` - -license -======= - -MIT - -notes -===== - -This module was written while sitting high above the ground in a tree. diff --git a/node_modules/console-control-strings/README.md b/node_modules/console-control-strings/README.md deleted file mode 100644 index f58cc8d892506..0000000000000 --- a/node_modules/console-control-strings/README.md +++ /dev/null @@ -1,145 +0,0 @@ -# Console Control Strings - -A library of cross-platform tested terminal/console command strings for -doing things like color and cursor positioning. This is a subset of both -ansi and vt100. All control codes included work on both Windows & Unix-like -OSes, except where noted. - -## Usage - -```js -var consoleControl = require('console-control-strings') - -console.log(consoleControl.color('blue','bgRed', 'bold') + 'hi there' + consoleControl.color('reset')) -process.stdout.write(consoleControl.goto(75, 10)) -``` - -## Why Another? - -There are tons of libraries similar to this one. I wanted one that was: - -1. Very clear about compatibility goals. -2. Could emit, for instance, a start color code without an end one. -3. Returned strings w/o writing to streams. -4. Was not weighed down with other unrelated baggage. - -## Functions - -### var code = consoleControl.up(_num = 1_) - -Returns the escape sequence to move _num_ lines up. - -### var code = consoleControl.down(_num = 1_) - -Returns the escape sequence to move _num_ lines down. - -### var code = consoleControl.forward(_num = 1_) - -Returns the escape sequence to move _num_ lines righ. - -### var code = consoleControl.back(_num = 1_) - -Returns the escape sequence to move _num_ lines left. - -### var code = consoleControl.nextLine(_num = 1_) - -Returns the escape sequence to move _num_ lines down and to the beginning of -the line. - -### var code = consoleControl.previousLine(_num = 1_) - -Returns the escape sequence to move _num_ lines up and to the beginning of -the line. - -### var code = consoleControl.eraseData() - -Returns the escape sequence to erase everything from the current cursor -position to the bottom right of the screen. This is line based, so it -erases the remainder of the current line and all following lines. - -### var code = consoleControl.eraseLine() - -Returns the escape sequence to erase to the end of the current line. - -### var code = consoleControl.goto(_x_, _y_) - -Returns the escape sequence to move the cursor to the designated position. -Note that the origin is _1, 1_ not _0, 0_. - -### var code = consoleControl.gotoSOL() - -Returns the escape sequence to move the cursor to the beginning of the -current line. (That is, it returns a carriage return, `\r`.) - -### var code = consoleControl.beep() - -Returns the escape sequence to cause the termianl to beep. (That is, it -returns unicode character `\x0007`, a Control-G.) - -### var code = consoleControl.hideCursor() - -Returns the escape sequence to hide the cursor. - -### var code = consoleControl.showCursor() - -Returns the escape sequence to show the cursor. - -### var code = consoleControl.color(_colors = []_) - -### var code = consoleControl.color(_color1_, _color2_, _…_, _colorn_) - -Returns the escape sequence to set the current terminal display attributes -(mostly colors). Arguments can either be a list of attributes or an array -of attributes. The difference between passing in an array or list of colors -and calling `.color` separately for each one, is that in the former case a -single escape sequence will be produced where as in the latter each change -will have its own distinct escape sequence. Each attribute can be one of: - -* Reset: - * **reset** – Reset all attributes to the terminal default. -* Styles: - * **bold** – Display text as bold. In some terminals this means using a - bold font, in others this means changing the color. In some it means - both. - * **italic** – Display text as italic. This is not available in most Windows terminals. - * **underline** – Underline text. This is not available in most Windows Terminals. - * **inverse** – Invert the foreground and background colors. - * **stopBold** – Do not display text as bold. - * **stopItalic** – Do not display text as italic. - * **stopUnderline** – Do not underline text. - * **stopInverse** – Do not invert foreground and background. -* Colors: - * **white** - * **black** - * **blue** - * **cyan** - * **green** - * **magenta** - * **red** - * **yellow** - * **grey** / **brightBlack** - * **brightRed** - * **brightGreen** - * **brightYellow** - * **brightBlue** - * **brightMagenta** - * **brightCyan** - * **brightWhite** -* Background Colors: - * **bgWhite** - * **bgBlack** - * **bgBlue** - * **bgCyan** - * **bgGreen** - * **bgMagenta** - * **bgRed** - * **bgYellow** - * **bgGrey** / **bgBrightBlack** - * **bgBrightRed** - * **bgBrightGreen** - * **bgBrightYellow** - * **bgBrightBlue** - * **bgBrightMagenta** - * **bgBrightCyan** - * **bgBrightWhite** - diff --git a/node_modules/console-control-strings/README.md~ b/node_modules/console-control-strings/README.md~ deleted file mode 100644 index 6eb34e89d17de..0000000000000 --- a/node_modules/console-control-strings/README.md~ +++ /dev/null @@ -1,140 +0,0 @@ -# Console Control Strings - -A library of cross-platform tested terminal/console command strings for -doing things like color and cursor positioning. This is a subset of both -ansi and vt100. All control codes included work on both Windows & Unix-like -OSes, except where noted. - -## Usage - -```js -var consoleControl = require('console-control-strings') - -console.log(consoleControl.color('blue','bgRed', 'bold') + 'hi there' + consoleControl.color('reset')) -process.stdout.write(consoleControl.goto(75, 10)) -``` - -## Why Another? - -There are tons of libraries similar to this one. I wanted one that was: - -1. Very clear about compatibility goals. -2. Could emit, for instance, a start color code without an end one. -3. Returned strings w/o writing to streams. -4. Was not weighed down with other unrelated baggage. - -## Functions - -### var code = consoleControl.up(_num = 1_) - -Returns the escape sequence to move _num_ lines up. - -### var code = consoleControl.down(_num = 1_) - -Returns the escape sequence to move _num_ lines down. - -### var code = consoleControl.forward(_num = 1_) - -Returns the escape sequence to move _num_ lines righ. - -### var code = consoleControl.back(_num = 1_) - -Returns the escape sequence to move _num_ lines left. - -### var code = consoleControl.nextLine(_num = 1_) - -Returns the escape sequence to move _num_ lines down and to the beginning of -the line. - -### var code = consoleControl.previousLine(_num = 1_) - -Returns the escape sequence to move _num_ lines up and to the beginning of -the line. - -### var code = consoleControl.eraseData() - -Returns the escape sequence to erase everything from the current cursor -position to the bottom right of the screen. This is line based, so it -erases the remainder of the current line and all following lines. - -### var code = consoleControl.eraseLine() - -Returns the escape sequence to erase to the end of the current line. - -### var code = consoleControl.goto(_x_, _y_) - -Returns the escape sequence to move the cursor to the designated position. -Note that the origin is _1, 1_ not _0, 0_. - -### var code = consoleControl.gotoSOL() - -Returns the escape sequence to move the cursor to the beginning of the -current line. (That is, it returns a carriage return, `\r`.) - -### var code = consoleControl.hideCursor() - -Returns the escape sequence to hide the cursor. - -### var code = consoleControl.showCursor() - -Returns the escape sequence to show the cursor. - -### var code = consoleControl.color(_colors = []_) - -### var code = consoleControl.color(_color1_, _color2_, _…_, _colorn_) - -Returns the escape sequence to set the current terminal display attributes -(mostly colors). Arguments can either be a list of attributes or an array -of attributes. The difference between passing in an array or list of colors -and calling `.color` separately for each one, is that in the former case a -single escape sequence will be produced where as in the latter each change -will have its own distinct escape sequence. Each attribute can be one of: - -* Reset: - * **reset** – Reset all attributes to the terminal default. -* Styles: - * **bold** – Display text as bold. In some terminals this means using a - bold font, in others this means changing the color. In some it means - both. - * **italic** – Display text as italic. This is not available in most Windows terminals. - * **underline** – Underline text. This is not available in most Windows Terminals. - * **inverse** – Invert the foreground and background colors. - * **stopBold** – Do not display text as bold. - * **stopItalic** – Do not display text as italic. - * **stopUnderline** – Do not underline text. - * **stopInverse** – Do not invert foreground and background. -* Colors: - * **white** - * **black** - * **blue** - * **cyan** - * **green** - * **magenta** - * **red** - * **yellow** - * **grey** / **brightBlack** - * **brightRed** - * **brightGreen** - * **brightYellow** - * **brightBlue** - * **brightMagenta** - * **brightCyan** - * **brightWhite** -* Background Colors: - * **bgWhite** - * **bgBlack** - * **bgBlue** - * **bgCyan** - * **bgGreen** - * **bgMagenta** - * **bgRed** - * **bgYellow** - * **bgGrey** / **bgBrightBlack** - * **bgBrightRed** - * **bgBrightGreen** - * **bgBrightYellow** - * **bgBrightBlue** - * **bgBrightMagenta** - * **bgBrightCyan** - * **bgBrightWhite** - diff --git a/node_modules/core-util-is/README.md b/node_modules/core-util-is/README.md deleted file mode 100644 index 5a76b4149c5eb..0000000000000 --- a/node_modules/core-util-is/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# core-util-is - -The `util.is*` functions introduced in Node v0.12. diff --git a/node_modules/dashdash/README.md b/node_modules/dashdash/README.md deleted file mode 100644 index e47b106e637d2..0000000000000 --- a/node_modules/dashdash/README.md +++ /dev/null @@ -1,574 +0,0 @@ -A light, featureful and explicit option parsing library for node.js. - -[Why another one? See below](#why). tl;dr: The others I've tried are one of -too loosey goosey (not explicit), too big/too many deps, or ill specified. -YMMV. - -Follow <a href="https://twitter.com/intent/user?screen_name=trentmick" target="_blank">@trentmick</a> -for updates to node-dashdash. - -# Install - - npm install dashdash - - -# Usage - -```javascript -var dashdash = require('dashdash'); - -// Specify the options. Minimally `name` (or `names`) and `type` -// must be given for each. -var options = [ - { - // `names` or a single `name`. First element is the `opts.KEY`. - names: ['help', 'h'], - // See "Option specs" below for types. - type: 'bool', - help: 'Print this help and exit.' - } -]; - -// Shortcut form. As called it infers `process.argv`. See below for -// the longer form to use methods like `.help()` on the Parser object. -var opts = dashdash.parse({options: options}); - -console.log("opts:", opts); -console.log("args:", opts._args); -``` - - -# Longer Example - -A more realistic [starter script "foo.js"](./examples/foo.js) is as follows. -This also shows using `parser.help()` for formatted option help. - -```javascript -var dashdash = require('./lib/dashdash'); - -var options = [ - { - name: 'version', - type: 'bool', - help: 'Print tool version and exit.' - }, - { - names: ['help', 'h'], - type: 'bool', - help: 'Print this help and exit.' - }, - { - names: ['verbose', 'v'], - type: 'arrayOfBool', - help: 'Verbose output. Use multiple times for more verbose.' - }, - { - names: ['file', 'f'], - type: 'string', - help: 'File to process', - helpArg: 'FILE' - } -]; - -var parser = dashdash.createParser({options: options}); -try { - var opts = parser.parse(process.argv); -} catch (e) { - console.error('foo: error: %s', e.message); - process.exit(1); -} - -console.log("# opts:", opts); -console.log("# args:", opts._args); - -// Use `parser.help()` for formatted options help. -if (opts.help) { - var help = parser.help({includeEnv: true}).trimRight(); - console.log('usage: node foo.js [OPTIONS]\n' - + 'options:\n' - + help); - process.exit(0); -} - -// ... -``` - - -Some example output from this script (foo.js): - -``` -$ node foo.js -h -# opts: { help: true, - _order: [ { name: 'help', value: true, from: 'argv' } ], - _args: [] } -# args: [] -usage: node foo.js [OPTIONS] -options: - --version Print tool version and exit. - -h, --help Print this help and exit. - -v, --verbose Verbose output. Use multiple times for more verbose. - -f FILE, --file=FILE File to process - -$ node foo.js -v -# opts: { verbose: [ true ], - _order: [ { name: 'verbose', value: true, from: 'argv' } ], - _args: [] } -# args: [] - -$ node foo.js --version arg1 -# opts: { version: true, - _order: [ { name: 'version', value: true, from: 'argv' } ], - _args: [ 'arg1' ] } -# args: [ 'arg1' ] - -$ node foo.js -f bar.txt -# opts: { file: 'bar.txt', - _order: [ { name: 'file', value: 'bar.txt', from: 'argv' } ], - _args: [] } -# args: [] - -$ node foo.js -vvv --file=blah -# opts: { verbose: [ true, true, true ], - file: 'blah', - _order: - [ { name: 'verbose', value: true, from: 'argv' }, - { name: 'verbose', value: true, from: 'argv' }, - { name: 'verbose', value: true, from: 'argv' }, - { name: 'file', value: 'blah', from: 'argv' } ], - _args: [] } -# args: [] -``` - - -See the ["examples"](examples/) dir for a number of starter examples using -some of dashdash's features. - - -# Environment variable integration - -If you want to allow environment variables to specify options to your tool, -dashdash makes this easy. We can change the 'verbose' option in the example -above to include an 'env' field: - -```javascript - { - names: ['verbose', 'v'], - type: 'arrayOfBool', - env: 'FOO_VERBOSE', // <--- add this line - help: 'Verbose output. Use multiple times for more verbose.' - }, -``` - -then the **"FOO_VERBOSE" environment variable** can be used to set this -option: - -```shell -$ FOO_VERBOSE=1 node foo.js -# opts: { verbose: [ true ], - _order: [ { name: 'verbose', value: true, from: 'env' } ], - _args: [] } -# args: [] -``` - -Boolean options will interpret the empty string as unset, '0' as false -and anything else as true. - -```shell -$ FOO_VERBOSE= node examples/foo.js # not set -# opts: { _order: [], _args: [] } -# args: [] - -$ FOO_VERBOSE=0 node examples/foo.js # '0' is false -# opts: { verbose: [ false ], - _order: [ { key: 'verbose', value: false, from: 'env' } ], - _args: [] } -# args: [] - -$ FOO_VERBOSE=1 node examples/foo.js # true -# opts: { verbose: [ true ], - _order: [ { key: 'verbose', value: true, from: 'env' } ], - _args: [] } -# args: [] - -$ FOO_VERBOSE=boogabooga node examples/foo.js # true -# opts: { verbose: [ true ], - _order: [ { key: 'verbose', value: true, from: 'env' } ], - _args: [] } -# args: [] -``` - -Non-booleans can be used as well. Strings: - -```shell -$ FOO_FILE=data.txt node examples/foo.js -# opts: { file: 'data.txt', - _order: [ { key: 'file', value: 'data.txt', from: 'env' } ], - _args: [] } -# args: [] -``` - -Numbers: - -```shell -$ FOO_TIMEOUT=5000 node examples/foo.js -# opts: { timeout: 5000, - _order: [ { key: 'timeout', value: 5000, from: 'env' } ], - _args: [] } -# args: [] - -$ FOO_TIMEOUT=blarg node examples/foo.js -foo: error: arg for "FOO_TIMEOUT" is not a positive integer: "blarg" -``` - -With the `includeEnv: true` config to `parser.help()` the environment -variable can also be included in **help output**: - - usage: node foo.js [OPTIONS] - options: - --version Print tool version and exit. - -h, --help Print this help and exit. - -v, --verbose Verbose output. Use multiple times for more verbose. - Environment: FOO_VERBOSE=1 - -f FILE, --file=FILE File to process - - -# Bash completion - -Dashdash provides a simple way to create a Bash completion file that you -can place in your "bash_completion.d" directory -- sometimes that is -"/usr/local/etc/bash_completion.d/"). Features: - -- Support for short and long opts -- Support for knowing which options take arguments -- Support for subcommands (e.g. 'git log <TAB>' to show just options for the - log subcommand). See - [node-cmdln](https://github.com/trentm/node-cmdln#bash-completion) for - how to integrate that. -- Does the right thing with "--" to stop options. -- Custom optarg and arg types for custom completions. - -Dashdash will return bash completion file content given a parser instance: - - var parser = dashdash.createParser({options: options}); - console.log( parser.bashCompletion({name: 'mycli'}) ); - -or directly from a `options` array of options specs: - - var code = dashdash.bashCompletionFromOptions({ - name: 'mycli', - options: OPTIONS - }); - -Write that content to "/usr/local/etc/bash_completion.d/mycli" and you will -have Bash completions for `mycli`. Alternatively you can write it to -any file (e.g. "~/.bashrc") and source it. - -You could add a `--completion` hidden option to your tool that emits the -completion content and document for your users to call that to install -Bash completions. - -See [examples/ddcompletion.js](examples/ddcompletion.js) for a complete -example, including how one can define bash functions for completion of custom -option types. Also see [node-cmdln](https://github.com/trentm/node-cmdln) for -how it uses this for Bash completion for full multi-subcommand tools. - -- TODO: document specExtra -- TODO: document includeHidden -- TODO: document custom types, `function complete\_FOO` guide, completionType -- TODO: document argtypes - - -# Parser config - -Parser construction (i.e. `dashdash.createParser(CONFIG)`) takes the -following fields: - -- `options` (Array of option specs). Required. See the - [Option specs](#option-specs) section below. - -- `interspersed` (Boolean). Optional. Default is true. If true this allows - interspersed arguments and options. I.e.: - - node ./tool.js -v arg1 arg2 -h # '-h' is after interspersed args - - Set it to false to have '-h' **not** get parsed as an option in the above - example. - -- `allowUnknown` (Boolean). Optional. Default is false. If false, this causes - unknown arguments to throw an error. I.e.: - - node ./tool.js -v arg1 --afe8asefksjefhas - - Set it to true to treat the unknown option as a positional - argument. - - **Caveat**: When a shortopt group, such as `-xaz` contains a mix of - known and unknown options, the *entire* group is passed through - unmolested as a positional argument. - - Consider if you have a known short option `-a`, and parse the - following command line: - - node ./tool.js -xaz - - where `-x` and `-z` are unknown. There are multiple ways to - interpret this: - - 1. `-x` takes a value: `{x: 'az'}` - 2. `-x` and `-z` are both booleans: `{x:true,a:true,z:true}` - - Since dashdash does not know what `-x` and `-z` are, it can't know - if you'd prefer to receive `{a:true,_args:['-x','-z']}` or - `{x:'az'}`, or `{_args:['-xaz']}`. Leaving the positional arg unprocessed - is the easiest mistake for the user to recover from. - - -# Option specs - -Example using all fields (required fields are noted): - -```javascript -{ - names: ['file', 'f'], // Required (one of `names` or `name`). - type: 'string', // Required. - completionType: 'filename', - env: 'MYTOOL_FILE', - help: 'Config file to load before running "mytool"', - helpArg: 'PATH', - helpWrap: false, - default: path.resolve(process.env.HOME, '.mytoolrc') -} -``` - -Each option spec in the `options` array must/can have the following fields: - -- `name` (String) or `names` (Array). Required. These give the option name - and aliases. The first name (if more than one given) is the key for the - parsed `opts` object. - -- `type` (String). Required. One of: - - - bool - - string - - number - - integer - - positiveInteger - - date (epoch seconds, e.g. 1396031701, or ISO 8601 format - `YYYY-MM-DD[THH:MM:SS[.sss][Z]]`, e.g. "2014-03-28T18:35:01.489Z") - - arrayOfBool - - arrayOfString - - arrayOfNumber - - arrayOfInteger - - arrayOfPositiveInteger - - arrayOfDate - - FWIW, these names attempt to match with asserts on - [assert-plus](https://github.com/mcavage/node-assert-plus). - You can add your own custom option types with `dashdash.addOptionType`. - See below. - -- `completionType` (String). Optional. This is used for [Bash - completion](#bash-completion) for an option argument. If not specified, - then the value of `type` is used. Any string may be specified, but only the - following values have meaning: - - - `none`: Provide no completions. - - `file`: Bash's default completion (i.e. `complete -o default`), which - includes filenames. - - *Any string FOO for which a `function complete_FOO` Bash function is - defined.* This is for custom completions for a given tool. Typically - these custom functions are provided in the `specExtra` argument to - `dashdash.bashCompletionFromOptions()`. See - ["examples/ddcompletion.js"](examples/ddcompletion.js) for an example. - -- `env` (String or Array of String). Optional. An environment variable name - (or names) that can be used as a fallback for this option. For example, - given a "foo.js" like this: - - var options = [{names: ['dry-run', 'n'], env: 'FOO_DRY_RUN'}]; - var opts = dashdash.parse({options: options}); - - Both `node foo.js --dry-run` and `FOO_DRY_RUN=1 node foo.js` would result - in `opts.dry_run = true`. - - An environment variable is only used as a fallback, i.e. it is ignored if - the associated option is given in `argv`. - -- `help` (String). Optional. Used for `parser.help()` output. - -- `helpArg` (String). Optional. Used in help output as the placeholder for - the option argument, e.g. the "PATH" in: - - ... - -f PATH, --file=PATH File to process - ... - -- `helpWrap` (Boolean). Optional, default true. Set this to `false` to have - that option's `help` *not* be text wrapped in `<parser>.help()` output. - -- `default`. Optional. A default value used for this option, if the - option isn't specified in argv. - -- `hidden` (Boolean). Optional, default false. If true, help output will not - include this option. See also the `includeHidden` option to - `bashCompletionFromOptions()` for [Bash completion](#bash-completion). - - -# Option group headings - -You can add headings between option specs in the `options` array. To do so, -simply add an object with only a `group` property -- the string to print as -the heading for the subsequent options in the array. For example: - -```javascript -var options = [ - { - group: 'Armament Options' - }, - { - names: [ 'weapon', 'w' ], - type: 'string' - }, - { - group: 'General Options' - }, - { - names: [ 'help', 'h' ], - type: 'bool' - } -]; -... -``` - -Note: You can use an empty string, `{group: ''}`, to get a blank line in help -output between groups of options. - - -# Help config - -The `parser.help(...)` function is configurable as follows: - - Options: - Armament Options: - ^^ -w WEAPON, --weapon=WEAPON Weapon with which to crush. One of: | - / sword, spear, maul | - / General Options: | - / -h, --help Print this help and exit. | - / ^^^^ ^ | - \ `-- indent `-- helpCol maxCol ---' - `-- headingIndent - -- `indent` (Number or String). Default 4. Set to a number (for that many - spaces) or a string for the literal indent. -- `headingIndent` (Number or String). Default half length of `indent`. Set to - a number (for that many spaces) or a string for the literal indent. This - indent applies to group heading lines, between normal option lines. -- `nameSort` (String). Default is 'length'. By default the names are - sorted to put the short opts first (i.e. '-h, --help' preferred - to '--help, -h'). Set to 'none' to not do this sorting. -- `maxCol` (Number). Default 80. Note that reflow is just done on whitespace - so a long token in the option help can overflow maxCol. -- `helpCol` (Number). If not set a reasonable value will be determined - between `minHelpCol` and `maxHelpCol`. -- `minHelpCol` (Number). Default 20. -- `maxHelpCol` (Number). Default 40. -- `helpWrap` (Boolean). Default true. Set to `false` to have option `help` - strings *not* be textwrapped to the helpCol..maxCol range. -- `includeEnv` (Boolean). Default false. If the option has associated - environment variables (via the `env` option spec attribute), then - append mentioned of those envvars to the help string. -- `includeDefault` (Boolean). Default false. If the option has a default value - (via the `default` option spec attribute, or a default on the option's type), - then a "Default: VALUE" string will be appended to the help string. - - -# Custom option types - -Dashdash includes a good starter set of option types that it will parse for -you. However, you can add your own via: - - var dashdash = require('dashdash'); - dashdash.addOptionType({ - name: '...', - takesArg: true, - helpArg: '...', - parseArg: function (option, optstr, arg) { - ... - }, - array: false, // optional - arrayFlatten: false, // optional - default: ..., // optional - completionType: ... // optional - }); - -For example, a simple option type that accepts 'yes', 'y', 'no' or 'n' as -a boolean argument would look like: - - var dashdash = require('dashdash'); - - function parseYesNo(option, optstr, arg) { - var argLower = arg.toLowerCase() - if (~['yes', 'y'].indexOf(argLower)) { - return true; - } else if (~['no', 'n'].indexOf(argLower)) { - return false; - } else { - throw new Error(format( - 'arg for "%s" is not "yes" or "no": "%s"', - optstr, arg)); - } - } - - dashdash.addOptionType({ - name: 'yesno' - takesArg: true, - helpArg: '<yes|no>', - parseArg: parseYesNo - }); - - var options = { - {names: ['answer', 'a'], type: 'yesno'} - }; - var opts = dashdash.parse({options: options}); - -See "examples/custom-option-\*.js" for other examples. -See the `addOptionType` block comment in "lib/dashdash.js" for more details. -Please let me know [with an -issue](https://github.com/trentm/node-dashdash/issues/new) if you write a -generally useful one. - - - -# Why - -Why another node.js option parsing lib? - -- `nopt` really is just for "tools like npm". Implicit opts (e.g. '--no-foo' - works for every '--foo'). Can't disable abbreviated opts. Can't do multiple - usages of same opt, e.g. '-vvv' (I think). Can't do grouped short opts. - -- `optimist` has surprise interpretation of options (at least to me). - Implicit opts mean ambiguities and poor error handling for fat-fingering. - `process.exit` calls makes it hard to use as a libary. - -- `optparse` Incomplete docs. Is this an attempted clone of Python's `optparse`. - Not clear. Some divergence. `parser.on("name", ...)` API is weird. - -- `argparse` Dep on underscore. No thanks just for option processing. - `find lib | wc -l` -> `26`. Overkill. - Argparse is a bit different anyway. Not sure I want that. - -- `posix-getopt` No type validation. Though that isn't a killer. AFAIK can't - have a long opt without a short alias. I.e. no `getopt_long` semantics. - Also, no whizbang features like generated help output. - -- ["commander.js"](https://github.com/visionmedia/commander.js): I wrote - [a critique](http://trentm.com/2014/01/a-critique-of-commander-for-nodejs.html) - a while back. It seems fine, but last I checked had - [an outstanding bug](https://github.com/visionmedia/commander.js/pull/121) - that would prevent me from using it. - - -# License - -MIT. See LICENSE.txt. diff --git a/node_modules/debug/README.md b/node_modules/debug/README.md deleted file mode 100644 index 88dae35d9fc95..0000000000000 --- a/node_modules/debug/README.md +++ /dev/null @@ -1,455 +0,0 @@ -# debug -[![Build Status](https://travis-ci.org/visionmedia/debug.svg?branch=master)](https://travis-ci.org/visionmedia/debug) [![Coverage Status](https://coveralls.io/repos/github/visionmedia/debug/badge.svg?branch=master)](https://coveralls.io/github/visionmedia/debug?branch=master) [![Slack](https://visionmedia-community-slackin.now.sh/badge.svg)](https://visionmedia-community-slackin.now.sh/) [![OpenCollective](https://opencollective.com/debug/backers/badge.svg)](#backers) -[![OpenCollective](https://opencollective.com/debug/sponsors/badge.svg)](#sponsors) - -<img width="647" src="https://user-images.githubusercontent.com/71256/29091486-fa38524c-7c37-11e7-895f-e7ec8e1039b6.png"> - -A tiny JavaScript debugging utility modelled after Node.js core's debugging -technique. Works in Node.js and web browsers. - -## Installation - -```bash -$ npm install debug -``` - -## Usage - -`debug` exposes a function; simply pass this function the name of your module, and it will return a decorated version of `console.error` for you to pass debug statements to. This will allow you to toggle the debug output for different parts of your module as well as the module as a whole. - -Example [_app.js_](./examples/node/app.js): - -```js -var debug = require('debug')('http') - , http = require('http') - , name = 'My App'; - -// fake app - -debug('booting %o', name); - -http.createServer(function(req, res){ - debug(req.method + ' ' + req.url); - res.end('hello\n'); -}).listen(3000, function(){ - debug('listening'); -}); - -// fake worker of some kind - -require('./worker'); -``` - -Example [_worker.js_](./examples/node/worker.js): - -```js -var a = require('debug')('worker:a') - , b = require('debug')('worker:b'); - -function work() { - a('doing lots of uninteresting work'); - setTimeout(work, Math.random() * 1000); -} - -work(); - -function workb() { - b('doing some work'); - setTimeout(workb, Math.random() * 2000); -} - -workb(); -``` - -The `DEBUG` environment variable is then used to enable these based on space or -comma-delimited names. - -Here are some examples: - -<img width="647" alt="screen shot 2017-08-08 at 12 53 04 pm" src="https://user-images.githubusercontent.com/71256/29091703-a6302cdc-7c38-11e7-8304-7c0b3bc600cd.png"> -<img width="647" alt="screen shot 2017-08-08 at 12 53 38 pm" src="https://user-images.githubusercontent.com/71256/29091700-a62a6888-7c38-11e7-800b-db911291ca2b.png"> -<img width="647" alt="screen shot 2017-08-08 at 12 53 25 pm" src="https://user-images.githubusercontent.com/71256/29091701-a62ea114-7c38-11e7-826a-2692bedca740.png"> - -#### Windows command prompt notes - -##### CMD - -On Windows the environment variable is set using the `set` command. - -```cmd -set DEBUG=*,-not_this -``` - -Example: - -```cmd -set DEBUG=* & node app.js -``` - -##### PowerShell (VS Code default) - -PowerShell uses different syntax to set environment variables. - -```cmd -$env:DEBUG = "*,-not_this" -``` - -Example: - -```cmd -$env:DEBUG='app';node app.js -``` - -Then, run the program to be debugged as usual. - -npm script example: -```js - "windowsDebug": "@powershell -Command $env:DEBUG='*';node app.js", -``` - -## Namespace Colors - -Every debug instance has a color generated for it based on its namespace name. -This helps when visually parsing the debug output to identify which debug instance -a debug line belongs to. - -#### Node.js - -In Node.js, colors are enabled when stderr is a TTY. You also _should_ install -the [`supports-color`](https://npmjs.org/supports-color) module alongside debug, -otherwise debug will only use a small handful of basic colors. - -<img width="521" src="https://user-images.githubusercontent.com/71256/29092181-47f6a9e6-7c3a-11e7-9a14-1928d8a711cd.png"> - -#### Web Browser - -Colors are also enabled on "Web Inspectors" that understand the `%c` formatting -option. These are WebKit web inspectors, Firefox ([since version -31](https://hacks.mozilla.org/2014/05/editable-box-model-multiple-selection-sublime-text-keys-much-more-firefox-developer-tools-episode-31/)) -and the Firebug plugin for Firefox (any version). - -<img width="524" src="https://user-images.githubusercontent.com/71256/29092033-b65f9f2e-7c39-11e7-8e32-f6f0d8e865c1.png"> - - -## Millisecond diff - -When actively developing an application it can be useful to see when the time spent between one `debug()` call and the next. Suppose for example you invoke `debug()` before requesting a resource, and after as well, the "+NNNms" will show you how much time was spent between calls. - -<img width="647" src="https://user-images.githubusercontent.com/71256/29091486-fa38524c-7c37-11e7-895f-e7ec8e1039b6.png"> - -When stdout is not a TTY, `Date#toISOString()` is used, making it more useful for logging the debug information as shown below: - -<img width="647" src="https://user-images.githubusercontent.com/71256/29091956-6bd78372-7c39-11e7-8c55-c948396d6edd.png"> - - -## Conventions - -If you're using this in one or more of your libraries, you _should_ use the name of your library so that developers may toggle debugging as desired without guessing names. If you have more than one debuggers you _should_ prefix them with your library name and use ":" to separate features. For example "bodyParser" from Connect would then be "connect:bodyParser". If you append a "*" to the end of your name, it will always be enabled regardless of the setting of the DEBUG environment variable. You can then use it for normal output as well as debug output. - -## Wildcards - -The `*` character may be used as a wildcard. Suppose for example your library has -debuggers named "connect:bodyParser", "connect:compress", "connect:session", -instead of listing all three with -`DEBUG=connect:bodyParser,connect:compress,connect:session`, you may simply do -`DEBUG=connect:*`, or to run everything using this module simply use `DEBUG=*`. - -You can also exclude specific debuggers by prefixing them with a "-" character. -For example, `DEBUG=*,-connect:*` would include all debuggers except those -starting with "connect:". - -## Environment Variables - -When running through Node.js, you can set a few environment variables that will -change the behavior of the debug logging: - -| Name | Purpose | -|-----------|-------------------------------------------------| -| `DEBUG` | Enables/disables specific debugging namespaces. | -| `DEBUG_HIDE_DATE` | Hide date from debug output (non-TTY). | -| `DEBUG_COLORS`| Whether or not to use colors in the debug output. | -| `DEBUG_DEPTH` | Object inspection depth. | -| `DEBUG_SHOW_HIDDEN` | Shows hidden properties on inspected objects. | - - -__Note:__ The environment variables beginning with `DEBUG_` end up being -converted into an Options object that gets used with `%o`/`%O` formatters. -See the Node.js documentation for -[`util.inspect()`](https://nodejs.org/api/util.html#util_util_inspect_object_options) -for the complete list. - -## Formatters - -Debug uses [printf-style](https://wikipedia.org/wiki/Printf_format_string) formatting. -Below are the officially supported formatters: - -| Formatter | Representation | -|-----------|----------------| -| `%O` | Pretty-print an Object on multiple lines. | -| `%o` | Pretty-print an Object all on a single line. | -| `%s` | String. | -| `%d` | Number (both integer and float). | -| `%j` | JSON. Replaced with the string '[Circular]' if the argument contains circular references. | -| `%%` | Single percent sign ('%'). This does not consume an argument. | - - -### Custom formatters - -You can add custom formatters by extending the `debug.formatters` object. -For example, if you wanted to add support for rendering a Buffer as hex with -`%h`, you could do something like: - -```js -const createDebug = require('debug') -createDebug.formatters.h = (v) => { - return v.toString('hex') -} - -// …elsewhere -const debug = createDebug('foo') -debug('this is hex: %h', new Buffer('hello world')) -// foo this is hex: 68656c6c6f20776f726c6421 +0ms -``` - - -## Browser Support - -You can build a browser-ready script using [browserify](https://github.com/substack/node-browserify), -or just use the [browserify-as-a-service](https://wzrd.in/) [build](https://wzrd.in/standalone/debug@latest), -if you don't want to build it yourself. - -Debug's enable state is currently persisted by `localStorage`. -Consider the situation shown below where you have `worker:a` and `worker:b`, -and wish to debug both. You can enable this using `localStorage.debug`: - -```js -localStorage.debug = 'worker:*' -``` - -And then refresh the page. - -```js -a = debug('worker:a'); -b = debug('worker:b'); - -setInterval(function(){ - a('doing some work'); -}, 1000); - -setInterval(function(){ - b('doing some work'); -}, 1200); -``` - - -## Output streams - - By default `debug` will log to stderr, however this can be configured per-namespace by overriding the `log` method: - -Example [_stdout.js_](./examples/node/stdout.js): - -```js -var debug = require('debug'); -var error = debug('app:error'); - -// by default stderr is used -error('goes to stderr!'); - -var log = debug('app:log'); -// set this namespace to log via console.log -log.log = console.log.bind(console); // don't forget to bind to console! -log('goes to stdout'); -error('still goes to stderr!'); - -// set all output to go via console.info -// overrides all per-namespace log settings -debug.log = console.info.bind(console); -error('now goes to stdout via console.info'); -log('still goes to stdout, but via console.info now'); -``` - -## Extend -You can simply extend debugger -```js -const log = require('debug')('auth'); - -//creates new debug instance with extended namespace -const logSign = log.extend('sign'); -const logLogin = log.extend('login'); - -log('hello'); // auth hello -logSign('hello'); //auth:sign hello -logLogin('hello'); //auth:login hello -``` - -## Set dynamically - -You can also enable debug dynamically by calling the `enable()` method : - -```js -let debug = require('debug'); - -console.log(1, debug.enabled('test')); - -debug.enable('test'); -console.log(2, debug.enabled('test')); - -debug.disable(); -console.log(3, debug.enabled('test')); - -``` - -print : -``` -1 false -2 true -3 false -``` - -Usage : -`enable(namespaces)` -`namespaces` can include modes separated by a colon and wildcards. - -Note that calling `enable()` completely overrides previously set DEBUG variable : - -``` -$ DEBUG=foo node -e 'var dbg = require("debug"); dbg.enable("bar"); console.log(dbg.enabled("foo"))' -=> false -``` - -`disable()` - -Will disable all namespaces. The functions returns the namespaces currently -enabled (and skipped). This can be useful if you want to disable debugging -temporarily without knowing what was enabled to begin with. - -For example: - -```js -let debug = require('debug'); -debug.enable('foo:*,-foo:bar'); -let namespaces = debug.disable(); -debug.enable(namespaces); -``` - -Note: There is no guarantee that the string will be identical to the initial -enable string, but semantically they will be identical. - -## Checking whether a debug target is enabled - -After you've created a debug instance, you can determine whether or not it is -enabled by checking the `enabled` property: - -```javascript -const debug = require('debug')('http'); - -if (debug.enabled) { - // do stuff... -} -``` - -You can also manually toggle this property to force the debug instance to be -enabled or disabled. - - -## Authors - - - TJ Holowaychuk - - Nathan Rajlich - - Andrew Rhyne - -## Backers - -Support us with a monthly donation and help us continue our activities. [[Become a backer](https://opencollective.com/debug#backer)] - -<a href="https://opencollective.com/debug/backer/0/website" target="_blank"><img src="https://opencollective.com/debug/backer/0/avatar.svg"></a> -<a href="https://opencollective.com/debug/backer/1/website" target="_blank"><img src="https://opencollective.com/debug/backer/1/avatar.svg"></a> -<a href="https://opencollective.com/debug/backer/2/website" target="_blank"><img src="https://opencollective.com/debug/backer/2/avatar.svg"></a> -<a href="https://opencollective.com/debug/backer/3/website" target="_blank"><img src="https://opencollective.com/debug/backer/3/avatar.svg"></a> -<a href="https://opencollective.com/debug/backer/4/website" target="_blank"><img src="https://opencollective.com/debug/backer/4/avatar.svg"></a> -<a href="https://opencollective.com/debug/backer/5/website" target="_blank"><img src="https://opencollective.com/debug/backer/5/avatar.svg"></a> -<a href="https://opencollective.com/debug/backer/6/website" target="_blank"><img src="https://opencollective.com/debug/backer/6/avatar.svg"></a> -<a href="https://opencollective.com/debug/backer/7/website" target="_blank"><img src="https://opencollective.com/debug/backer/7/avatar.svg"></a> -<a href="https://opencollective.com/debug/backer/8/website" target="_blank"><img src="https://opencollective.com/debug/backer/8/avatar.svg"></a> -<a href="https://opencollective.com/debug/backer/9/website" target="_blank"><img src="https://opencollective.com/debug/backer/9/avatar.svg"></a> -<a href="https://opencollective.com/debug/backer/10/website" target="_blank"><img src="https://opencollective.com/debug/backer/10/avatar.svg"></a> -<a href="https://opencollective.com/debug/backer/11/website" target="_blank"><img src="https://opencollective.com/debug/backer/11/avatar.svg"></a> -<a href="https://opencollective.com/debug/backer/12/website" target="_blank"><img src="https://opencollective.com/debug/backer/12/avatar.svg"></a> -<a href="https://opencollective.com/debug/backer/13/website" target="_blank"><img src="https://opencollective.com/debug/backer/13/avatar.svg"></a> -<a href="https://opencollective.com/debug/backer/14/website" target="_blank"><img src="https://opencollective.com/debug/backer/14/avatar.svg"></a> -<a href="https://opencollective.com/debug/backer/15/website" target="_blank"><img src="https://opencollective.com/debug/backer/15/avatar.svg"></a> -<a href="https://opencollective.com/debug/backer/16/website" target="_blank"><img src="https://opencollective.com/debug/backer/16/avatar.svg"></a> -<a href="https://opencollective.com/debug/backer/17/website" target="_blank"><img src="https://opencollective.com/debug/backer/17/avatar.svg"></a> -<a href="https://opencollective.com/debug/backer/18/website" target="_blank"><img src="https://opencollective.com/debug/backer/18/avatar.svg"></a> -<a href="https://opencollective.com/debug/backer/19/website" target="_blank"><img src="https://opencollective.com/debug/backer/19/avatar.svg"></a> -<a href="https://opencollective.com/debug/backer/20/website" target="_blank"><img src="https://opencollective.com/debug/backer/20/avatar.svg"></a> -<a href="https://opencollective.com/debug/backer/21/website" target="_blank"><img src="https://opencollective.com/debug/backer/21/avatar.svg"></a> -<a href="https://opencollective.com/debug/backer/22/website" target="_blank"><img src="https://opencollective.com/debug/backer/22/avatar.svg"></a> -<a href="https://opencollective.com/debug/backer/23/website" target="_blank"><img src="https://opencollective.com/debug/backer/23/avatar.svg"></a> -<a href="https://opencollective.com/debug/backer/24/website" target="_blank"><img src="https://opencollective.com/debug/backer/24/avatar.svg"></a> -<a href="https://opencollective.com/debug/backer/25/website" target="_blank"><img src="https://opencollective.com/debug/backer/25/avatar.svg"></a> -<a href="https://opencollective.com/debug/backer/26/website" target="_blank"><img src="https://opencollective.com/debug/backer/26/avatar.svg"></a> -<a href="https://opencollective.com/debug/backer/27/website" target="_blank"><img src="https://opencollective.com/debug/backer/27/avatar.svg"></a> -<a href="https://opencollective.com/debug/backer/28/website" target="_blank"><img src="https://opencollective.com/debug/backer/28/avatar.svg"></a> -<a href="https://opencollective.com/debug/backer/29/website" target="_blank"><img src="https://opencollective.com/debug/backer/29/avatar.svg"></a> - - -## Sponsors - -Become a sponsor and get your logo on our README on Github with a link to your site. [[Become a sponsor](https://opencollective.com/debug#sponsor)] - -<a href="https://opencollective.com/debug/sponsor/0/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/0/avatar.svg"></a> -<a href="https://opencollective.com/debug/sponsor/1/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/1/avatar.svg"></a> -<a href="https://opencollective.com/debug/sponsor/2/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/2/avatar.svg"></a> -<a href="https://opencollective.com/debug/sponsor/3/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/3/avatar.svg"></a> -<a href="https://opencollective.com/debug/sponsor/4/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/4/avatar.svg"></a> -<a href="https://opencollective.com/debug/sponsor/5/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/5/avatar.svg"></a> -<a href="https://opencollective.com/debug/sponsor/6/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/6/avatar.svg"></a> -<a href="https://opencollective.com/debug/sponsor/7/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/7/avatar.svg"></a> -<a href="https://opencollective.com/debug/sponsor/8/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/8/avatar.svg"></a> -<a href="https://opencollective.com/debug/sponsor/9/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/9/avatar.svg"></a> -<a href="https://opencollective.com/debug/sponsor/10/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/10/avatar.svg"></a> -<a href="https://opencollective.com/debug/sponsor/11/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/11/avatar.svg"></a> -<a href="https://opencollective.com/debug/sponsor/12/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/12/avatar.svg"></a> -<a href="https://opencollective.com/debug/sponsor/13/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/13/avatar.svg"></a> -<a href="https://opencollective.com/debug/sponsor/14/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/14/avatar.svg"></a> -<a href="https://opencollective.com/debug/sponsor/15/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/15/avatar.svg"></a> -<a href="https://opencollective.com/debug/sponsor/16/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/16/avatar.svg"></a> -<a href="https://opencollective.com/debug/sponsor/17/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/17/avatar.svg"></a> -<a href="https://opencollective.com/debug/sponsor/18/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/18/avatar.svg"></a> -<a href="https://opencollective.com/debug/sponsor/19/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/19/avatar.svg"></a> -<a href="https://opencollective.com/debug/sponsor/20/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/20/avatar.svg"></a> -<a href="https://opencollective.com/debug/sponsor/21/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/21/avatar.svg"></a> -<a href="https://opencollective.com/debug/sponsor/22/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/22/avatar.svg"></a> -<a href="https://opencollective.com/debug/sponsor/23/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/23/avatar.svg"></a> -<a href="https://opencollective.com/debug/sponsor/24/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/24/avatar.svg"></a> -<a href="https://opencollective.com/debug/sponsor/25/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/25/avatar.svg"></a> -<a href="https://opencollective.com/debug/sponsor/26/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/26/avatar.svg"></a> -<a href="https://opencollective.com/debug/sponsor/27/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/27/avatar.svg"></a> -<a href="https://opencollective.com/debug/sponsor/28/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/28/avatar.svg"></a> -<a href="https://opencollective.com/debug/sponsor/29/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/29/avatar.svg"></a> - -## License - -(The MIT License) - -Copyright (c) 2014-2017 TJ Holowaychuk <tj@vision-media.ca> - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -'Software'), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, -TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE -SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/debug/package.json b/node_modules/debug/package.json index da809d2b8d28b..b7d70acb9bee8 100644 --- a/node_modules/debug/package.json +++ b/node_modules/debug/package.json @@ -1,6 +1,6 @@ { "name": "debug", - "version": "4.3.1", + "version": "4.3.2", "repository": { "type": "git", "url": "git://github.com/visionmedia/debug.git" diff --git a/node_modules/debug/src/common.js b/node_modules/debug/src/common.js index 392a8e005a063..50ce2925101d7 100644 --- a/node_modules/debug/src/common.js +++ b/node_modules/debug/src/common.js @@ -60,6 +60,8 @@ function setup(env) { function createDebug(namespace) { let prevTime; let enableOverride = null; + let namespacesCache; + let enabledCache; function debug(...args) { // Disabled? @@ -120,7 +122,17 @@ function setup(env) { Object.defineProperty(debug, 'enabled', { enumerable: true, configurable: false, - get: () => enableOverride === null ? createDebug.enabled(namespace) : enableOverride, + get: () => { + if (enableOverride !== null) { + return enableOverride; + } + if (namespacesCache !== createDebug.namespaces) { + namespacesCache = createDebug.namespaces; + enabledCache = createDebug.enabled(namespace); + } + + return enabledCache; + }, set: v => { enableOverride = v; } @@ -149,6 +161,7 @@ function setup(env) { */ function enable(namespaces) { createDebug.save(namespaces); + createDebug.namespaces = namespaces; createDebug.names = []; createDebug.skips = []; diff --git a/node_modules/debuglog/README.md b/node_modules/debuglog/README.md deleted file mode 100644 index dc6fccecc32f0..0000000000000 --- a/node_modules/debuglog/README.md +++ /dev/null @@ -1,40 +0,0 @@ -# debuglog - backport of util.debuglog() from node v0.11 - -To facilitate using the `util.debuglog()` function that will be available when -node v0.12 is released now, this is a copy extracted from the source. - -## require('debuglog') - -Return `util.debuglog`, if it exists, otherwise it will return an internal copy -of the implementation from node v0.11. - -## debuglog(section) - -* `section` {String} The section of the program to be debugged -* Returns: {Function} The logging function - -This is used to create a function which conditionally writes to stderr -based on the existence of a `NODE_DEBUG` environment variable. If the -`section` name appears in that environment variable, then the returned -function will be similar to `console.error()`. If not, then the -returned function is a no-op. - -For example: - -```javascript -var debuglog = util.debuglog('foo'); - -var bar = 123; -debuglog('hello from foo [%d]', bar); -``` - -If this program is run with `NODE_DEBUG=foo` in the environment, then -it will output something like: - - FOO 3245: hello from foo [123] - -where `3245` is the process id. If it is not run with that -environment variable set, then it will not print anything. - -You may separate multiple `NODE_DEBUG` environment variables with a -comma. For example, `NODE_DEBUG=fs,net,tls`. diff --git a/node_modules/defaults/.npmignore b/node_modules/defaults/.npmignore deleted file mode 100644 index 3c3629e647f5d..0000000000000 --- a/node_modules/defaults/.npmignore +++ /dev/null @@ -1 +0,0 @@ -node_modules diff --git a/node_modules/defaults/README.md b/node_modules/defaults/README.md deleted file mode 100644 index 1a4a2ea9c919e..0000000000000 --- a/node_modules/defaults/README.md +++ /dev/null @@ -1,43 +0,0 @@ -# defaults - -A simple one level options merge utility - -## install - -`npm install defaults` - -## use - -```javascript - -var defaults = require('defaults'); - -var handle = function(options, fn) { - options = defaults(options, { - timeout: 100 - }); - - setTimeout(function() { - fn(options); - }, options.timeout); -} - -handle({ timeout: 1000 }, function() { - // we're here 1000 ms later -}); - -handle({ timeout: 10000 }, function() { - // we're here 10s later -}); - -``` - -## summary - -this module exports a function that takes 2 arguments: `options` and `defaults`. When called, it overrides all of `undefined` properties in `options` with the clones of properties defined in `defaults` - -Sidecases: if called with a falsy `options` value, options will be initialized to a new object before being merged onto. - -## license - -[MIT](LICENSE) diff --git a/node_modules/delayed-stream/.npmignore b/node_modules/delayed-stream/.npmignore deleted file mode 100644 index 9daeafb9864cf..0000000000000 --- a/node_modules/delayed-stream/.npmignore +++ /dev/null @@ -1 +0,0 @@ -test diff --git a/node_modules/delegates/.npmignore b/node_modules/delegates/.npmignore deleted file mode 100644 index c2658d7d1b318..0000000000000 --- a/node_modules/delegates/.npmignore +++ /dev/null @@ -1 +0,0 @@ -node_modules/ diff --git a/node_modules/dezalgo/.travis.yml b/node_modules/dezalgo/.travis.yml deleted file mode 100644 index e1bcee1acd90c..0000000000000 --- a/node_modules/dezalgo/.travis.yml +++ /dev/null @@ -1,7 +0,0 @@ -language: node_js -before_script: npm install -g npm@latest -node_js: - - '0.8' - - '0.10' - - '0.12' - - 'iojs' diff --git a/node_modules/dezalgo/README.md b/node_modules/dezalgo/README.md deleted file mode 100644 index bdfc8ba80d075..0000000000000 --- a/node_modules/dezalgo/README.md +++ /dev/null @@ -1,29 +0,0 @@ -# dezalgo - -Contain async insanity so that the dark pony lord doesn't eat souls - -See [this blog -post](http://blog.izs.me/post/59142742143/designing-apis-for-asynchrony). - -## USAGE - -Pass a callback to `dezalgo` and it will ensure that it is *always* -called in a future tick, and never in this tick. - -```javascript -var dz = require('dezalgo') - -var cache = {} -function maybeSync(arg, cb) { - cb = dz(cb) - - // this will actually defer to nextTick - if (cache[arg]) cb(null, cache[arg]) - - fs.readFile(arg, function (er, data) { - // since this is *already* defered, it will call immediately - if (er) cb(er) - cb(null, cache[arg] = data) - }) -} -``` diff --git a/node_modules/diff/README.md b/node_modules/diff/README.md deleted file mode 100644 index be7b4ec8a5b24..0000000000000 --- a/node_modules/diff/README.md +++ /dev/null @@ -1,208 +0,0 @@ -# jsdiff - -[![Build Status](https://secure.travis-ci.org/kpdecker/jsdiff.svg)](http://travis-ci.org/kpdecker/jsdiff) -[![Sauce Test Status](https://saucelabs.com/buildstatus/jsdiff)](https://saucelabs.com/u/jsdiff) - -A javascript text differencing implementation. - -Based on the algorithm proposed in -["An O(ND) Difference Algorithm and its Variations" (Myers, 1986)](http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.4.6927). - -## Installation -```bash -npm install diff --save -``` - -## API - -* `Diff.diffChars(oldStr, newStr[, options])` - diffs two blocks of text, comparing character by character. - - Returns a list of change objects (See below). - - Options - * `ignoreCase`: `true` to ignore casing difference. Defaults to `false`. - -* `Diff.diffWords(oldStr, newStr[, options])` - diffs two blocks of text, comparing word by word, ignoring whitespace. - - Returns a list of change objects (See below). - - Options - * `ignoreCase`: Same as in `diffChars`. - -* `Diff.diffWordsWithSpace(oldStr, newStr[, options])` - diffs two blocks of text, comparing word by word, treating whitespace as significant. - - Returns a list of change objects (See below). - -* `Diff.diffLines(oldStr, newStr[, options])` - diffs two blocks of text, comparing line by line. - - Options - * `ignoreWhitespace`: `true` to ignore leading and trailing whitespace. This is the same as `diffTrimmedLines` - * `newlineIsToken`: `true` to treat newline characters as separate tokens. This allows for changes to the newline structure to occur independently of the line content and to be treated as such. In general this is the more human friendly form of `diffLines` and `diffLines` is better suited for patches and other computer friendly output. - - Returns a list of change objects (See below). - -* `Diff.diffTrimmedLines(oldStr, newStr[, options])` - diffs two blocks of text, comparing line by line, ignoring leading and trailing whitespace. - - Returns a list of change objects (See below). - -* `Diff.diffSentences(oldStr, newStr[, options])` - diffs two blocks of text, comparing sentence by sentence. - - Returns a list of change objects (See below). - -* `Diff.diffCss(oldStr, newStr[, options])` - diffs two blocks of text, comparing CSS tokens. - - Returns a list of change objects (See below). - -* `Diff.diffJson(oldObj, newObj[, options])` - diffs two JSON objects, comparing the fields defined on each. The order of fields, etc does not matter in this comparison. - - Returns a list of change objects (See below). - -* `Diff.diffArrays(oldArr, newArr[, options])` - diffs two arrays, comparing each item for strict equality (===). - - Options - * `comparator`: `function(left, right)` for custom equality checks - - Returns a list of change objects (See below). - -* `Diff.createTwoFilesPatch(oldFileName, newFileName, oldStr, newStr, oldHeader, newHeader)` - creates a unified diff patch. - - Parameters: - * `oldFileName` : String to be output in the filename section of the patch for the removals - * `newFileName` : String to be output in the filename section of the patch for the additions - * `oldStr` : Original string value - * `newStr` : New string value - * `oldHeader` : Additional information to include in the old file header - * `newHeader` : Additional information to include in the new file header - * `options` : An object with options. Currently, only `context` is supported and describes how many lines of context should be included. - -* `Diff.createPatch(fileName, oldStr, newStr, oldHeader, newHeader)` - creates a unified diff patch. - - Just like Diff.createTwoFilesPatch, but with oldFileName being equal to newFileName. - - -* `Diff.structuredPatch(oldFileName, newFileName, oldStr, newStr, oldHeader, newHeader, options)` - returns an object with an array of hunk objects. - - This method is similar to createTwoFilesPatch, but returns a data structure - suitable for further processing. Parameters are the same as createTwoFilesPatch. The data structure returned may look like this: - - ```js - { - oldFileName: 'oldfile', newFileName: 'newfile', - oldHeader: 'header1', newHeader: 'header2', - hunks: [{ - oldStart: 1, oldLines: 3, newStart: 1, newLines: 3, - lines: [' line2', ' line3', '-line4', '+line5', '\\ No newline at end of file'], - }] - } - ``` - -* `Diff.applyPatch(source, patch[, options])` - applies a unified diff patch. - - Return a string containing new version of provided data. `patch` may be a string diff or the output from the `parsePatch` or `structuredPatch` methods. - - The optional `options` object may have the following keys: - - - `fuzzFactor`: Number of lines that are allowed to differ before rejecting a patch. Defaults to 0. - - `compareLine(lineNumber, line, operation, patchContent)`: Callback used to compare to given lines to determine if they should be considered equal when patching. Defaults to strict equality but may be overridden to provide fuzzier comparison. Should return false if the lines should be rejected. - -* `Diff.applyPatches(patch, options)` - applies one or more patches. - - This method will iterate over the contents of the patch and apply to data provided through callbacks. The general flow for each patch index is: - - - `options.loadFile(index, callback)` is called. The caller should then load the contents of the file and then pass that to the `callback(err, data)` callback. Passing an `err` will terminate further patch execution. - - `options.patched(index, content, callback)` is called once the patch has been applied. `content` will be the return value from `applyPatch`. When it's ready, the caller should call `callback(err)` callback. Passing an `err` will terminate further patch execution. - - Once all patches have been applied or an error occurs, the `options.complete(err)` callback is made. - -* `Diff.parsePatch(diffStr)` - Parses a patch into structured data - - Return a JSON object representation of the a patch, suitable for use with the `applyPatch` method. This parses to the same structure returned by `Diff.structuredPatch`. - -* `convertChangesToXML(changes)` - converts a list of changes to a serialized XML format - - -All methods above which accept the optional `callback` method will run in sync mode when that parameter is omitted and in async mode when supplied. This allows for larger diffs without blocking the event loop. This may be passed either directly as the final parameter or as the `callback` field in the `options` object. - -### Change Objects -Many of the methods above return change objects. These objects consist of the following fields: - -* `value`: Text content -* `added`: True if the value was inserted into the new string -* `removed`: True if the value was removed from the old string - -Note that some cases may omit a particular flag field. Comparison on the flag fields should always be done in a truthy or falsy manner. - -## Examples - -Basic example in Node - -```js -require('colors'); -const Diff = require('diff'); - -const one = 'beep boop'; -const other = 'beep boob blah'; - -const diff = Diff.diffChars(one, other); - -diff.forEach((part) => { - // green for additions, red for deletions - // grey for common parts - const color = part.added ? 'green' : - part.removed ? 'red' : 'grey'; - process.stderr.write(part.value[color]); -}); - -console.log(); -``` -Running the above program should yield - -<img src="images/node_example.png" alt="Node Example"> - -Basic example in a web page - -```html -<pre id="display"></pre> -<script src="diff.js"></script> -<script> -const one = 'beep boop', - other = 'beep boob blah', - color = ''; - -let span = null; - -const diff = Diff.diffChars(one, other), - display = document.getElementById('display'), - fragment = document.createDocumentFragment(); - -diff.forEach((part) => { - // green for additions, red for deletions - // grey for common parts - const color = part.added ? 'green' : - part.removed ? 'red' : 'grey'; - span = document.createElement('span'); - span.style.color = color; - span.appendChild(document - .createTextNode(part.value)); - fragment.appendChild(span); -}); - -display.appendChild(fragment); -</script> -``` - -Open the above .html file in a browser and you should see - -<img src="images/web_example.png" alt="Node Example"> - -**[Full online demo](http://kpdecker.github.com/jsdiff)** - -## Compatibility - -[![Sauce Test Status](https://saucelabs.com/browser-matrix/jsdiff.svg)](https://saucelabs.com/u/jsdiff) - -jsdiff supports all ES3 environments with some known issues on IE8 and below. Under these browsers some diff algorithms such as word diff and others may fail due to lack of support for capturing groups in the `split` operation. - -## License - -See [LICENSE](https://github.com/kpdecker/jsdiff/blob/master/LICENSE). diff --git a/node_modules/ecc-jsbn/README.md b/node_modules/ecc-jsbn/README.md deleted file mode 100755 index b5d0b9de965cd..0000000000000 --- a/node_modules/ecc-jsbn/README.md +++ /dev/null @@ -1,8 +0,0 @@ -ecc-jsbn -======== - -ECC package based on [jsbn](https://github.com/andyperlitch/jsbn) from [Tom Wu](http://www-cs-students.stanford.edu/~tjw/). - -This is a subset of the same interface as the [node compiled module](https://github.com/quartzjer/ecc), but works in the browser too. - -Also uses point compression now from [https://github.com/kaielvin](https://github.com/kaielvin/jsbn-ec-point-compression). diff --git a/node_modules/emoji-regex/README.md b/node_modules/emoji-regex/README.md deleted file mode 100644 index f10e173335047..0000000000000 --- a/node_modules/emoji-regex/README.md +++ /dev/null @@ -1,73 +0,0 @@ -# emoji-regex [![Build status](https://travis-ci.org/mathiasbynens/emoji-regex.svg?branch=master)](https://travis-ci.org/mathiasbynens/emoji-regex) - -_emoji-regex_ offers a regular expression to match all emoji symbols (including textual representations of emoji) as per the Unicode Standard. - -This repository contains a script that generates this regular expression based on [the data from Unicode v12](https://github.com/mathiasbynens/unicode-12.0.0). Because of this, the regular expression can easily be updated whenever new emoji are added to the Unicode standard. - -## Installation - -Via [npm](https://www.npmjs.com/): - -```bash -npm install emoji-regex -``` - -In [Node.js](https://nodejs.org/): - -```js -const emojiRegex = require('emoji-regex'); -// Note: because the regular expression has the global flag set, this module -// exports a function that returns the regex rather than exporting the regular -// expression itself, to make it impossible to (accidentally) mutate the -// original regular expression. - -const text = ` -\u{231A}: ⌚ default emoji presentation character (Emoji_Presentation) -\u{2194}\u{FE0F}: ↔️ default text presentation character rendered as emoji -\u{1F469}: 👩 emoji modifier base (Emoji_Modifier_Base) -\u{1F469}\u{1F3FF}: 👩🏿 emoji modifier base followed by a modifier -`; - -const regex = emojiRegex(); -let match; -while (match = regex.exec(text)) { - const emoji = match[0]; - console.log(`Matched sequence ${ emoji } — code points: ${ [...emoji].length }`); -} -``` - -Console output: - -``` -Matched sequence ⌚ — code points: 1 -Matched sequence ⌚ — code points: 1 -Matched sequence ↔️ — code points: 2 -Matched sequence ↔️ — code points: 2 -Matched sequence 👩 — code points: 1 -Matched sequence 👩 — code points: 1 -Matched sequence 👩🏿 — code points: 2 -Matched sequence 👩🏿 — code points: 2 -``` - -To match emoji in their textual representation as well (i.e. emoji that are not `Emoji_Presentation` symbols and that aren’t forced to render as emoji by a variation selector), `require` the other regex: - -```js -const emojiRegex = require('emoji-regex/text.js'); -``` - -Additionally, in environments which support ES2015 Unicode escapes, you may `require` ES2015-style versions of the regexes: - -```js -const emojiRegex = require('emoji-regex/es2015/index.js'); -const emojiRegexText = require('emoji-regex/es2015/text.js'); -``` - -## Author - -| [![twitter/mathias](https://gravatar.com/avatar/24e08a9ea84deb17ae121074d0f17125?s=70)](https://twitter.com/mathias "Follow @mathias on Twitter") | -|---| -| [Mathias Bynens](https://mathiasbynens.be/) | - -## License - -_emoji-regex_ is available under the [MIT](https://mths.be/mit) license. diff --git a/node_modules/encoding/.prettierrc.js b/node_modules/encoding/.prettierrc.js deleted file mode 100644 index 3f83654ec845a..0000000000000 --- a/node_modules/encoding/.prettierrc.js +++ /dev/null @@ -1,8 +0,0 @@ -module.exports = { - printWidth: 160, - tabWidth: 4, - singleQuote: true, - endOfLine: 'lf', - trailingComma: 'none', - arrowParens: 'avoid' -}; diff --git a/node_modules/encoding/.travis.yml b/node_modules/encoding/.travis.yml deleted file mode 100644 index abc4f48cdd940..0000000000000 --- a/node_modules/encoding/.travis.yml +++ /dev/null @@ -1,25 +0,0 @@ -language: node_js -sudo: false -node_js: - - "0.10" - - 0.12 - - iojs - - 4 - - 5 -env: - - CXX=g++-4.8 -addons: - apt: - sources: - - ubuntu-toolchain-r-test - packages: - - g++-4.8 -notifications: - email: - - andris@kreata.ee - webhooks: - urls: - - https://webhooks.gitter.im/e/0ed18fd9b3e529b3c2cc - on_success: change # options: [always|never|change] default: always - on_failure: always # options: [always|never|change] default: always - on_start: false # default: false diff --git a/node_modules/encoding/README.md b/node_modules/encoding/README.md deleted file mode 100644 index 618891888169e..0000000000000 --- a/node_modules/encoding/README.md +++ /dev/null @@ -1,41 +0,0 @@ -# Encoding - -**encoding** is a simple wrapper around [iconv-lite](https://github.com/ashtuchkin/iconv-lite/) to convert strings from one encoding to another. - -[![Build Status](https://secure.travis-ci.org/andris9/encoding.svg)](http://travis-ci.org/andris9/Nodemailer) -[![npm version](https://badge.fury.io/js/encoding.svg)](http://badge.fury.io/js/encoding) - -Initially _encoding_ was a wrapper around _node-iconv_ (main) and _iconv-lite_ (fallback) and was used as the encoding layer for Nodemailer/mailparser. Somehow it also ended up as a dependency for a bunch of other project, none of these actually using _node-iconv_. The loading mechanics caused issues for front-end projects and Nodemailer/malparser had moved on, so _node-iconv_ was removed. - -## Install - -Install through npm - - npm install encoding - -## Usage - -Require the module - - var encoding = require("encoding"); - -Convert with encoding.convert() - - var resultBuffer = encoding.convert(text, toCharset, fromCharset); - -Where - -- **text** is either a Buffer or a String to be converted -- **toCharset** is the characterset to convert the string -- **fromCharset** (_optional_, defaults to UTF-8) is the source charset - -Output of the conversion is always a Buffer object. - -Example - - var result = encoding.convert("ÕÄÖÜ", "Latin_1"); - console.log(result); //<Buffer d5 c4 d6 dc> - -## License - -**MIT** diff --git a/node_modules/env-paths/index.d.ts b/node_modules/env-paths/index.d.ts index e57fa8f661f1f..277ddc0a183c9 100644 --- a/node_modules/env-paths/index.d.ts +++ b/node_modules/env-paths/index.d.ts @@ -13,26 +13,56 @@ declare namespace envPaths { export interface Paths { /** Directory for data files. + + Example locations (with the default `nodejs` suffix): + + - macOS: `~/Library/Application Support/MyApp-nodejs` + - Windows: `%LOCALAPPDATA%\MyApp-nodejs\Data` (for example, `C:\Users\USERNAME\AppData\Local\MyApp-nodejs\Data`) + - Linux: `~/.local/share/MyApp-nodejs` (or `$XDG_DATA_HOME/MyApp-nodejs`) */ readonly data: string; /** Directory for data files. + + Example locations (with the default `nodejs` suffix): + + - macOS: `~/Library/Preferences/MyApp-nodejs` + - Windows: `%APPDATA%\MyApp-nodejs\Config` (for example, `C:\Users\USERNAME\AppData\Roaming\MyApp-nodejs\Config`) + - Linux: `~/.config/MyApp-nodejs` (or `$XDG_CONFIG_HOME/MyApp-nodejs`) */ readonly config: string; /** Directory for non-essential data files. + + Example locations (with the default `nodejs` suffix): + + - macOS: `~/Library/Caches/MyApp-nodejs` + - Windows: `%LOCALAPPDATA%\MyApp-nodejs\Cache` (for example, `C:\Users\USERNAME\AppData\Local\MyApp-nodejs\Cache`) + - Linux: `~/.cache/MyApp-nodejs` (or `$XDG_CACHE_HOME/MyApp-nodejs`) */ readonly cache: string; /** Directory for log files. + + Example locations (with the default `nodejs` suffix): + + - macOS: `~/Library/Logs/MyApp-nodejs` + - Windows: `%LOCALAPPDATA%\MyApp-nodejs\Log` (for example, `C:\Users\USERNAME\AppData\Local\MyApp-nodejs\Log`) + - Linux: `~/.local/state/MyApp-nodejs` (or `$XDG_STATE_HOME/MyApp-nodejs`) */ readonly log: string; /** Directory for temporary files. + + Example locations (with the default `nodejs` suffix): + + - macOS: `/var/folders/jf/f2twvvvs5jl_m49tf034ffpw0000gn/T/MyApp-nodejs` + - Windows: `%LOCALAPPDATA%\Temp\MyApp-nodejs` (for example, `C:\Users\USERNAME\AppData\Local\Temp\MyApp-nodejs`) + - Linux: `/tmp/USERNAME/MyApp-nodejs` */ readonly temp: string; } @@ -42,6 +72,8 @@ declare const envPaths: { /** Get paths for storing things like data, config, cache, etc. + Note: It only generates the path strings. It doesn't create the directories for you. You could use [`make-dir`](https://github.com/sindresorhus/make-dir) to create the directories. + @param name - Name of your project. Used to generate the paths. @returns The paths to use for your project on current OS. diff --git a/node_modules/env-paths/package.json b/node_modules/env-paths/package.json index ea4e1d53edeb0..fae4ebcf20c67 100644 --- a/node_modules/env-paths/package.json +++ b/node_modules/env-paths/package.json @@ -1,6 +1,6 @@ { "name": "env-paths", - "version": "2.2.0", + "version": "2.2.1", "description": "Get paths for storing things like data, config, cache, etc", "license": "MIT", "repository": "sindresorhus/env-paths", diff --git a/node_modules/env-paths/readme.md b/node_modules/env-paths/readme.md index ec3439316f9a1..b66d571af48df 100644 --- a/node_modules/env-paths/readme.md +++ b/node_modules/env-paths/readme.md @@ -1,4 +1,4 @@ -# env-paths [![Build Status](https://travis-ci.org/sindresorhus/env-paths.svg?branch=master)](https://travis-ci.org/sindresorhus/env-paths) +# env-paths > Get paths for storing things like data, config, cache, etc @@ -29,7 +29,9 @@ paths.config ## API -### paths = envPaths(name, [options]) +### paths = envPaths(name, options?) + +Note: It only generates the path strings. It doesn't create the directories for you. You could use [`make-dir`](https://github.com/sindresorhus/make-dir) to create the directories. #### name @@ -39,7 +41,7 @@ Name of your project. Used to generate the paths. #### options -Type: `Object` +Type: `object` ##### suffix @@ -54,23 +56,60 @@ apps. Pass an empty string to disable it. Directory for data files. +Example locations (with the default `nodejs` [suffix](#suffix)): + +- macOS: `~/Library/Application Support/MyApp-nodejs` +- Windows: `%LOCALAPPDATA%\MyApp-nodejs\Data` (for example, `C:\Users\USERNAME\AppData\Local\MyApp-nodejs\Data`) +- Linux: `~/.local/share/MyApp-nodejs` (or `$XDG_DATA_HOME/MyApp-nodejs`) + ### paths.config Directory for config files. +Example locations (with the default `nodejs` [suffix](#suffix)): + +- macOS: `~/Library/Preferences/MyApp-nodejs` +- Windows: `%APPDATA%\MyApp-nodejs\Config` (for example, `C:\Users\USERNAME\AppData\Roaming\MyApp-nodejs\Config`) +- Linux: `~/.config/MyApp-nodejs` (or `$XDG_CONFIG_HOME/MyApp-nodejs`) + ### paths.cache Directory for non-essential data files. +Example locations (with the default `nodejs` [suffix](#suffix)): + +- macOS: `~/Library/Caches/MyApp-nodejs` +- Windows: `%LOCALAPPDATA%\MyApp-nodejs\Cache` (for example, `C:\Users\USERNAME\AppData\Local\MyApp-nodejs\Cache`) +- Linux: `~/.cache/MyApp-nodejs` (or `$XDG_CACHE_HOME/MyApp-nodejs`) + ### paths.log Directory for log files. +Example locations (with the default `nodejs` [suffix](#suffix)): + +- macOS: `~/Library/Logs/MyApp-nodejs` +- Windows: `%LOCALAPPDATA%\MyApp-nodejs\Log` (for example, `C:\Users\USERNAME\AppData\Local\MyApp-nodejs\Log`) +- Linux: `~/.local/state/MyApp-nodejs` (or `$XDG_STATE_HOME/MyApp-nodejs`) + ### paths.temp Directory for temporary files. +Example locations (with the default `nodejs` [suffix](#suffix)): + +- macOS: `/var/folders/jf/f2twvvvs5jl_m49tf034ffpw0000gn/T/MyApp-nodejs` +- Windows: `%LOCALAPPDATA%\Temp\MyApp-nodejs` (for example, `C:\Users\USERNAME\AppData\Local\Temp\MyApp-nodejs`) +- Linux: `/tmp/USERNAME/MyApp-nodejs` -## License +--- -MIT © [Sindre Sorhus](https://sindresorhus.com) +<div align="center"> + <b> + <a href="https://tidelift.com/subscription/pkg/npm-env-paths?utm_source=npm-env-paths&utm_medium=referral&utm_campaign=readme">Get professional support for this package with a Tidelift subscription</a> + </b> + <br> + <sub> + Tidelift helps make open source sustainable for maintainers while giving companies<br>assurances about security, maintenance, and licensing for their dependencies. + </sub> +</div> diff --git a/node_modules/err-code/.editorconfig b/node_modules/err-code/.editorconfig deleted file mode 100644 index 829280bee1ac3..0000000000000 --- a/node_modules/err-code/.editorconfig +++ /dev/null @@ -1,12 +0,0 @@ -root = true - -[*] -indent_style = space -indent_size = 4 -end_of_line = lf -charset = utf-8 -trim_trailing_whitespace = true -insert_final_newline = true - -[package.json] -indent_size = 2 diff --git a/node_modules/err-code/.eslintrc.json b/node_modules/err-code/.eslintrc.json deleted file mode 100644 index 4829595a424ed..0000000000000 --- a/node_modules/err-code/.eslintrc.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "root": true, - "extends": [ - "@satazor/eslint-config/es6", - "@satazor/eslint-config/addons/node" - ] -} \ No newline at end of file diff --git a/node_modules/err-code/.travis.yml b/node_modules/err-code/.travis.yml deleted file mode 100644 index b29cf66a2b3b3..0000000000000 --- a/node_modules/err-code/.travis.yml +++ /dev/null @@ -1,4 +0,0 @@ -language: node_js -node_js: - - "4" - - "6" diff --git a/node_modules/err-code/README.md b/node_modules/err-code/README.md deleted file mode 100644 index 5afdab00c9348..0000000000000 --- a/node_modules/err-code/README.md +++ /dev/null @@ -1,70 +0,0 @@ -# err-code - -[![NPM version][npm-image]][npm-url] [![Downloads][downloads-image]][npm-url] [![Build Status][travis-image]][travis-url] [![Dependency status][david-dm-image]][david-dm-url] [![Dev Dependency status][david-dm-dev-image]][david-dm-dev-url] [![Greenkeeper badge][greenkeeper-image]][greenkeeper-url] - -[npm-url]:https://npmjs.org/package/err-code -[downloads-image]:http://img.shields.io/npm/dm/err-code.svg -[npm-image]:http://img.shields.io/npm/v/err-code.svg -[travis-url]:https://travis-ci.org/IndigoUnited/js-err-code -[travis-image]:http://img.shields.io/travis/IndigoUnited/js-err-code/master.svg -[david-dm-url]:https://david-dm.org/IndigoUnited/js-err-code -[david-dm-image]:https://img.shields.io/david/IndigoUnited/js-err-code.svg -[david-dm-dev-url]:https://david-dm.org/IndigoUnited/js-err-code?type=dev -[david-dm-dev-image]:https://img.shields.io/david/dev/IndigoUnited/js-err-code.svg -[greenkeeper-image]:https://badges.greenkeeper.io/IndigoUnited/js-err-code.svg -[greenkeeper-url]:https://greenkeeper.io/ - -Create new error instances with a code and additional properties. - - -## Installation - -```console -$ npm install err-code -// or -$ bower install err-code -``` - -The browser file is named index.umd.js which supports CommonJS, AMD and globals (errCode). - - -## Why - -I find myself doing this repeatedly: - -```js -var err = new Error('My message'); -err.code = 'SOMECODE'; -err.detail = 'Additional information about the error'; -throw err; -``` - - -## Usage - -Simple usage. - -```js -var errcode = require('err-code'); - -// fill error with message + code -throw errcode(new Error('My message'), 'ESOMECODE'); -// fill error with message + code + props -throw errcode(new Error('My message'), 'ESOMECODE', { detail: 'Additional information about the error' }); -// fill error with message + props -throw errcode(new Error('My message'), { detail: 'Additional information about the error' }); -``` - -## Pre-existing fields - -If the passed `Error` already has a `.code` field, or fields specified in the third argument to `errcode` they will be overwritten, unless the fields are read only or otherwise throw during assignment in which case a new object will be created that shares a prototype chain with the original `Error`. The `.stack` and `.message` properties will be carried over from the original error and `.code` or any passed properties will be set on it. - - -## Tests - -`$ npm test` - - -## License - -Released under the [MIT License](http://www.opensource.org/licenses/mit-license.php). diff --git a/node_modules/err-code/test/.eslintrc.json b/node_modules/err-code/test/.eslintrc.json deleted file mode 100644 index f9fbb2d6ce6ab..0000000000000 --- a/node_modules/err-code/test/.eslintrc.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "env": { - "mocha": true - } -} \ No newline at end of file diff --git a/node_modules/extend/.editorconfig b/node_modules/extend/.editorconfig deleted file mode 100644 index bc228f8269443..0000000000000 --- a/node_modules/extend/.editorconfig +++ /dev/null @@ -1,20 +0,0 @@ -root = true - -[*] -indent_style = tab -indent_size = 4 -end_of_line = lf -charset = utf-8 -trim_trailing_whitespace = true -insert_final_newline = true -max_line_length = 150 - -[CHANGELOG.md] -indent_style = space -indent_size = 2 - -[*.json] -max_line_length = off - -[Makefile] -max_line_length = off diff --git a/node_modules/extend/.jscs.json b/node_modules/extend/.jscs.json deleted file mode 100644 index 3cce01d783294..0000000000000 --- a/node_modules/extend/.jscs.json +++ /dev/null @@ -1,175 +0,0 @@ -{ - "es3": true, - - "additionalRules": [], - - "requireSemicolons": true, - - "disallowMultipleSpaces": true, - - "disallowIdentifierNames": [], - - "requireCurlyBraces": { - "allExcept": [], - "keywords": ["if", "else", "for", "while", "do", "try", "catch"] - }, - - "requireSpaceAfterKeywords": ["if", "else", "for", "while", "do", "switch", "return", "try", "catch", "function"], - - "disallowSpaceAfterKeywords": [], - - "disallowSpaceBeforeComma": true, - "disallowSpaceAfterComma": false, - "disallowSpaceBeforeSemicolon": true, - - "disallowNodeTypes": [ - "DebuggerStatement", - "LabeledStatement", - "SwitchCase", - "SwitchStatement", - "WithStatement" - ], - - "requireObjectKeysOnNewLine": { "allExcept": ["sameLine"] }, - - "requireSpacesInAnonymousFunctionExpression": { "beforeOpeningRoundBrace": true, "beforeOpeningCurlyBrace": true }, - "requireSpacesInNamedFunctionExpression": { "beforeOpeningCurlyBrace": true }, - "disallowSpacesInNamedFunctionExpression": { "beforeOpeningRoundBrace": true }, - "requireSpacesInFunctionDeclaration": { "beforeOpeningCurlyBrace": true }, - "disallowSpacesInFunctionDeclaration": { "beforeOpeningRoundBrace": true }, - - "requireSpaceBetweenArguments": true, - - "disallowSpacesInsideParentheses": true, - - "disallowSpacesInsideArrayBrackets": true, - - "disallowQuotedKeysInObjects": { "allExcept": ["reserved"] }, - - "disallowSpaceAfterObjectKeys": true, - - "requireCommaBeforeLineBreak": true, - - "disallowSpaceAfterPrefixUnaryOperators": ["++", "--", "+", "-", "~", "!"], - "requireSpaceAfterPrefixUnaryOperators": [], - - "disallowSpaceBeforePostfixUnaryOperators": ["++", "--"], - "requireSpaceBeforePostfixUnaryOperators": [], - - "disallowSpaceBeforeBinaryOperators": [], - "requireSpaceBeforeBinaryOperators": ["+", "-", "/", "*", "=", "==", "===", "!=", "!=="], - - "requireSpaceAfterBinaryOperators": ["+", "-", "/", "*", "=", "==", "===", "!=", "!=="], - "disallowSpaceAfterBinaryOperators": [], - - "disallowImplicitTypeConversion": ["binary", "string"], - - "disallowKeywords": ["with", "eval"], - - "requireKeywordsOnNewLine": [], - "disallowKeywordsOnNewLine": ["else"], - - "requireLineFeedAtFileEnd": true, - - "disallowTrailingWhitespace": true, - - "disallowTrailingComma": true, - - "excludeFiles": ["node_modules/**", "vendor/**"], - - "disallowMultipleLineStrings": true, - - "requireDotNotation": { "allExcept": ["keywords"] }, - - "requireParenthesesAroundIIFE": true, - - "validateLineBreaks": "LF", - - "validateQuoteMarks": { - "escape": true, - "mark": "'" - }, - - "disallowOperatorBeforeLineBreak": [], - - "requireSpaceBeforeKeywords": [ - "do", - "for", - "if", - "else", - "switch", - "case", - "try", - "catch", - "finally", - "while", - "with", - "return" - ], - - "validateAlignedFunctionParameters": { - "lineBreakAfterOpeningBraces": true, - "lineBreakBeforeClosingBraces": true - }, - - "requirePaddingNewLinesBeforeExport": true, - - "validateNewlineAfterArrayElements": { - "maximum": 6 - }, - - "requirePaddingNewLinesAfterUseStrict": true, - - "disallowArrowFunctions": true, - - "disallowMultiLineTernary": true, - - "validateOrderInObjectKeys": false, - - "disallowIdenticalDestructuringNames": true, - - "disallowNestedTernaries": { "maxLevel": 1 }, - - "requireSpaceAfterComma": { "allExcept": ["trailing"] }, - "requireAlignedMultilineParams": false, - - "requireSpacesInGenerator": { - "afterStar": true - }, - - "disallowSpacesInGenerator": { - "beforeStar": true - }, - - "disallowVar": false, - - "requireArrayDestructuring": false, - - "requireEnhancedObjectLiterals": false, - - "requireObjectDestructuring": false, - - "requireEarlyReturn": false, - - "requireCapitalizedConstructorsNew": { - "allExcept": ["Function", "String", "Object", "Symbol", "Number", "Date", "RegExp", "Error", "Boolean", "Array"] - }, - - "requireImportAlphabetized": false, - - "requireSpaceBeforeObjectValues": true, - "requireSpaceBeforeDestructuredValues": true, - - "disallowSpacesInsideTemplateStringPlaceholders": true, - - "disallowArrayDestructuringReturn": false, - - "requireNewlineBeforeSingleStatementsInIf": false, - - "disallowUnusedVariables": true, - - "requireSpacesInsideImportedObjectBraces": true, - - "requireUseStrict": true -} - diff --git a/node_modules/extend/.travis.yml b/node_modules/extend/.travis.yml deleted file mode 100644 index 5ccdfc4948155..0000000000000 --- a/node_modules/extend/.travis.yml +++ /dev/null @@ -1,230 +0,0 @@ -language: node_js -os: - - linux -node_js: - - "10.7" - - "9.11" - - "8.11" - - "7.10" - - "6.14" - - "5.12" - - "4.9" - - "iojs-v3.3" - - "iojs-v2.5" - - "iojs-v1.8" - - "0.12" - - "0.10" - - "0.8" -before_install: - - 'case "${TRAVIS_NODE_VERSION}" in 0.*) export NPM_CONFIG_STRICT_SSL=false ;; esac' - - 'nvm install-latest-npm' -install: - - 'if [ "${TRAVIS_NODE_VERSION}" = "0.6" ] || [ "${TRAVIS_NODE_VERSION}" = "0.9" ]; then nvm install --latest-npm 0.8 && npm install && nvm use "${TRAVIS_NODE_VERSION}"; else npm install; fi;' -script: - - 'if [ -n "${PRETEST-}" ]; then npm run pretest ; fi' - - 'if [ -n "${POSTTEST-}" ]; then npm run posttest ; fi' - - 'if [ -n "${COVERAGE-}" ]; then npm run coverage ; fi' - - 'if [ -n "${TEST-}" ]; then npm run tests-only ; fi' -sudo: false -env: - - TEST=true -matrix: - fast_finish: true - include: - - node_js: "lts/*" - env: PRETEST=true - - node_js: "lts/*" - env: POSTTEST=true - - node_js: "4" - env: COVERAGE=true - - node_js: "10.6" - env: TEST=true ALLOW_FAILURE=true - - node_js: "10.5" - env: TEST=true ALLOW_FAILURE=true - - node_js: "10.4" - env: TEST=true ALLOW_FAILURE=true - - node_js: "10.3" - env: TEST=true ALLOW_FAILURE=true - - node_js: "10.2" - env: TEST=true ALLOW_FAILURE=true - - node_js: "10.1" - env: TEST=true ALLOW_FAILURE=true - - node_js: "10.0" - env: TEST=true ALLOW_FAILURE=true - - node_js: "9.10" - env: TEST=true ALLOW_FAILURE=true - - node_js: "9.9" - env: TEST=true ALLOW_FAILURE=true - - node_js: "9.8" - env: TEST=true ALLOW_FAILURE=true - - node_js: "9.7" - env: TEST=true ALLOW_FAILURE=true - - node_js: "9.6" - env: TEST=true ALLOW_FAILURE=true - - node_js: "9.5" - env: TEST=true ALLOW_FAILURE=true - - node_js: "9.4" - env: TEST=true ALLOW_FAILURE=true - - node_js: "9.3" - env: TEST=true ALLOW_FAILURE=true - - node_js: "9.2" - env: TEST=true ALLOW_FAILURE=true - - node_js: "9.1" - env: TEST=true ALLOW_FAILURE=true - - node_js: "9.0" - env: TEST=true ALLOW_FAILURE=true - - node_js: "8.10" - env: TEST=true ALLOW_FAILURE=true - - node_js: "8.9" - env: TEST=true ALLOW_FAILURE=true - - node_js: "8.8" - env: TEST=true ALLOW_FAILURE=true - - node_js: "8.7" - env: TEST=true ALLOW_FAILURE=true - - node_js: "8.6" - env: TEST=true ALLOW_FAILURE=true - - node_js: "8.5" - env: TEST=true ALLOW_FAILURE=true - - node_js: "8.4" - env: TEST=true ALLOW_FAILURE=true - - node_js: "8.3" - env: TEST=true ALLOW_FAILURE=true - - node_js: "8.2" - env: TEST=true ALLOW_FAILURE=true - - node_js: "8.1" - env: TEST=true ALLOW_FAILURE=true - - node_js: "8.0" - env: TEST=true ALLOW_FAILURE=true - - node_js: "7.9" - env: TEST=true ALLOW_FAILURE=true - - node_js: "7.8" - env: TEST=true ALLOW_FAILURE=true - - node_js: "7.7" - env: TEST=true ALLOW_FAILURE=true - - node_js: "7.6" - env: TEST=true ALLOW_FAILURE=true - - node_js: "7.5" - env: TEST=true ALLOW_FAILURE=true - - node_js: "7.4" - env: TEST=true ALLOW_FAILURE=true - - node_js: "7.3" - env: TEST=true ALLOW_FAILURE=true - - node_js: "7.2" - env: TEST=true ALLOW_FAILURE=true - - node_js: "7.1" - env: TEST=true ALLOW_FAILURE=true - - node_js: "7.0" - env: TEST=true ALLOW_FAILURE=true - - node_js: "6.13" - env: TEST=true ALLOW_FAILURE=true - - node_js: "6.12" - env: TEST=true ALLOW_FAILURE=true - - node_js: "6.11" - env: TEST=true ALLOW_FAILURE=true - - node_js: "6.10" - env: TEST=true ALLOW_FAILURE=true - - node_js: "6.9" - env: TEST=true ALLOW_FAILURE=true - - node_js: "6.8" - env: TEST=true ALLOW_FAILURE=true - - node_js: "6.7" - env: TEST=true ALLOW_FAILURE=true - - node_js: "6.6" - env: TEST=true ALLOW_FAILURE=true - - node_js: "6.5" - env: TEST=true ALLOW_FAILURE=true - - node_js: "6.4" - env: TEST=true ALLOW_FAILURE=true - - node_js: "6.3" - env: TEST=true ALLOW_FAILURE=true - - node_js: "6.2" - env: TEST=true ALLOW_FAILURE=true - - node_js: "6.1" - env: TEST=true ALLOW_FAILURE=true - - node_js: "6.0" - env: TEST=true ALLOW_FAILURE=true - - node_js: "5.11" - env: TEST=true ALLOW_FAILURE=true - - node_js: "5.10" - env: TEST=true ALLOW_FAILURE=true - - node_js: "5.9" - env: TEST=true ALLOW_FAILURE=true - - node_js: "5.8" - env: TEST=true ALLOW_FAILURE=true - - node_js: "5.7" - env: TEST=true ALLOW_FAILURE=true - - node_js: "5.6" - env: TEST=true ALLOW_FAILURE=true - - node_js: "5.5" - env: TEST=true ALLOW_FAILURE=true - - node_js: "5.4" - env: TEST=true ALLOW_FAILURE=true - - node_js: "5.3" - env: TEST=true ALLOW_FAILURE=true - - node_js: "5.2" - env: TEST=true ALLOW_FAILURE=true - - node_js: "5.1" - env: TEST=true ALLOW_FAILURE=true - - node_js: "5.0" - env: TEST=true ALLOW_FAILURE=true - - node_js: "4.8" - env: TEST=true ALLOW_FAILURE=true - - node_js: "4.7" - env: TEST=true ALLOW_FAILURE=true - - node_js: "4.6" - env: TEST=true ALLOW_FAILURE=true - - node_js: "4.5" - env: TEST=true ALLOW_FAILURE=true - - node_js: "4.4" - env: TEST=true ALLOW_FAILURE=true - - node_js: "4.3" - env: TEST=true ALLOW_FAILURE=true - - node_js: "4.2" - env: TEST=true ALLOW_FAILURE=true - - node_js: "4.1" - env: TEST=true ALLOW_FAILURE=true - - node_js: "4.0" - env: TEST=true ALLOW_FAILURE=true - - node_js: "iojs-v3.2" - env: TEST=true ALLOW_FAILURE=true - - node_js: "iojs-v3.1" - env: TEST=true ALLOW_FAILURE=true - - node_js: "iojs-v3.0" - env: TEST=true ALLOW_FAILURE=true - - node_js: "iojs-v2.4" - env: TEST=true ALLOW_FAILURE=true - - node_js: "iojs-v2.3" - env: TEST=true ALLOW_FAILURE=true - - node_js: "iojs-v2.2" - env: TEST=true ALLOW_FAILURE=true - - node_js: "iojs-v2.1" - env: TEST=true ALLOW_FAILURE=true - - node_js: "iojs-v2.0" - env: TEST=true ALLOW_FAILURE=true - - node_js: "iojs-v1.7" - env: TEST=true ALLOW_FAILURE=true - - node_js: "iojs-v1.6" - env: TEST=true ALLOW_FAILURE=true - - node_js: "iojs-v1.5" - env: TEST=true ALLOW_FAILURE=true - - node_js: "iojs-v1.4" - env: TEST=true ALLOW_FAILURE=true - - node_js: "iojs-v1.3" - env: TEST=true ALLOW_FAILURE=true - - node_js: "iojs-v1.2" - env: TEST=true ALLOW_FAILURE=true - - node_js: "iojs-v1.1" - env: TEST=true ALLOW_FAILURE=true - - node_js: "iojs-v1.0" - env: TEST=true ALLOW_FAILURE=true - - node_js: "0.11" - env: TEST=true ALLOW_FAILURE=true - - node_js: "0.9" - env: TEST=true ALLOW_FAILURE=true - - node_js: "0.6" - env: TEST=true ALLOW_FAILURE=true - - node_js: "0.4" - env: TEST=true ALLOW_FAILURE=true - allow_failures: - - os: osx - - env: TEST=true ALLOW_FAILURE=true diff --git a/node_modules/extend/CHANGELOG.md b/node_modules/extend/CHANGELOG.md deleted file mode 100644 index 2cf7de6fb3ae5..0000000000000 --- a/node_modules/extend/CHANGELOG.md +++ /dev/null @@ -1,83 +0,0 @@ -3.0.2 / 2018-07-19 -================== - * [Fix] Prevent merging `__proto__` property (#48) - * [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `tape` - * [Tests] up to `node` `v10.7`, `v9.11`, `v8.11`, `v7.10`, `v6.14`, `v4.9`; use `nvm install-latest-npm` - -3.0.1 / 2017-04-27 -================== - * [Fix] deep extending should work with a non-object (#46) - * [Dev Deps] update `tape`, `eslint`, `@ljharb/eslint-config` - * [Tests] up to `node` `v7.9`, `v6.10`, `v4.8`; improve matrix - * [Docs] Switch from vb.teelaun.ch to versionbadg.es for the npm version badge SVG. - * [Docs] Add example to readme (#34) - -3.0.0 / 2015-07-01 -================== - * [Possible breaking change] Use global "strict" directive (#32) - * [Tests] `int` is an ES3 reserved word - * [Tests] Test up to `io.js` `v2.3` - * [Tests] Add `npm run eslint` - * [Dev Deps] Update `covert`, `jscs` - -2.0.1 / 2015-04-25 -================== - * Use an inline `isArray` check, for ES3 browsers. (#27) - * Some old browsers fail when an identifier is `toString` - * Test latest `node` and `io.js` versions on `travis-ci`; speed up builds - * Add license info to package.json (#25) - * Update `tape`, `jscs` - * Adding a CHANGELOG - -2.0.0 / 2014-10-01 -================== - * Increase code coverage to 100%; run code coverage as part of tests - * Add `npm run lint`; Run linter as part of tests - * Remove nodeType and setInterval checks in isPlainObject - * Updating `tape`, `jscs`, `covert` - * General style and README cleanup - -1.3.0 / 2014-06-20 -================== - * Add component.json for browser support (#18) - * Use SVG for badges in README (#16) - * Updating `tape`, `covert` - * Updating travis-ci to work with multiple node versions - * Fix `deep === false` bug (returning target as {}) (#14) - * Fixing constructor checks in isPlainObject - * Adding additional test coverage - * Adding `npm run coverage` - * Add LICENSE (#13) - * Adding a warning about `false`, per #11 - * General style and whitespace cleanup - -1.2.1 / 2013-09-14 -================== - * Fixing hasOwnProperty bugs that would only have shown up in specific browsers. Fixes #8 - * Updating `tape` - -1.2.0 / 2013-09-02 -================== - * Updating the README: add badges - * Adding a missing variable reference. - * Using `tape` instead of `buster` for tests; add more tests (#7) - * Adding node 0.10 to Travis CI (#6) - * Enabling "npm test" and cleaning up package.json (#5) - * Add Travis CI. - -1.1.3 / 2012-12-06 -================== - * Added unit tests. - * Ensure extend function is named. (Looks nicer in a stack trace.) - * README cleanup. - -1.1.1 / 2012-11-07 -================== - * README cleanup. - * Added installation instructions. - * Added a missing semicolon - -1.0.0 / 2012-04-08 -================== - * Initial commit - diff --git a/node_modules/extend/README.md b/node_modules/extend/README.md deleted file mode 100644 index 5b8249aa95e5d..0000000000000 --- a/node_modules/extend/README.md +++ /dev/null @@ -1,81 +0,0 @@ -[![Build Status][travis-svg]][travis-url] -[![dependency status][deps-svg]][deps-url] -[![dev dependency status][dev-deps-svg]][dev-deps-url] - -# extend() for Node.js <sup>[![Version Badge][npm-version-png]][npm-url]</sup> - -`node-extend` is a port of the classic extend() method from jQuery. It behaves as you expect. It is simple, tried and true. - -Notes: - -* Since Node.js >= 4, - [`Object.assign`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/assign) - now offers the same functionality natively (but without the "deep copy" option). - See [ECMAScript 2015 (ES6) in Node.js](https://nodejs.org/en/docs/es6). -* Some native implementations of `Object.assign` in both Node.js and many - browsers (since NPM modules are for the browser too) may not be fully - spec-compliant. - Check [`object.assign`](https://www.npmjs.com/package/object.assign) module for - a compliant candidate. - -## Installation - -This package is available on [npm][npm-url] as: `extend` - -``` sh -npm install extend -``` - -## Usage - -**Syntax:** extend **(** [`deep`], `target`, `object1`, [`objectN`] **)** - -*Extend one object with one or more others, returning the modified object.* - -**Example:** - -``` js -var extend = require('extend'); -extend(targetObject, object1, object2); -``` - -Keep in mind that the target object will be modified, and will be returned from extend(). - -If a boolean true is specified as the first argument, extend performs a deep copy, recursively copying any objects it finds. Otherwise, the copy will share structure with the original object(s). -Undefined properties are not copied. However, properties inherited from the object's prototype will be copied over. -Warning: passing `false` as the first argument is not supported. - -### Arguments - -* `deep` *Boolean* (optional) -If set, the merge becomes recursive (i.e. deep copy). -* `target` *Object* -The object to extend. -* `object1` *Object* -The object that will be merged into the first. -* `objectN` *Object* (Optional) -More objects to merge into the first. - -## License - -`node-extend` is licensed under the [MIT License][mit-license-url]. - -## Acknowledgements - -All credit to the jQuery authors for perfecting this amazing utility. - -Ported to Node.js by [Stefan Thomas][github-justmoon] with contributions by [Jonathan Buchanan][github-insin] and [Jordan Harband][github-ljharb]. - -[travis-svg]: https://travis-ci.org/justmoon/node-extend.svg -[travis-url]: https://travis-ci.org/justmoon/node-extend -[npm-url]: https://npmjs.org/package/extend -[mit-license-url]: http://opensource.org/licenses/MIT -[github-justmoon]: https://github.com/justmoon -[github-insin]: https://github.com/insin -[github-ljharb]: https://github.com/ljharb -[npm-version-png]: http://versionbadg.es/justmoon/node-extend.svg -[deps-svg]: https://david-dm.org/justmoon/node-extend.svg -[deps-url]: https://david-dm.org/justmoon/node-extend -[dev-deps-svg]: https://david-dm.org/justmoon/node-extend/dev-status.svg -[dev-deps-url]: https://david-dm.org/justmoon/node-extend#info=devDependencies - diff --git a/node_modules/extsprintf/.npmignore b/node_modules/extsprintf/.npmignore deleted file mode 100644 index 6ed1ae975080f..0000000000000 --- a/node_modules/extsprintf/.npmignore +++ /dev/null @@ -1,2 +0,0 @@ -/deps -/examples diff --git a/node_modules/extsprintf/README.md b/node_modules/extsprintf/README.md deleted file mode 100644 index b22998d63af16..0000000000000 --- a/node_modules/extsprintf/README.md +++ /dev/null @@ -1,46 +0,0 @@ -# extsprintf: extended POSIX-style sprintf - -Stripped down version of s[n]printf(3c). We make a best effort to throw an -exception when given a format string we don't understand, rather than ignoring -it, so that we won't break existing programs if/when we go implement the rest -of this. - -This implementation currently supports specifying - -* field alignment ('-' flag), -* zero-pad ('0' flag) -* always show numeric sign ('+' flag), -* field width -* conversions for strings, decimal integers, and floats (numbers). -* argument size specifiers. These are all accepted but ignored, since - Javascript has no notion of the physical size of an argument. - -Everything else is currently unsupported, most notably: precision, unsigned -numbers, non-decimal numbers, and characters. - -Besides the usual POSIX conversions, this implementation supports: - -* `%j`: pretty-print a JSON object (using node's "inspect") -* `%r`: pretty-print an Error object - -# Example - -First, install it: - - # npm install extsprintf - -Now, use it: - - var mod_extsprintf = require('extsprintf'); - console.log(mod_extsprintf.sprintf('hello %25s', 'world')); - -outputs: - - hello world - -# Also supported - -**printf**: same args as sprintf, but prints the result to stdout - -**fprintf**: same args as sprintf, preceded by a Node stream. Prints the result -to the given stream. diff --git a/node_modules/fast-deep-equal/README.md b/node_modules/fast-deep-equal/README.md deleted file mode 100644 index d3f4ffcc316f9..0000000000000 --- a/node_modules/fast-deep-equal/README.md +++ /dev/null @@ -1,96 +0,0 @@ -# fast-deep-equal -The fastest deep equal with ES6 Map, Set and Typed arrays support. - -[![Build Status](https://travis-ci.org/epoberezkin/fast-deep-equal.svg?branch=master)](https://travis-ci.org/epoberezkin/fast-deep-equal) -[![npm](https://img.shields.io/npm/v/fast-deep-equal.svg)](https://www.npmjs.com/package/fast-deep-equal) -[![Coverage Status](https://coveralls.io/repos/github/epoberezkin/fast-deep-equal/badge.svg?branch=master)](https://coveralls.io/github/epoberezkin/fast-deep-equal?branch=master) - - -## Install - -```bash -npm install fast-deep-equal -``` - - -## Features - -- ES5 compatible -- works in node.js (8+) and browsers (IE9+) -- checks equality of Date and RegExp objects by value. - -ES6 equal (`require('fast-deep-equal/es6')`) also supports: -- Maps -- Sets -- Typed arrays - - -## Usage - -```javascript -var equal = require('fast-deep-equal'); -console.log(equal({foo: 'bar'}, {foo: 'bar'})); // true -``` - -To support ES6 Maps, Sets and Typed arrays equality use: - -```javascript -var equal = require('fast-deep-equal/es6'); -console.log(equal(Int16Array([1, 2]), Int16Array([1, 2]))); // true -``` - -To use with React (avoiding the traversal of React elements' _owner -property that contains circular references and is not needed when -comparing the elements - borrowed from [react-fast-compare](https://github.com/FormidableLabs/react-fast-compare)): - -```javascript -var equal = require('fast-deep-equal/react'); -var equal = require('fast-deep-equal/es6/react'); -``` - - -## Performance benchmark - -Node.js v12.6.0: - -``` -fast-deep-equal x 261,950 ops/sec ±0.52% (89 runs sampled) -fast-deep-equal/es6 x 212,991 ops/sec ±0.34% (92 runs sampled) -fast-equals x 230,957 ops/sec ±0.83% (85 runs sampled) -nano-equal x 187,995 ops/sec ±0.53% (88 runs sampled) -shallow-equal-fuzzy x 138,302 ops/sec ±0.49% (90 runs sampled) -underscore.isEqual x 74,423 ops/sec ±0.38% (89 runs sampled) -lodash.isEqual x 36,637 ops/sec ±0.72% (90 runs sampled) -deep-equal x 2,310 ops/sec ±0.37% (90 runs sampled) -deep-eql x 35,312 ops/sec ±0.67% (91 runs sampled) -ramda.equals x 12,054 ops/sec ±0.40% (91 runs sampled) -util.isDeepStrictEqual x 46,440 ops/sec ±0.43% (90 runs sampled) -assert.deepStrictEqual x 456 ops/sec ±0.71% (88 runs sampled) - -The fastest is fast-deep-equal -``` - -To run benchmark (requires node.js 6+): - -```bash -npm run benchmark -``` - -__Please note__: this benchmark runs against the available test cases. To choose the most performant library for your application, it is recommended to benchmark against your data and to NOT expect this benchmark to reflect the performance difference in your application. - - -## Enterprise support - -fast-deep-equal package is a part of [Tidelift enterprise subscription](https://tidelift.com/subscription/pkg/npm-fast-deep-equal?utm_source=npm-fast-deep-equal&utm_medium=referral&utm_campaign=enterprise&utm_term=repo) - it provides a centralised commercial support to open-source software users, in addition to the support provided by software maintainers. - - -## Security contact - -To report a security vulnerability, please use the -[Tidelift security contact](https://tidelift.com/security). -Tidelift will coordinate the fix and disclosure. Please do NOT report security vulnerability via GitHub issues. - - -## License - -[MIT](https://github.com/epoberezkin/fast-deep-equal/blob/master/LICENSE) diff --git a/node_modules/fast-json-stable-stringify/.eslintrc.yml b/node_modules/fast-json-stable-stringify/.eslintrc.yml deleted file mode 100644 index 1c77b0d479035..0000000000000 --- a/node_modules/fast-json-stable-stringify/.eslintrc.yml +++ /dev/null @@ -1,26 +0,0 @@ -extends: eslint:recommended -env: - node: true - browser: true -rules: - block-scoped-var: 2 - callback-return: 2 - dot-notation: 2 - indent: 2 - linebreak-style: [2, unix] - new-cap: 2 - no-console: [2, allow: [warn, error]] - no-else-return: 2 - no-eq-null: 2 - no-fallthrough: 2 - no-invalid-this: 2 - no-return-assign: 2 - no-shadow: 1 - no-trailing-spaces: 2 - no-use-before-define: [2, nofunc] - quotes: [2, single, avoid-escape] - semi: [2, always] - strict: [2, global] - valid-jsdoc: [2, requireReturn: false] - no-control-regex: 0 - no-useless-escape: 2 diff --git a/node_modules/fast-json-stable-stringify/.github/FUNDING.yml b/node_modules/fast-json-stable-stringify/.github/FUNDING.yml deleted file mode 100644 index 61f9daa955b01..0000000000000 --- a/node_modules/fast-json-stable-stringify/.github/FUNDING.yml +++ /dev/null @@ -1 +0,0 @@ -tidelift: "npm/fast-json-stable-stringify" diff --git a/node_modules/fast-json-stable-stringify/.travis.yml b/node_modules/fast-json-stable-stringify/.travis.yml deleted file mode 100644 index b61e8f0dc9dcc..0000000000000 --- a/node_modules/fast-json-stable-stringify/.travis.yml +++ /dev/null @@ -1,8 +0,0 @@ -language: node_js -node_js: - - "8" - - "10" - - "12" - - "13" -after_script: - - coveralls < coverage/lcov.info diff --git a/node_modules/fast-json-stable-stringify/README.md b/node_modules/fast-json-stable-stringify/README.md deleted file mode 100644 index 02cf49ff385b8..0000000000000 --- a/node_modules/fast-json-stable-stringify/README.md +++ /dev/null @@ -1,131 +0,0 @@ -# fast-json-stable-stringify - -Deterministic `JSON.stringify()` - a faster version of [@substack](https://github.com/substack)'s json-stable-strigify without [jsonify](https://github.com/substack/jsonify). - -You can also pass in a custom comparison function. - -[![Build Status](https://travis-ci.org/epoberezkin/fast-json-stable-stringify.svg?branch=master)](https://travis-ci.org/epoberezkin/fast-json-stable-stringify) -[![Coverage Status](https://coveralls.io/repos/github/epoberezkin/fast-json-stable-stringify/badge.svg?branch=master)](https://coveralls.io/github/epoberezkin/fast-json-stable-stringify?branch=master) - -# example - -``` js -var stringify = require('fast-json-stable-stringify'); -var obj = { c: 8, b: [{z:6,y:5,x:4},7], a: 3 }; -console.log(stringify(obj)); -``` - -output: - -``` -{"a":3,"b":[{"x":4,"y":5,"z":6},7],"c":8} -``` - - -# methods - -``` js -var stringify = require('fast-json-stable-stringify') -``` - -## var str = stringify(obj, opts) - -Return a deterministic stringified string `str` from the object `obj`. - - -## options - -### cmp - -If `opts` is given, you can supply an `opts.cmp` to have a custom comparison -function for object keys. Your function `opts.cmp` is called with these -parameters: - -``` js -opts.cmp({ key: akey, value: avalue }, { key: bkey, value: bvalue }) -``` - -For example, to sort on the object key names in reverse order you could write: - -``` js -var stringify = require('fast-json-stable-stringify'); - -var obj = { c: 8, b: [{z:6,y:5,x:4},7], a: 3 }; -var s = stringify(obj, function (a, b) { - return a.key < b.key ? 1 : -1; -}); -console.log(s); -``` - -which results in the output string: - -``` -{"c":8,"b":[{"z":6,"y":5,"x":4},7],"a":3} -``` - -Or if you wanted to sort on the object values in reverse order, you could write: - -``` -var stringify = require('fast-json-stable-stringify'); - -var obj = { d: 6, c: 5, b: [{z:3,y:2,x:1},9], a: 10 }; -var s = stringify(obj, function (a, b) { - return a.value < b.value ? 1 : -1; -}); -console.log(s); -``` - -which outputs: - -``` -{"d":6,"c":5,"b":[{"z":3,"y":2,"x":1},9],"a":10} -``` - -### cycles - -Pass `true` in `opts.cycles` to stringify circular property as `__cycle__` - the result will not be a valid JSON string in this case. - -TypeError will be thrown in case of circular object without this option. - - -# install - -With [npm](https://npmjs.org) do: - -``` -npm install fast-json-stable-stringify -``` - - -# benchmark - -To run benchmark (requires Node.js 6+): -``` -node benchmark -``` - -Results: -``` -fast-json-stable-stringify x 17,189 ops/sec ±1.43% (83 runs sampled) -json-stable-stringify x 13,634 ops/sec ±1.39% (85 runs sampled) -fast-stable-stringify x 20,212 ops/sec ±1.20% (84 runs sampled) -faster-stable-stringify x 15,549 ops/sec ±1.12% (84 runs sampled) -The fastest is fast-stable-stringify -``` - - -## Enterprise support - -fast-json-stable-stringify package is a part of [Tidelift enterprise subscription](https://tidelift.com/subscription/pkg/npm-fast-json-stable-stringify?utm_source=npm-fast-json-stable-stringify&utm_medium=referral&utm_campaign=enterprise&utm_term=repo) - it provides a centralised commercial support to open-source software users, in addition to the support provided by software maintainers. - - -## Security contact - -To report a security vulnerability, please use the -[Tidelift security contact](https://tidelift.com/security). -Tidelift will coordinate the fix and disclosure. Please do NOT report security vulnerability via GitHub issues. - - -# license - -[MIT](https://github.com/epoberezkin/fast-json-stable-stringify/blob/master/LICENSE) diff --git a/node_modules/forever-agent/README.md b/node_modules/forever-agent/README.md deleted file mode 100644 index 9d5b66343c4e9..0000000000000 --- a/node_modules/forever-agent/README.md +++ /dev/null @@ -1,4 +0,0 @@ -forever-agent -============= - -HTTP Agent that keeps socket connections alive between keep-alive requests. Formerly part of mikeal/request, now a standalone module. diff --git a/node_modules/form-data/README.md b/node_modules/form-data/README.md deleted file mode 100644 index d7809364fba88..0000000000000 --- a/node_modules/form-data/README.md +++ /dev/null @@ -1,234 +0,0 @@ -# Form-Data [![NPM Module](https://img.shields.io/npm/v/form-data.svg)](https://www.npmjs.com/package/form-data) [![Join the chat at https://gitter.im/form-data/form-data](http://form-data.github.io/images/gitterbadge.svg)](https://gitter.im/form-data/form-data) - -A library to create readable ```"multipart/form-data"``` streams. Can be used to submit forms and file uploads to other web applications. - -The API of this library is inspired by the [XMLHttpRequest-2 FormData Interface][xhr2-fd]. - -[xhr2-fd]: http://dev.w3.org/2006/webapi/XMLHttpRequest-2/Overview.html#the-formdata-interface - -[![Linux Build](https://img.shields.io/travis/form-data/form-data/v2.3.3.svg?label=linux:4.x-9.x)](https://travis-ci.org/form-data/form-data) -[![MacOS Build](https://img.shields.io/travis/form-data/form-data/v2.3.3.svg?label=macos:4.x-9.x)](https://travis-ci.org/form-data/form-data) -[![Windows Build](https://img.shields.io/appveyor/ci/alexindigo/form-data/v2.3.3.svg?label=windows:4.x-9.x)](https://ci.appveyor.com/project/alexindigo/form-data) - -[![Coverage Status](https://img.shields.io/coveralls/form-data/form-data/v2.3.3.svg?label=code+coverage)](https://coveralls.io/github/form-data/form-data?branch=master) -[![Dependency Status](https://img.shields.io/david/form-data/form-data.svg)](https://david-dm.org/form-data/form-data) -[![bitHound Overall Score](https://www.bithound.io/github/form-data/form-data/badges/score.svg)](https://www.bithound.io/github/form-data/form-data) - -## Install - -``` -npm install --save form-data -``` - -## Usage - -In this example we are constructing a form with 3 fields that contain a string, -a buffer and a file stream. - -``` javascript -var FormData = require('form-data'); -var fs = require('fs'); - -var form = new FormData(); -form.append('my_field', 'my value'); -form.append('my_buffer', new Buffer(10)); -form.append('my_file', fs.createReadStream('/foo/bar.jpg')); -``` - -Also you can use http-response stream: - -``` javascript -var FormData = require('form-data'); -var http = require('http'); - -var form = new FormData(); - -http.request('http://nodejs.org/images/logo.png', function(response) { - form.append('my_field', 'my value'); - form.append('my_buffer', new Buffer(10)); - form.append('my_logo', response); -}); -``` - -Or @mikeal's [request](https://github.com/request/request) stream: - -``` javascript -var FormData = require('form-data'); -var request = require('request'); - -var form = new FormData(); - -form.append('my_field', 'my value'); -form.append('my_buffer', new Buffer(10)); -form.append('my_logo', request('http://nodejs.org/images/logo.png')); -``` - -In order to submit this form to a web application, call ```submit(url, [callback])``` method: - -``` javascript -form.submit('http://example.org/', function(err, res) { - // res – response object (http.IncomingMessage) // - res.resume(); -}); - -``` - -For more advanced request manipulations ```submit()``` method returns ```http.ClientRequest``` object, or you can choose from one of the alternative submission methods. - -### Custom options - -You can provide custom options, such as `maxDataSize`: - -``` javascript -var FormData = require('form-data'); - -var form = new FormData({ maxDataSize: 20971520 }); -form.append('my_field', 'my value'); -form.append('my_buffer', /* something big */); -``` - -List of available options could be found in [combined-stream](https://github.com/felixge/node-combined-stream/blob/master/lib/combined_stream.js#L7-L15) - -### Alternative submission methods - -You can use node's http client interface: - -``` javascript -var http = require('http'); - -var request = http.request({ - method: 'post', - host: 'example.org', - path: '/upload', - headers: form.getHeaders() -}); - -form.pipe(request); - -request.on('response', function(res) { - console.log(res.statusCode); -}); -``` - -Or if you would prefer the `'Content-Length'` header to be set for you: - -``` javascript -form.submit('example.org/upload', function(err, res) { - console.log(res.statusCode); -}); -``` - -To use custom headers and pre-known length in parts: - -``` javascript -var CRLF = '\r\n'; -var form = new FormData(); - -var options = { - header: CRLF + '--' + form.getBoundary() + CRLF + 'X-Custom-Header: 123' + CRLF + CRLF, - knownLength: 1 -}; - -form.append('my_buffer', buffer, options); - -form.submit('http://example.com/', function(err, res) { - if (err) throw err; - console.log('Done'); -}); -``` - -Form-Data can recognize and fetch all the required information from common types of streams (```fs.readStream```, ```http.response``` and ```mikeal's request```), for some other types of streams you'd need to provide "file"-related information manually: - -``` javascript -someModule.stream(function(err, stdout, stderr) { - if (err) throw err; - - var form = new FormData(); - - form.append('file', stdout, { - filename: 'unicycle.jpg', // ... or: - filepath: 'photos/toys/unicycle.jpg', - contentType: 'image/jpeg', - knownLength: 19806 - }); - - form.submit('http://example.com/', function(err, res) { - if (err) throw err; - console.log('Done'); - }); -}); -``` - -The `filepath` property overrides `filename` and may contain a relative path. This is typically used when uploading [multiple files from a directory](https://wicg.github.io/entries-api/#dom-htmlinputelement-webkitdirectory). - -For edge cases, like POST request to URL with query string or to pass HTTP auth credentials, object can be passed to `form.submit()` as first parameter: - -``` javascript -form.submit({ - host: 'example.com', - path: '/probably.php?extra=params', - auth: 'username:password' -}, function(err, res) { - console.log(res.statusCode); -}); -``` - -In case you need to also send custom HTTP headers with the POST request, you can use the `headers` key in first parameter of `form.submit()`: - -``` javascript -form.submit({ - host: 'example.com', - path: '/surelynot.php', - headers: {'x-test-header': 'test-header-value'} -}, function(err, res) { - console.log(res.statusCode); -}); -``` - -### Integration with other libraries - -#### Request - -Form submission using [request](https://github.com/request/request): - -```javascript -var formData = { - my_field: 'my_value', - my_file: fs.createReadStream(__dirname + '/unicycle.jpg'), -}; - -request.post({url:'http://service.com/upload', formData: formData}, function(err, httpResponse, body) { - if (err) { - return console.error('upload failed:', err); - } - console.log('Upload successful! Server responded with:', body); -}); -``` - -For more details see [request readme](https://github.com/request/request#multipartform-data-multipart-form-uploads). - -#### node-fetch - -You can also submit a form using [node-fetch](https://github.com/bitinn/node-fetch): - -```javascript -var form = new FormData(); - -form.append('a', 1); - -fetch('http://example.com', { method: 'POST', body: form }) - .then(function(res) { - return res.json(); - }).then(function(json) { - console.log(json); - }); -``` - -## Notes - -- ```getLengthSync()``` method DOESN'T calculate length for streams, use ```knownLength``` options as workaround. -- Starting version `2.x` FormData has dropped support for `node@0.10.x`. - -## License - -Form-Data is released under the [MIT](License) license. diff --git a/node_modules/form-data/README.md.bak b/node_modules/form-data/README.md.bak deleted file mode 100644 index 0524d60288a13..0000000000000 --- a/node_modules/form-data/README.md.bak +++ /dev/null @@ -1,234 +0,0 @@ -# Form-Data [![NPM Module](https://img.shields.io/npm/v/form-data.svg)](https://www.npmjs.com/package/form-data) [![Join the chat at https://gitter.im/form-data/form-data](http://form-data.github.io/images/gitterbadge.svg)](https://gitter.im/form-data/form-data) - -A library to create readable ```"multipart/form-data"``` streams. Can be used to submit forms and file uploads to other web applications. - -The API of this library is inspired by the [XMLHttpRequest-2 FormData Interface][xhr2-fd]. - -[xhr2-fd]: http://dev.w3.org/2006/webapi/XMLHttpRequest-2/Overview.html#the-formdata-interface - -[![Linux Build](https://img.shields.io/travis/form-data/form-data/master.svg?label=linux:4.x-9.x)](https://travis-ci.org/form-data/form-data) -[![MacOS Build](https://img.shields.io/travis/form-data/form-data/master.svg?label=macos:4.x-9.x)](https://travis-ci.org/form-data/form-data) -[![Windows Build](https://img.shields.io/appveyor/ci/alexindigo/form-data/master.svg?label=windows:4.x-9.x)](https://ci.appveyor.com/project/alexindigo/form-data) - -[![Coverage Status](https://img.shields.io/coveralls/form-data/form-data/master.svg?label=code+coverage)](https://coveralls.io/github/form-data/form-data?branch=master) -[![Dependency Status](https://img.shields.io/david/form-data/form-data.svg)](https://david-dm.org/form-data/form-data) -[![bitHound Overall Score](https://www.bithound.io/github/form-data/form-data/badges/score.svg)](https://www.bithound.io/github/form-data/form-data) - -## Install - -``` -npm install --save form-data -``` - -## Usage - -In this example we are constructing a form with 3 fields that contain a string, -a buffer and a file stream. - -``` javascript -var FormData = require('form-data'); -var fs = require('fs'); - -var form = new FormData(); -form.append('my_field', 'my value'); -form.append('my_buffer', new Buffer(10)); -form.append('my_file', fs.createReadStream('/foo/bar.jpg')); -``` - -Also you can use http-response stream: - -``` javascript -var FormData = require('form-data'); -var http = require('http'); - -var form = new FormData(); - -http.request('http://nodejs.org/images/logo.png', function(response) { - form.append('my_field', 'my value'); - form.append('my_buffer', new Buffer(10)); - form.append('my_logo', response); -}); -``` - -Or @mikeal's [request](https://github.com/request/request) stream: - -``` javascript -var FormData = require('form-data'); -var request = require('request'); - -var form = new FormData(); - -form.append('my_field', 'my value'); -form.append('my_buffer', new Buffer(10)); -form.append('my_logo', request('http://nodejs.org/images/logo.png')); -``` - -In order to submit this form to a web application, call ```submit(url, [callback])``` method: - -``` javascript -form.submit('http://example.org/', function(err, res) { - // res – response object (http.IncomingMessage) // - res.resume(); -}); - -``` - -For more advanced request manipulations ```submit()``` method returns ```http.ClientRequest``` object, or you can choose from one of the alternative submission methods. - -### Custom options - -You can provide custom options, such as `maxDataSize`: - -``` javascript -var FormData = require('form-data'); - -var form = new FormData({ maxDataSize: 20971520 }); -form.append('my_field', 'my value'); -form.append('my_buffer', /* something big */); -``` - -List of available options could be found in [combined-stream](https://github.com/felixge/node-combined-stream/blob/master/lib/combined_stream.js#L7-L15) - -### Alternative submission methods - -You can use node's http client interface: - -``` javascript -var http = require('http'); - -var request = http.request({ - method: 'post', - host: 'example.org', - path: '/upload', - headers: form.getHeaders() -}); - -form.pipe(request); - -request.on('response', function(res) { - console.log(res.statusCode); -}); -``` - -Or if you would prefer the `'Content-Length'` header to be set for you: - -``` javascript -form.submit('example.org/upload', function(err, res) { - console.log(res.statusCode); -}); -``` - -To use custom headers and pre-known length in parts: - -``` javascript -var CRLF = '\r\n'; -var form = new FormData(); - -var options = { - header: CRLF + '--' + form.getBoundary() + CRLF + 'X-Custom-Header: 123' + CRLF + CRLF, - knownLength: 1 -}; - -form.append('my_buffer', buffer, options); - -form.submit('http://example.com/', function(err, res) { - if (err) throw err; - console.log('Done'); -}); -``` - -Form-Data can recognize and fetch all the required information from common types of streams (```fs.readStream```, ```http.response``` and ```mikeal's request```), for some other types of streams you'd need to provide "file"-related information manually: - -``` javascript -someModule.stream(function(err, stdout, stderr) { - if (err) throw err; - - var form = new FormData(); - - form.append('file', stdout, { - filename: 'unicycle.jpg', // ... or: - filepath: 'photos/toys/unicycle.jpg', - contentType: 'image/jpeg', - knownLength: 19806 - }); - - form.submit('http://example.com/', function(err, res) { - if (err) throw err; - console.log('Done'); - }); -}); -``` - -The `filepath` property overrides `filename` and may contain a relative path. This is typically used when uploading [multiple files from a directory](https://wicg.github.io/entries-api/#dom-htmlinputelement-webkitdirectory). - -For edge cases, like POST request to URL with query string or to pass HTTP auth credentials, object can be passed to `form.submit()` as first parameter: - -``` javascript -form.submit({ - host: 'example.com', - path: '/probably.php?extra=params', - auth: 'username:password' -}, function(err, res) { - console.log(res.statusCode); -}); -``` - -In case you need to also send custom HTTP headers with the POST request, you can use the `headers` key in first parameter of `form.submit()`: - -``` javascript -form.submit({ - host: 'example.com', - path: '/surelynot.php', - headers: {'x-test-header': 'test-header-value'} -}, function(err, res) { - console.log(res.statusCode); -}); -``` - -### Integration with other libraries - -#### Request - -Form submission using [request](https://github.com/request/request): - -```javascript -var formData = { - my_field: 'my_value', - my_file: fs.createReadStream(__dirname + '/unicycle.jpg'), -}; - -request.post({url:'http://service.com/upload', formData: formData}, function(err, httpResponse, body) { - if (err) { - return console.error('upload failed:', err); - } - console.log('Upload successful! Server responded with:', body); -}); -``` - -For more details see [request readme](https://github.com/request/request#multipartform-data-multipart-form-uploads). - -#### node-fetch - -You can also submit a form using [node-fetch](https://github.com/bitinn/node-fetch): - -```javascript -var form = new FormData(); - -form.append('a', 1); - -fetch('http://example.com', { method: 'POST', body: form }) - .then(function(res) { - return res.json(); - }).then(function(json) { - console.log(json); - }); -``` - -## Notes - -- ```getLengthSync()``` method DOESN'T calculate length for streams, use ```knownLength``` options as workaround. -- Starting version `2.x` FormData has dropped support for `node@0.10.x`. - -## License - -Form-Data is released under the [MIT](License) license. diff --git a/node_modules/fs-minipass/README.md b/node_modules/fs-minipass/README.md deleted file mode 100644 index 1e61241cf03a6..0000000000000 --- a/node_modules/fs-minipass/README.md +++ /dev/null @@ -1,70 +0,0 @@ -# fs-minipass - -Filesystem streams based on [minipass](http://npm.im/minipass). - -4 classes are exported: - -- ReadStream -- ReadStreamSync -- WriteStream -- WriteStreamSync - -When using `ReadStreamSync`, all of the data is made available -immediately upon consuming the stream. Nothing is buffered in memory -when the stream is constructed. If the stream is piped to a writer, -then it will synchronously `read()` and emit data into the writer as -fast as the writer can consume it. (That is, it will respect -backpressure.) If you call `stream.read()` then it will read the -entire file and return the contents. - -When using `WriteStreamSync`, every write is flushed to the file -synchronously. If your writes all come in a single tick, then it'll -write it all out in a single tick. It's as synchronous as you are. - -The async versions work much like their node builtin counterparts, -with the exception of introducing significantly less Stream machinery -overhead. - -## USAGE - -It's just streams, you pipe them or read() them or write() to them. - -```js -const fsm = require('fs-minipass') -const readStream = new fsm.ReadStream('file.txt') -const writeStream = new fsm.WriteStream('output.txt') -writeStream.write('some file header or whatever\n') -readStream.pipe(writeStream) -``` - -## ReadStream(path, options) - -Path string is required, but somewhat irrelevant if an open file -descriptor is passed in as an option. - -Options: - -- `fd` Pass in a numeric file descriptor, if the file is already open. -- `readSize` The size of reads to do, defaults to 16MB -- `size` The size of the file, if known. Prevents zero-byte read() - call at the end. -- `autoClose` Set to `false` to prevent the file descriptor from being - closed when the file is done being read. - -## WriteStream(path, options) - -Path string is required, but somewhat irrelevant if an open file -descriptor is passed in as an option. - -Options: - -- `fd` Pass in a numeric file descriptor, if the file is already open. -- `mode` The mode to create the file with. Defaults to `0o666`. -- `start` The position in the file to start reading. If not - specified, then the file will start writing at position zero, and be - truncated by default. -- `autoClose` Set to `false` to prevent the file descriptor from being - closed when the stream is ended. -- `flags` Flags to use when opening the file. Irrelevant if `fd` is - passed in, since file won't be opened in that case. Defaults to - `'a'` if a `pos` is specified, or `'w'` otherwise. diff --git a/node_modules/fs.realpath/README.md b/node_modules/fs.realpath/README.md deleted file mode 100644 index a42ceac62663a..0000000000000 --- a/node_modules/fs.realpath/README.md +++ /dev/null @@ -1,33 +0,0 @@ -# fs.realpath - -A backwards-compatible fs.realpath for Node v6 and above - -In Node v6, the JavaScript implementation of fs.realpath was replaced -with a faster (but less resilient) native implementation. That raises -new and platform-specific errors and cannot handle long or excessively -symlink-looping paths. - -This module handles those cases by detecting the new errors and -falling back to the JavaScript implementation. On versions of Node -prior to v6, it has no effect. - -## USAGE - -```js -var rp = require('fs.realpath') - -// async version -rp.realpath(someLongAndLoopingPath, function (er, real) { - // the ELOOP was handled, but it was a bit slower -}) - -// sync version -var real = rp.realpathSync(someLongAndLoopingPath) - -// monkeypatch at your own risk! -// This replaces the fs.realpath/fs.realpathSync builtins -rp.monkeypatch() - -// un-do the monkeypatching -rp.unmonkeypatch() -``` diff --git a/node_modules/function-bind/.editorconfig b/node_modules/function-bind/.editorconfig deleted file mode 100644 index ac29adef0361c..0000000000000 --- a/node_modules/function-bind/.editorconfig +++ /dev/null @@ -1,20 +0,0 @@ -root = true - -[*] -indent_style = tab -indent_size = 4 -end_of_line = lf -charset = utf-8 -trim_trailing_whitespace = true -insert_final_newline = true -max_line_length = 120 - -[CHANGELOG.md] -indent_style = space -indent_size = 2 - -[*.json] -max_line_length = off - -[Makefile] -max_line_length = off diff --git a/node_modules/function-bind/.jscs.json b/node_modules/function-bind/.jscs.json deleted file mode 100644 index 8c4479480be70..0000000000000 --- a/node_modules/function-bind/.jscs.json +++ /dev/null @@ -1,176 +0,0 @@ -{ - "es3": true, - - "additionalRules": [], - - "requireSemicolons": true, - - "disallowMultipleSpaces": true, - - "disallowIdentifierNames": [], - - "requireCurlyBraces": { - "allExcept": [], - "keywords": ["if", "else", "for", "while", "do", "try", "catch"] - }, - - "requireSpaceAfterKeywords": ["if", "else", "for", "while", "do", "switch", "return", "try", "catch", "function"], - - "disallowSpaceAfterKeywords": [], - - "disallowSpaceBeforeComma": true, - "disallowSpaceAfterComma": false, - "disallowSpaceBeforeSemicolon": true, - - "disallowNodeTypes": [ - "DebuggerStatement", - "ForInStatement", - "LabeledStatement", - "SwitchCase", - "SwitchStatement", - "WithStatement" - ], - - "requireObjectKeysOnNewLine": { "allExcept": ["sameLine"] }, - - "requireSpacesInAnonymousFunctionExpression": { "beforeOpeningRoundBrace": true, "beforeOpeningCurlyBrace": true }, - "requireSpacesInNamedFunctionExpression": { "beforeOpeningCurlyBrace": true }, - "disallowSpacesInNamedFunctionExpression": { "beforeOpeningRoundBrace": true }, - "requireSpacesInFunctionDeclaration": { "beforeOpeningCurlyBrace": true }, - "disallowSpacesInFunctionDeclaration": { "beforeOpeningRoundBrace": true }, - - "requireSpaceBetweenArguments": true, - - "disallowSpacesInsideParentheses": true, - - "disallowSpacesInsideArrayBrackets": true, - - "disallowQuotedKeysInObjects": { "allExcept": ["reserved"] }, - - "disallowSpaceAfterObjectKeys": true, - - "requireCommaBeforeLineBreak": true, - - "disallowSpaceAfterPrefixUnaryOperators": ["++", "--", "+", "-", "~", "!"], - "requireSpaceAfterPrefixUnaryOperators": [], - - "disallowSpaceBeforePostfixUnaryOperators": ["++", "--"], - "requireSpaceBeforePostfixUnaryOperators": [], - - "disallowSpaceBeforeBinaryOperators": [], - "requireSpaceBeforeBinaryOperators": ["+", "-", "/", "*", "=", "==", "===", "!=", "!=="], - - "requireSpaceAfterBinaryOperators": ["+", "-", "/", "*", "=", "==", "===", "!=", "!=="], - "disallowSpaceAfterBinaryOperators": [], - - "disallowImplicitTypeConversion": ["binary", "string"], - - "disallowKeywords": ["with", "eval"], - - "requireKeywordsOnNewLine": [], - "disallowKeywordsOnNewLine": ["else"], - - "requireLineFeedAtFileEnd": true, - - "disallowTrailingWhitespace": true, - - "disallowTrailingComma": true, - - "excludeFiles": ["node_modules/**", "vendor/**"], - - "disallowMultipleLineStrings": true, - - "requireDotNotation": { "allExcept": ["keywords"] }, - - "requireParenthesesAroundIIFE": true, - - "validateLineBreaks": "LF", - - "validateQuoteMarks": { - "escape": true, - "mark": "'" - }, - - "disallowOperatorBeforeLineBreak": [], - - "requireSpaceBeforeKeywords": [ - "do", - "for", - "if", - "else", - "switch", - "case", - "try", - "catch", - "finally", - "while", - "with", - "return" - ], - - "validateAlignedFunctionParameters": { - "lineBreakAfterOpeningBraces": true, - "lineBreakBeforeClosingBraces": true - }, - - "requirePaddingNewLinesBeforeExport": true, - - "validateNewlineAfterArrayElements": { - "maximum": 8 - }, - - "requirePaddingNewLinesAfterUseStrict": true, - - "disallowArrowFunctions": true, - - "disallowMultiLineTernary": true, - - "validateOrderInObjectKeys": "asc-insensitive", - - "disallowIdenticalDestructuringNames": true, - - "disallowNestedTernaries": { "maxLevel": 1 }, - - "requireSpaceAfterComma": { "allExcept": ["trailing"] }, - "requireAlignedMultilineParams": false, - - "requireSpacesInGenerator": { - "afterStar": true - }, - - "disallowSpacesInGenerator": { - "beforeStar": true - }, - - "disallowVar": false, - - "requireArrayDestructuring": false, - - "requireEnhancedObjectLiterals": false, - - "requireObjectDestructuring": false, - - "requireEarlyReturn": false, - - "requireCapitalizedConstructorsNew": { - "allExcept": ["Function", "String", "Object", "Symbol", "Number", "Date", "RegExp", "Error", "Boolean", "Array"] - }, - - "requireImportAlphabetized": false, - - "requireSpaceBeforeObjectValues": true, - "requireSpaceBeforeDestructuredValues": true, - - "disallowSpacesInsideTemplateStringPlaceholders": true, - - "disallowArrayDestructuringReturn": false, - - "requireNewlineBeforeSingleStatementsInIf": false, - - "disallowUnusedVariables": true, - - "requireSpacesInsideImportedObjectBraces": true, - - "requireUseStrict": true -} - diff --git a/node_modules/function-bind/.npmignore b/node_modules/function-bind/.npmignore deleted file mode 100644 index dbb555fd1f9f5..0000000000000 --- a/node_modules/function-bind/.npmignore +++ /dev/null @@ -1,22 +0,0 @@ -# gitignore -.DS_Store -.monitor -.*.swp -.nodemonignore -releases -*.log -*.err -fleet.json -public/browserify -bin/*.json -.bin -build -compile -.lock-wscript -coverage -node_modules - -# Only apps should have lockfiles -npm-shrinkwrap.json -package-lock.json -yarn.lock diff --git a/node_modules/function-bind/.travis.yml b/node_modules/function-bind/.travis.yml deleted file mode 100644 index 85f70d2464f39..0000000000000 --- a/node_modules/function-bind/.travis.yml +++ /dev/null @@ -1,168 +0,0 @@ -language: node_js -os: - - linux -node_js: - - "8.4" - - "7.10" - - "6.11" - - "5.12" - - "4.8" - - "iojs-v3.3" - - "iojs-v2.5" - - "iojs-v1.8" - - "0.12" - - "0.10" - - "0.8" -before_install: - - 'if [ "${TRAVIS_NODE_VERSION}" = "0.6" ]; then npm install -g npm@1.3 ; elif [ "${TRAVIS_NODE_VERSION}" != "0.9" ]; then case "$(npm --version)" in 1.*) npm install -g npm@1.4.28 ;; 2.*) npm install -g npm@2 ;; esac ; fi' - - 'if [ "${TRAVIS_NODE_VERSION}" != "0.6" ] && [ "${TRAVIS_NODE_VERSION}" != "0.9" ]; then if [ "${TRAVIS_NODE_VERSION%${TRAVIS_NODE_VERSION#[0-9]}}" = "0" ] || [ "${TRAVIS_NODE_VERSION:0:4}" = "iojs" ]; then npm install -g npm@4.5 ; else npm install -g npm; fi; fi' -install: - - 'if [ "${TRAVIS_NODE_VERSION}" = "0.6" ]; then nvm install 0.8 && npm install -g npm@1.3 && npm install -g npm@1.4.28 && npm install -g npm@2 && npm install && nvm use "${TRAVIS_NODE_VERSION}"; else npm install; fi;' -script: - - 'if [ -n "${PRETEST-}" ]; then npm run pretest ; fi' - - 'if [ -n "${POSTTEST-}" ]; then npm run posttest ; fi' - - 'if [ -n "${COVERAGE-}" ]; then npm run coverage ; fi' - - 'if [ -n "${TEST-}" ]; then npm run tests-only ; fi' -sudo: false -env: - - TEST=true -matrix: - fast_finish: true - include: - - node_js: "node" - env: PRETEST=true - - node_js: "4" - env: COVERAGE=true - - node_js: "8.3" - env: TEST=true ALLOW_FAILURE=true - - node_js: "8.2" - env: TEST=true ALLOW_FAILURE=true - - node_js: "8.1" - env: TEST=true ALLOW_FAILURE=true - - node_js: "8.0" - env: TEST=true ALLOW_FAILURE=true - - node_js: "7.9" - env: TEST=true ALLOW_FAILURE=true - - node_js: "7.8" - env: TEST=true ALLOW_FAILURE=true - - node_js: "7.7" - env: TEST=true ALLOW_FAILURE=true - - node_js: "7.6" - env: TEST=true ALLOW_FAILURE=true - - node_js: "7.5" - env: TEST=true ALLOW_FAILURE=true - - node_js: "7.4" - env: TEST=true ALLOW_FAILURE=true - - node_js: "7.3" - env: TEST=true ALLOW_FAILURE=true - - node_js: "7.2" - env: TEST=true ALLOW_FAILURE=true - - node_js: "7.1" - env: TEST=true ALLOW_FAILURE=true - - node_js: "7.0" - env: TEST=true ALLOW_FAILURE=true - - node_js: "6.10" - env: TEST=true ALLOW_FAILURE=true - - node_js: "6.9" - env: TEST=true ALLOW_FAILURE=true - - node_js: "6.8" - env: TEST=true ALLOW_FAILURE=true - - node_js: "6.7" - env: TEST=true ALLOW_FAILURE=true - - node_js: "6.6" - env: TEST=true ALLOW_FAILURE=true - - node_js: "6.5" - env: TEST=true ALLOW_FAILURE=true - - node_js: "6.4" - env: TEST=true ALLOW_FAILURE=true - - node_js: "6.3" - env: TEST=true ALLOW_FAILURE=true - - node_js: "6.2" - env: TEST=true ALLOW_FAILURE=true - - node_js: "6.1" - env: TEST=true ALLOW_FAILURE=true - - node_js: "6.0" - env: TEST=true ALLOW_FAILURE=true - - node_js: "5.11" - env: TEST=true ALLOW_FAILURE=true - - node_js: "5.10" - env: TEST=true ALLOW_FAILURE=true - - node_js: "5.9" - env: TEST=true ALLOW_FAILURE=true - - node_js: "5.8" - env: TEST=true ALLOW_FAILURE=true - - node_js: "5.7" - env: TEST=true ALLOW_FAILURE=true - - node_js: "5.6" - env: TEST=true ALLOW_FAILURE=true - - node_js: "5.5" - env: TEST=true ALLOW_FAILURE=true - - node_js: "5.4" - env: TEST=true ALLOW_FAILURE=true - - node_js: "5.3" - env: TEST=true ALLOW_FAILURE=true - - node_js: "5.2" - env: TEST=true ALLOW_FAILURE=true - - node_js: "5.1" - env: TEST=true ALLOW_FAILURE=true - - node_js: "5.0" - env: TEST=true ALLOW_FAILURE=true - - node_js: "4.7" - env: TEST=true ALLOW_FAILURE=true - - node_js: "4.6" - env: TEST=true ALLOW_FAILURE=true - - node_js: "4.5" - env: TEST=true ALLOW_FAILURE=true - - node_js: "4.4" - env: TEST=true ALLOW_FAILURE=true - - node_js: "4.3" - env: TEST=true ALLOW_FAILURE=true - - node_js: "4.2" - env: TEST=true ALLOW_FAILURE=true - - node_js: "4.1" - env: TEST=true ALLOW_FAILURE=true - - node_js: "4.0" - env: TEST=true ALLOW_FAILURE=true - - node_js: "iojs-v3.2" - env: TEST=true ALLOW_FAILURE=true - - node_js: "iojs-v3.1" - env: TEST=true ALLOW_FAILURE=true - - node_js: "iojs-v3.0" - env: TEST=true ALLOW_FAILURE=true - - node_js: "iojs-v2.4" - env: TEST=true ALLOW_FAILURE=true - - node_js: "iojs-v2.3" - env: TEST=true ALLOW_FAILURE=true - - node_js: "iojs-v2.2" - env: TEST=true ALLOW_FAILURE=true - - node_js: "iojs-v2.1" - env: TEST=true ALLOW_FAILURE=true - - node_js: "iojs-v2.0" - env: TEST=true ALLOW_FAILURE=true - - node_js: "iojs-v1.7" - env: TEST=true ALLOW_FAILURE=true - - node_js: "iojs-v1.6" - env: TEST=true ALLOW_FAILURE=true - - node_js: "iojs-v1.5" - env: TEST=true ALLOW_FAILURE=true - - node_js: "iojs-v1.4" - env: TEST=true ALLOW_FAILURE=true - - node_js: "iojs-v1.3" - env: TEST=true ALLOW_FAILURE=true - - node_js: "iojs-v1.2" - env: TEST=true ALLOW_FAILURE=true - - node_js: "iojs-v1.1" - env: TEST=true ALLOW_FAILURE=true - - node_js: "iojs-v1.0" - env: TEST=true ALLOW_FAILURE=true - - node_js: "0.11" - env: TEST=true ALLOW_FAILURE=true - - node_js: "0.9" - env: TEST=true ALLOW_FAILURE=true - - node_js: "0.6" - env: TEST=true ALLOW_FAILURE=true - - node_js: "0.4" - env: TEST=true ALLOW_FAILURE=true - allow_failures: - - os: osx - - env: TEST=true ALLOW_FAILURE=true diff --git a/node_modules/function-bind/README.md b/node_modules/function-bind/README.md deleted file mode 100644 index 81862a02cb940..0000000000000 --- a/node_modules/function-bind/README.md +++ /dev/null @@ -1,48 +0,0 @@ -# function-bind - -<!-- - [![build status][travis-svg]][travis-url] - [![NPM version][npm-badge-svg]][npm-url] - [![Coverage Status][5]][6] - [![gemnasium Dependency Status][7]][8] - [![Dependency status][deps-svg]][deps-url] - [![Dev Dependency status][dev-deps-svg]][dev-deps-url] ---> - -<!-- [![browser support][11]][12] --> - -Implementation of function.prototype.bind - -## Example - -I mainly do this for unit tests I run on phantomjs. -PhantomJS does not have Function.prototype.bind :( - -```js -Function.prototype.bind = require("function-bind") -``` - -## Installation - -`npm install function-bind` - -## Contributors - - - Raynos - -## MIT Licenced - - [travis-svg]: https://travis-ci.org/Raynos/function-bind.svg - [travis-url]: https://travis-ci.org/Raynos/function-bind - [npm-badge-svg]: https://badge.fury.io/js/function-bind.svg - [npm-url]: https://npmjs.org/package/function-bind - [5]: https://coveralls.io/repos/Raynos/function-bind/badge.png - [6]: https://coveralls.io/r/Raynos/function-bind - [7]: https://gemnasium.com/Raynos/function-bind.png - [8]: https://gemnasium.com/Raynos/function-bind - [deps-svg]: https://david-dm.org/Raynos/function-bind.svg - [deps-url]: https://david-dm.org/Raynos/function-bind - [dev-deps-svg]: https://david-dm.org/Raynos/function-bind/dev-status.svg - [dev-deps-url]: https://david-dm.org/Raynos/function-bind#info=devDependencies - [11]: https://ci.testling.com/Raynos/function-bind.png - [12]: https://ci.testling.com/Raynos/function-bind diff --git a/node_modules/gauge/CHANGELOG.md b/node_modules/gauge/CHANGELOG.md deleted file mode 100644 index 407bc192e77f4..0000000000000 --- a/node_modules/gauge/CHANGELOG.md +++ /dev/null @@ -1,160 +0,0 @@ -### v2.7.4 - -* Reset colors prior to ending a line, to eliminate flicker when a line - is trucated between start and end color sequences. - -### v2.7.3 - -* Only create our onExit handler when we're enabled and remove it when we're - disabled. This stops us from creating multiple onExit handlers when - multiple gauge objects are being used. -* Fix bug where if a theme name were given instead of a theme object, it - would crash. -* Remove supports-color because it's not actually used. Uhm. Yes, I just - updated it. >.> - -### v2.7.2 - -* Use supports-color instead of has-color (as the module has been renamed) - -### v2.7.1 - -* Bug fix: Calls to show/pulse while the progress bar is disabled should still - update our internal representation of what would be shown should it be enabled. - -### v2.7.0 - -* New feature: Add new `isEnabled` method to allow introspection of the gauge's - "enabledness" as controlled by `.enable()` and `.disable()`. - -### v2.6.0 - -* Bug fix: Don't run the code associated with `enable`/`disable` if the gauge - is already enabled or disabled respectively. This prevents leaking event - listeners, amongst other weirdness. -* New feature: Template items can have default values that will be used if no - value was otherwise passed in. - -### v2.5.3 - -* Default to `enabled` only if we have a tty. Users can always override - this by passing in the `enabled` option explicitly or by calling calling - `gauge.enable()`. - -### v2.5.2 - -* Externalized `./console-strings.js` into `console-control-strings`. - -### v2.5.1 - -* Update to `signal-exit@3.0.0`, which fixes a compatibility bug with the - node profiler. -* [#39](https://github.com/iarna/gauge/pull/39) Fix tests on 0.10 and add - a missing devDependency. ([@helloyou2012](https://github.com/helloyou2012)) - -### v2.5.0 - -* Add way to programmatically fetch a list of theme names in a themeset - (`Themeset.getThemeNames`). - -### v2.4.0 - -* Add support for setting themesets on existing gauge objects. -* Add post-IO callback to `gauge.hide()` as it is somtetimes necessary when - your terminal is interleaving output from multiple filehandles (ie, stdout - & stderr). - -### v2.3.1 - -* Fix a refactor bug in setTheme where it wasn't accepting the various types - of args it should. - -### v2.3.0 - -#### FEATURES - -* Add setTemplate & setTheme back in. -* Add support for named themes, you can now ask for things like 'colorASCII' - and 'brailleSpinner'. Of course, you can still pass in theme objects. - Additionally you can now pass in an object with `hasUnicode`, `hasColor` and - `platform` keys in order to override our guesses as to those values when - selecting a default theme from the themeset. -* Make the output stream optional (it defaults to `process.stderr` now). -* Add `setWriteTo(stream[, tty])` to change the output stream and, - optionally, tty. - -#### BUG FIXES & REFACTORING - -* Abort the display phase early if we're supposed to be hidden and we are. -* Stop printing a bunch of spaces at the end of lines, since we're already - using an erase-to-end-of-line code anyway. -* The unicode themes were missing the subsection separator. - -### v2.2.1 - -* Fix image in readme - -### v2.2.0 - -* All new themes API– reference themes by name and pass in custom themes and - themesets (themesets get platform support autodetection done on them to - select the best theme). Theme mixins let you add features to all existing - themes. -* Much, much improved test coverage. - -### v2.1.0 - -* Got rid of ░ in the default platform, noUnicode, hasColor theme. Thanks - to @yongtw123 for pointing out this had snuck in. -* Fiddled with the demo output to make it easier to see the spinner spin. Also - added prints before each platforms test output. -* I forgot to include `signal-exit` in our deps. <.< Thank you @KenanY for - finding this. Then I was lazy and made a new commit instead of using his - PR. Again, thank you for your patience @KenenY. -* Drastically speed up travis testing. -* Add a small javascript demo (demo.js) for showing off the various themes - (and testing them on diff platforms). -* Change: The subsection separator from ⁄ and / (different chars) to >. -* Fix crasher: A show or pulse without a label would cause the template renderer - to complain about a missing value. -* New feature: Add the ability to disable the clean-up-on-exit behavior. - Not something I expect to be widely desirable, but important if you have - multiple distinct gauge instances in your app. -* Use our own color support detection. - The `has-color` module proved too magic for my needs, making assumptions - as to which stream we write to and reading command line arguments. - -### v2.0.0 - -This is a major rewrite of the internals. Externally there are fewer -changes: - -* On node>0.8 gauge object now prints updates at a fixed rate. This means - that when you call `show` it may wate up to `updateInterval` ms before it - actually prints an update. You override this behavior with the - `fixedFramerate` option. -* The gauge object now keeps the cursor hidden as long as it's enabled and - shown. -* The constructor's arguments have changed, now it takes a mandatory output - stream and an optional options object. The stream no longer needs to be - an `ansi`ified stream, although it can be if you want (but we won't make - use of its special features). -* Previously the gauge was disabled by default if `process.stdout` wasn't a - tty. Now it always defaults to enabled. If you want the previous - behavior set the `enabled` option to `process.stdout.isTTY`. -* The constructor's options have changed– see the docs for details. -* Themes are entirely different. If you were using a custom theme, or - referring to one directly (eg via `Gauge.unicode` or `Gauge.ascii`) then - you'll need to change your code. You can get the equivalent of the latter - with: - ``` - var themes = require('gauge/themes') - var unicodeTheme = themes(true, true) // returns the color unicode theme for your platform - ``` - The default themes no longer use any ambiguous width characters, so even - if you choose to display those as wide your progress bar should still - display correctly. -* Templates are entirely different and if you were using a custom one, you - should consult the documentation to learn how to recreate it. If you were - using the default, be aware that it has changed and the result looks quite - a bit different. diff --git a/node_modules/gauge/README.md b/node_modules/gauge/README.md deleted file mode 100644 index bdd60e38c2092..0000000000000 --- a/node_modules/gauge/README.md +++ /dev/null @@ -1,399 +0,0 @@ -gauge -===== - -A nearly stateless terminal based horizontal gauge / progress bar. - -```javascript -var Gauge = require("gauge") - -var gauge = new Gauge() - -gauge.show("test", 0.20) - -gauge.pulse("this") - -gauge.hide() -``` - -![](gauge-demo.gif) - - -### CHANGES FROM 1.x - -Gauge 2.x is breaking release, please see the [changelog] for details on -what's changed if you were previously a user of this module. - -[changelog]: CHANGELOG.md - -### THE GAUGE CLASS - -This is the typical interface to the module– it provides a pretty -fire-and-forget interface to displaying your status information. - -``` -var Gauge = require("gauge") - -var gauge = new Gauge([stream], [options]) -``` - -* **stream** – *(optional, default STDERR)* A stream that progress bar - updates are to be written to. Gauge honors backpressure and will pause - most writing if it is indicated. -* **options** – *(optional)* An option object. - -Constructs a new gauge. Gauges are drawn on a single line, and are not drawn -if **stream** isn't a tty and a tty isn't explicitly provided. - -If **stream** is a terminal or if you pass in **tty** to **options** then we -will detect terminal resizes and redraw to fit. We do this by watching for -`resize` events on the tty. (To work around a bug in verisons of Node prior -to 2.5.0, we watch for them on stdout if the tty is stderr.) Resizes to -larger window sizes will be clean, but shrinking the window will always -result in some cruft. - -**IMPORTANT:** If you prevously were passing in a non-tty stream but you still -want output (for example, a stream wrapped by the `ansi` module) then you -need to pass in the **tty** option below, as `gauge` needs access to -the underlying tty in order to do things like terminal resizes and terminal -width detection. - -The **options** object can have the following properties, all of which are -optional: - -* **updateInterval**: How often gauge updates should be drawn, in miliseconds. -* **fixedFramerate**: Defaults to false on node 0.8, true on everything - else. When this is true a timer is created to trigger once every - `updateInterval` ms, when false, updates are printed as soon as they come - in but updates more often than `updateInterval` are ignored. The reason - 0.8 doesn't have this set to true is that it can't `unref` its timer and - so it would stop your program from exiting– if you want to use this - feature with 0.8 just make sure you call `gauge.disable()` before you - expect your program to exit. -* **themes**: A themeset to use when selecting the theme to use. Defaults - to `gauge/themes`, see the [themes] documentation for details. -* **theme**: Select a theme for use, it can be a: - * Theme object, in which case the **themes** is not used. - * The name of a theme, which will be looked up in the current *themes* - object. - * A configuration object with any of `hasUnicode`, `hasColor` or - `platform` keys, which if wlll be used to override our guesses when making - a default theme selection. - - If no theme is selected then a default is picked using a combination of our - best guesses at your OS, color support and unicode support. -* **template**: Describes what you want your gauge to look like. The - default is what npm uses. Detailed [documentation] is later in this - document. -* **hideCursor**: Defaults to true. If true, then the cursor will be hidden - while the gauge is displayed. -* **tty**: The tty that you're ultimately writing to. Defaults to the same - as **stream**. This is used for detecting the width of the terminal and - resizes. The width used is `tty.columns - 1`. If no tty is available then - a width of `79` is assumed. -* **enabled**: Defaults to true if `tty` is a TTY, false otherwise. If true - the gauge starts enabled. If disabled then all update commands are - ignored and no gauge will be printed until you call `.enable()`. -* **Plumbing**: The class to use to actually generate the gauge for - printing. This defaults to `require('gauge/plumbing')` and ordinarly you - shouldn't need to override this. -* **cleanupOnExit**: Defaults to true. Ordinarily we register an exit - handler to make sure your cursor is turned back on and the progress bar - erased when your process exits, even if you Ctrl-C out or otherwise exit - unexpectedly. You can disable this and it won't register the exit handler. - -[has-unicode]: https://www.npmjs.com/package/has-unicode -[themes]: #themes -[documentation]: #templates - -#### `gauge.show(section | status, [completed])` - -The first argument is either the section, the name of the current thing -contributing to progress, or an object with keys like **section**, -**subsection** & **completed** (or any others you have types for in a custom -template). If you don't want to update or set any of these you can pass -`null` and it will be ignored. - -The second argument is the percent completed as a value between 0 and 1. -Without it, completion is just not updated. You'll also note that completion -can be passed in as part of a status object as the first argument. If both -it and the completed argument are passed in, the completed argument wins. - -#### `gauge.hide([cb])` - -Removes the gauge from the terminal. Optionally, callback `cb` after IO has -had an opportunity to happen (currently this just means after `setImmediate` -has called back.) - -It turns out this is important when you're pausing the progress bar on one -filehandle and printing to another– otherwise (with a big enough print) node -can end up printing the "end progress bar" bits to the progress bar filehandle -while other stuff is printing to another filehandle. These getting interleaved -can cause corruption in some terminals. - -#### `gauge.pulse([subsection])` - -* **subsection** – *(optional)* The specific thing that triggered this pulse - -Spins the spinner in the gauge to show output. If **subsection** is -included then it will be combined with the last name passed to `gauge.show`. - -#### `gauge.disable()` - -Hides the gauge and ignores further calls to `show` or `pulse`. - -#### `gauge.enable()` - -Shows the gauge and resumes updating when `show` or `pulse` is called. - -#### `gauge.isEnabled()` - -Returns true if the gauge is enabled. - -#### `gauge.setThemeset(themes)` - -Change the themeset to select a theme from. The same as the `themes` option -used in the constructor. The theme will be reselected from this themeset. - -#### `gauge.setTheme(theme)` - -Change the active theme, will be displayed with the next show or pulse. This can be: - -* Theme object, in which case the **themes** is not used. -* The name of a theme, which will be looked up in the current *themes* - object. -* A configuration object with any of `hasUnicode`, `hasColor` or - `platform` keys, which if wlll be used to override our guesses when making - a default theme selection. - -If no theme is selected then a default is picked using a combination of our -best guesses at your OS, color support and unicode support. - -#### `gauge.setTemplate(template)` - -Change the active template, will be displayed with the next show or pulse - -### Tracking Completion - -If you have more than one thing going on that you want to track completion -of, you may find the related [are-we-there-yet] helpful. It's `change` -event can be wired up to the `show` method to get a more traditional -progress bar interface. - -[are-we-there-yet]: https://www.npmjs.com/package/are-we-there-yet - -### THEMES - -``` -var themes = require('gauge/themes') - -// fetch the default color unicode theme for this platform -var ourTheme = themes({hasUnicode: true, hasColor: true}) - -// fetch the default non-color unicode theme for osx -var ourTheme = themes({hasUnicode: true, hasColor: false, platform: 'darwin'}) - -// create a new theme based on the color ascii theme for this platform -// that brackets the progress bar with arrows -var ourTheme = themes.newTheme(theme(hasUnicode: false, hasColor: true}), { - preProgressbar: '→', - postProgressbar: '←' -}) -``` - -The object returned by `gauge/themes` is an instance of the `ThemeSet` class. - -``` -var ThemeSet = require('gauge/theme-set') -var themes = new ThemeSet() -// or -var themes = require('gauge/themes') -var mythemes = themes.newThemeset() // creates a new themeset based on the default themes -``` - -#### themes(opts) -#### themes.getDefault(opts) - -Theme objects are a function that fetches the default theme based on -platform, unicode and color support. - -Options is an object with the following properties: - -* **hasUnicode** - If true, fetch a unicode theme, if no unicode theme is - available then a non-unicode theme will be used. -* **hasColor** - If true, fetch a color theme, if no color theme is - available a non-color theme will be used. -* **platform** (optional) - Defaults to `process.platform`. If no - platform match is available then `fallback` is used instead. - -If no compatible theme can be found then an error will be thrown with a -`code` of `EMISSINGTHEME`. - -#### themes.addTheme(themeName, themeObj) -#### themes.addTheme(themeName, [parentTheme], newTheme) - -Adds a named theme to the themeset. You can pass in either a theme object, -as returned by `themes.newTheme` or the arguments you'd pass to -`themes.newTheme`. - -#### themes.getThemeNames() - -Return a list of all of the names of the themes in this themeset. Suitable -for use in `themes.getTheme(…)`. - -#### themes.getTheme(name) - -Returns the theme object from this theme set named `name`. - -If `name` does not exist in this themeset an error will be thrown with -a `code` of `EMISSINGTHEME`. - -#### themes.setDefault([opts], themeName) - -`opts` is an object with the following properties. - -* **platform** - Defaults to `'fallback'`. If your theme is platform - specific, specify that here with the platform from `process.platform`, eg, - `win32`, `darwin`, etc. -* **hasUnicode** - Defaults to `false`. If your theme uses unicode you - should set this to true. -* **hasColor** - Defaults to `false`. If your theme uses color you should - set this to true. - -`themeName` is the name of the theme (as given to `addTheme`) to use for -this set of `opts`. - -#### themes.newTheme([parentTheme,] newTheme) - -Create a new theme object based on `parentTheme`. If no `parentTheme` is -provided then a minimal parentTheme that defines functions for rendering the -activity indicator (spinner) and progress bar will be defined. (This -fallback parent is defined in `gauge/base-theme`.) - -newTheme should be a bare object– we'll start by discussing the properties -defined by the default themes: - -* **preProgressbar** - displayed prior to the progress bar, if the progress - bar is displayed. -* **postProgressbar** - displayed after the progress bar, if the progress bar - is displayed. -* **progressBarTheme** - The subtheme passed through to the progress bar - renderer, it's an object with `complete` and `remaining` properties - that are the strings you want repeated for those sections of the progress - bar. -* **activityIndicatorTheme** - The theme for the activity indicator (spinner), - this can either be a string, in which each character is a different step, or - an array of strings. -* **preSubsection** - Displayed as a separator between the `section` and - `subsection` when the latter is printed. - -More generally, themes can have any value that would be a valid value when rendering -templates. The properties in the theme are used when their name matches a type in -the template. Their values can be: - -* **strings & numbers** - They'll be included as is -* **function (values, theme, width)** - Should return what you want in your output. - *values* is an object with values provided via `gauge.show`, - *theme* is the theme specific to this item (see below) or this theme object, - and *width* is the number of characters wide your result should be. - -There are a couple of special prefixes: - -* **pre** - Is shown prior to the property, if its displayed. -* **post** - Is shown after the property, if its displayed. - -And one special suffix: - -* **Theme** - Its value is passed to a function-type item as the theme. - -#### themes.addToAllThemes(theme) - -This *mixes-in* `theme` into all themes currently defined. It also adds it -to the default parent theme for this themeset, so future themes added to -this themeset will get the values from `theme` by default. - -#### themes.newThemeset() - -Copy the current themeset into a new one. This allows you to easily inherit -one themeset from another. - -### TEMPLATES - -A template is an array of objects and strings that, after being evaluated, -will be turned into the gauge line. The default template is: - -```javascript -[ - {type: 'progressbar', length: 20}, - {type: 'activityIndicator', kerning: 1, length: 1}, - {type: 'section', kerning: 1, default: ''}, - {type: 'subsection', kerning: 1, default: ''} -] -``` - -The various template elements can either be **plain strings**, in which case they will -be be included verbatum in the output, or objects with the following properties: - -* *type* can be any of the following plus any keys you pass into `gauge.show` plus - any keys you have on a custom theme. - * `section` – What big thing you're working on now. - * `subsection` – What component of that thing is currently working. - * `activityIndicator` – Shows a spinner using the `activityIndicatorTheme` - from your active theme. - * `progressbar` – A progress bar representing your current `completed` - using the `progressbarTheme` from your active theme. -* *kerning* – Number of spaces that must be between this item and other - items, if this item is displayed at all. -* *maxLength* – The maximum length for this element. If its value is longer it - will be truncated. -* *minLength* – The minimum length for this element. If its value is shorter it - will be padded according to the *align* value. -* *align* – (Default: left) Possible values "left", "right" and "center". Works - as you'd expect from word processors. -* *length* – Provides a single value for both *minLength* and *maxLength*. If both - *length* and *minLength or *maxLength* are specifed then the latter take precedence. -* *value* – A literal value to use for this template item. -* *default* – A default value to use for this template item if a value - wasn't otherwise passed in. - -### PLUMBING - -This is the super simple, assume nothing, do no magic internals used by gauge to -implement its ordinary interface. - -``` -var Plumbing = require('gauge/plumbing') -var gauge = new Plumbing(theme, template, width) -``` - -* **theme**: The theme to use. -* **template**: The template to use. -* **width**: How wide your gauge should be - -#### `gauge.setTheme(theme)` - -Change the active theme. - -#### `gauge.setTemplate(template)` - -Change the active template. - -#### `gauge.setWidth(width)` - -Change the width to render at. - -#### `gauge.hide()` - -Return the string necessary to hide the progress bar - -#### `gauge.hideCursor()` - -Return a string to hide the cursor. - -#### `gauge.showCursor()` - -Return a string to show the cursor. - -#### `gauge.show(status)` - -Using `status` for values, render the provided template with the theme and return -a string that is suitable for printing to update the gauge. diff --git a/node_modules/gauge/has-color.js b/node_modules/gauge/has-color.js index e283a256f26b7..16cba0eb47d33 100644 --- a/node_modules/gauge/has-color.js +++ b/node_modules/gauge/has-color.js @@ -1,12 +1,4 @@ 'use strict' +var colorSupport = require('color-support') -module.exports = isWin32() || isColorTerm() - -function isWin32 () { - return process.platform === 'win32' -} - -function isColorTerm () { - var termHasColor = /^screen|^xterm|^vt100|color|ansi|cygwin|linux/i - return !!process.env.COLORTERM || termHasColor.test(process.env.TERM) -} +module.exports = colorSupport().hasBasic diff --git a/node_modules/gauge/index.js b/node_modules/gauge/index.js index c55324008cbfa..87a4bb930834e 100644 --- a/node_modules/gauge/index.js +++ b/node_modules/gauge/index.js @@ -188,7 +188,7 @@ Gauge.prototype.show = function (section, completed) { Gauge.prototype.pulse = function (subsection) { this._status.subsection = subsection || '' - this._status.spun ++ + this._status.spun++ if (this._disabled) return if (!this._showing) return this._requestRedraw() diff --git a/node_modules/gauge/node_modules/aproba/README.md b/node_modules/gauge/node_modules/aproba/README.md deleted file mode 100644 index 0bfc594c56a37..0000000000000 --- a/node_modules/gauge/node_modules/aproba/README.md +++ /dev/null @@ -1,94 +0,0 @@ -aproba -====== - -A ridiculously light-weight function argument validator - -``` -var validate = require("aproba") - -function myfunc(a, b, c) { - // `a` must be a string, `b` a number, `c` a function - validate('SNF', arguments) // [a,b,c] is also valid -} - -myfunc('test', 23, function () {}) // ok -myfunc(123, 23, function () {}) // type error -myfunc('test', 23) // missing arg error -myfunc('test', 23, function () {}, true) // too many args error - -``` - -Valid types are: - -| type | description -| :--: | :---------- -| * | matches any type -| A | `Array.isArray` OR an `arguments` object -| S | typeof == string -| N | typeof == number -| F | typeof == function -| O | typeof == object and not type A and not type E -| B | typeof == boolean -| E | `instanceof Error` OR `null` **(special: see below)** -| Z | == `null` - -Validation failures throw one of three exception types, distinguished by a -`code` property of `EMISSINGARG`, `EINVALIDTYPE` or `ETOOMANYARGS`. - -If you pass in an invalid type then it will throw with a code of -`EUNKNOWNTYPE`. - -If an **error** argument is found and is not null then the remaining -arguments are optional. That is, if you say `ESO` then that's like using a -non-magical `E` in: `E|ESO|ZSO`. - -### But I have optional arguments?! - -You can provide more than one signature by separating them with pipes `|`. -If any signature matches the arguments then they'll be considered valid. - -So for example, say you wanted to write a signature for -`fs.createWriteStream`. The docs for it describe it thusly: - -``` -fs.createWriteStream(path[, options]) -``` - -This would be a signature of `SO|S`. That is, a string and and object, or -just a string. - -Now, if you read the full `fs` docs, you'll see that actually path can ALSO -be a buffer. And options can be a string, that is: -``` -path <String> | <Buffer> -options <String> | <Object> -``` - -To reproduce this you have to fully enumerate all of the possible -combinations and that implies a signature of `SO|SS|OO|OS|S|O`. The -awkwardness is a feature: It reminds you of the complexity you're adding to -your API when you do this sort of thing. - - -### Browser support - -This has no dependencies and should work in browsers, though you'll have -noisier stack traces. - -### Why this exists - -I wanted a very simple argument validator. It needed to do two things: - -1. Be more concise and easier to use than assertions - -2. Not encourage an infinite bikeshed of DSLs - -This is why types are specified by a single character and there's no such -thing as an optional argument. - -This is not intended to validate user data. This is specifically about -asserting the interface of your functions. - -If you need greater validation, I encourage you to write them by hand or -look elsewhere. - diff --git a/node_modules/gauge/node_modules/is-fullwidth-code-point/readme.md b/node_modules/gauge/node_modules/is-fullwidth-code-point/readme.md deleted file mode 100644 index 4936464b1b415..0000000000000 --- a/node_modules/gauge/node_modules/is-fullwidth-code-point/readme.md +++ /dev/null @@ -1,39 +0,0 @@ -# is-fullwidth-code-point [![Build Status](https://travis-ci.org/sindresorhus/is-fullwidth-code-point.svg?branch=master)](https://travis-ci.org/sindresorhus/is-fullwidth-code-point) - -> Check if the character represented by a given [Unicode code point](https://en.wikipedia.org/wiki/Code_point) is [fullwidth](https://en.wikipedia.org/wiki/Halfwidth_and_fullwidth_forms) - - -## Install - -``` -$ npm install --save is-fullwidth-code-point -``` - - -## Usage - -```js -var isFullwidthCodePoint = require('is-fullwidth-code-point'); - -isFullwidthCodePoint('谢'.codePointAt()); -//=> true - -isFullwidthCodePoint('a'.codePointAt()); -//=> false -``` - - -## API - -### isFullwidthCodePoint(input) - -#### input - -Type: `number` - -[Code point](https://en.wikipedia.org/wiki/Code_point) of a character. - - -## License - -MIT © [Sindre Sorhus](http://sindresorhus.com) diff --git a/node_modules/gauge/node_modules/string-width/readme.md b/node_modules/gauge/node_modules/string-width/readme.md deleted file mode 100644 index 1ab42c93580ec..0000000000000 --- a/node_modules/gauge/node_modules/string-width/readme.md +++ /dev/null @@ -1,42 +0,0 @@ -# string-width [![Build Status](https://travis-ci.org/sindresorhus/string-width.svg?branch=master)](https://travis-ci.org/sindresorhus/string-width) - -> Get the visual width of a string - the number of columns required to display it - -Some Unicode characters are [fullwidth](https://en.wikipedia.org/wiki/Halfwidth_and_fullwidth_forms) and use double the normal width. [ANSI escape codes](http://en.wikipedia.org/wiki/ANSI_escape_code) are stripped and doesn't affect the width. - -Useful to be able to measure the actual width of command-line output. - - -## Install - -``` -$ npm install --save string-width -``` - - -## Usage - -```js -const stringWidth = require('string-width'); - -stringWidth('古'); -//=> 2 - -stringWidth('\u001b[1m古\u001b[22m'); -//=> 2 - -stringWidth('a'); -//=> 1 -``` - - -## Related - -- [string-width-cli](https://github.com/sindresorhus/string-width-cli) - CLI for this module -- [string-length](https://github.com/sindresorhus/string-length) - Get the real length of a string -- [widest-line](https://github.com/sindresorhus/widest-line) - Get the visual width of the widest line in a string - - -## License - -MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/gauge/package.json b/node_modules/gauge/package.json index 4882cff8390d8..5635aa179f73d 100644 --- a/node_modules/gauge/package.json +++ b/node_modules/gauge/package.json @@ -1,11 +1,10 @@ { "name": "gauge", - "version": "2.7.4", + "version": "3.0.1", "description": "A terminal based horizontal guage", "main": "index.js", "scripts": { - "test": "standard && tap test/*.js --coverage", - "prepublish": "rm -f *~" + "test": "standard && tap test/*.js --coverage" }, "repository": { "type": "git", @@ -19,24 +18,25 @@ "author": "Rebecca Turner <me@re-becca.org>", "license": "ISC", "bugs": { - "url": "https://github.com/iarna/gauge/issues" + "url": "https://github.com/npm/gauge/issues" }, - "homepage": "https://github.com/iarna/gauge", + "homepage": "https://github.com/npm/gauge", "dependencies": { - "aproba": "^1.0.3", + "aproba": "^1.0.3 || ^2.0.0", + "color-support": "^1.1.2", "console-control-strings": "^1.0.0", - "has-unicode": "^2.0.0", - "object-assign": "^4.1.0", + "has-unicode": "^2.0.1", + "object-assign": "^4.1.1", "signal-exit": "^3.0.0", - "string-width": "^1.0.1", - "strip-ansi": "^3.0.1", - "wide-align": "^1.1.0" + "string-width": "^1.0.1 || ^2.0.0", + "strip-ansi": "^3.0.1 || ^4.0.0", + "wide-align": "^1.1.2" }, "devDependencies": { "readable-stream": "^2.0.6", "require-inject": "^1.4.0", - "standard": "^7.1.2", - "tap": "^5.7.2", + "standard": "^11.0.1", + "tap": "^12.0.1", "through2": "^2.0.0" }, "files": [ @@ -59,5 +59,8 @@ "theme-set.js", "themes.js", "wide-truncate.js" - ] + ], + "engines": { + "node": ">=10" + } } diff --git a/node_modules/gauge/progress-bar.js b/node_modules/gauge/progress-bar.js index 7f8dd68be24cf..1780a8a54d1cb 100644 --- a/node_modules/gauge/progress-bar.js +++ b/node_modules/gauge/progress-bar.js @@ -27,7 +27,7 @@ function repeat (string, width) { result += string } n = Math.floor(n / 2) - /*eslint no-self-assign: 0*/ + /* eslint no-self-assign: 0 */ string += string } while (n && stringWidth(result) < width) diff --git a/node_modules/gauge/render-template.js b/node_modules/gauge/render-template.js index 3261bfbe6f4be..9764c6e0a1556 100644 --- a/node_modules/gauge/render-template.js +++ b/node_modules/gauge/render-template.js @@ -1,7 +1,6 @@ 'use strict' var align = require('wide-align') var validate = require('aproba') -var objectAssign = require('object-assign') var wideTruncate = require('./wide-truncate') var error = require('./error') var TemplateItem = require('./template-item') @@ -34,7 +33,7 @@ function hasPreOrPost (item, values) { } function generatePreAndPost (baseItem, parentValues) { - var item = objectAssign({}, baseItem) + var item = Object.assign({}, baseItem) var values = Object.create(parentValues) var template = [] var pre = preType(item) @@ -82,13 +81,11 @@ function prepareItems (width, template, values) { var output = template.map(cloneAndObjectify).filter(function (item) { return item != null }) - var outputLength = 0 var remainingSpace = width var variableCount = output.length function consumeSpace (length) { if (length > remainingSpace) length = remainingSpace - outputLength += length remainingSpace -= length } diff --git a/node_modules/gauge/template-item.js b/node_modules/gauge/template-item.js index e46f447c941d3..4f02fefaa23ec 100644 --- a/node_modules/gauge/template-item.js +++ b/node_modules/gauge/template-item.js @@ -70,4 +70,3 @@ TemplateItem.prototype.getMinLength = function () { if (this.minLength == null) return null return this.minLength + this.padLeft + this.padRight } - diff --git a/node_modules/gauge/theme-set.js b/node_modules/gauge/theme-set.js index 68971d5d231b0..c022d61cf13cb 100644 --- a/node_modules/gauge/theme-set.js +++ b/node_modules/gauge/theme-set.js @@ -112,4 +112,3 @@ ThemeSetProto.newThemeSet = function () { defaults: JSON.parse(JSON.stringify(this.defaults || {})) }) } - diff --git a/node_modules/gauge/themes.js b/node_modules/gauge/themes.js index eb5a4f5b5e103..df1184db51f2c 100644 --- a/node_modules/gauge/themes.js +++ b/node_modules/gauge/themes.js @@ -1,5 +1,5 @@ 'use strict' -var consoleControl = require('console-control-strings') +var color = require('console-control-strings').color var ThemeSet = require('./theme-set.js') var themes = module.exports = new ThemeSet() @@ -17,12 +17,12 @@ themes.addTheme('ASCII', { themes.addTheme('colorASCII', themes.getTheme('ASCII'), { progressbarTheme: { - preComplete: consoleControl.color('inverse'), - complete: ' ', - postComplete: consoleControl.color('stopInverse'), - preRemaining: consoleControl.color('brightBlack'), + preComplete: color('bgBrightWhite', 'brightWhite'), + complete: '#', + postComplete: color('reset'), + preRemaining: color('bgBrightBlack', 'brightBlack'), remaining: '.', - postRemaining: consoleControl.color('reset') + postRemaining: color('reset') } }) @@ -30,7 +30,7 @@ themes.addTheme('brailleSpinner', { preProgressbar: '⸨', postProgressbar: '⸩', progressbarTheme: { - complete: '░', + complete: '#', remaining: '⠂' }, activityIndicatorTheme: '⠋⠙⠹⠸⠼⠴⠦⠧⠇⠏', @@ -39,12 +39,12 @@ themes.addTheme('brailleSpinner', { themes.addTheme('colorBrailleSpinner', themes.getTheme('brailleSpinner'), { progressbarTheme: { - preComplete: consoleControl.color('inverse'), - complete: ' ', - postComplete: consoleControl.color('stopInverse'), - preRemaining: consoleControl.color('brightBlack'), - remaining: '░', - postRemaining: consoleControl.color('reset') + preComplete: color('bgBrightWhite', 'brightWhite'), + complete: '#', + postComplete: color('reset'), + preRemaining: color('bgBrightBlack', 'brightBlack'), + remaining: '⠂', + postRemaining: color('reset') } }) @@ -52,3 +52,5 @@ themes.setDefault({}, 'ASCII') themes.setDefault({hasColor: true}, 'colorASCII') themes.setDefault({platform: 'darwin', hasUnicode: true}, 'brailleSpinner') themes.setDefault({platform: 'darwin', hasUnicode: true, hasColor: true}, 'colorBrailleSpinner') +themes.setDefault({platform: 'linux', hasUnicode: true}, 'brailleSpinner') +themes.setDefault({platform: 'linux', hasUnicode: true, hasColor: true}, 'colorBrailleSpinner') diff --git a/node_modules/getpass/.npmignore b/node_modules/getpass/.npmignore deleted file mode 100644 index a4261fc06feaa..0000000000000 --- a/node_modules/getpass/.npmignore +++ /dev/null @@ -1,8 +0,0 @@ -.gitmodules -deps -docs -Makefile -node_modules -test -tools -coverage diff --git a/node_modules/getpass/.travis.yml b/node_modules/getpass/.travis.yml deleted file mode 100644 index d8b5833a71b22..0000000000000 --- a/node_modules/getpass/.travis.yml +++ /dev/null @@ -1,9 +0,0 @@ -language: node_js -node_js: - - "5.10" - - "4.4" - - "4.1" - - "0.12" - - "0.10" -before_install: - - "make check" diff --git a/node_modules/getpass/README.md b/node_modules/getpass/README.md deleted file mode 100644 index 6e4a50f63f7f0..0000000000000 --- a/node_modules/getpass/README.md +++ /dev/null @@ -1,32 +0,0 @@ -## getpass - -Get a password from the terminal. Sounds simple? Sounds like the `readline` -module should be able to do it? NOPE. - -## Install and use it - -```bash -npm install --save getpass -``` - -```javascript -const mod_getpass = require('getpass'); -``` - -## API - -### `mod_getpass.getPass([options, ]callback)` - -Gets a password from the terminal. If available, this uses `/dev/tty` to avoid -interfering with any data being piped in or out of stdio. - -This function prints a prompt (by default `Password:`) and then accepts input -without echoing. - -Parameters: - - * `options`, an Object, with properties: - * `prompt`, an optional String - * `callback`, a `Func(error, password)`, with arguments: - * `error`, either `null` (no error) or an `Error` instance - * `password`, a String diff --git a/node_modules/glob/README.md b/node_modules/glob/README.md deleted file mode 100644 index 0916a48255cd6..0000000000000 --- a/node_modules/glob/README.md +++ /dev/null @@ -1,375 +0,0 @@ -# Glob - -Match files using the patterns the shell uses, like stars and stuff. - -[![Build Status](https://travis-ci.org/isaacs/node-glob.svg?branch=master)](https://travis-ci.org/isaacs/node-glob/) [![Build Status](https://ci.appveyor.com/api/projects/status/kd7f3yftf7unxlsx?svg=true)](https://ci.appveyor.com/project/isaacs/node-glob) [![Coverage Status](https://coveralls.io/repos/isaacs/node-glob/badge.svg?branch=master&service=github)](https://coveralls.io/github/isaacs/node-glob?branch=master) - -This is a glob implementation in JavaScript. It uses the `minimatch` -library to do its matching. - -![](logo/glob.png) - -## Usage - -Install with npm - -``` -npm i glob -``` - -```javascript -var glob = require("glob") - -// options is optional -glob("**/*.js", options, function (er, files) { - // files is an array of filenames. - // If the `nonull` option is set, and nothing - // was found, then files is ["**/*.js"] - // er is an error object or null. -}) -``` - -## Glob Primer - -"Globs" are the patterns you type when you do stuff like `ls *.js` on -the command line, or put `build/*` in a `.gitignore` file. - -Before parsing the path part patterns, braced sections are expanded -into a set. Braced sections start with `{` and end with `}`, with any -number of comma-delimited sections within. Braced sections may contain -slash characters, so `a{/b/c,bcd}` would expand into `a/b/c` and `abcd`. - -The following characters have special magic meaning when used in a -path portion: - -* `*` Matches 0 or more characters in a single path portion -* `?` Matches 1 character -* `[...]` Matches a range of characters, similar to a RegExp range. - If the first character of the range is `!` or `^` then it matches - any character not in the range. -* `!(pattern|pattern|pattern)` Matches anything that does not match - any of the patterns provided. -* `?(pattern|pattern|pattern)` Matches zero or one occurrence of the - patterns provided. -* `+(pattern|pattern|pattern)` Matches one or more occurrences of the - patterns provided. -* `*(a|b|c)` Matches zero or more occurrences of the patterns provided -* `@(pattern|pat*|pat?erN)` Matches exactly one of the patterns - provided -* `**` If a "globstar" is alone in a path portion, then it matches - zero or more directories and subdirectories searching for matches. - It does not crawl symlinked directories. - -### Dots - -If a file or directory path portion has a `.` as the first character, -then it will not match any glob pattern unless that pattern's -corresponding path part also has a `.` as its first character. - -For example, the pattern `a/.*/c` would match the file at `a/.b/c`. -However the pattern `a/*/c` would not, because `*` does not start with -a dot character. - -You can make glob treat dots as normal characters by setting -`dot:true` in the options. - -### Basename Matching - -If you set `matchBase:true` in the options, and the pattern has no -slashes in it, then it will seek for any file anywhere in the tree -with a matching basename. For example, `*.js` would match -`test/simple/basic.js`. - -### Empty Sets - -If no matching files are found, then an empty array is returned. This -differs from the shell, where the pattern itself is returned. For -example: - - $ echo a*s*d*f - a*s*d*f - -To get the bash-style behavior, set the `nonull:true` in the options. - -### See Also: - -* `man sh` -* `man bash` (Search for "Pattern Matching") -* `man 3 fnmatch` -* `man 5 gitignore` -* [minimatch documentation](https://github.com/isaacs/minimatch) - -## glob.hasMagic(pattern, [options]) - -Returns `true` if there are any special characters in the pattern, and -`false` otherwise. - -Note that the options affect the results. If `noext:true` is set in -the options object, then `+(a|b)` will not be considered a magic -pattern. If the pattern has a brace expansion, like `a/{b/c,x/y}` -then that is considered magical, unless `nobrace:true` is set in the -options. - -## glob(pattern, [options], cb) - -* `pattern` `{String}` Pattern to be matched -* `options` `{Object}` -* `cb` `{Function}` - * `err` `{Error | null}` - * `matches` `{Array<String>}` filenames found matching the pattern - -Perform an asynchronous glob search. - -## glob.sync(pattern, [options]) - -* `pattern` `{String}` Pattern to be matched -* `options` `{Object}` -* return: `{Array<String>}` filenames found matching the pattern - -Perform a synchronous glob search. - -## Class: glob.Glob - -Create a Glob object by instantiating the `glob.Glob` class. - -```javascript -var Glob = require("glob").Glob -var mg = new Glob(pattern, options, cb) -``` - -It's an EventEmitter, and starts walking the filesystem to find matches -immediately. - -### new glob.Glob(pattern, [options], [cb]) - -* `pattern` `{String}` pattern to search for -* `options` `{Object}` -* `cb` `{Function}` Called when an error occurs, or matches are found - * `err` `{Error | null}` - * `matches` `{Array<String>}` filenames found matching the pattern - -Note that if the `sync` flag is set in the options, then matches will -be immediately available on the `g.found` member. - -### Properties - -* `minimatch` The minimatch object that the glob uses. -* `options` The options object passed in. -* `aborted` Boolean which is set to true when calling `abort()`. There - is no way at this time to continue a glob search after aborting, but - you can re-use the statCache to avoid having to duplicate syscalls. -* `cache` Convenience object. Each field has the following possible - values: - * `false` - Path does not exist - * `true` - Path exists - * `'FILE'` - Path exists, and is not a directory - * `'DIR'` - Path exists, and is a directory - * `[file, entries, ...]` - Path exists, is a directory, and the - array value is the results of `fs.readdir` -* `statCache` Cache of `fs.stat` results, to prevent statting the same - path multiple times. -* `symlinks` A record of which paths are symbolic links, which is - relevant in resolving `**` patterns. -* `realpathCache` An optional object which is passed to `fs.realpath` - to minimize unnecessary syscalls. It is stored on the instantiated - Glob object, and may be re-used. - -### Events - -* `end` When the matching is finished, this is emitted with all the - matches found. If the `nonull` option is set, and no match was found, - then the `matches` list contains the original pattern. The matches - are sorted, unless the `nosort` flag is set. -* `match` Every time a match is found, this is emitted with the specific - thing that matched. It is not deduplicated or resolved to a realpath. -* `error` Emitted when an unexpected error is encountered, or whenever - any fs error occurs if `options.strict` is set. -* `abort` When `abort()` is called, this event is raised. - -### Methods - -* `pause` Temporarily stop the search -* `resume` Resume the search -* `abort` Stop the search forever - -### Options - -All the options that can be passed to Minimatch can also be passed to -Glob to change pattern matching behavior. Also, some have been added, -or have glob-specific ramifications. - -All options are false by default, unless otherwise noted. - -All options are added to the Glob object, as well. - -If you are running many `glob` operations, you can pass a Glob object -as the `options` argument to a subsequent operation to shortcut some -`stat` and `readdir` calls. At the very least, you may pass in shared -`symlinks`, `statCache`, `realpathCache`, and `cache` options, so that -parallel glob operations will be sped up by sharing information about -the filesystem. - -* `cwd` The current working directory in which to search. Defaults - to `process.cwd()`. -* `root` The place where patterns starting with `/` will be mounted - onto. Defaults to `path.resolve(options.cwd, "/")` (`/` on Unix - systems, and `C:\` or some such on Windows.) -* `dot` Include `.dot` files in normal matches and `globstar` matches. - Note that an explicit dot in a portion of the pattern will always - match dot files. -* `nomount` By default, a pattern starting with a forward-slash will be - "mounted" onto the root setting, so that a valid filesystem path is - returned. Set this flag to disable that behavior. -* `mark` Add a `/` character to directory matches. Note that this - requires additional stat calls. -* `nosort` Don't sort the results. -* `stat` Set to true to stat *all* results. This reduces performance - somewhat, and is completely unnecessary, unless `readdir` is presumed - to be an untrustworthy indicator of file existence. -* `silent` When an unusual error is encountered when attempting to - read a directory, a warning will be printed to stderr. Set the - `silent` option to true to suppress these warnings. -* `strict` When an unusual error is encountered when attempting to - read a directory, the process will just continue on in search of - other matches. Set the `strict` option to raise an error in these - cases. -* `cache` See `cache` property above. Pass in a previously generated - cache object to save some fs calls. -* `statCache` A cache of results of filesystem information, to prevent - unnecessary stat calls. While it should not normally be necessary - to set this, you may pass the statCache from one glob() call to the - options object of another, if you know that the filesystem will not - change between calls. (See "Race Conditions" below.) -* `symlinks` A cache of known symbolic links. You may pass in a - previously generated `symlinks` object to save `lstat` calls when - resolving `**` matches. -* `sync` DEPRECATED: use `glob.sync(pattern, opts)` instead. -* `nounique` In some cases, brace-expanded patterns can result in the - same file showing up multiple times in the result set. By default, - this implementation prevents duplicates in the result set. Set this - flag to disable that behavior. -* `nonull` Set to never return an empty set, instead returning a set - containing the pattern itself. This is the default in glob(3). -* `debug` Set to enable debug logging in minimatch and glob. -* `nobrace` Do not expand `{a,b}` and `{1..3}` brace sets. -* `noglobstar` Do not match `**` against multiple filenames. (Ie, - treat it as a normal `*` instead.) -* `noext` Do not match `+(a|b)` "extglob" patterns. -* `nocase` Perform a case-insensitive match. Note: on - case-insensitive filesystems, non-magic patterns will match by - default, since `stat` and `readdir` will not raise errors. -* `matchBase` Perform a basename-only match if the pattern does not - contain any slash characters. That is, `*.js` would be treated as - equivalent to `**/*.js`, matching all js files in all directories. -* `nodir` Do not match directories, only files. (Note: to match - *only* directories, simply put a `/` at the end of the pattern.) -* `ignore` Add a pattern or an array of glob patterns to exclude matches. - Note: `ignore` patterns are *always* in `dot:true` mode, regardless - of any other settings. -* `follow` Follow symlinked directories when expanding `**` patterns. - Note that this can result in a lot of duplicate references in the - presence of cyclic links. -* `realpath` Set to true to call `fs.realpath` on all of the results. - In the case of a symlink that cannot be resolved, the full absolute - path to the matched entry is returned (though it will usually be a - broken symlink) -* `absolute` Set to true to always receive absolute paths for matched - files. Unlike `realpath`, this also affects the values returned in - the `match` event. - -## Comparisons to other fnmatch/glob implementations - -While strict compliance with the existing standards is a worthwhile -goal, some discrepancies exist between node-glob and other -implementations, and are intentional. - -The double-star character `**` is supported by default, unless the -`noglobstar` flag is set. This is supported in the manner of bsdglob -and bash 4.3, where `**` only has special significance if it is the only -thing in a path part. That is, `a/**/b` will match `a/x/y/b`, but -`a/**b` will not. - -Note that symlinked directories are not crawled as part of a `**`, -though their contents may match against subsequent portions of the -pattern. This prevents infinite loops and duplicates and the like. - -If an escaped pattern has no matches, and the `nonull` flag is set, -then glob returns the pattern as-provided, rather than -interpreting the character escapes. For example, -`glob.match([], "\\*a\\?")` will return `"\\*a\\?"` rather than -`"*a?"`. This is akin to setting the `nullglob` option in bash, except -that it does not resolve escaped pattern characters. - -If brace expansion is not disabled, then it is performed before any -other interpretation of the glob pattern. Thus, a pattern like -`+(a|{b),c)}`, which would not be valid in bash or zsh, is expanded -**first** into the set of `+(a|b)` and `+(a|c)`, and those patterns are -checked for validity. Since those two are valid, matching proceeds. - -### Comments and Negation - -Previously, this module let you mark a pattern as a "comment" if it -started with a `#` character, or a "negated" pattern if it started -with a `!` character. - -These options were deprecated in version 5, and removed in version 6. - -To specify things that should not match, use the `ignore` option. - -## Windows - -**Please only use forward-slashes in glob expressions.** - -Though windows uses either `/` or `\` as its path separator, only `/` -characters are used by this glob implementation. You must use -forward-slashes **only** in glob expressions. Back-slashes will always -be interpreted as escape characters, not path separators. - -Results from absolute patterns such as `/foo/*` are mounted onto the -root setting using `path.join`. On windows, this will by default result -in `/foo/*` matching `C:\foo\bar.txt`. - -## Race Conditions - -Glob searching, by its very nature, is susceptible to race conditions, -since it relies on directory walking and such. - -As a result, it is possible that a file that exists when glob looks for -it may have been deleted or modified by the time it returns the result. - -As part of its internal implementation, this program caches all stat -and readdir calls that it makes, in order to cut down on system -overhead. However, this also makes it even more susceptible to races, -especially if the cache or statCache objects are reused between glob -calls. - -Users are thus advised not to use a glob result as a guarantee of -filesystem state in the face of rapid changes. For the vast majority -of operations, this is never a problem. - -## Glob Logo -Glob's logo was created by [Tanya Brassie](http://tanyabrassie.com/). Logo files can be found [here](https://github.com/isaacs/node-glob/tree/master/logo). - -The logo is licensed under a [Creative Commons Attribution-ShareAlike 4.0 International License](https://creativecommons.org/licenses/by-sa/4.0/). - -## Contributing - -Any change to behavior (including bugfixes) must come with a test. - -Patches that fail tests or reduce performance will be rejected. - -``` -# to run tests -npm test - -# to re-generate test fixtures -npm run test-regen - -# to benchmark against bash/zsh -npm run bench - -# to profile javascript -npm run prof -``` - -![](oh-my-glob.gif) diff --git a/node_modules/glob/common.js b/node_modules/glob/common.js index 66651bb3aac65..d14157a0aec8a 100644 --- a/node_modules/glob/common.js +++ b/node_modules/glob/common.js @@ -1,5 +1,3 @@ -exports.alphasort = alphasort -exports.alphasorti = alphasorti exports.setopts = setopts exports.ownProp = ownProp exports.makeAbs = makeAbs @@ -17,12 +15,8 @@ var minimatch = require("minimatch") var isAbsolute = require("path-is-absolute") var Minimatch = minimatch.Minimatch -function alphasorti (a, b) { - return a.toLowerCase().localeCompare(b.toLowerCase()) -} - function alphasort (a, b) { - return a.localeCompare(b) + return a.localeCompare(b, 'en') } function setupIgnores (self, options) { @@ -150,7 +144,7 @@ function finish (self) { all = Object.keys(all) if (!self.nosort) - all = all.sort(self.nocase ? alphasorti : alphasort) + all = all.sort(alphasort) // at *some* point we statted all of these if (self.mark) { diff --git a/node_modules/glob/glob.js b/node_modules/glob/glob.js index 58dec0f6c2bd0..dc27aef10b344 100644 --- a/node_modules/glob/glob.js +++ b/node_modules/glob/glob.js @@ -51,8 +51,6 @@ var assert = require('assert') var isAbsolute = require('path-is-absolute') var globSync = require('./sync.js') var common = require('./common.js') -var alphasort = common.alphasort -var alphasorti = common.alphasorti var setopts = common.setopts var ownProp = common.ownProp var inflight = require('inflight') diff --git a/node_modules/glob/package.json b/node_modules/glob/package.json index 6477c3070cb14..b345ae1e9fd83 100644 --- a/node_modules/glob/package.json +++ b/node_modules/glob/package.json @@ -2,7 +2,7 @@ "author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)", "name": "glob", "description": "a little globber", - "version": "7.1.6", + "version": "7.1.7", "repository": { "type": "git", "url": "git://github.com/isaacs/node-glob.git" @@ -27,13 +27,18 @@ "devDependencies": { "mkdirp": "0", "rimraf": "^2.2.8", - "tap": "^12.0.1", + "tap": "^15.0.6", "tick": "0.0.6" }, + "tap": { + "before": "test/00-setup.js", + "after": "test/zz-cleanup.js", + "jobs": 1 + }, "scripts": { "prepublish": "npm run benchclean", "profclean": "rm -f v8.log profile.txt", - "test": "tap test/*.js --cov", + "test": "tap", "test-regen": "npm run profclean && TEST_REGEN=1 node test/00-setup.js", "bench": "bash benchmark.sh", "prof": "bash prof.sh && cat profile.txt", diff --git a/node_modules/glob/sync.js b/node_modules/glob/sync.js index c952134baa7ec..10b0ed2c0026b 100644 --- a/node_modules/glob/sync.js +++ b/node_modules/glob/sync.js @@ -11,8 +11,6 @@ var path = require('path') var assert = require('assert') var isAbsolute = require('path-is-absolute') var common = require('./common.js') -var alphasort = common.alphasort -var alphasorti = common.alphasorti var setopts = common.setopts var ownProp = common.ownProp var childrenIgnored = common.childrenIgnored diff --git a/node_modules/graceful-fs/README.md b/node_modules/graceful-fs/README.md deleted file mode 100644 index 5273a50ad6a52..0000000000000 --- a/node_modules/graceful-fs/README.md +++ /dev/null @@ -1,133 +0,0 @@ -# graceful-fs - -graceful-fs functions as a drop-in replacement for the fs module, -making various improvements. - -The improvements are meant to normalize behavior across different -platforms and environments, and to make filesystem access more -resilient to errors. - -## Improvements over [fs module](https://nodejs.org/api/fs.html) - -* Queues up `open` and `readdir` calls, and retries them once - something closes if there is an EMFILE error from too many file - descriptors. -* fixes `lchmod` for Node versions prior to 0.6.2. -* implements `fs.lutimes` if possible. Otherwise it becomes a noop. -* ignores `EINVAL` and `EPERM` errors in `chown`, `fchown` or - `lchown` if the user isn't root. -* makes `lchmod` and `lchown` become noops, if not available. -* retries reading a file if `read` results in EAGAIN error. - -On Windows, it retries renaming a file for up to one second if `EACCESS` -or `EPERM` error occurs, likely because antivirus software has locked -the directory. - -## USAGE - -```javascript -// use just like fs -var fs = require('graceful-fs') - -// now go and do stuff with it... -fs.readFileSync('some-file-or-whatever') -``` - -## Global Patching - -If you want to patch the global fs module (or any other fs-like -module) you can do this: - -```javascript -// Make sure to read the caveat below. -var realFs = require('fs') -var gracefulFs = require('graceful-fs') -gracefulFs.gracefulify(realFs) -``` - -This should only ever be done at the top-level application layer, in -order to delay on EMFILE errors from any fs-using dependencies. You -should **not** do this in a library, because it can cause unexpected -delays in other parts of the program. - -## Changes - -This module is fairly stable at this point, and used by a lot of -things. That being said, because it implements a subtle behavior -change in a core part of the node API, even modest changes can be -extremely breaking, and the versioning is thus biased towards -bumping the major when in doubt. - -The main change between major versions has been switching between -providing a fully-patched `fs` module vs monkey-patching the node core -builtin, and the approach by which a non-monkey-patched `fs` was -created. - -The goal is to trade `EMFILE` errors for slower fs operations. So, if -you try to open a zillion files, rather than crashing, `open` -operations will be queued up and wait for something else to `close`. - -There are advantages to each approach. Monkey-patching the fs means -that no `EMFILE` errors can possibly occur anywhere in your -application, because everything is using the same core `fs` module, -which is patched. However, it can also obviously cause undesirable -side-effects, especially if the module is loaded multiple times. - -Implementing a separate-but-identical patched `fs` module is more -surgical (and doesn't run the risk of patching multiple times), but -also imposes the challenge of keeping in sync with the core module. - -The current approach loads the `fs` module, and then creates a -lookalike object that has all the same methods, except a few that are -patched. It is safe to use in all versions of Node from 0.8 through -7.0. - -### v4 - -* Do not monkey-patch the fs module. This module may now be used as a - drop-in dep, and users can opt into monkey-patching the fs builtin - if their app requires it. - -### v3 - -* Monkey-patch fs, because the eval approach no longer works on recent - node. -* fixed possible type-error throw if rename fails on windows -* verify that we *never* get EMFILE errors -* Ignore ENOSYS from chmod/chown -* clarify that graceful-fs must be used as a drop-in - -### v2.1.0 - -* Use eval rather than monkey-patching fs. -* readdir: Always sort the results -* win32: requeue a file if error has an OK status - -### v2.0 - -* A return to monkey patching -* wrap process.cwd - -### v1.1 - -* wrap readFile -* Wrap fs.writeFile. -* readdir protection -* Don't clobber the fs builtin -* Handle fs.read EAGAIN errors by trying again -* Expose the curOpen counter -* No-op lchown/lchmod if not implemented -* fs.rename patch only for win32 -* Patch fs.rename to handle AV software on Windows -* Close #4 Chown should not fail on einval or eperm if non-root -* Fix isaacs/fstream#1 Only wrap fs one time -* Fix #3 Start at 1024 max files, then back off on EMFILE -* lutimes that doens't blow up on Linux -* A full on-rewrite using a queue instead of just swallowing the EMFILE error -* Wrap Read/Write streams as well - -### 1.0 - -* Update engines for node 0.6 -* Be lstat-graceful on Windows -* first diff --git a/node_modules/har-schema/README.md b/node_modules/har-schema/README.md deleted file mode 100644 index cd0a28e1a7223..0000000000000 --- a/node_modules/har-schema/README.md +++ /dev/null @@ -1,49 +0,0 @@ -# HAR Schema [![version][npm-version]][npm-url] [![License][npm-license]][license-url] - -> JSON Schema for HTTP Archive ([HAR][spec]). - -[![Build Status][travis-image]][travis-url] -[![Downloads][npm-downloads]][npm-url] -[![Code Climate][codeclimate-quality]][codeclimate-url] -[![Coverage Status][codeclimate-coverage]][codeclimate-url] -[![Dependency Status][dependencyci-image]][dependencyci-url] -[![Dependencies][david-image]][david-url] - -## Install - -```bash -npm install --only=production --save har-schema -``` - -## Usage - -Compatible with any [JSON Schema validation tool][validator]. - ----- -> :copyright: [ahmadnassri.com](https://www.ahmadnassri.com/)  ·  -> License: [ISC][license-url]  ·  -> Github: [@ahmadnassri](https://github.com/ahmadnassri)  ·  -> Twitter: [@ahmadnassri](https://twitter.com/ahmadnassri) - -[license-url]: http://choosealicense.com/licenses/isc/ - -[travis-url]: https://travis-ci.org/ahmadnassri/har-schema -[travis-image]: https://img.shields.io/travis/ahmadnassri/har-schema.svg?style=flat-square - -[npm-url]: https://www.npmjs.com/package/har-schema -[npm-license]: https://img.shields.io/npm/l/har-schema.svg?style=flat-square -[npm-version]: https://img.shields.io/npm/v/har-schema.svg?style=flat-square -[npm-downloads]: https://img.shields.io/npm/dm/har-schema.svg?style=flat-square - -[codeclimate-url]: https://codeclimate.com/github/ahmadnassri/har-schema -[codeclimate-quality]: https://img.shields.io/codeclimate/github/ahmadnassri/har-schema.svg?style=flat-square -[codeclimate-coverage]: https://img.shields.io/codeclimate/coverage/github/ahmadnassri/har-schema.svg?style=flat-square - -[david-url]: https://david-dm.org/ahmadnassri/har-schema -[david-image]: https://img.shields.io/david/ahmadnassri/har-schema.svg?style=flat-square - -[dependencyci-url]: https://dependencyci.com/github/ahmadnassri/har-schema -[dependencyci-image]: https://dependencyci.com/github/ahmadnassri/har-schema/badge?style=flat-square - -[spec]: https://github.com/ahmadnassri/har-spec/blob/master/versions/1.2.md -[validator]: https://github.com/ahmadnassri/har-validator diff --git a/node_modules/har-validator/README.md b/node_modules/har-validator/README.md deleted file mode 100644 index ea944cc5c7bd8..0000000000000 --- a/node_modules/har-validator/README.md +++ /dev/null @@ -1,43 +0,0 @@ -# HAR Validator - -[![license][license-img]][license-url] -[![version][npm-img]][npm-url] -[![super linter][super-linter-img]][super-linter-url] -[![test][test-img]][test-url] -[![release][release-img]][release-url] - -[license-url]: LICENSE -[license-img]: https://badgen.net/github/license/ahmadnassri/node-har-validator - -[npm-url]: https://www.npmjs.com/package/har-validator -[npm-img]: https://badgen.net/npm/v/har-validator - -[super-linter-url]: https://github.com/ahmadnassri/node-har-validator/actions?query=workflow%3Asuper-linter -[super-linter-img]: https://github.com/ahmadnassri/node-har-validator/workflows/super-linter/badge.svg - -[test-url]: https://github.com/ahmadnassri/node-har-validator/actions?query=workflow%3Atest -[test-img]: https://github.com/ahmadnassri/node-har-validator/workflows/test/badge.svg - -[release-url]: https://github.com/ahmadnassri/node-har-validator/actions?query=workflow%3Arelease -[release-img]: https://github.com/ahmadnassri/node-har-validator/workflows/release/badge.svg - -> Extremely fast HTTP Archive ([HAR](https://github.com/ahmadnassri/har-spec/blob/master/versions/1.2.md)) validator using JSON Schema. - -## Install - -```bash -npm install har-validator -``` - -## CLI Usage - -Please refer to [`har-cli`](https://github.com/ahmadnassri/har-cli) for more info. - -## API - -**Note**: as of [`v2.0.0`](https://github.com/ahmadnassri/node-har-validator/releases/tag/v2.0.0) this module defaults to Promise based API. -_For backward compatibility with `v1.x` an [async/callback API](docs/async.md) is also provided_ - -- [async API](docs/async.md) -- [callback API](docs/async.md) -- [Promise API](docs/promise.md) _(default)_ diff --git a/node_modules/has-unicode/README.md b/node_modules/has-unicode/README.md deleted file mode 100644 index 5a03e5991c539..0000000000000 --- a/node_modules/has-unicode/README.md +++ /dev/null @@ -1,43 +0,0 @@ -has-unicode -=========== - -Try to guess if your terminal supports unicode - -```javascript -var hasUnicode = require("has-unicode") - -if (hasUnicode()) { - // the terminal probably has unicode support -} -``` -```javascript -var hasUnicode = require("has-unicode").tryHarder -hasUnicode(function(unicodeSupported) { - if (unicodeSupported) { - // the terminal probably has unicode support - } -}) -``` - -## Detecting Unicode - -What we actually detect is UTF-8 support, as that's what Node itself supports. -If you have a UTF-16 locale then you won't be detected as unicode capable. - -### Windows - -Since at least Windows 7, `cmd` and `powershell` have been unicode capable, -but unfortunately even then it's not guaranteed. In many localizations it -still uses legacy code pages and there's no facility short of running -programs or linking C++ that will let us detect this. As such, we -report any Windows installation as NOT unicode capable, and recommend -that you encourage your users to override this via config. - -### Unix Like Operating Systems - -We look at the environment variables `LC_ALL`, `LC_CTYPE`, and `LANG` in -that order. For `LC_ALL` and `LANG`, it looks for `.UTF-8` in the value. -For `LC_CTYPE` it looks to see if the value is `UTF-8`. This is sufficient -for most POSIX systems. While locale data can be put in `/etc/locale.conf` -as well, AFAIK it's always copied into the environment. - diff --git a/node_modules/has/README.md b/node_modules/has/README.md deleted file mode 100644 index 635e3a4baab00..0000000000000 --- a/node_modules/has/README.md +++ /dev/null @@ -1,18 +0,0 @@ -# has - -> Object.prototype.hasOwnProperty.call shortcut - -## Installation - -```sh -npm install --save has -``` - -## Usage - -```js -var has = require('has'); - -has({}, 'hasOwnProperty'); // false -has(Object.prototype, 'hasOwnProperty'); // true -``` diff --git a/node_modules/hosted-git-info/CHANGELOG.md b/node_modules/hosted-git-info/CHANGELOG.md deleted file mode 100644 index 3ffcacacc575c..0000000000000 --- a/node_modules/hosted-git-info/CHANGELOG.md +++ /dev/null @@ -1,185 +0,0 @@ -# Change Log - -All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. - -<a name="3.0.8"></a> -## [3.0.8](https://github.com/npm/hosted-git-info/compare/v3.0.7...v3.0.8) (2021-01-28) - - -### Bug Fixes - -* simplify the regular expression for shortcut matching ([bede0dc](https://github.com/npm/hosted-git-info/commit/bede0dc)), closes [#76](https://github.com/npm/hosted-git-info/issues/76) - - - -<a name="3.0.7"></a> -## [3.0.7](https://github.com/npm/hosted-git-info/compare/v3.0.6...v3.0.7) (2020-10-15) - - -### Bug Fixes - -* correctly filter out urls for tarballs in gitlab ([eb5bd5a](https://github.com/npm/hosted-git-info/commit/eb5bd5a)), closes [#69](https://github.com/npm/hosted-git-info/issues/69) - - - -<a name="3.0.6"></a> -## [3.0.6](https://github.com/npm/hosted-git-info/compare/v3.0.5...v3.0.6) (2020-10-12) - - -### Bug Fixes - -* support to github gist legacy hash length ([c067102](https://github.com/npm/hosted-git-info/commit/c067102)), closes [#68](https://github.com/npm/hosted-git-info/issues/68) - - - -<a name="3.0.5"></a> -## [3.0.5](https://github.com/npm/hosted-git-info/compare/v3.0.4...v3.0.5) (2020-07-11) - - - -<a name="3.0.4"></a> -## [3.0.4](https://github.com/npm/hosted-git-info/compare/v3.0.3...v3.0.4) (2020-02-26) - - -### Bug Fixes - -* Do not pass scp-style URLs to the WhatWG url.URL ([0835306](https://github.com/npm/hosted-git-info/commit/0835306)), closes [#60](https://github.com/npm/hosted-git-info/issues/60) [#63](https://github.com/npm/hosted-git-info/issues/63) - - - -<a name="3.0.3"></a> -## [3.0.3](https://github.com/npm/hosted-git-info/compare/v3.0.2...v3.0.3) (2020-02-25) - - - -<a name="3.0.2"></a> -## [3.0.2](https://github.com/npm/hosted-git-info/compare/v3.0.1...v3.0.2) (2019-10-08) - - -### Bug Fixes - -* do not encodeURIComponent the domain ([3e5fbec](https://github.com/npm/hosted-git-info/commit/3e5fbec)), closes [#53](https://github.com/npm/hosted-git-info/issues/53) - - - -<a name="3.0.1"></a> -## [3.0.1](https://github.com/npm/hosted-git-info/compare/v3.0.0...v3.0.1) (2019-10-07) - - -### Bug Fixes - -* update pathmatch for gitlab ([e3e3054](https://github.com/npm/hosted-git-info/commit/e3e3054)), closes [#52](https://github.com/npm/hosted-git-info/issues/52) -* updated pathmatch for gitlab ([fa87af7](https://github.com/npm/hosted-git-info/commit/fa87af7)) - - - -<a name="3.0.0"></a> -# [3.0.0](https://github.com/npm/hosted-git-info/compare/v2.8.3...v3.0.0) (2019-08-12) - - -### Bug Fixes - -* **cache:** Switch to lru-cache to save ourselves from unlimited memory consumption ([37c2891](https://github.com/npm/hosted-git-info/commit/37c2891)), closes [#38](https://github.com/npm/hosted-git-info/issues/38) - - -### BREAKING CHANGES - -* **cache:** Drop support for node 0.x - - - -<a name="2.8.3"></a> -## [2.8.3](https://github.com/npm/hosted-git-info/compare/v2.8.2...v2.8.3) (2019-08-12) - - - -<a name="2.8.2"></a> -## [2.8.2](https://github.com/npm/hosted-git-info/compare/v2.8.1...v2.8.2) (2019-08-05) - - -### Bug Fixes - -* http protocol use sshurl by default ([3b1d629](https://github.com/npm/hosted-git-info/commit/3b1d629)), closes [#48](https://github.com/npm/hosted-git-info/issues/48) - - - -<a name="2.8.1"></a> -## [2.8.1](https://github.com/npm/hosted-git-info/compare/v2.8.0...v2.8.1) (2019-08-05) - - -### Bug Fixes - -* ignore noCommittish on tarball url generation ([5d4a8d7](https://github.com/npm/hosted-git-info/commit/5d4a8d7)) -* use gist tarball url that works for anonymous gists ([1692435](https://github.com/npm/hosted-git-info/commit/1692435)) - - - -<a name="2.8.0"></a> -# [2.8.0](https://github.com/npm/hosted-git-info/compare/v2.7.1...v2.8.0) (2019-08-05) - - -### Bug Fixes - -* Allow slashes in gitlab project section ([bbcf7b2](https://github.com/npm/hosted-git-info/commit/bbcf7b2)), closes [#46](https://github.com/npm/hosted-git-info/issues/46) [#43](https://github.com/npm/hosted-git-info/issues/43) -* **git-host:** disallow URI-encoded slash (%2F) in `path` ([3776fa5](https://github.com/npm/hosted-git-info/commit/3776fa5)), closes [#44](https://github.com/npm/hosted-git-info/issues/44) -* **gitlab:** Do not URL encode slashes in project name for GitLab https URL ([cbf04f9](https://github.com/npm/hosted-git-info/commit/cbf04f9)), closes [#47](https://github.com/npm/hosted-git-info/issues/47) -* do not allow invalid gist urls ([d5cf830](https://github.com/npm/hosted-git-info/commit/d5cf830)) -* **cache:** Switch to lru-cache to save ourselves from unlimited memory consumption ([e518222](https://github.com/npm/hosted-git-info/commit/e518222)), closes [#38](https://github.com/npm/hosted-git-info/issues/38) - - -### Features - -* give these objects a name ([60abaea](https://github.com/npm/hosted-git-info/commit/60abaea)) - - - -<a name="2.7.1"></a> -## [2.7.1](https://github.com/npm/hosted-git-info/compare/v2.7.0...v2.7.1) (2018-07-07) - - -### Bug Fixes - -* **index:** Guard against non-string types ([5bc580d](https://github.com/npm/hosted-git-info/commit/5bc580d)) -* **parse:** Crash on strings that parse to having no host ([c931482](https://github.com/npm/hosted-git-info/commit/c931482)), closes [#35](https://github.com/npm/hosted-git-info/issues/35) - - - -<a name="2.7.0"></a> -# [2.7.0](https://github.com/npm/hosted-git-info/compare/v2.6.1...v2.7.0) (2018-07-06) - - -### Bug Fixes - -* **github tarball:** update github tarballtemplate ([6efd582](https://github.com/npm/hosted-git-info/commit/6efd582)), closes [#34](https://github.com/npm/hosted-git-info/issues/34) -* **gitlab docs:** switched to lowercase anchors for readmes ([701bcd1](https://github.com/npm/hosted-git-info/commit/701bcd1)) - - -### Features - -* **all:** Support www. prefixes on hostnames ([3349575](https://github.com/npm/hosted-git-info/commit/3349575)), closes [#32](https://github.com/npm/hosted-git-info/issues/32) - - - -<a name="2.6.1"></a> -## [2.6.1](https://github.com/npm/hosted-git-info/compare/v2.6.0...v2.6.1) (2018-06-25) - -### Bug Fixes - -* **Revert:** "compat: remove Object.assign fallback ([#25](https://github.com/npm/hosted-git-info/issues/25))" ([cce5a62](https://github.com/npm/hosted-git-info/commit/cce5a62)) -* **Revert:** "git-host: fix forgotten extend()" ([a815ec9](https://github.com/npm/hosted-git-info/commit/a815ec9)) - - - -<a name="2.6.0"></a> -# [2.6.0](https://github.com/npm/hosted-git-info/compare/v2.5.0...v2.6.0) (2018-03-07) - - -### Bug Fixes - -* **compat:** remove Object.assign fallback ([#25](https://github.com/npm/hosted-git-info/issues/25)) ([627ab55](https://github.com/npm/hosted-git-info/commit/627ab55)) -* **git-host:** fix forgotten extend() ([eba1f7b](https://github.com/npm/hosted-git-info/commit/eba1f7b)) - - -### Features - -* **browse:** fragment support for browse() ([#28](https://github.com/npm/hosted-git-info/issues/28)) ([cd5e5bb](https://github.com/npm/hosted-git-info/commit/cd5e5bb)) diff --git a/node_modules/hosted-git-info/README.md b/node_modules/hosted-git-info/README.md deleted file mode 100644 index 7b723f6b9e213..0000000000000 --- a/node_modules/hosted-git-info/README.md +++ /dev/null @@ -1,133 +0,0 @@ -# hosted-git-info - -This will let you identify and transform various git hosts URLs between -protocols. It also can tell you what the URL is for the raw path for -particular file for direct access without git. - -## Example - -```javascript -var hostedGitInfo = require("hosted-git-info") -var info = hostedGitInfo.fromUrl("git@github.com:npm/hosted-git-info.git", opts) -/* info looks like: -{ - type: "github", - domain: "github.com", - user: "npm", - project: "hosted-git-info" -} -*/ -``` - -If the URL can't be matched with a git host, `null` will be returned. We -can match git, ssh and https urls. Additionally, we can match ssh connect -strings (`git@github.com:npm/hosted-git-info`) and shortcuts (eg, -`github:npm/hosted-git-info`). Github specifically, is detected in the case -of a third, unprefixed, form: `npm/hosted-git-info`. - -If it does match, the returned object has properties of: - -* info.type -- The short name of the service -* info.domain -- The domain for git protocol use -* info.user -- The name of the user/org on the git host -* info.project -- The name of the project on the git host - -## Version Contract - -The major version will be bumped any time… - -* The constructor stops accepting URLs that it previously accepted. -* A method is removed. -* A method can no longer accept the number and type of arguments it previously accepted. -* A method can return a different type than it currently returns. - -Implications: - -* I do not consider the specific format of the urls returned from, say - `.https()` to be a part of the contract. The contract is that it will - return a string that can be used to fetch the repo via HTTPS. But what - that string looks like, specifically, can change. -* Dropping support for a hosted git provider would constitute a breaking - change. - -## Usage - -### var info = hostedGitInfo.fromUrl(gitSpecifier[, options]) - -* *gitSpecifer* is a URL of a git repository or a SCP-style specifier of one. -* *options* is an optional object. It can have the following properties: - * *noCommittish* — If true then committishes won't be included in generated URLs. - * *noGitPlus* — If true then `git+` won't be prefixed on URLs. - -## Methods - -All of the methods take the same options as the `fromUrl` factory. Options -provided to a method override those provided to the constructor. - -* info.file(path, opts) - -Given the path of a file relative to the repository, returns a URL for -directly fetching it from the githost. If no committish was set then -`master` will be used as the default. - -For example `hostedGitInfo.fromUrl("git@github.com:npm/hosted-git-info.git#v1.0.0").file("package.json")` -would return `https://raw.githubusercontent.com/npm/hosted-git-info/v1.0.0/package.json` - -* info.shortcut(opts) - -eg, `github:npm/hosted-git-info` - -* info.browse(path, fragment, opts) - -eg, `https://github.com/npm/hosted-git-info/tree/v1.2.0`, -`https://github.com/npm/hosted-git-info/tree/v1.2.0/package.json`, -`https://github.com/npm/hosted-git-info/tree/v1.2.0/REAMDE.md#supported-hosts` - -* info.bugs(opts) - -eg, `https://github.com/npm/hosted-git-info/issues` - -* info.docs(opts) - -eg, `https://github.com/npm/hosted-git-info/tree/v1.2.0#readme` - -* info.https(opts) - -eg, `git+https://github.com/npm/hosted-git-info.git` - -* info.sshurl(opts) - -eg, `git+ssh://git@github.com/npm/hosted-git-info.git` - -* info.ssh(opts) - -eg, `git@github.com:npm/hosted-git-info.git` - -* info.path(opts) - -eg, `npm/hosted-git-info` - -* info.tarball(opts) - -eg, `https://github.com/npm/hosted-git-info/archive/v1.2.0.tar.gz` - -* info.getDefaultRepresentation() - -Returns the default output type. The default output type is based on the -string you passed in to be parsed - -* info.toString(opts) - -Uses the getDefaultRepresentation to call one of the other methods to get a URL for -this resource. As such `hostedGitInfo.fromUrl(url).toString()` will give -you a normalized version of the URL that still uses the same protocol. - -Shortcuts will still be returned as shortcuts, but the special case github -form of `org/project` will be normalized to `github:org/project`. - -SSH connect strings will be normalized into `git+ssh` URLs. - -## Supported hosts - -Currently this supports Github, Bitbucket and Gitlab. Pull requests for -additional hosts welcome. diff --git a/node_modules/hosted-git-info/git-host-info.js b/node_modules/hosted-git-info/git-host-info.js index da3348fa7b817..d4919344c77bf 100644 --- a/node_modules/hosted-git-info/git-host-info.js +++ b/node_modules/hosted-git-info/git-host-info.js @@ -1,79 +1,154 @@ 'use strict' +const maybeJoin = (...args) => args.every(arg => arg) ? args.join('') : '' +const maybeEncode = (arg) => arg ? encodeURIComponent(arg) : '' -var gitHosts = module.exports = { - github: { - // First two are insecure and generally shouldn't be used any more, but - // they are still supported. - 'protocols': [ 'git', 'http', 'git+ssh', 'git+https', 'ssh', 'https' ], - 'domain': 'github.com', - 'treepath': 'tree', - 'filetemplate': 'https://{auth@}raw.githubusercontent.com/{user}/{project}/{committish}/{path}', - 'bugstemplate': 'https://{domain}/{user}/{project}/issues', - 'gittemplate': 'git://{auth@}{domain}/{user}/{project}.git{#committish}', - 'tarballtemplate': 'https://codeload.{domain}/{user}/{project}/tar.gz/{committish}' - }, - bitbucket: { - 'protocols': [ 'git+ssh', 'git+https', 'ssh', 'https' ], - 'domain': 'bitbucket.org', - 'treepath': 'src', - 'tarballtemplate': 'https://{domain}/{user}/{project}/get/{committish}.tar.gz' - }, - gitlab: { - 'protocols': [ 'git+ssh', 'git+https', 'ssh', 'https' ], - 'domain': 'gitlab.com', - 'treepath': 'tree', - 'bugstemplate': 'https://{domain}/{user}/{project}/issues', - 'httpstemplate': 'git+https://{auth@}{domain}/{user}/{projectPath}.git{#committish}', - 'tarballtemplate': 'https://{domain}/{user}/{project}/repository/archive.tar.gz?ref={committish}', - 'pathmatch': /^\/([^/]+)\/((?!.*(\/-\/|\/repository(\/[^/]+)?\/archive\.tar\.gz)).*?)(?:\.git|\/)?$/ - }, - gist: { - 'protocols': [ 'git', 'git+ssh', 'git+https', 'ssh', 'https' ], - 'domain': 'gist.github.com', - 'pathmatch': /^[/](?:([^/]+)[/])?([a-z0-9]{7,})(?:[.]git)?$/, - 'filetemplate': 'https://gist.githubusercontent.com/{user}/{project}/raw{/committish}/{path}', - 'bugstemplate': 'https://{domain}/{project}', - 'gittemplate': 'git://{domain}/{project}.git{#committish}', - 'sshtemplate': 'git@{domain}:/{project}.git{#committish}', - 'sshurltemplate': 'git+ssh://git@{domain}/{project}.git{#committish}', - 'browsetemplate': 'https://{domain}/{project}{/committish}', - 'browsefiletemplate': 'https://{domain}/{project}{/committish}{#path}', - 'docstemplate': 'https://{domain}/{project}{/committish}', - 'httpstemplate': 'git+https://{domain}/{project}.git{#committish}', - 'shortcuttemplate': '{type}:{project}{#committish}', - 'pathtemplate': '{project}{#committish}', - 'tarballtemplate': 'https://codeload.github.com/gist/{project}/tar.gz/{committish}', - 'hashformat': function (fragment) { - return 'file-' + formatHashFragment(fragment) +const defaults = { + sshtemplate: ({ domain, user, project, committish }) => `git@${domain}:${user}/${project}.git${maybeJoin('#', committish)}`, + sshurltemplate: ({ domain, user, project, committish }) => `git+ssh://git@${domain}/${user}/${project}.git${maybeJoin('#', committish)}`, + browsetemplate: ({ domain, user, project, committish, treepath }) => `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}`, + browsefiletemplate: ({ domain, user, project, committish, treepath, path, fragment, hashformat }) => `https://${domain}/${user}/${project}/${treepath}/${maybeEncode(committish || 'master')}/${path}${maybeJoin('#', hashformat(fragment || ''))}`, + docstemplate: ({ domain, user, project, treepath, committish }) => `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}#readme`, + httpstemplate: ({ auth, domain, user, project, committish }) => `git+https://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`, + filetemplate: ({ domain, user, project, committish, path }) => `https://${domain}/${user}/${project}/raw/${maybeEncode(committish) || 'master'}/${path}`, + shortcuttemplate: ({ type, user, project, committish }) => `${type}:${user}/${project}${maybeJoin('#', committish)}`, + pathtemplate: ({ user, project, committish }) => `${user}/${project}${maybeJoin('#', committish)}`, + bugstemplate: ({ domain, user, project }) => `https://${domain}/${user}/${project}/issues`, + hashformat: formatHashFragment +} + +const gitHosts = {} +gitHosts.github = Object.assign({}, defaults, { + // First two are insecure and generally shouldn't be used any more, but + // they are still supported. + protocols: ['git:', 'http:', 'git+ssh:', 'git+https:', 'ssh:', 'https:'], + domain: 'github.com', + treepath: 'tree', + filetemplate: ({ auth, user, project, committish, path }) => `https://${maybeJoin(auth, '@')}raw.githubusercontent.com/${user}/${project}/${maybeEncode(committish) || 'master'}/${path}`, + gittemplate: ({ auth, domain, user, project, committish }) => `git://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`, + tarballtemplate: ({ domain, user, project, committish }) => `https://codeload.${domain}/${user}/${project}/tar.gz/${maybeEncode(committish) || 'master'}`, + extract: (url) => { + let [, user, project, type, committish] = url.pathname.split('/', 5) + if (type && type !== 'tree') { + return + } + + if (!type) { + committish = url.hash.slice(1) + } + + if (project && project.endsWith('.git')) { + project = project.slice(0, -4) + } + + if (!user || !project) { + return } + + return { user, project, committish } } -} +}) -var gitHostDefaults = { - 'sshtemplate': 'git@{domain}:{user}/{project}.git{#committish}', - 'sshurltemplate': 'git+ssh://git@{domain}/{user}/{project}.git{#committish}', - 'browsetemplate': 'https://{domain}/{user}/{project}{/tree/committish}', - 'browsefiletemplate': 'https://{domain}/{user}/{project}/{treepath}/{committish}/{path}{#fragment}', - 'docstemplate': 'https://{domain}/{user}/{project}{/tree/committish}#readme', - 'httpstemplate': 'git+https://{auth@}{domain}/{user}/{project}.git{#committish}', - 'filetemplate': 'https://{domain}/{user}/{project}/raw/{committish}/{path}', - 'shortcuttemplate': '{type}:{user}/{project}{#committish}', - 'pathtemplate': '{user}/{project}{#committish}', - 'pathmatch': /^[/]([^/]+)[/]([^/]+?)(?:[.]git|[/])?$/, - 'hashformat': formatHashFragment -} +gitHosts.bitbucket = Object.assign({}, defaults, { + protocols: ['git+ssh:', 'git+https:', 'ssh:', 'https:'], + domain: 'bitbucket.org', + treepath: 'src', + tarballtemplate: ({ domain, user, project, committish }) => `https://${domain}/${user}/${project}/get/${maybeEncode(committish) || 'master'}.tar.gz`, + extract: (url) => { + let [, user, project, aux] = url.pathname.split('/', 4) + if (['get'].includes(aux)) { + return + } + + if (project && project.endsWith('.git')) { + project = project.slice(0, -4) + } + + if (!user || !project) { + return + } + + return { user, project, committish: url.hash.slice(1) } + } +}) + +gitHosts.gitlab = Object.assign({}, defaults, { + protocols: ['git+ssh:', 'git+https:', 'ssh:', 'https:'], + domain: 'gitlab.com', + treepath: 'tree', + httpstemplate: ({ auth, domain, user, project, committish }) => `git+https://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`, + tarballtemplate: ({ domain, user, project, committish }) => `https://${domain}/${user}/${project}/repository/archive.tar.gz?ref=${maybeEncode(committish) || 'master'}`, + extract: (url) => { + const path = url.pathname.slice(1) + if (path.includes('/-/') || path.includes('/archive.tar.gz')) { + return + } + + const segments = path.split('/') + let project = segments.pop() + if (project.endsWith('.git')) { + project = project.slice(0, -4) + } + + const user = segments.join('/') + if (!user || !project) { + return + } + + return { user, project, committish: url.hash.slice(1) } + } +}) + +gitHosts.gist = Object.assign({}, defaults, { + protocols: ['git:', 'git+ssh:', 'git+https:', 'ssh:', 'https:'], + domain: 'gist.github.com', + sshtemplate: ({ domain, project, committish }) => `git@${domain}:${project}.git${maybeJoin('#', committish)}`, + sshurltemplate: ({ domain, project, committish }) => `git+ssh://git@${domain}/${project}.git${maybeJoin('#', committish)}`, + browsetemplate: ({ domain, project, committish }) => `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}`, + browsefiletemplate: ({ domain, project, committish, path, hashformat }) => `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}${maybeJoin('#', hashformat(path))}`, + docstemplate: ({ domain, project, committish }) => `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}`, + httpstemplate: ({ domain, project, committish }) => `git+https://${domain}/${project}.git${maybeJoin('#', committish)}`, + filetemplate: ({ user, project, committish, path }) => `https://gist.githubusercontent.com/${user}/${project}/raw${maybeJoin('/', maybeEncode(committish))}/${path}`, + shortcuttemplate: ({ type, project, committish }) => `${type}:${project}${maybeJoin('#', committish)}`, + pathtemplate: ({ project, committish }) => `${project}${maybeJoin('#', committish)}`, + bugstemplate: ({ domain, project }) => `https://${domain}/${project}`, + gittemplate: ({ domain, project, committish }) => `git://${domain}/${project}.git${maybeJoin('#', committish)}`, + tarballtemplate: ({ project, committish }) => `https://codeload.github.com/gist/${project}/tar.gz/${maybeEncode(committish) || 'master'}`, + extract: (url) => { + let [, user, project, aux] = url.pathname.split('/', 4) + if (aux === 'raw') { + return + } + + if (!project) { + if (!user) { + return + } + + project = user + user = null + } + + if (project.endsWith('.git')) { + project = project.slice(0, -4) + } -Object.keys(gitHosts).forEach(function (name) { - Object.keys(gitHostDefaults).forEach(function (key) { - if (gitHosts[name][key]) return - gitHosts[name][key] = gitHostDefaults[key] - }) - gitHosts[name].protocols_re = RegExp('^(' + - gitHosts[name].protocols.map(function (protocol) { - return protocol.replace(/([\\+*{}()[\]$^|])/g, '\\$1') - }).join('|') + '):$') + return { user, project, committish: url.hash.slice(1) } + }, + hashformat: function (fragment) { + return fragment && 'file-' + formatHashFragment(fragment) + } }) +const names = Object.keys(gitHosts) +gitHosts.byShortcut = {} +gitHosts.byDomain = {} +for (const name of names) { + gitHosts.byShortcut[`${name}:`] = name + gitHosts.byDomain[gitHosts[name].domain] = name +} + function formatHashFragment (fragment) { return fragment.toLowerCase().replace(/^\W+|\/|\W+$/g, '').replace(/\W+/g, '-') } + +module.exports = gitHosts diff --git a/node_modules/hosted-git-info/git-host.js b/node_modules/hosted-git-info/git-host.js index f9b1ec7456320..8a975e92e58bb 100644 --- a/node_modules/hosted-git-info/git-host.js +++ b/node_modules/hosted-git-info/git-host.js @@ -1,156 +1,110 @@ 'use strict' -var gitHosts = require('./git-host-info.js') -/* eslint-disable node/no-deprecated-api */ - -// copy-pasta util._extend from node's source, to avoid pulling -// the whole util module into peoples' webpack bundles. -/* istanbul ignore next */ -var extend = Object.assign || function _extend (target, source) { - // Don't do anything if source isn't an object - if (source === null || typeof source !== 'object') return target - - const keys = Object.keys(source) - let i = keys.length - while (i--) { - target[keys[i]] = source[keys[i]] - } - return target -} +const gitHosts = require('./git-host-info.js') + +class GitHost { + constructor (type, user, auth, project, committish, defaultRepresentation, opts = {}) { + Object.assign(this, gitHosts[type]) + this.type = type + this.user = user + this.auth = auth + this.project = project + this.committish = committish + this.default = defaultRepresentation + this.opts = opts + } -module.exports = GitHost -function GitHost (type, user, auth, project, committish, defaultRepresentation, opts) { - var gitHostInfo = this - gitHostInfo.type = type - Object.keys(gitHosts[type]).forEach(function (key) { - gitHostInfo[key] = gitHosts[type][key] - }) - gitHostInfo.user = user - gitHostInfo.auth = auth - gitHostInfo.project = project - gitHostInfo.committish = committish - gitHostInfo.default = defaultRepresentation - gitHostInfo.opts = opts || {} -} + hash () { + return this.committish ? `#${this.committish}` : '' + } -GitHost.prototype.hash = function () { - return this.committish ? '#' + this.committish : '' -} + ssh (opts) { + return this._fill(this.sshtemplate, opts) + } + + _fill (template, opts) { + if (typeof template === 'function') { + const options = { ...this, ...this.opts, ...opts } + + // the path should always be set so we don't end up with 'undefined' in urls + if (!options.path) { + options.path = '' + } + + // template functions will insert the leading slash themselves + if (options.path.startsWith('/')) { + options.path = options.path.slice(1) + } -GitHost.prototype._fill = function (template, opts) { - if (!template) return - var vars = extend({}, opts) - vars.path = vars.path ? vars.path.replace(/^[/]+/g, '') : '' - opts = extend(extend({}, this.opts), opts) - var self = this - Object.keys(this).forEach(function (key) { - if (self[key] != null && vars[key] == null) vars[key] = self[key] - }) - var rawAuth = vars.auth - var rawcommittish = vars.committish - var rawFragment = vars.fragment - var rawPath = vars.path - var rawProject = vars.project - Object.keys(vars).forEach(function (key) { - var value = vars[key] - if ((key === 'path' || key === 'project') && typeof value === 'string') { - vars[key] = value.split('/').map(function (pathComponent) { - return encodeURIComponent(pathComponent) - }).join('/') - } else if (key !== 'domain') { - vars[key] = encodeURIComponent(value) + if (options.noCommittish) { + options.committish = null + } + + const result = template(options) + return options.noGitPlus && result.startsWith('git+') ? result.slice(4) : result } - }) - vars['auth@'] = rawAuth ? rawAuth + '@' : '' - vars['#fragment'] = rawFragment ? '#' + this.hashformat(rawFragment) : '' - vars.fragment = vars.fragment ? vars.fragment : '' - vars['#path'] = rawPath ? '#' + this.hashformat(rawPath) : '' - vars['/path'] = vars.path ? '/' + vars.path : '' - vars.projectPath = rawProject.split('/').map(encodeURIComponent).join('/') - if (opts.noCommittish) { - vars['#committish'] = '' - vars['/tree/committish'] = '' - vars['/committish'] = '' - vars.committish = '' - } else { - vars['#committish'] = rawcommittish ? '#' + rawcommittish : '' - vars['/tree/committish'] = vars.committish - ? '/' + vars.treepath + '/' + vars.committish - : '' - vars['/committish'] = vars.committish ? '/' + vars.committish : '' - vars.committish = vars.committish || 'master' - } - var res = template - Object.keys(vars).forEach(function (key) { - res = res.replace(new RegExp('[{]' + key + '[}]', 'g'), vars[key]) - }) - if (opts.noGitPlus) { - return res.replace(/^git[+]/, '') - } else { - return res + + return null } -} -GitHost.prototype.ssh = function (opts) { - return this._fill(this.sshtemplate, opts) -} + sshurl (opts) { + return this._fill(this.sshurltemplate, opts) + } -GitHost.prototype.sshurl = function (opts) { - return this._fill(this.sshurltemplate, opts) -} + browse (path, fragment, opts) { + // not a string, treat path as opts + if (typeof path !== 'string') { + return this._fill(this.browsetemplate, path) + } -GitHost.prototype.browse = function (P, F, opts) { - if (typeof P === 'string') { - if (typeof F !== 'string') { - opts = F - F = null + if (typeof fragment !== 'string') { + opts = fragment + fragment = null } - return this._fill(this.browsefiletemplate, extend({ - fragment: F, - path: P - }, opts)) - } else { - return this._fill(this.browsetemplate, P) + return this._fill(this.browsefiletemplate, { ...opts, fragment, path }) } -} -GitHost.prototype.docs = function (opts) { - return this._fill(this.docstemplate, opts) -} + docs (opts) { + return this._fill(this.docstemplate, opts) + } -GitHost.prototype.bugs = function (opts) { - return this._fill(this.bugstemplate, opts) -} + bugs (opts) { + return this._fill(this.bugstemplate, opts) + } -GitHost.prototype.https = function (opts) { - return this._fill(this.httpstemplate, opts) -} + https (opts) { + return this._fill(this.httpstemplate, opts) + } -GitHost.prototype.git = function (opts) { - return this._fill(this.gittemplate, opts) -} + git (opts) { + return this._fill(this.gittemplate, opts) + } -GitHost.prototype.shortcut = function (opts) { - return this._fill(this.shortcuttemplate, opts) -} + shortcut (opts) { + return this._fill(this.shortcuttemplate, opts) + } -GitHost.prototype.path = function (opts) { - return this._fill(this.pathtemplate, opts) -} + path (opts) { + return this._fill(this.pathtemplate, opts) + } -GitHost.prototype.tarball = function (opts_) { - var opts = extend({}, opts_, { noCommittish: false }) - return this._fill(this.tarballtemplate, opts) -} + tarball (opts) { + return this._fill(this.tarballtemplate, { ...opts, noCommittish: false }) + } -GitHost.prototype.file = function (P, opts) { - return this._fill(this.filetemplate, extend({ path: P }, opts)) -} + file (path, opts) { + return this._fill(this.filetemplate, { ...opts, path }) + } -GitHost.prototype.getDefaultRepresentation = function () { - return this.default -} + getDefaultRepresentation () { + return this.default + } + + toString (opts) { + if (this.default && typeof this[this.default] === 'function') { + return this[this.default](opts) + } -GitHost.prototype.toString = function (opts) { - if (this.default && typeof this[this.default] === 'function') return this[this.default](opts) - return this.sshurl(opts) + return this.sshurl(opts) + } } +module.exports = GitHost diff --git a/node_modules/hosted-git-info/index.js b/node_modules/hosted-git-info/index.js index 8b3eaba3da7fb..f35c570c46b59 100644 --- a/node_modules/hosted-git-info/index.js +++ b/node_modules/hosted-git-info/index.js @@ -1,11 +1,11 @@ 'use strict' -var url = require('url') -var gitHosts = require('./git-host-info.js') -var GitHost = module.exports = require('./git-host.js') -var LRU = require('lru-cache') -var cache = new LRU({max: 1000}) +const url = require('url') +const gitHosts = require('./git-host-info.js') +const GitHost = module.exports = require('./git-host.js') +const LRU = require('lru-cache') +const cache = new LRU({ max: 1000 }) -var protocolToRepresentationMap = { +const protocolToRepresentationMap = { 'git+ssh:': 'sshurl', 'git+https:': 'https', 'ssh:': 'sshurl', @@ -16,7 +16,7 @@ function protocolToRepresentation (protocol) { return protocolToRepresentationMap[protocol] || protocol.slice(0, -1) } -var authProtocols = { +const authProtocols = { 'git:': true, 'https:': true, 'git+https:': true, @@ -24,9 +24,14 @@ var authProtocols = { 'git+http:': true } +const knownProtocols = Object.keys(gitHosts.byShortcut).concat(['http:', 'https:', 'git:', 'git+ssh:', 'git+https:', 'ssh:']) + module.exports.fromUrl = function (giturl, opts) { - if (typeof giturl !== 'string') return - var key = giturl + JSON.stringify(opts || {}) + if (typeof giturl !== 'string') { + return + } + + const key = giturl + JSON.stringify(opts || {}) if (!cache.has(key)) { cache.set(key, fromUrl(giturl, opts)) @@ -36,111 +41,197 @@ module.exports.fromUrl = function (giturl, opts) { } function fromUrl (giturl, opts) { - if (giturl == null || giturl === '') return - var url = fixupUnqualifiedGist( - isGitHubShorthand(giturl) ? 'github:' + giturl : giturl - ) - var parsed = parseGitUrl(url) - var shortcutMatch = url.match(/^([^:]+):(?:[^@]+@)?(?:([^/]*)\/)?([^#]+)/) - var matches = Object.keys(gitHosts).map(function (gitHostName) { - try { - var gitHostInfo = gitHosts[gitHostName] - var auth = null - if (parsed.auth && authProtocols[parsed.protocol]) { - auth = parsed.auth + if (!giturl) { + return + } + + const url = isGitHubShorthand(giturl) ? 'github:' + giturl : correctProtocol(giturl) + const parsed = parseGitUrl(url) + if (!parsed) { + return parsed + } + + const gitHostShortcut = gitHosts.byShortcut[parsed.protocol] + const gitHostDomain = gitHosts.byDomain[parsed.hostname.startsWith('www.') ? parsed.hostname.slice(4) : parsed.hostname] + const gitHostName = gitHostShortcut || gitHostDomain + if (!gitHostName) { + return + } + + const gitHostInfo = gitHosts[gitHostShortcut || gitHostDomain] + let auth = null + if (authProtocols[parsed.protocol] && (parsed.username || parsed.password)) { + auth = `${parsed.username}${parsed.password ? ':' + parsed.password : ''}` + } + + let committish = null + let user = null + let project = null + let defaultRepresentation = null + + try { + if (gitHostShortcut) { + let pathname = parsed.pathname.startsWith('/') ? parsed.pathname.slice(1) : parsed.pathname + const firstAt = pathname.indexOf('@') + // we ignore auth for shortcuts, so just trim it out + if (firstAt > -1) { + pathname = pathname.slice(firstAt + 1) } - var committish = parsed.hash ? decodeURIComponent(parsed.hash.substr(1)) : null - var user = null - var project = null - var defaultRepresentation = null - if (shortcutMatch && shortcutMatch[1] === gitHostName) { - user = shortcutMatch[2] && decodeURIComponent(shortcutMatch[2]) - project = decodeURIComponent(shortcutMatch[3].replace(/\.git$/, '')) - defaultRepresentation = 'shortcut' - } else { - if (parsed.host && parsed.host !== gitHostInfo.domain && parsed.host.replace(/^www[.]/, '') !== gitHostInfo.domain) return - if (!gitHostInfo.protocols_re.test(parsed.protocol)) return - if (!parsed.path) return - var pathmatch = gitHostInfo.pathmatch - var matched = parsed.path.match(pathmatch) - if (!matched) return - /* istanbul ignore else */ - if (matched[1] !== null && matched[1] !== undefined) { - user = decodeURIComponent(matched[1].replace(/^:/, '')) + + const lastSlash = pathname.lastIndexOf('/') + if (lastSlash > -1) { + user = decodeURIComponent(pathname.slice(0, lastSlash)) + // we want nulls only, never empty strings + if (!user) { + user = null } - project = decodeURIComponent(matched[2]) - defaultRepresentation = protocolToRepresentation(parsed.protocol) + project = decodeURIComponent(pathname.slice(lastSlash + 1)) + } else { + project = decodeURIComponent(pathname) + } + + if (project.endsWith('.git')) { + project = project.slice(0, -4) + } + + if (parsed.hash) { + committish = decodeURIComponent(parsed.hash.slice(1)) + } + + defaultRepresentation = 'shortcut' + } else { + if (!gitHostInfo.protocols.includes(parsed.protocol)) { + return } - return new GitHost(gitHostName, user, auth, project, committish, defaultRepresentation, opts) - } catch (ex) { - /* istanbul ignore else */ - if (ex instanceof URIError) { - } else throw ex + + const segments = gitHostInfo.extract(parsed) + if (!segments) { + return + } + + user = segments.user && decodeURIComponent(segments.user) + project = decodeURIComponent(segments.project) + committish = decodeURIComponent(segments.committish) + defaultRepresentation = protocolToRepresentation(parsed.protocol) } - }).filter(function (gitHostInfo) { return gitHostInfo }) - if (matches.length !== 1) return - return matches[0] -} + } catch (err) { + /* istanbul ignore else */ + if (err instanceof URIError) { + return + } else { + throw err + } + } -function isGitHubShorthand (arg) { - // Note: This does not fully test the git ref format. - // See https://www.kernel.org/pub/software/scm/git/docs/git-check-ref-format.html - // - // The only way to do this properly would be to shell out to - // git-check-ref-format, and as this is a fast sync function, - // we don't want to do that. Just let git fail if it turns - // out that the commit-ish is invalid. - // GH usernames cannot start with . or - - return /^[^:@%/\s.-][^:@%/\s]*[/][^:@\s/%]+(?:#.*)?$/.test(arg) + return new GitHost(gitHostName, user, auth, project, committish, defaultRepresentation, opts) } -function fixupUnqualifiedGist (giturl) { - // necessary for round-tripping gists - var parsed = url.parse(giturl) - if (parsed.protocol === 'gist:' && parsed.host && !parsed.path) { - return parsed.protocol + '/' + parsed.host - } else { - return giturl +// accepts input like git:github.com:user/repo and inserts the // after the first : +const correctProtocol = (arg) => { + const firstColon = arg.indexOf(':') + const proto = arg.slice(0, firstColon + 1) + if (knownProtocols.includes(proto)) { + return arg } -} -function parseGitUrl (giturl) { - var matched = giturl.match(/^([^@]+)@([^:/]+):[/]?((?:[^/]+[/])?[^/]+?)(?:[.]git)?(#.*)?$/) - if (!matched) { - var legacy = url.parse(giturl) - if (legacy.auth) { - // git urls can be in the form of scp-style/ssh-connect strings, like - // git+ssh://user@host.com:some/path, which the legacy url parser - // supports, but WhatWG url.URL class does not. However, the legacy - // parser de-urlencodes the username and password, so something like - // https://user%3An%40me:p%40ss%3Aword@x.com/ becomes - // https://user:n@me:p@ss:word@x.com/ which is all kinds of wrong. - // Pull off just the auth and host, so we dont' get the confusing - // scp-style URL, then pass that to the WhatWG parser to get the - // auth properly escaped. - const authmatch = giturl.match(/[^@]+@[^:/]+/) - /* istanbul ignore else - this should be impossible */ - if (authmatch) { - var whatwg = new url.URL(authmatch[0]) - legacy.auth = whatwg.username || '' - if (whatwg.password) legacy.auth += ':' + whatwg.password - } + const firstAt = arg.indexOf('@') + if (firstAt > -1) { + if (firstAt > firstColon) { + return `git+ssh://${arg}` + } else { + return arg } - return legacy } - return { - protocol: 'git+ssh:', - slashes: true, - auth: matched[1], - host: matched[2], - port: null, - hostname: matched[2], - hash: matched[4], - search: null, - query: null, - pathname: '/' + matched[3], - path: '/' + matched[3], - href: 'git+ssh://' + matched[1] + '@' + matched[2] + - '/' + matched[3] + (matched[4] || '') + + const doubleSlash = arg.indexOf('//') + if (doubleSlash === firstColon + 1) { + return arg + } + + return arg.slice(0, firstColon + 1) + '//' + arg.slice(firstColon + 1) +} + +// look for github shorthand inputs, such as npm/cli +const isGitHubShorthand = (arg) => { + // it cannot contain whitespace before the first # + // it cannot start with a / because that's probably an absolute file path + // but it must include a slash since repos are username/repository + // it cannot start with a . because that's probably a relative file path + // it cannot start with an @ because that's a scoped package if it passes the other tests + // it cannot contain a : before a # because that tells us that there's a protocol + // a second / may not exist before a # + const firstHash = arg.indexOf('#') + const firstSlash = arg.indexOf('/') + const secondSlash = arg.indexOf('/', firstSlash + 1) + const firstColon = arg.indexOf(':') + const firstSpace = /\s/.exec(arg) + const firstAt = arg.indexOf('@') + + const spaceOnlyAfterHash = !firstSpace || (firstHash > -1 && firstSpace.index > firstHash) + const atOnlyAfterHash = firstAt === -1 || (firstHash > -1 && firstAt > firstHash) + const colonOnlyAfterHash = firstColon === -1 || (firstHash > -1 && firstColon > firstHash) + const secondSlashOnlyAfterHash = secondSlash === -1 || (firstHash > -1 && secondSlash > firstHash) + const hasSlash = firstSlash > 0 + // if a # is found, what we really want to know is that the character immediately before # is not a / + const doesNotEndWithSlash = firstHash > -1 ? arg[firstHash - 1] !== '/' : !arg.endsWith('/') + const doesNotStartWithDot = !arg.startsWith('.') + + return spaceOnlyAfterHash && hasSlash && doesNotEndWithSlash && doesNotStartWithDot && atOnlyAfterHash && colonOnlyAfterHash && secondSlashOnlyAfterHash +} + +// attempt to correct an scp style url so that it will parse with `new URL()` +const correctUrl = (giturl) => { + const firstAt = giturl.indexOf('@') + const lastHash = giturl.lastIndexOf('#') + let firstColon = giturl.indexOf(':') + let lastColon = giturl.lastIndexOf(':', lastHash > -1 ? lastHash : Infinity) + + let corrected + if (lastColon > firstAt) { + // the last : comes after the first @ (or there is no @) + // like it would in: + // proto://hostname.com:user/repo + // username@hostname.com:user/repo + // :password@hostname.com:user/repo + // username:password@hostname.com:user/repo + // proto://username@hostname.com:user/repo + // proto://:password@hostname.com:user/repo + // proto://username:password@hostname.com:user/repo + // then we replace the last : with a / to create a valid path + corrected = giturl.slice(0, lastColon) + '/' + giturl.slice(lastColon + 1) + // // and we find our new : positions + firstColon = corrected.indexOf(':') + lastColon = corrected.lastIndexOf(':') + } + + if (firstColon === -1 && giturl.indexOf('//') === -1) { + // we have no : at all + // as it would be in: + // username@hostname.com/user/repo + // then we prepend a protocol + corrected = `git+ssh://${corrected}` + } + + return corrected +} + +// try to parse the url as its given to us, if that throws +// then we try to clean the url and parse that result instead +// THIS FUNCTION SHOULD NEVER THROW +const parseGitUrl = (giturl) => { + let result + try { + result = new url.URL(giturl) + } catch (err) {} + + if (result) { + return result } + + const correctedUrl = correctUrl(giturl) + try { + result = new url.URL(correctedUrl) + } catch (err) {} + + return result } diff --git a/node_modules/hosted-git-info/package.json b/node_modules/hosted-git-info/package.json index 32712269f0427..b7e2ee28e5b11 100644 --- a/node_modules/hosted-git-info/package.json +++ b/node_modules/hosted-git-info/package.json @@ -1,7 +1,7 @@ { "name": "hosted-git-info", - "version": "3.0.8", - "description": "Provides metadata and conversions from repository urls for Github, Bitbucket and Gitlab", + "version": "4.0.2", + "description": "Provides metadata and conversions from repository urls for GitHub, Bitbucket and GitLab", "main": "index.js", "repository": { "type": "git", @@ -20,20 +20,21 @@ }, "homepage": "https://github.com/npm/hosted-git-info", "scripts": { - "prerelease": "npm t", - "postrelease": "npm publish && git push --follow-tags", "posttest": "standard", - "release": "standard-version -s", - "test:coverage": "tap --coverage-report=html -J --100 --no-esm test/*.js", - "test": "tap -J --100 --no-esm test/*.js" + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "preversion": "npm test", + "snap": "tap", + "test": "tap", + "test:coverage": "tap --coverage-report=html" }, "dependencies": { "lru-cache": "^6.0.0" }, "devDependencies": { - "standard": "^11.0.1", - "standard-version": "^4.4.0", - "tap": "^12.7.0" + "standard": "^16.0.3", + "standard-version": "^9.1.0", + "tap": "^14.11.0" }, "files": [ "index.js", @@ -42,5 +43,10 @@ ], "engines": { "node": ">=10" + }, + "tap": { + "color": 1, + "coverage": true, + "esm": false } } diff --git a/node_modules/http-cache-semantics/README.md b/node_modules/http-cache-semantics/README.md deleted file mode 100644 index 685aa55dd3a4b..0000000000000 --- a/node_modules/http-cache-semantics/README.md +++ /dev/null @@ -1,203 +0,0 @@ -# Can I cache this? [![Build Status](https://travis-ci.org/kornelski/http-cache-semantics.svg?branch=master)](https://travis-ci.org/kornelski/http-cache-semantics) - -`CachePolicy` tells when responses can be reused from a cache, taking into account [HTTP RFC 7234](http://httpwg.org/specs/rfc7234.html) rules for user agents and shared caches. -It also implements [RFC 5861](https://tools.ietf.org/html/rfc5861), implementing `stale-if-error` and `stale-while-revalidate`. -It's aware of many tricky details such as the `Vary` header, proxy revalidation, and authenticated responses. - -## Usage - -Cacheability of an HTTP response depends on how it was requested, so both `request` and `response` are required to create the policy. - -```js -const policy = new CachePolicy(request, response, options); - -if (!policy.storable()) { - // throw the response away, it's not usable at all - return; -} - -// Cache the data AND the policy object in your cache -// (this is pseudocode, roll your own cache (lru-cache package works)) -letsPretendThisIsSomeCache.set( - request.url, - { policy, response }, - policy.timeToLive() -); -``` - -```js -// And later, when you receive a new request: -const { policy, response } = letsPretendThisIsSomeCache.get(newRequest.url); - -// It's not enough that it exists in the cache, it has to match the new request, too: -if (policy && policy.satisfiesWithoutRevalidation(newRequest)) { - // OK, the previous response can be used to respond to the `newRequest`. - // Response headers have to be updated, e.g. to add Age and remove uncacheable headers. - response.headers = policy.responseHeaders(); - return response; -} -``` - -It may be surprising, but it's not enough for an HTTP response to be [fresh](#yo-fresh) to satisfy a request. It may need to match request headers specified in `Vary`. Even a matching fresh response may still not be usable if the new request restricted cacheability, etc. - -The key method is `satisfiesWithoutRevalidation(newRequest)`, which checks whether the `newRequest` is compatible with the original request and whether all caching conditions are met. - -### Constructor options - -Request and response must have a `headers` property with all header names in lower case. `url`, `status` and `method` are optional (defaults are any URL, status `200`, and `GET` method). - -```js -const request = { - url: '/', - method: 'GET', - headers: { - accept: '*/*', - }, -}; - -const response = { - status: 200, - headers: { - 'cache-control': 'public, max-age=7234', - }, -}; - -const options = { - shared: true, - cacheHeuristic: 0.1, - immutableMinTimeToLive: 24 * 3600 * 1000, // 24h - ignoreCargoCult: false, -}; -``` - -If `options.shared` is `true` (default), then the response is evaluated from a perspective of a shared cache (i.e. `private` is not cacheable and `s-maxage` is respected). If `options.shared` is `false`, then the response is evaluated from a perspective of a single-user cache (i.e. `private` is cacheable and `s-maxage` is ignored). `shared: true` is recommended for HTTP clients. - -`options.cacheHeuristic` is a fraction of response's age that is used as a fallback cache duration. The default is 0.1 (10%), e.g. if a file hasn't been modified for 100 days, it'll be cached for 100\*0.1 = 10 days. - -`options.immutableMinTimeToLive` is a number of milliseconds to assume as the default time to cache responses with `Cache-Control: immutable`. Note that [per RFC](http://httpwg.org/http-extensions/immutable.html) these can become stale, so `max-age` still overrides the default. - -If `options.ignoreCargoCult` is true, common anti-cache directives will be completely ignored if the non-standard `pre-check` and `post-check` directives are present. These two useless directives are most commonly found in bad StackOverflow answers and PHP's "session limiter" defaults. - -### `storable()` - -Returns `true` if the response can be stored in a cache. If it's `false` then you MUST NOT store either the request or the response. - -### `satisfiesWithoutRevalidation(newRequest)` - -This is the most important method. Use this method to check whether the cached response is still fresh in the context of the new request. - -If it returns `true`, then the given `request` matches the original response this cache policy has been created with, and the response can be reused without contacting the server. Note that the old response can't be returned without being updated, see `responseHeaders()`. - -If it returns `false`, then the response may not be matching at all (e.g. it's for a different URL or method), or may require to be refreshed first (see `revalidationHeaders()`). - -### `responseHeaders()` - -Returns updated, filtered set of response headers to return to clients receiving the cached response. This function is necessary, because proxies MUST always remove hop-by-hop headers (such as `TE` and `Connection`) and update response's `Age` to avoid doubling cache time. - -```js -cachedResponse.headers = cachePolicy.responseHeaders(cachedResponse); -``` - -### `timeToLive()` - -Returns approximate time in _milliseconds_ until the response becomes stale (i.e. not fresh). - -After that time (when `timeToLive() <= 0`) the response might not be usable without revalidation. However, there are exceptions, e.g. a client can explicitly allow stale responses, so always check with `satisfiesWithoutRevalidation()`. -`stale-if-error` and `stale-while-revalidate` extend the time to live of the cache, that can still be used if stale. - -### `toObject()`/`fromObject(json)` - -Chances are you'll want to store the `CachePolicy` object along with the cached response. `obj = policy.toObject()` gives a plain JSON-serializable object. `policy = CachePolicy.fromObject(obj)` creates an instance from it. - -### Refreshing stale cache (revalidation) - -When a cached response has expired, it can be made fresh again by making a request to the origin server. The server may respond with status 304 (Not Modified) without sending the response body again, saving bandwidth. - -The following methods help perform the update efficiently and correctly. - -#### `revalidationHeaders(newRequest)` - -Returns updated, filtered set of request headers to send to the origin server to check if the cached response can be reused. These headers allow the origin server to return status 304 indicating the response is still fresh. All headers unrelated to caching are passed through as-is. - -Use this method when updating cache from the origin server. - -```js -updateRequest.headers = cachePolicy.revalidationHeaders(updateRequest); -``` - -#### `revalidatedPolicy(revalidationRequest, revalidationResponse)` - -Use this method to update the cache after receiving a new response from the origin server. It returns an object with two keys: - -- `policy` — A new `CachePolicy` with HTTP headers updated from `revalidationResponse`. You can always replace the old cached `CachePolicy` with the new one. -- `modified` — Boolean indicating whether the response body has changed. - - If `false`, then a valid 304 Not Modified response has been received, and you can reuse the old cached response body. This is also affected by `stale-if-error`. - - If `true`, you should use new response's body (if present), or make another request to the origin server without any conditional headers (i.e. don't use `revalidationHeaders()` this time) to get the new resource. - -```js -// When serving requests from cache: -const { oldPolicy, oldResponse } = letsPretendThisIsSomeCache.get( - newRequest.url -); - -if (!oldPolicy.satisfiesWithoutRevalidation(newRequest)) { - // Change the request to ask the origin server if the cached response can be used - newRequest.headers = oldPolicy.revalidationHeaders(newRequest); - - // Send request to the origin server. The server may respond with status 304 - const newResponse = await makeRequest(newRequest); - - // Create updated policy and combined response from the old and new data - const { policy, modified } = oldPolicy.revalidatedPolicy( - newRequest, - newResponse - ); - const response = modified ? newResponse : oldResponse; - - // Update the cache with the newer/fresher response - letsPretendThisIsSomeCache.set( - newRequest.url, - { policy, response }, - policy.timeToLive() - ); - - // And proceed returning cached response as usual - response.headers = policy.responseHeaders(); - return response; -} -``` - -# Yo, FRESH - -![satisfiesWithoutRevalidation](fresh.jpg) - -## Used by - -- [ImageOptim API](https://imageoptim.com/api), [make-fetch-happen](https://github.com/zkat/make-fetch-happen), [cacheable-request](https://www.npmjs.com/package/cacheable-request) ([got](https://www.npmjs.com/package/got)), [npm/registry-fetch](https://github.com/npm/registry-fetch), [etc.](https://github.com/kornelski/http-cache-semantics/network/dependents) - -## Implemented - -- `Cache-Control` response header with all the quirks. -- `Expires` with check for bad clocks. -- `Pragma` response header. -- `Age` response header. -- `Vary` response header. -- Default cacheability of statuses and methods. -- Requests for stale data. -- Filtering of hop-by-hop headers. -- Basic revalidation request -- `stale-if-error` - -## Unimplemented - -- Merging of range requests, `If-Range` (but correctly supports them as non-cacheable) -- Revalidation of multiple representations - -### Trusting server `Date` - -Per the RFC, the cache should take into account the time between server-supplied `Date` and the time it received the response. The RFC-mandated behavior creates two problems: - - * Servers with incorrectly set timezone may add several hours to cache age (or more, if the clock is completely wrong). - * Even reasonably correct clocks may be off by a couple of seconds, breaking `max-age=1` trick (which is useful for reverse proxies on high-traffic servers). - -Previous versions of this library had an option to ignore the server date if it was "too inaccurate". To support the `max-age=1` trick the library also has to ignore dates that pretty accurate. There's no point of having an option to trust dates that are only a bit inaccurate, so this library won't trust any server dates. `max-age` will be interpreted from the time the response has been received, not from when it has been sent. This will affect only [RFC 1149 networks](https://tools.ietf.org/html/rfc1149). diff --git a/node_modules/http-proxy-agent/README.md b/node_modules/http-proxy-agent/README.md deleted file mode 100644 index d60e20661f86c..0000000000000 --- a/node_modules/http-proxy-agent/README.md +++ /dev/null @@ -1,74 +0,0 @@ -http-proxy-agent -================ -### An HTTP(s) proxy `http.Agent` implementation for HTTP -[![Build Status](https://github.com/TooTallNate/node-http-proxy-agent/workflows/Node%20CI/badge.svg)](https://github.com/TooTallNate/node-http-proxy-agent/actions?workflow=Node+CI) - -This module provides an `http.Agent` implementation that connects to a specified -HTTP or HTTPS proxy server, and can be used with the built-in `http` module. - -__Note:__ For HTTP proxy usage with the `https` module, check out -[`node-https-proxy-agent`](https://github.com/TooTallNate/node-https-proxy-agent). - -Installation ------------- - -Install with `npm`: - -``` bash -$ npm install http-proxy-agent -``` - - -Example -------- - -``` js -var url = require('url'); -var http = require('http'); -var HttpProxyAgent = require('http-proxy-agent'); - -// HTTP/HTTPS proxy to connect to -var proxy = process.env.http_proxy || 'http://168.63.76.32:3128'; -console.log('using proxy server %j', proxy); - -// HTTP endpoint for the proxy to connect to -var endpoint = process.argv[2] || 'http://nodejs.org/api/'; -console.log('attempting to GET %j', endpoint); -var opts = url.parse(endpoint); - -// create an instance of the `HttpProxyAgent` class with the proxy server information -var agent = new HttpProxyAgent(proxy); -opts.agent = agent; - -http.get(opts, function (res) { - console.log('"response" event!', res.headers); - res.pipe(process.stdout); -}); -``` - - -License -------- - -(The MIT License) - -Copyright (c) 2013 Nathan Rajlich <nathan@tootallnate.net> - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -'Software'), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, -TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE -SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/http-signature/.dir-locals.el b/node_modules/http-signature/.dir-locals.el deleted file mode 100644 index 3bc9235f25562..0000000000000 --- a/node_modules/http-signature/.dir-locals.el +++ /dev/null @@ -1,6 +0,0 @@ -((nil . ((indent-tabs-mode . nil) - (tab-width . 8) - (fill-column . 80))) - (js-mode . ((js-indent-level . 2) - (indent-tabs-mode . nil) - ))) \ No newline at end of file diff --git a/node_modules/http-signature/.npmignore b/node_modules/http-signature/.npmignore deleted file mode 100644 index c143fb3a46cac..0000000000000 --- a/node_modules/http-signature/.npmignore +++ /dev/null @@ -1,7 +0,0 @@ -.gitmodules -deps -docs -Makefile -node_modules -test -tools \ No newline at end of file diff --git a/node_modules/http-signature/README.md b/node_modules/http-signature/README.md deleted file mode 100644 index de487d3236ac6..0000000000000 --- a/node_modules/http-signature/README.md +++ /dev/null @@ -1,79 +0,0 @@ -# node-http-signature - -node-http-signature is a node.js library that has client and server components -for Joyent's [HTTP Signature Scheme](http_signing.md). - -## Usage - -Note the example below signs a request with the same key/cert used to start an -HTTP server. This is almost certainly not what you actually want, but is just -used to illustrate the API calls; you will need to provide your own key -management in addition to this library. - -### Client - -```js -var fs = require('fs'); -var https = require('https'); -var httpSignature = require('http-signature'); - -var key = fs.readFileSync('./key.pem', 'ascii'); - -var options = { - host: 'localhost', - port: 8443, - path: '/', - method: 'GET', - headers: {} -}; - -// Adds a 'Date' header in, signs it, and adds the -// 'Authorization' header in. -var req = https.request(options, function(res) { - console.log(res.statusCode); -}); - - -httpSignature.sign(req, { - key: key, - keyId: './cert.pem' -}); - -req.end(); -``` - -### Server - -```js -var fs = require('fs'); -var https = require('https'); -var httpSignature = require('http-signature'); - -var options = { - key: fs.readFileSync('./key.pem'), - cert: fs.readFileSync('./cert.pem') -}; - -https.createServer(options, function (req, res) { - var rc = 200; - var parsed = httpSignature.parseRequest(req); - var pub = fs.readFileSync(parsed.keyId, 'ascii'); - if (!httpSignature.verifySignature(parsed, pub)) - rc = 401; - - res.writeHead(rc); - res.end(); -}).listen(8443); -``` - -## Installation - - npm install http-signature - -## License - -MIT. - -## Bugs - -See <https://github.com/joyent/node-http-signature/issues>. diff --git a/node_modules/https-proxy-agent/README.md b/node_modules/https-proxy-agent/README.md deleted file mode 100644 index 328656a9e048a..0000000000000 --- a/node_modules/https-proxy-agent/README.md +++ /dev/null @@ -1,137 +0,0 @@ -https-proxy-agent -================ -### An HTTP(s) proxy `http.Agent` implementation for HTTPS -[![Build Status](https://github.com/TooTallNate/node-https-proxy-agent/workflows/Node%20CI/badge.svg)](https://github.com/TooTallNate/node-https-proxy-agent/actions?workflow=Node+CI) - -This module provides an `http.Agent` implementation that connects to a specified -HTTP or HTTPS proxy server, and can be used with the built-in `https` module. - -Specifically, this `Agent` implementation connects to an intermediary "proxy" -server and issues the [CONNECT HTTP method][CONNECT], which tells the proxy to -open a direct TCP connection to the destination server. - -Since this agent implements the CONNECT HTTP method, it also works with other -protocols that use this method when connecting over proxies (i.e. WebSockets). -See the "Examples" section below for more. - - -Installation ------------- - -Install with `npm`: - -``` bash -$ npm install https-proxy-agent -``` - - -Examples --------- - -#### `https` module example - -``` js -var url = require('url'); -var https = require('https'); -var HttpsProxyAgent = require('https-proxy-agent'); - -// HTTP/HTTPS proxy to connect to -var proxy = process.env.http_proxy || 'http://168.63.76.32:3128'; -console.log('using proxy server %j', proxy); - -// HTTPS endpoint for the proxy to connect to -var endpoint = process.argv[2] || 'https://graph.facebook.com/tootallnate'; -console.log('attempting to GET %j', endpoint); -var options = url.parse(endpoint); - -// create an instance of the `HttpsProxyAgent` class with the proxy server information -var agent = new HttpsProxyAgent(proxy); -options.agent = agent; - -https.get(options, function (res) { - console.log('"response" event!', res.headers); - res.pipe(process.stdout); -}); -``` - -#### `ws` WebSocket connection example - -``` js -var url = require('url'); -var WebSocket = require('ws'); -var HttpsProxyAgent = require('https-proxy-agent'); - -// HTTP/HTTPS proxy to connect to -var proxy = process.env.http_proxy || 'http://168.63.76.32:3128'; -console.log('using proxy server %j', proxy); - -// WebSocket endpoint for the proxy to connect to -var endpoint = process.argv[2] || 'ws://echo.websocket.org'; -var parsed = url.parse(endpoint); -console.log('attempting to connect to WebSocket %j', endpoint); - -// create an instance of the `HttpsProxyAgent` class with the proxy server information -var options = url.parse(proxy); - -var agent = new HttpsProxyAgent(options); - -// finally, initiate the WebSocket connection -var socket = new WebSocket(endpoint, { agent: agent }); - -socket.on('open', function () { - console.log('"open" event!'); - socket.send('hello world'); -}); - -socket.on('message', function (data, flags) { - console.log('"message" event! %j %j', data, flags); - socket.close(); -}); -``` - -API ---- - -### new HttpsProxyAgent(Object options) - -The `HttpsProxyAgent` class implements an `http.Agent` subclass that connects -to the specified "HTTP(s) proxy server" in order to proxy HTTPS and/or WebSocket -requests. This is achieved by using the [HTTP `CONNECT` method][CONNECT]. - -The `options` argument may either be a string URI of the proxy server to use, or an -"options" object with more specific properties: - - * `host` - String - Proxy host to connect to (may use `hostname` as well). Required. - * `port` - Number - Proxy port to connect to. Required. - * `protocol` - String - If `https:`, then use TLS to connect to the proxy. - * `headers` - Object - Additional HTTP headers to be sent on the HTTP CONNECT method. - * Any other options given are passed to the `net.connect()`/`tls.connect()` functions. - - -License -------- - -(The MIT License) - -Copyright (c) 2013 Nathan Rajlich <nathan@tootallnate.net> - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -'Software'), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, -TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE -SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -[CONNECT]: http://en.wikipedia.org/wiki/HTTP_tunnel#HTTP_CONNECT_Tunneling diff --git a/node_modules/humanize-ms/README.md b/node_modules/humanize-ms/README.md deleted file mode 100644 index 20a2ca35b89fb..0000000000000 --- a/node_modules/humanize-ms/README.md +++ /dev/null @@ -1,40 +0,0 @@ -humanize-ms ---------------- - -[![NPM version][npm-image]][npm-url] -[![build status][travis-image]][travis-url] -[![Test coverage][coveralls-image]][coveralls-url] -[![Gittip][gittip-image]][gittip-url] -[![David deps][david-image]][david-url] - -[npm-image]: https://img.shields.io/npm/v/humanize-ms.svg?style=flat -[npm-url]: https://npmjs.org/package/humanize-ms -[travis-image]: https://img.shields.io/travis/node-modules/humanize-ms.svg?style=flat -[travis-url]: https://travis-ci.org/node-modules/humanize-ms -[coveralls-image]: https://img.shields.io/coveralls/node-modules/humanize-ms.svg?style=flat -[coveralls-url]: https://coveralls.io/r/node-modules/humanize-ms?branch=master -[gittip-image]: https://img.shields.io/gittip/dead-horse.svg?style=flat -[gittip-url]: https://www.gittip.com/dead-horse/ -[david-image]: https://img.shields.io/david/node-modules/humanize-ms.svg?style=flat -[david-url]: https://david-dm.org/node-modules/humanize-ms - -transform humanize time to ms - -## Installation - -```bash -$ npm install humanize-ms -``` - -## Examples - -```js -var ms = require('humanize-ms'); - -ms('1s') // 1000 -ms(1000) // 1000 -``` - -### License - -MIT diff --git a/node_modules/iconv-lite/.github/dependabot.yml b/node_modules/iconv-lite/.github/dependabot.yml deleted file mode 100644 index e4a0e0afdff7c..0000000000000 --- a/node_modules/iconv-lite/.github/dependabot.yml +++ /dev/null @@ -1,11 +0,0 @@ -# Please see the documentation for all configuration options: -# https://help.github.com/github/administering-a-repository/configuration-options-for-dependency-updates - -version: 2 -updates: - - package-ecosystem: "npm" - directory: "/" - schedule: - interval: "daily" - allow: - - dependency-type: production diff --git a/node_modules/iconv-lite/Changelog.md b/node_modules/iconv-lite/Changelog.md index c299cc06a25d3..464549b148481 100644 --- a/node_modules/iconv-lite/Changelog.md +++ b/node_modules/iconv-lite/Changelog.md @@ -1,3 +1,7 @@ +## 0.6.3 / 2021-05-23 + * Fix HKSCS encoding to prefer Big5 codes if both Big5 and HKSCS codes are possible (#264) + + ## 0.6.2 / 2020-07-08 * Support Uint8Array-s decoding without conversion to Buffers, plus fix an edge case. diff --git a/node_modules/iconv-lite/README.md b/node_modules/iconv-lite/README.md deleted file mode 100644 index 3c97f87307946..0000000000000 --- a/node_modules/iconv-lite/README.md +++ /dev/null @@ -1,130 +0,0 @@ -## iconv-lite: Pure JS character encoding conversion - - * No need for native code compilation. Quick to install, works on Windows and in sandboxed environments like [Cloud9](http://c9.io). - * Used in popular projects like [Express.js (body_parser)](https://github.com/expressjs/body-parser), - [Grunt](http://gruntjs.com/), [Nodemailer](http://www.nodemailer.com/), [Yeoman](http://yeoman.io/) and others. - * Faster than [node-iconv](https://github.com/bnoordhuis/node-iconv) (see below for performance comparison). - * Intuitive encode/decode API, including Streaming support. - * In-browser usage via [browserify](https://github.com/substack/node-browserify) or [webpack](https://webpack.js.org/) (~180kb gzip compressed with Buffer shim included). - * Typescript [type definition file](https://github.com/ashtuchkin/iconv-lite/blob/master/lib/index.d.ts) included. - * React Native is supported (need to install `stream` module to enable Streaming API). - * License: MIT. - -[![NPM Stats](https://nodei.co/npm/iconv-lite.png)](https://npmjs.org/package/iconv-lite/) -[![Build Status](https://travis-ci.org/ashtuchkin/iconv-lite.svg?branch=master)](https://travis-ci.org/ashtuchkin/iconv-lite) -[![npm](https://img.shields.io/npm/v/iconv-lite.svg)](https://npmjs.org/package/iconv-lite/) -[![npm downloads](https://img.shields.io/npm/dm/iconv-lite.svg)](https://npmjs.org/package/iconv-lite/) -[![npm bundle size](https://img.shields.io/bundlephobia/min/iconv-lite.svg)](https://npmjs.org/package/iconv-lite/) - -## Usage -### Basic API -```javascript -var iconv = require('iconv-lite'); - -// Convert from an encoded buffer to a js string. -str = iconv.decode(Buffer.from([0x68, 0x65, 0x6c, 0x6c, 0x6f]), 'win1251'); - -// Convert from a js string to an encoded buffer. -buf = iconv.encode("Sample input string", 'win1251'); - -// Check if encoding is supported -iconv.encodingExists("us-ascii") -``` - -### Streaming API -```javascript - -// Decode stream (from binary data stream to js strings) -http.createServer(function(req, res) { - var converterStream = iconv.decodeStream('win1251'); - req.pipe(converterStream); - - converterStream.on('data', function(str) { - console.log(str); // Do something with decoded strings, chunk-by-chunk. - }); -}); - -// Convert encoding streaming example -fs.createReadStream('file-in-win1251.txt') - .pipe(iconv.decodeStream('win1251')) - .pipe(iconv.encodeStream('ucs2')) - .pipe(fs.createWriteStream('file-in-ucs2.txt')); - -// Sugar: all encode/decode streams have .collect(cb) method to accumulate data. -http.createServer(function(req, res) { - req.pipe(iconv.decodeStream('win1251')).collect(function(err, body) { - assert(typeof body == 'string'); - console.log(body); // full request body string - }); -}); -``` - -## Supported encodings - - * All node.js native encodings: utf8, ucs2 / utf16-le, ascii, binary, base64, hex. - * Additional unicode encodings: utf16, utf16-be, utf-7, utf-7-imap, utf32, utf32-le, and utf32-be. - * All widespread singlebyte encodings: Windows 125x family, ISO-8859 family, - IBM/DOS codepages, Macintosh family, KOI8 family, all others supported by iconv library. - Aliases like 'latin1', 'us-ascii' also supported. - * All widespread multibyte encodings: CP932, CP936, CP949, CP950, GB2312, GBK, GB18030, Big5, Shift_JIS, EUC-JP. - -See [all supported encodings on wiki](https://github.com/ashtuchkin/iconv-lite/wiki/Supported-Encodings). - -Most singlebyte encodings are generated automatically from [node-iconv](https://github.com/bnoordhuis/node-iconv). Thank you Ben Noordhuis and libiconv authors! - -Multibyte encodings are generated from [Unicode.org mappings](http://www.unicode.org/Public/MAPPINGS/) and [WHATWG Encoding Standard mappings](http://encoding.spec.whatwg.org/). Thank you, respective authors! - - -## Encoding/decoding speed - -Comparison with node-iconv module (1000x256kb, on MacBook Pro, Core i5/2.6 GHz, Node v0.12.0). -Note: your results may vary, so please always check on your hardware. - - operation iconv@2.1.4 iconv-lite@0.4.7 - ---------------------------------------------------------- - encode('win1251') ~96 Mb/s ~320 Mb/s - decode('win1251') ~95 Mb/s ~246 Mb/s - -## BOM handling - - * Decoding: BOM is stripped by default, unless overridden by passing `stripBOM: false` in options - (f.ex. `iconv.decode(buf, enc, {stripBOM: false})`). - A callback might also be given as a `stripBOM` parameter - it'll be called if BOM character was actually found. - * If you want to detect UTF-8 BOM when decoding other encodings, use [node-autodetect-decoder-stream](https://github.com/danielgindi/node-autodetect-decoder-stream) module. - * Encoding: No BOM added, unless overridden by `addBOM: true` option. - -## UTF-16 Encodings - -This library supports UTF-16LE, UTF-16BE and UTF-16 encodings. First two are straightforward, but UTF-16 is trying to be -smart about endianness in the following ways: - * Decoding: uses BOM and 'spaces heuristic' to determine input endianness. Default is UTF-16LE, but can be - overridden with `defaultEncoding: 'utf-16be'` option. Strips BOM unless `stripBOM: false`. - * Encoding: uses UTF-16LE and writes BOM by default. Use `addBOM: false` to override. - -## UTF-32 Encodings - -This library supports UTF-32LE, UTF-32BE and UTF-32 encodings. Like the UTF-16 encoding above, UTF-32 defaults to UTF-32LE, but uses BOM and 'spaces heuristics' to determine input endianness. - * The default of UTF-32LE can be overridden with the `defaultEncoding: 'utf-32be'` option. Strips BOM unless `stripBOM: false`. - * Encoding: uses UTF-32LE and writes BOM by default. Use `addBOM: false` to override. (`defaultEncoding: 'utf-32be'` can also be used here to change encoding.) - -## Other notes - -When decoding, be sure to supply a Buffer to decode() method, otherwise [bad things usually happen](https://github.com/ashtuchkin/iconv-lite/wiki/Use-Buffers-when-decoding). -Untranslatable characters are set to � or ?. No transliteration is currently supported. -Node versions 0.10.31 and 0.11.13 are buggy, don't use them (see #65, #77). - -## Testing - -```bash -$ git clone git@github.com:ashtuchkin/iconv-lite.git -$ cd iconv-lite -$ npm install -$ npm test - -$ # To view performance: -$ node test/performance.js - -$ # To view test coverage: -$ npm run coverage -$ open coverage/lcov-report/index.html -``` diff --git a/node_modules/iconv-lite/encodings/dbcs-data.js b/node_modules/iconv-lite/encodings/dbcs-data.js index 4b61914341f91..0d17e5821b3df 100644 --- a/node_modules/iconv-lite/encodings/dbcs-data.js +++ b/node_modules/iconv-lite/encodings/dbcs-data.js @@ -167,7 +167,19 @@ module.exports = { 'big5hkscs': { type: '_dbcs', table: function() { return require('./tables/cp950.json').concat(require('./tables/big5-added.json')) }, - encodeSkipVals: [0xa2cc], + encodeSkipVals: [ + // Although Encoding Standard says we should avoid encoding to HKSCS area (See Step 1 of + // https://encoding.spec.whatwg.org/#index-big5-pointer), we still do it to increase compatibility with ICU. + // But if a single unicode point can be encoded both as HKSCS and regular Big5, we prefer the latter. + 0x8e69, 0x8e6f, 0x8e7e, 0x8eab, 0x8eb4, 0x8ecd, 0x8ed0, 0x8f57, 0x8f69, 0x8f6e, 0x8fcb, 0x8ffe, + 0x906d, 0x907a, 0x90c4, 0x90dc, 0x90f1, 0x91bf, 0x92af, 0x92b0, 0x92b1, 0x92b2, 0x92d1, 0x9447, 0x94ca, + 0x95d9, 0x96fc, 0x9975, 0x9b76, 0x9b78, 0x9b7b, 0x9bc6, 0x9bde, 0x9bec, 0x9bf6, 0x9c42, 0x9c53, 0x9c62, + 0x9c68, 0x9c6b, 0x9c77, 0x9cbc, 0x9cbd, 0x9cd0, 0x9d57, 0x9d5a, 0x9dc4, 0x9def, 0x9dfb, 0x9ea9, 0x9eef, + 0x9efd, 0x9f60, 0x9fcb, 0xa077, 0xa0dc, 0xa0df, 0x8fcc, 0x92c8, 0x9644, 0x96ed, + + // Step 2 of https://encoding.spec.whatwg.org/#index-big5-pointer: Use last pointer for U+2550, U+255E, U+2561, U+256A, U+5341, or U+5345 + 0xa2a4, 0xa2a5, 0xa2a7, 0xa2a6, 0xa2cc, 0xa2ce, + ], }, 'cnbig5': 'big5hkscs', diff --git a/node_modules/iconv-lite/package.json b/node_modules/iconv-lite/package.json index 8f86f9c9bc1f8..d351115a839fa 100644 --- a/node_modules/iconv-lite/package.json +++ b/node_modules/iconv-lite/package.json @@ -1,7 +1,7 @@ { "name": "iconv-lite", "description": "Convert character encodings in pure javascript.", - "version": "0.6.2", + "version": "0.6.3", "license": "MIT", "keywords": [ "iconv", diff --git a/node_modules/ignore-walk/README.md b/node_modules/ignore-walk/README.md deleted file mode 100644 index 278f61017f5e7..0000000000000 --- a/node_modules/ignore-walk/README.md +++ /dev/null @@ -1,60 +0,0 @@ -# ignore-walk - -[![Build -Status](https://travis-ci.org/npm/ignore-walk.svg?branch=master)](https://travis-ci.org/npm/ignore-walk) - -Nested/recursive `.gitignore`/`.npmignore` parsing and filtering. - -Walk a directory creating a list of entries, parsing any `.ignore` -files met along the way to exclude files. - -## USAGE - -```javascript -const walk = require('ignore-walk') - -// All options are optional, defaults provided. - -// this function returns a promise, but you can also pass a cb -// if you like that approach better. -walk({ - path: '...', // root dir to start in. defaults to process.cwd() - ignoreFiles: [ '.gitignore' ], // list of filenames. defaults to ['.ignore'] - includeEmpty: true|false, // true to include empty dirs, default false - follow: true|false // true to follow symlink dirs, default false -}, callback) - -// to walk synchronously, do it this way: -const result = walk.sync({ path: '/wow/such/filepath' }) -``` - -If you want to get at the underlying classes, they're at `walk.Walker` -and `walk.WalkerSync`. - -## OPTIONS - -* `path` The path to start in. Defaults to `process.cwd()` - -* `ignoreFiles` Filenames to treat as ignore files. The default is - `['.ignore']`. (This is where you'd put `.gitignore` or - `.npmignore` or whatever.) If multiple ignore files are in a - directory, then rules from each are applied in the order that the - files are listed. - -* `includeEmpty` Set to `true` to include empty directories, assuming - they are not excluded by any of the ignore rules. If not set, then - this follows the standard `git` behavior of not including - directories that are empty. - - Note: this will cause an empty directory to be included if it - would contain an included entry, even if it would have otherwise - been excluded itself. - - For example, given the rules `*` (ignore everything) and `!/a/b/c` - (re-include the entry at `/a/b/c`), the directory `/a/b` will be - included if it is empty. - -* `follow` Set to `true` to treat symbolically linked directories as - directories, recursing into them. There is no handling for nested - symlinks, so `ELOOP` errors can occur in some cases when using this - option. Defaults to `false`. diff --git a/node_modules/ignore-walk/index.js b/node_modules/ignore-walk/index.js index eec6851804a99..c01d57de2a039 100644 --- a/node_modules/ignore-walk/index.js +++ b/node_modules/ignore-walk/index.js @@ -23,7 +23,7 @@ class Walker extends EE { } sort (a, b) { - return a.localeCompare(b) + return a.localeCompare(b, 'en') } emit (ev, data) { diff --git a/node_modules/ignore-walk/package.json b/node_modules/ignore-walk/package.json index 99d2c2e64de9d..7d48b977e0702 100644 --- a/node_modules/ignore-walk/package.json +++ b/node_modules/ignore-walk/package.json @@ -1,13 +1,13 @@ { "name": "ignore-walk", - "version": "3.0.3", + "version": "3.0.4", "description": "Nested/recursive `.gitignore`/`.npmignore` parsing and filtering.", "main": "index.js", "devDependencies": { "mkdirp": "^0.5.1", "mutate-fs": "^1.1.0", "rimraf": "^2.6.1", - "tap": "^14.6.9" + "tap": "^15.0.6" }, "scripts": { "test": "tap", @@ -36,6 +36,9 @@ "minimatch": "^3.0.4" }, "tap": { + "test-env": "LC_ALL=sk", + "before": "test/00-setup.js", + "after": "test/zz-cleanup.js", "jobs": 1 } } diff --git a/node_modules/imurmurhash/README.md b/node_modules/imurmurhash/README.md deleted file mode 100644 index f35b20a0ef5bf..0000000000000 --- a/node_modules/imurmurhash/README.md +++ /dev/null @@ -1,122 +0,0 @@ -iMurmurHash.js -============== - -An incremental implementation of the MurmurHash3 (32-bit) hashing algorithm for JavaScript based on [Gary Court's implementation](https://github.com/garycourt/murmurhash-js) with [kazuyukitanimura's modifications](https://github.com/kazuyukitanimura/murmurhash-js). - -This version works significantly faster than the non-incremental version if you need to hash many small strings into a single hash, since string concatenation (to build the single string to pass the non-incremental version) is fairly costly. In one case tested, using the incremental version was about 50% faster than concatenating 5-10 strings and then hashing. - -Installation ------------- - -To use iMurmurHash in the browser, [download the latest version](https://raw.github.com/jensyt/imurmurhash-js/master/imurmurhash.min.js) and include it as a script on your site. - -```html -<script type="text/javascript" src="/scripts/imurmurhash.min.js"></script> -<script> -// Your code here, access iMurmurHash using the global object MurmurHash3 -</script> -``` - ---- - -To use iMurmurHash in Node.js, install the module using NPM: - -```bash -npm install imurmurhash -``` - -Then simply include it in your scripts: - -```javascript -MurmurHash3 = require('imurmurhash'); -``` - -Quick Example -------------- - -```javascript -// Create the initial hash -var hashState = MurmurHash3('string'); - -// Incrementally add text -hashState.hash('more strings'); -hashState.hash('even more strings'); - -// All calls can be chained if desired -hashState.hash('and').hash('some').hash('more'); - -// Get a result -hashState.result(); -// returns 0xe4ccfe6b -``` - -Functions ---------- - -### MurmurHash3 ([string], [seed]) -Get a hash state object, optionally initialized with the given _string_ and _seed_. _Seed_ must be a positive integer if provided. Calling this function without the `new` keyword will return a cached state object that has been reset. This is safe to use as long as the object is only used from a single thread and no other hashes are created while operating on this one. If this constraint cannot be met, you can use `new` to create a new state object. For example: - -```javascript -// Use the cached object, calling the function again will return the same -// object (but reset, so the current state would be lost) -hashState = MurmurHash3(); -... - -// Create a new object that can be safely used however you wish. Calling the -// function again will simply return a new state object, and no state loss -// will occur, at the cost of creating more objects. -hashState = new MurmurHash3(); -``` - -Both methods can be mixed however you like if you have different use cases. - ---- - -### MurmurHash3.prototype.hash (string) -Incrementally add _string_ to the hash. This can be called as many times as you want for the hash state object, including after a call to `result()`. Returns `this` so calls can be chained. - ---- - -### MurmurHash3.prototype.result () -Get the result of the hash as a 32-bit positive integer. This performs the tail and finalizer portions of the algorithm, but does not store the result in the state object. This means that it is perfectly safe to get results and then continue adding strings via `hash`. - -```javascript -// Do the whole string at once -MurmurHash3('this is a test string').result(); -// 0x70529328 - -// Do part of the string, get a result, then the other part -var m = MurmurHash3('this is a'); -m.result(); -// 0xbfc4f834 -m.hash(' test string').result(); -// 0x70529328 (same as above) -``` - ---- - -### MurmurHash3.prototype.reset ([seed]) -Reset the state object for reuse, optionally using the given _seed_ (defaults to 0 like the constructor). Returns `this` so calls can be chained. - ---- - -License (MIT) -------------- -Copyright (c) 2013 Gary Court, Jens Taylor - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software is furnished to do so, -subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS -FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR -COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER -IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/infer-owner/README.md b/node_modules/infer-owner/README.md deleted file mode 100644 index 146caf7b8c801..0000000000000 --- a/node_modules/infer-owner/README.md +++ /dev/null @@ -1,41 +0,0 @@ -# infer-owner - -Infer the owner of a path based on the owner of its nearest existing parent - -## USAGE - -```js -const inferOwner = require('infer-owner') - -inferOwner('/some/cache/folder/file').then(owner => { - // owner is {uid, gid} that should be attached to - // the /some/cache/folder/file, based on ownership - // of /some/cache/folder, /some/cache, /some, or /, - // whichever is the first to exist -}) - -// same, but not async -const owner = inferOwner.sync('/some/cache/folder/file') - -// results are cached! to reset the cache (eg, to change -// permissions for whatever reason), do this: -inferOwner.clearCache() -``` - -This module endeavors to be as performant as possible. Parallel requests -for ownership of the same path will only stat the directories one time. - -## API - -* `inferOwner(path) -> Promise<{ uid, gid }>` - - If the path exists, return its uid and gid. If it does not, look to - its parent, then its grandparent, and so on. - -* `inferOwner(path) -> { uid, gid }` - - Sync form of `inferOwner(path)`. - -* `inferOwner.clearCache()` - - Delete all cached ownership information and in-flight tracking. diff --git a/node_modules/inflight/README.md b/node_modules/inflight/README.md deleted file mode 100644 index 6dc8929171a8c..0000000000000 --- a/node_modules/inflight/README.md +++ /dev/null @@ -1,37 +0,0 @@ -# inflight - -Add callbacks to requests in flight to avoid async duplication - -## USAGE - -```javascript -var inflight = require('inflight') - -// some request that does some stuff -function req(key, callback) { - // key is any random string. like a url or filename or whatever. - // - // will return either a falsey value, indicating that the - // request for this key is already in flight, or a new callback - // which when called will call all callbacks passed to inflightk - // with the same key - callback = inflight(key, callback) - - // If we got a falsey value back, then there's already a req going - if (!callback) return - - // this is where you'd fetch the url or whatever - // callback is also once()-ified, so it can safely be assigned - // to multiple events etc. First call wins. - setTimeout(function() { - callback(null, key) - }, 100) -} - -// only assigns a single setTimeout -// when it dings, all cbs get called -req('foo', cb1) -req('foo', cb2) -req('foo', cb3) -req('foo', cb4) -``` diff --git a/node_modules/inherits/README.md b/node_modules/inherits/README.md deleted file mode 100644 index b1c56658557b8..0000000000000 --- a/node_modules/inherits/README.md +++ /dev/null @@ -1,42 +0,0 @@ -Browser-friendly inheritance fully compatible with standard node.js -[inherits](http://nodejs.org/api/util.html#util_util_inherits_constructor_superconstructor). - -This package exports standard `inherits` from node.js `util` module in -node environment, but also provides alternative browser-friendly -implementation through [browser -field](https://gist.github.com/shtylman/4339901). Alternative -implementation is a literal copy of standard one located in standalone -module to avoid requiring of `util`. It also has a shim for old -browsers with no `Object.create` support. - -While keeping you sure you are using standard `inherits` -implementation in node.js environment, it allows bundlers such as -[browserify](https://github.com/substack/node-browserify) to not -include full `util` package to your client code if all you need is -just `inherits` function. It worth, because browser shim for `util` -package is large and `inherits` is often the single function you need -from it. - -It's recommended to use this package instead of -`require('util').inherits` for any code that has chances to be used -not only in node.js but in browser too. - -## usage - -```js -var inherits = require('inherits'); -// then use exactly as the standard one -``` - -## note on version ~1.0 - -Version ~1.0 had completely different motivation and is not compatible -neither with 2.0 nor with standard node.js `inherits`. - -If you are using version ~1.0 and planning to switch to ~2.0, be -careful: - -* new version uses `super_` instead of `super` for referencing - superclass -* new version overwrites current prototype while old one preserves any - existing fields on it diff --git a/node_modules/ini/README.md b/node_modules/ini/README.md deleted file mode 100644 index 33df258297db7..0000000000000 --- a/node_modules/ini/README.md +++ /dev/null @@ -1,102 +0,0 @@ -An ini format parser and serializer for node. - -Sections are treated as nested objects. Items before the first -heading are saved on the object directly. - -## Usage - -Consider an ini-file `config.ini` that looks like this: - - ; this comment is being ignored - scope = global - - [database] - user = dbuser - password = dbpassword - database = use_this_database - - [paths.default] - datadir = /var/lib/data - array[] = first value - array[] = second value - array[] = third value - -You can read, manipulate and write the ini-file like so: - - var fs = require('fs') - , ini = require('ini') - - var config = ini.parse(fs.readFileSync('./config.ini', 'utf-8')) - - config.scope = 'local' - config.database.database = 'use_another_database' - config.paths.default.tmpdir = '/tmp' - delete config.paths.default.datadir - config.paths.default.array.push('fourth value') - - fs.writeFileSync('./config_modified.ini', ini.stringify(config, { section: 'section' })) - -This will result in a file called `config_modified.ini` being written -to the filesystem with the following content: - - [section] - scope=local - [section.database] - user=dbuser - password=dbpassword - database=use_another_database - [section.paths.default] - tmpdir=/tmp - array[]=first value - array[]=second value - array[]=third value - array[]=fourth value - - -## API - -### decode(inistring) - -Decode the ini-style formatted `inistring` into a nested object. - -### parse(inistring) - -Alias for `decode(inistring)` - -### encode(object, [options]) - -Encode the object `object` into an ini-style formatted string. If the -optional parameter `section` is given, then all top-level properties -of the object are put into this section and the `section`-string is -prepended to all sub-sections, see the usage example above. - -The `options` object may contain the following: - -* `section` A string which will be the first `section` in the encoded - ini data. Defaults to none. -* `whitespace` Boolean to specify whether to put whitespace around the - `=` character. By default, whitespace is omitted, to be friendly to - some persnickety old parsers that don't tolerate it well. But some - find that it's more human-readable and pretty with the whitespace. - -For backwards compatibility reasons, if a `string` options is passed -in, then it is assumed to be the `section` value. - -### stringify(object, [options]) - -Alias for `encode(object, [options])` - -### safe(val) - -Escapes the string `val` such that it is safe to be used as a key or -value in an ini-file. Basically escapes quotes. For example - - ini.safe('"unsafe string"') - -would result in - - "\"unsafe string\"" - -### unsafe(val) - -Unescapes the string `val` diff --git a/node_modules/init-package-json/CHANGELOG.md b/node_modules/init-package-json/CHANGELOG.md deleted file mode 100644 index 92e92aed11714..0000000000000 --- a/node_modules/init-package-json/CHANGELOG.md +++ /dev/null @@ -1,21 +0,0 @@ -# Change Log - -<a name="2.0.0"></a> -## [2.0.0](https://github.com/npm/init-package-json/compare/v1.10.3...v2.0.0) (2020-10-09) -* BREAKING: requires node10+ -* fix: compat with new `@npmcli/config` module -* chore: update deps to latest and greatest - -<a name="1.10.3"></a> -## [1.10.3](https://github.com/npm/init-package-json/compare/v1.10.2...v1.10.3) (2018-03-07) - - - -<a name="1.10.2"></a> -## [1.10.2](https://github.com/npm/init-package-json/compare/v1.10.1...v1.10.2) (2018-03-07) - - -### Bug Fixes - -* **default-input:** Catch errors from npa ([#71](https://github.com/npm/init-package-json/issues/71)) ([11aee1e](https://github.com/npm/init-package-json/commit/11aee1e)) -* **grammar:** Fix minor style issue in final prompt ([#76](https://github.com/npm/init-package-json/issues/76)) ([ba259ce](https://github.com/npm/init-package-json/commit/ba259ce)) diff --git a/node_modules/init-package-json/README.md b/node_modules/init-package-json/README.md deleted file mode 100644 index 528acf355158a..0000000000000 --- a/node_modules/init-package-json/README.md +++ /dev/null @@ -1,45 +0,0 @@ -# init-package-json - -A node module to get your node module started. - -[![Build Status](https://secure.travis-ci.org/npm/init-package-json.svg)](http://travis-ci.org/npm/init-package-json) - -## Usage - -```javascript -var init = require('init-package-json') -var path = require('path') - -// a path to a promzard module. In the event that this file is -// not found, one will be provided for you. -var initFile = path.resolve(process.env.HOME, '.npm-init') - -// the dir where we're doin stuff. -var dir = process.cwd() - -// extra stuff that gets put into the PromZard module's context. -// In npm, this is the resolved config object. Exposed as 'config' -// Optional. -var configData = { some: 'extra stuff' } - -// Any existing stuff from the package.json file is also exposed in the -// PromZard module as the `package` object. There will also be three -// vars for: -// * `filename` path to the package.json file -// * `basename` the tip of the package dir -// * `dirname` the parent of the package dir - -init(dir, initFile, configData, function (er, data) { - // the data's already been written to {dir}/package.json - // now you can do stuff with it -}) -``` - -Or from the command line: - -``` -$ npm-init -``` - -See [PromZard](https://github.com/npm/promzard) for details about -what can go in the config file. diff --git a/node_modules/init-package-json/default-input.js b/node_modules/init-package-json/default-input.js index 8e9fe0b573ea5..d1f65841d6c5a 100644 --- a/node_modules/init-package-json/default-input.js +++ b/node_modules/init-package-json/default-input.js @@ -12,7 +12,7 @@ function isTestPkg (p) { } function niceName (n) { - return n.replace(/^node-|[.-]js$/g, '').replace(' ', '-').toLowerCase() + return n.replace(/^node-|[.-]js$/g, '').replace(/\s+/g, ' ').replace(/ /g, '-').toLowerCase() } function readDeps (test, excluded) { return function (cb) { @@ -45,7 +45,7 @@ function readDeps (test, excluded) { return function (cb) { }) }} -var name = package.name || basename +var name = niceName(package.name || basename) var spec try { spec = npa(name) @@ -61,7 +61,7 @@ if (scope) { name = scope + '/' + name } } -exports.name = yes ? name : prompt('package name', niceName(name), function (data) { +exports.name = yes ? name : prompt('package name', name, function (data) { var its = validateName(data) if (its.validForNewPackages) return data var errors = (its.errors || []).concat(its.warnings || []) diff --git a/node_modules/init-package-json/package.json b/node_modules/init-package-json/package.json index 91c6bfba82049..584e313b4c2c7 100644 --- a/node_modules/init-package-json/package.json +++ b/node_modules/init-package-json/package.json @@ -1,6 +1,6 @@ { "name": "init-package-json", - "version": "2.0.2", + "version": "2.0.3", "main": "init-package-json.js", "scripts": { "test": "tap", @@ -17,19 +17,19 @@ "description": "A node module to get your node module started", "dependencies": { "glob": "^7.1.1", - "npm-package-arg": "^8.1.0", + "npm-package-arg": "^8.1.2", "promzard": "^0.3.0", "read": "~1.0.1", - "read-package-json": "^3.0.0", - "semver": "^7.3.2", + "read-package-json": "^3.0.1", + "semver": "^7.3.5", "validate-npm-package-license": "^3.0.4", "validate-npm-package-name": "^3.0.0" }, "devDependencies": { - "@npmcli/config": "^1.2.1", + "@npmcli/config": "^2.1.0", "mkdirp": "^1.0.4", "rimraf": "^3.0.2", - "tap": "^14.10.8" + "tap": "^14.11.0" }, "engines": { "node": ">=10" diff --git a/node_modules/ip/.jscsrc b/node_modules/ip/.jscsrc deleted file mode 100644 index dbaae20574deb..0000000000000 --- a/node_modules/ip/.jscsrc +++ /dev/null @@ -1,46 +0,0 @@ -{ - "disallowKeywordsOnNewLine": [ "else" ], - "disallowMixedSpacesAndTabs": true, - "disallowMultipleLineStrings": true, - "disallowMultipleVarDecl": true, - "disallowNewlineBeforeBlockStatements": true, - "disallowQuotedKeysInObjects": true, - "disallowSpaceAfterObjectKeys": true, - "disallowSpaceAfterPrefixUnaryOperators": true, - "disallowSpaceBeforePostfixUnaryOperators": true, - "disallowSpacesInCallExpression": true, - "disallowTrailingComma": true, - "disallowTrailingWhitespace": true, - "disallowYodaConditions": true, - - "requireCommaBeforeLineBreak": true, - "requireOperatorBeforeLineBreak": true, - "requireSpaceAfterBinaryOperators": true, - "requireSpaceAfterKeywords": [ "if", "for", "while", "else", "try", "catch" ], - "requireSpaceAfterLineComment": true, - "requireSpaceBeforeBinaryOperators": true, - "requireSpaceBeforeBlockStatements": true, - "requireSpaceBeforeKeywords": [ "else", "catch" ], - "requireSpaceBeforeObjectValues": true, - "requireSpaceBetweenArguments": true, - "requireSpacesInAnonymousFunctionExpression": { - "beforeOpeningCurlyBrace": true - }, - "requireSpacesInFunctionDeclaration": { - "beforeOpeningCurlyBrace": true - }, - "requireSpacesInFunctionExpression": { - "beforeOpeningCurlyBrace": true - }, - "requireSpacesInConditionalExpression": true, - "requireSpacesInForStatement": true, - "requireSpacesInsideArrayBrackets": "all", - "requireSpacesInsideObjectBrackets": "all", - "requireDotNotation": true, - - "maximumLineLength": 80, - "validateIndentation": 2, - "validateLineBreaks": "LF", - "validateParameterSeparator": ", ", - "validateQuoteMarks": "'" -} diff --git a/node_modules/ip/.npmignore b/node_modules/ip/.npmignore deleted file mode 100644 index 1ca957177f035..0000000000000 --- a/node_modules/ip/.npmignore +++ /dev/null @@ -1,2 +0,0 @@ -node_modules/ -npm-debug.log diff --git a/node_modules/ip/.travis.yml b/node_modules/ip/.travis.yml deleted file mode 100644 index a3a8fad6b6e38..0000000000000 --- a/node_modules/ip/.travis.yml +++ /dev/null @@ -1,15 +0,0 @@ -sudo: false -language: node_js -node_js: - - "0.8" - - "0.10" - - "0.12" - - "4" - - "6" - -before_install: - - travis_retry npm install -g npm@2.14.5 - - travis_retry npm install - -script: - - npm test diff --git a/node_modules/ip/README.md b/node_modules/ip/README.md deleted file mode 100644 index 22e5819ffaf94..0000000000000 --- a/node_modules/ip/README.md +++ /dev/null @@ -1,90 +0,0 @@ -# IP -[![](https://badge.fury.io/js/ip.svg)](https://www.npmjs.com/package/ip) - -IP address utilities for node.js - -## Installation - -### npm -```shell -npm install ip -``` - -### git - -```shell -git clone https://github.com/indutny/node-ip.git -``` - -## Usage -Get your ip address, compare ip addresses, validate ip addresses, etc. - -```js -var ip = require('ip'); - -ip.address() // my ip address -ip.isEqual('::1', '::0:1'); // true -ip.toBuffer('127.0.0.1') // Buffer([127, 0, 0, 1]) -ip.toString(new Buffer([127, 0, 0, 1])) // 127.0.0.1 -ip.fromPrefixLen(24) // 255.255.255.0 -ip.mask('192.168.1.134', '255.255.255.0') // 192.168.1.0 -ip.cidr('192.168.1.134/26') // 192.168.1.128 -ip.not('255.255.255.0') // 0.0.0.255 -ip.or('192.168.1.134', '0.0.0.255') // 192.168.1.255 -ip.isPrivate('127.0.0.1') // true -ip.isV4Format('127.0.0.1'); // true -ip.isV6Format('::ffff:127.0.0.1'); // true - -// operate on buffers in-place -var buf = new Buffer(128); -var offset = 64; -ip.toBuffer('127.0.0.1', buf, offset); // [127, 0, 0, 1] at offset 64 -ip.toString(buf, offset, 4); // '127.0.0.1' - -// subnet information -ip.subnet('192.168.1.134', '255.255.255.192') -// { networkAddress: '192.168.1.128', -// firstAddress: '192.168.1.129', -// lastAddress: '192.168.1.190', -// broadcastAddress: '192.168.1.191', -// subnetMask: '255.255.255.192', -// subnetMaskLength: 26, -// numHosts: 62, -// length: 64, -// contains: function(addr){...} } -ip.cidrSubnet('192.168.1.134/26') -// Same as previous. - -// range checking -ip.cidrSubnet('192.168.1.134/26').contains('192.168.1.190') // true - - -// ipv4 long conversion -ip.toLong('127.0.0.1'); // 2130706433 -ip.fromLong(2130706433); // '127.0.0.1' -``` - -### License - -This software is licensed under the MIT License. - -Copyright Fedor Indutny, 2012. - -Permission is hereby granted, free of charge, to any person obtaining a -copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to permit -persons to whom the Software is furnished to do so, subject to the -following conditions: - -The above copyright notice and this permission notice shall be included -in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/is-cidr/README.md b/node_modules/is-cidr/README.md deleted file mode 100644 index a786cd48145c8..0000000000000 --- a/node_modules/is-cidr/README.md +++ /dev/null @@ -1,47 +0,0 @@ -# is-cidr - -[![](https://img.shields.io/npm/v/is-cidr.svg?style=flat)](https://www.npmjs.org/package/is-cidr) [![](https://img.shields.io/npm/dm/is-cidr.svg)](https://www.npmjs.org/package/is-cidr) - -> Check if a string is an IP address in CIDR notation - -## Install - -``` -npm i is-cidr -``` - -## Usage - -```js -const isCidr = require("is-cidr"); - -isCidr("192.168.0.1/24"); //=> 4 -isCidr("1:2:3:4:5:6:7:8/64"); //=> 6 -isCidr("10.0.0.0"); //=> 0 -isCidr.v6("10.0.0.0/24"); //=> false -``` - -## API -### isCidr(input) - -Check if `input` is a IPv4 or IPv6 CIDR address. Returns either `4`, `6` (indicating the IP version) or `0` if the string is not a CIDR. - -### isCidr.v4(input) - -Check if `input` is a IPv4 CIDR address. Returns a boolean. - -### isCidr.v6(input) - -Check if `input` is a IPv6 CIDR address. Returns a boolean. - -## Related - -- [cidr-regex](https://github.com/silverwind/cidr-regex) - Regular expression for matching IP addresses in CIDR notation -- [is-ip](https://github.com/sindresorhus/is-ip) - Check if a string is an IP address -- [ip-regex](https://github.com/sindresorhus/ip-regex) - Regular expression for matching IP addresses - -## License - -© [silverwind](https://github.com/silverwind), distributed under BSD licence - -Based on previous work by [Felipe Apostol](https://github.com/flipjs) diff --git a/node_modules/is-core-module/.eslintignore b/node_modules/is-core-module/.eslintignore deleted file mode 100644 index 404abb22121cd..0000000000000 --- a/node_modules/is-core-module/.eslintignore +++ /dev/null @@ -1 +0,0 @@ -coverage/ diff --git a/node_modules/is-core-module/.github/FUNDING.yml b/node_modules/is-core-module/.github/FUNDING.yml deleted file mode 100644 index 422ce9b01a7f3..0000000000000 --- a/node_modules/is-core-module/.github/FUNDING.yml +++ /dev/null @@ -1,12 +0,0 @@ -# These are supported funding model platforms - -github: [ljharb] -patreon: # Replace with a single Patreon username -open_collective: # Replace with a single Open Collective username -ko_fi: # Replace with a single Ko-fi username -tidelift: npm/is-core-module -community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry -liberapay: # Replace with a single Liberapay username -issuehunt: # Replace with a single IssueHunt username -otechie: # Replace with a single Otechie username -custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2'] diff --git a/node_modules/is-core-module/.github/workflows/node-4+.yml b/node_modules/is-core-module/.github/workflows/node-4+.yml deleted file mode 100644 index ba174e1d6c28c..0000000000000 --- a/node_modules/is-core-module/.github/workflows/node-4+.yml +++ /dev/null @@ -1,54 +0,0 @@ -name: 'Tests: node.js' - -on: [pull_request, push] - -jobs: - matrix: - runs-on: ubuntu-latest - outputs: - latest: ${{ steps.set-matrix.outputs.requireds }} - minors: ${{ steps.set-matrix.outputs.optionals }} - steps: - - uses: ljharb/actions/node/matrix@main - id: set-matrix - with: - preset: '>=4' - - latest: - needs: [matrix] - name: 'latest minors' - runs-on: ubuntu-latest - - strategy: - matrix: ${{ fromJson(needs.matrix.outputs.latest) }} - - steps: - - uses: actions/checkout@v2 - - uses: ljharb/actions/node/run@main - name: 'npm install && npm run tests-only' - with: - node-version: ${{ matrix.node-version }} - command: 'tests-only' - minors: - needs: [matrix, latest] - name: 'non-latest minors' - continue-on-error: true - if: ${{ !github.head_ref || !startsWith(github.head_ref, 'renovate') }} - runs-on: ubuntu-latest - - strategy: - matrix: ${{ fromJson(needs.matrix.outputs.minors) }} - - steps: - - uses: actions/checkout@v2 - - uses: ljharb/actions/node/run@main - with: - node-version: ${{ matrix.node-version }} - command: 'tests-only' - - node: - name: 'node 4+' - needs: [latest, minors] - runs-on: ubuntu-latest - steps: - - run: 'echo tests completed' diff --git a/node_modules/is-core-module/.github/workflows/node-iojs.yml b/node_modules/is-core-module/.github/workflows/node-iojs.yml deleted file mode 100644 index f707c3cfc308e..0000000000000 --- a/node_modules/is-core-module/.github/workflows/node-iojs.yml +++ /dev/null @@ -1,58 +0,0 @@ -name: 'Tests: node.js (io.js)' - -on: [pull_request, push] - -jobs: - matrix: - runs-on: ubuntu-latest - outputs: - latest: ${{ steps.set-matrix.outputs.requireds }} - minors: ${{ steps.set-matrix.outputs.optionals }} - steps: - - uses: ljharb/actions/node/matrix@main - id: set-matrix - with: - preset: 'iojs' - - latest: - needs: [matrix] - name: 'latest minors' - runs-on: ubuntu-latest - - strategy: - matrix: ${{ fromJson(needs.matrix.outputs.latest) }} - - steps: - - uses: actions/checkout@v2 - - uses: ljharb/actions/node/run@main - name: 'npm install && npm run tests-only' - with: - node-version: ${{ matrix.node-version }} - command: 'tests-only' - skip-ls-check: true - - minors: - needs: [matrix, latest] - name: 'non-latest minors' - continue-on-error: true - if: ${{ !github.head_ref || !startsWith(github.head_ref, 'renovate') }} - runs-on: ubuntu-latest - - strategy: - matrix: ${{ fromJson(needs.matrix.outputs.minors) }} - - steps: - - uses: actions/checkout@v2 - - uses: ljharb/actions/node/run@main - name: 'npm install && npm run tests-only' - with: - node-version: ${{ matrix.node-version }} - command: 'tests-only' - skip-ls-check: true - - node: - name: 'io.js' - needs: [latest, minors] - runs-on: ubuntu-latest - steps: - - run: 'echo tests completed' diff --git a/node_modules/is-core-module/.github/workflows/node-pretest.yml b/node_modules/is-core-module/.github/workflows/node-pretest.yml deleted file mode 100644 index 3921e0ae6cd6b..0000000000000 --- a/node_modules/is-core-module/.github/workflows/node-pretest.yml +++ /dev/null @@ -1,26 +0,0 @@ -name: 'Tests: pretest/posttest' - -on: [pull_request, push] - -jobs: - pretest: - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v2 - - uses: ljharb/actions/node/run@main - name: 'npm install && npm run pretest' - with: - node-version: 'lts/*' - command: 'pretest' - - posttest: - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v2 - - uses: ljharb/actions/node/run@main - name: 'npm install && npm run posttest' - with: - node-version: 'lts/*' - command: 'posttest' diff --git a/node_modules/is-core-module/.github/workflows/node-zero.yml b/node_modules/is-core-module/.github/workflows/node-zero.yml deleted file mode 100644 index d044c6031d5b3..0000000000000 --- a/node_modules/is-core-module/.github/workflows/node-zero.yml +++ /dev/null @@ -1,58 +0,0 @@ -name: 'Tests: node.js (0.x)' - -on: [pull_request, push] - -jobs: - matrix: - runs-on: ubuntu-latest - outputs: - stable: ${{ steps.set-matrix.outputs.requireds }} - unstable: ${{ steps.set-matrix.outputs.optionals }} - steps: - - uses: ljharb/actions/node/matrix@main - id: set-matrix - with: - preset: '0.x' - - stable: - needs: [matrix] - name: 'stable minors' - runs-on: ubuntu-latest - - strategy: - matrix: ${{ fromJson(needs.matrix.outputs.stable) }} - - steps: - - uses: actions/checkout@v2 - - uses: ljharb/actions/node/run@main - with: - node-version: ${{ matrix.node-version }} - command: 'tests-only' - cache-node-modules-key: node_modules-${{ github.workflow }}-${{ github.action }}-${{ github.run_id }} - skip-ls-check: true - - unstable: - needs: [matrix, stable] - name: 'unstable minors' - continue-on-error: true - if: ${{ !github.head_ref || !startsWith(github.head_ref, 'renovate') }} - runs-on: ubuntu-latest - - strategy: - matrix: ${{ fromJson(needs.matrix.outputs.unstable) }} - - steps: - - uses: actions/checkout@v2 - - uses: ljharb/actions/node/run@main - with: - node-version: ${{ matrix.node-version }} - command: 'tests-only' - cache-node-modules-key: node_modules-${{ github.workflow }}-${{ github.action }}-${{ github.run_id }} - skip-ls-check: true - - node: - name: 'node 0.x' - needs: [stable, unstable] - runs-on: ubuntu-latest - steps: - - run: 'echo tests completed' diff --git a/node_modules/is-core-module/.github/workflows/rebase.yml b/node_modules/is-core-module/.github/workflows/rebase.yml deleted file mode 100644 index 0c2ad39b5f7b8..0000000000000 --- a/node_modules/is-core-module/.github/workflows/rebase.yml +++ /dev/null @@ -1,15 +0,0 @@ -name: Automatic Rebase - -on: [pull_request_target] - -jobs: - _: - name: "Automatic Rebase" - - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v1 - - uses: ljharb/rebase@master - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/node_modules/is-core-module/.github/workflows/require-allow-edits.yml b/node_modules/is-core-module/.github/workflows/require-allow-edits.yml deleted file mode 100644 index aac42d3e29c7a..0000000000000 --- a/node_modules/is-core-module/.github/workflows/require-allow-edits.yml +++ /dev/null @@ -1,14 +0,0 @@ -name: Require “Allow Edits” - -on: [pull_request_target] - -jobs: - _: - name: "Require “Allow Edits”" - - runs-on: ubuntu-latest - - steps: - - uses: ljharb/require-allow-edits@main - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/node_modules/is-core-module/.nycrc b/node_modules/is-core-module/.nycrc deleted file mode 100644 index 1826526e091b8..0000000000000 --- a/node_modules/is-core-module/.nycrc +++ /dev/null @@ -1,13 +0,0 @@ -{ - "all": true, - "check-coverage": false, - "reporter": ["text-summary", "text", "html", "json"], - "lines": 86, - "statements": 85.93, - "functions": 82.43, - "branches": 76.06, - "exclude": [ - "coverage", - "test" - ] -} diff --git a/node_modules/is-core-module/CHANGELOG.md b/node_modules/is-core-module/CHANGELOG.md deleted file mode 100644 index 4cdb33d005960..0000000000000 --- a/node_modules/is-core-module/CHANGELOG.md +++ /dev/null @@ -1,58 +0,0 @@ -# Changelog - -All notable changes to this project will be documented in this file. - -The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) -and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). - -## [v2.2.0](https://github.com/inspect-js/is-core-module/compare/v2.1.0...v2.2.0) - 2020-11-26 - -### Commits - -- [Tests] migrate tests to Github Actions [`c919f57`](https://github.com/inspect-js/is-core-module/commit/c919f573c0a92d10a0acad0b650b5aecb033d426) -- [patch] `core.json`: %s/ /\t/g [`db3f685`](https://github.com/inspect-js/is-core-module/commit/db3f68581f53e73cc09cd675955eb1bdd6a5a39b) -- [Tests] run `nyc` on all tests [`b2f925f`](https://github.com/inspect-js/is-core-module/commit/b2f925f8866f210ef441f39fcc8cc42692ab89b1) -- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `aud`; add `safe-publish-latest` [`89f02a2`](https://github.com/inspect-js/is-core-module/commit/89f02a2b4162246dea303a6ee31bb9a550b05c72) -- [New] add `path/posix`, `path/win32`, `util/types` [`77f94f1`](https://github.com/inspect-js/is-core-module/commit/77f94f1e90ffd7c0be2a3f1aa8574ebf7fd981b3) - -## [v2.1.0](https://github.com/inspect-js/is-core-module/compare/v2.0.0...v2.1.0) - 2020-11-04 - -### Commits - -- [Dev Deps] update `eslint` [`5e0034e`](https://github.com/inspect-js/is-core-module/commit/5e0034eae57c09c8f1bd769f502486a00f56c6e4) -- [New] Add `diagnostics_channel` [`c2d83d0`](https://github.com/inspect-js/is-core-module/commit/c2d83d0a0225a1a658945d9bab7036ea347d29ec) - -## [v2.0.0](https://github.com/inspect-js/is-core-module/compare/v1.0.2...v2.0.0) - 2020-09-29 - -### Commits - -- v2 implementation [`865aeb5`](https://github.com/inspect-js/is-core-module/commit/865aeb5ca0e90248a3dfff5d7622e4751fdeb9cd) -- Only apps should have lockfiles [`5a5e660`](https://github.com/inspect-js/is-core-module/commit/5a5e660d568e37eb44e17fb1ebb12a105205fc2b) -- Initial commit for v2 [`5a51524`](https://github.com/inspect-js/is-core-module/commit/5a51524e06f92adece5fbb138c69b7b9748a2348) -- Tests [`116eae4`](https://github.com/inspect-js/is-core-module/commit/116eae4fccd01bc72c1fd3cc4b7561c387afc496) -- [meta] add `auto-changelog` [`c24388b`](https://github.com/inspect-js/is-core-module/commit/c24388bee828d223040519d1f5b226ca35beee63) -- [actions] add "Automatic Rebase" and "require allow edits" actions [`34292db`](https://github.com/inspect-js/is-core-module/commit/34292dbcbadae0868aff03c22dbd8b7b8a11558a) -- [Tests] add `npm run lint` [`4f9eeee`](https://github.com/inspect-js/is-core-module/commit/4f9eeee7ddff10698bbf528620f4dc8d4fa3e697) -- [readme] fix travis badges, https all URLs [`e516a73`](https://github.com/inspect-js/is-core-module/commit/e516a73b0dccce20938c432b1ba512eae8eff9e9) -- [meta] create FUNDING.yml [`1aabebc`](https://github.com/inspect-js/is-core-module/commit/1aabebca98d01f8a04e46bc2e2520fa93cf21ac6) -- [Fix] `domain`: domain landed sometime > v0.7.7 and <= v0.7.12 [`2df7d37`](https://github.com/inspect-js/is-core-module/commit/2df7d37595d41b15eeada732b706b926c2771655) -- [Fix] `sys`: worked in 0.6, not 0.7, and 0.8+ [`a75c134`](https://github.com/inspect-js/is-core-module/commit/a75c134229e1e9441801f6b73f6a52489346eb65) - -## [v1.0.2](https://github.com/inspect-js/is-core-module/compare/v1.0.1...v1.0.2) - 2014-09-28 - -### Commits - -- simpler [`66fe90f`](https://github.com/inspect-js/is-core-module/commit/66fe90f9771581b9adc0c3900baa52c21b5baea2) - -## [v1.0.1](https://github.com/inspect-js/is-core-module/compare/v1.0.0...v1.0.1) - 2014-09-28 - -### Commits - -- remove stupid [`f21f906`](https://github.com/inspect-js/is-core-module/commit/f21f906f882c2bd656a5fc5ed6fbe48ddaffb2ac) -- update readme [`1eff0ec`](https://github.com/inspect-js/is-core-module/commit/1eff0ec69798d1ec65771552d1562911e90a8027) - -## v1.0.0 - 2014-09-28 - -### Commits - -- init [`48e5e76`](https://github.com/inspect-js/is-core-module/commit/48e5e76cac378fddb8c1f7d4055b8dfc943d6b96) diff --git a/node_modules/is-core-module/README.md b/node_modules/is-core-module/README.md deleted file mode 100644 index 479d6d24c0f04..0000000000000 --- a/node_modules/is-core-module/README.md +++ /dev/null @@ -1,37 +0,0 @@ -# is-core-module <sup>[![Version Badge][2]][1]</sup> - -[![Build Status][3]][4] -[![dependency status][5]][6] -[![dev dependency status][7]][8] -[![License][license-image]][license-url] -[![Downloads][downloads-image]][downloads-url] - -[![npm badge][11]][1] - -Is this specifier a node.js core module? Optionally provide a node version to check; defaults to the current node version. - -## Example - -```js -var isCore = require('is-core-module'); -var assert = require('assert'); -assert(isCore('fs')); -assert(!isCore('butts')); -``` - -## Tests -Clone the repo, `npm install`, and run `npm test` - -[1]: https://npmjs.org/package/is-core-module -[2]: https://versionbadg.es/inspect-js/is-core-module.svg -[3]: https://travis-ci.com/inspect-js/is-core-module.svg -[4]: https://travis-ci.com/inspect-js/is-core-module -[5]: https://david-dm.org/inspect-js/is-core-module.svg -[6]: https://david-dm.org/inspect-js/is-core-module -[7]: https://david-dm.org/inspect-js/is-core-module/dev-status.svg -[8]: https://david-dm.org/inspect-js/is-core-module#info=devDependencies -[11]: https://nodei.co/npm/is-core-module.png?downloads=true&stars=true -[license-image]: https://img.shields.io/npm/l/is-core-module.svg -[license-url]: LICENSE -[downloads-image]: https://img.shields.io/npm/dm/is-core-module.svg -[downloads-url]: https://npm-stat.com/charts.html?package=is-core-module diff --git a/node_modules/is-core-module/core.json b/node_modules/is-core-module/core.json index 0238b61a4c71e..773222f6ba3bc 100644 --- a/node_modules/is-core-module/core.json +++ b/node_modules/is-core-module/core.json @@ -1,83 +1,148 @@ { "assert": true, + "node:assert": ">= 16", "assert/strict": ">= 15", + "node:assert/strict": ">= 16", "async_hooks": ">= 8", + "node:async_hooks": ">= 16", "buffer_ieee754": "< 0.9.7", "buffer": true, + "node:buffer": ">= 16", "child_process": true, + "node:child_process": ">= 16", "cluster": true, + "node:cluster": ">= 16", "console": true, + "node:console": ">= 16", "constants": true, + "node:constants": ">= 16", "crypto": true, + "node:crypto": ">= 16", "_debug_agent": ">= 1 && < 8", "_debugger": "< 8", "dgram": true, - "diagnostics_channel": ">= 15.1", + "node:dgram": ">= 16", + "diagnostics_channel": [">= 14.17 && < 15", ">= 15.1"], + "node:diagnostics_channel": ">= 16", "dns": true, + "node:dns": ">= 16", "dns/promises": ">= 15", + "node:dns/promises": ">= 16", "domain": ">= 0.7.12", + "node:domain": ">= 16", "events": true, + "node:events": ">= 16", "freelist": "< 6", "fs": true, + "node:fs": ">= 16", "fs/promises": [">= 10 && < 10.1", ">= 14"], + "node:fs/promises": ">= 16", "_http_agent": ">= 0.11.1", + "node:_http_agent": ">= 16", "_http_client": ">= 0.11.1", + "node:_http_client": ">= 16", "_http_common": ">= 0.11.1", + "node:_http_common": ">= 16", "_http_incoming": ">= 0.11.1", + "node:_http_incoming": ">= 16", "_http_outgoing": ">= 0.11.1", + "node:_http_outgoing": ">= 16", "_http_server": ">= 0.11.1", + "node:_http_server": ">= 16", "http": true, + "node:http": ">= 16", "http2": ">= 8.8", + "node:http2": ">= 16", "https": true, - "inspector": ">= 8.0.0", + "node:https": ">= 16", + "inspector": ">= 8", + "node:inspector": ">= 16", "_linklist": "< 8", "module": true, + "node:module": ">= 16", "net": true, - "node-inspect/lib/_inspect": ">= 7.6.0 && < 12", - "node-inspect/lib/internal/inspect_client": ">= 7.6.0 && < 12", - "node-inspect/lib/internal/inspect_repl": ">= 7.6.0 && < 12", + "node:net": ">= 16", + "node-inspect/lib/_inspect": ">= 7.6 && < 12", + "node-inspect/lib/internal/inspect_client": ">= 7.6 && < 12", + "node-inspect/lib/internal/inspect_repl": ">= 7.6 && < 12", "os": true, + "node:os": ">= 16", "path": true, + "node:path": ">= 16", "path/posix": ">= 15.3", + "node:path/posix": ">= 16", "path/win32": ">= 15.3", + "node:path/win32": ">= 16", "perf_hooks": ">= 8.5", + "node:perf_hooks": ">= 16", "process": ">= 1", + "node:process": ">= 16", "punycode": true, + "node:punycode": ">= 16", "querystring": true, + "node:querystring": ">= 16", "readline": true, + "node:readline": ">= 16", "repl": true, + "node:repl": ">= 16", "smalloc": ">= 0.11.5 && < 3", "_stream_duplex": ">= 0.9.4", + "node:_stream_duplex": ">= 16", "_stream_transform": ">= 0.9.4", + "node:_stream_transform": ">= 16", "_stream_wrap": ">= 1.4.1", + "node:_stream_wrap": ">= 16", "_stream_passthrough": ">= 0.9.4", + "node:_stream_passthrough": ">= 16", "_stream_readable": ">= 0.9.4", + "node:_stream_readable": ">= 16", "_stream_writable": ">= 0.9.4", + "node:_stream_writable": ">= 16", "stream": true, + "node:stream": ">= 16", "stream/promises": ">= 15", + "node:stream/promises": ">= 16", + "stream/web": ">= 16.5", + "node:stream/web": ">= 16.5", "string_decoder": true, + "node:string_decoder": ">= 16", "sys": [">= 0.6 && < 0.7", ">= 0.8"], + "node:sys": ">= 16", "timers": true, + "node:timers": ">= 16", "timers/promises": ">= 15", + "node:timers/promises": ">= 16", "_tls_common": ">= 0.11.13", + "node:_tls_common": ">= 16", "_tls_legacy": ">= 0.11.3 && < 10", "_tls_wrap": ">= 0.11.3", + "node:_tls_wrap": ">= 16", "tls": true, + "node:tls": ">= 16", "trace_events": ">= 10", + "node:trace_events": ">= 16", "tty": true, + "node:tty": ">= 16", "url": true, + "node:url": ">= 16", "util": true, + "node:util": ">= 16", "util/types": ">= 15.3", + "node:util/types": ">= 16", "v8/tools/arguments": ">= 10 && < 12", - "v8/tools/codemap": [">= 4.4.0 && < 5", ">= 5.2.0 && < 12"], - "v8/tools/consarray": [">= 4.4.0 && < 5", ">= 5.2.0 && < 12"], - "v8/tools/csvparser": [">= 4.4.0 && < 5", ">= 5.2.0 && < 12"], - "v8/tools/logreader": [">= 4.4.0 && < 5", ">= 5.2.0 && < 12"], - "v8/tools/profile_view": [">= 4.4.0 && < 5", ">= 5.2.0 && < 12"], - "v8/tools/splaytree": [">= 4.4.0 && < 5", ">= 5.2.0 && < 12"], + "v8/tools/codemap": [">= 4.4 && < 5", ">= 5.2 && < 12"], + "v8/tools/consarray": [">= 4.4 && < 5", ">= 5.2 && < 12"], + "v8/tools/csvparser": [">= 4.4 && < 5", ">= 5.2 && < 12"], + "v8/tools/logreader": [">= 4.4 && < 5", ">= 5.2 && < 12"], + "v8/tools/profile_view": [">= 4.4 && < 5", ">= 5.2 && < 12"], + "v8/tools/splaytree": [">= 4.4 && < 5", ">= 5.2 && < 12"], "v8": ">= 1", + "node:v8": ">= 16", "vm": true, + "node:vm": ">= 16", "wasi": ">= 13.4 && < 13.5", "worker_threads": ">= 11.7", - "zlib": true + "node:worker_threads": ">= 16", + "zlib": true, + "node:zlib": ">= 16" } diff --git a/node_modules/is-core-module/package.json b/node_modules/is-core-module/package.json index 21341cc431a50..464294eae96be 100644 --- a/node_modules/is-core-module/package.json +++ b/node_modules/is-core-module/package.json @@ -1,6 +1,6 @@ { "name": "is-core-module", - "version": "2.2.0", + "version": "2.5.0", "description": "Is this specifier a node.js core module?", "main": "index.js", "exports": { @@ -13,7 +13,8 @@ "./package.json": "./package.json" }, "scripts": { - "prepublish": "safe-publish-latest", + "prepublish": "not-in-publish || npm run prepublishOnly", + "prepublishOnly": "safe-publish-latest", "lint": "eslint .", "pretest": "npm run lint", "tests-only": "tape 'test/**/*.js'", @@ -47,13 +48,14 @@ "has": "^1.0.3" }, "devDependencies": { - "@ljharb/eslint-config": "^17.3.0", - "aud": "^1.1.3", - "auto-changelog": "^2.2.1", - "eslint": "^7.14.0", + "@ljharb/eslint-config": "^17.6.0", + "aud": "^1.1.5", + "auto-changelog": "^2.3.0", + "eslint": "^7.30.0", "nyc": "^10.3.2", "safe-publish-latest": "^1.1.4", - "tape": "^5.0.1" + "semver": "^6.3.0", + "tape": "^5.2.2" }, "auto-changelog": { "output": "CHANGELOG.md", diff --git a/node_modules/is-core-module/test/index.js b/node_modules/is-core-module/test/index.js index 99659bcf113f7..281c7e9a4b134 100644 --- a/node_modules/is-core-module/test/index.js +++ b/node_modules/is-core-module/test/index.js @@ -2,9 +2,12 @@ var test = require('tape'); var keys = require('object-keys'); +var semver = require('semver'); var isCore = require('../'); var data = require('../core.json'); +var supportsNodePrefix = semver.satisfies(process.versions.node, '>= 16'); + test('core modules', function (t) { t.test('isCore()', function (st) { st.ok(isCore('fs')); @@ -48,6 +51,17 @@ test('core modules', function (t) { function () { require(mod); }, // eslint-disable-line no-loop-func 'requiring ' + mod + ' does not throw' ); + if (supportsNodePrefix) { + st.doesNotThrow( + function () { require('node:' + mod); }, // eslint-disable-line no-loop-func + 'requiring node:' + mod + ' does not throw' + ); + } else { + st['throws']( + function () { require('node:' + mod); }, // eslint-disable-line no-loop-func + 'requiring node:' + mod + ' throws' + ); + } } } st.end(); @@ -73,6 +87,17 @@ test('core modules', function (t) { function () { require(mod); }, // eslint-disable-line no-loop-func 'requiring ' + mod + ' does not throw' ); + if (supportsNodePrefix) { + st.doesNotThrow( + function () { require('node:' + mod); }, // eslint-disable-line no-loop-func + 'requiring node:' + mod + ' does not throw' + ); + } else { + st['throws']( + function () { require('node:' + mod); }, // eslint-disable-line no-loop-func + 'requiring node:' + mod + ' throws' + ); + } } } } diff --git a/node_modules/is-lambda/.npmignore b/node_modules/is-lambda/.npmignore deleted file mode 100644 index 3c3629e647f5d..0000000000000 --- a/node_modules/is-lambda/.npmignore +++ /dev/null @@ -1 +0,0 @@ -node_modules diff --git a/node_modules/is-lambda/.travis.yml b/node_modules/is-lambda/.travis.yml deleted file mode 100644 index 03dcca57bcc80..0000000000000 --- a/node_modules/is-lambda/.travis.yml +++ /dev/null @@ -1,8 +0,0 @@ -language: node_js -node_js: -- '7' -- '6' -- '5' -- '4' -- '0.12' -- '0.10' diff --git a/node_modules/is-lambda/README.md b/node_modules/is-lambda/README.md deleted file mode 100644 index 31a8f566ca002..0000000000000 --- a/node_modules/is-lambda/README.md +++ /dev/null @@ -1,27 +0,0 @@ -# is-lambda - -Returns `true` if the current environment is an [AWS -Lambda](https://aws.amazon.com/lambda/) server. - -[![Build status](https://travis-ci.org/watson/is-lambda.svg?branch=master)](https://travis-ci.org/watson/is-lambda) -[![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat)](https://github.com/feross/standard) - -## Installation - -``` -npm install is-lambda -``` - -## Usage - -```js -var isLambda = require('is-lambda') - -if (isLambda) { - console.log('The code is running on a AWS Lambda') -} -``` - -## License - -MIT diff --git a/node_modules/is-typedarray/README.md b/node_modules/is-typedarray/README.md deleted file mode 100644 index 2752863919358..0000000000000 --- a/node_modules/is-typedarray/README.md +++ /dev/null @@ -1,16 +0,0 @@ -# is-typedarray [![locked](http://badges.github.io/stability-badges/dist/locked.svg)](http://github.com/badges/stability-badges) - -Detect whether or not an object is a -[Typed Array](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Typed_arrays). - -## Usage - -[![NPM](https://nodei.co/npm/is-typedarray.png)](https://nodei.co/npm/is-typedarray/) - -### isTypedArray(array) - -Returns `true` when array is a Typed Array, and `false` when it is not. - -## License - -MIT. See [LICENSE.md](http://github.com/hughsk/is-typedarray/blob/master/LICENSE.md) for details. diff --git a/node_modules/isarray/.npmignore b/node_modules/isarray/.npmignore deleted file mode 100644 index 3c3629e647f5d..0000000000000 --- a/node_modules/isarray/.npmignore +++ /dev/null @@ -1 +0,0 @@ -node_modules diff --git a/node_modules/isarray/.travis.yml b/node_modules/isarray/.travis.yml deleted file mode 100644 index cc4dba29d959a..0000000000000 --- a/node_modules/isarray/.travis.yml +++ /dev/null @@ -1,4 +0,0 @@ -language: node_js -node_js: - - "0.8" - - "0.10" diff --git a/node_modules/isarray/README.md b/node_modules/isarray/README.md deleted file mode 100644 index 16d2c59c6195f..0000000000000 --- a/node_modules/isarray/README.md +++ /dev/null @@ -1,60 +0,0 @@ - -# isarray - -`Array#isArray` for older browsers. - -[![build status](https://secure.travis-ci.org/juliangruber/isarray.svg)](http://travis-ci.org/juliangruber/isarray) -[![downloads](https://img.shields.io/npm/dm/isarray.svg)](https://www.npmjs.org/package/isarray) - -[![browser support](https://ci.testling.com/juliangruber/isarray.png) -](https://ci.testling.com/juliangruber/isarray) - -## Usage - -```js -var isArray = require('isarray'); - -console.log(isArray([])); // => true -console.log(isArray({})); // => false -``` - -## Installation - -With [npm](http://npmjs.org) do - -```bash -$ npm install isarray -``` - -Then bundle for the browser with -[browserify](https://github.com/substack/browserify). - -With [component](http://component.io) do - -```bash -$ component install juliangruber/isarray -``` - -## License - -(MIT) - -Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/node_modules/isexe/.npmignore b/node_modules/isexe/.npmignore deleted file mode 100644 index c1cb757acf58a..0000000000000 --- a/node_modules/isexe/.npmignore +++ /dev/null @@ -1,2 +0,0 @@ -.nyc_output/ -coverage/ diff --git a/node_modules/isexe/README.md b/node_modules/isexe/README.md deleted file mode 100644 index 35769e84408ce..0000000000000 --- a/node_modules/isexe/README.md +++ /dev/null @@ -1,51 +0,0 @@ -# isexe - -Minimal module to check if a file is executable, and a normal file. - -Uses `fs.stat` and tests against the `PATHEXT` environment variable on -Windows. - -## USAGE - -```javascript -var isexe = require('isexe') -isexe('some-file-name', function (err, isExe) { - if (err) { - console.error('probably file does not exist or something', err) - } else if (isExe) { - console.error('this thing can be run') - } else { - console.error('cannot be run') - } -}) - -// same thing but synchronous, throws errors -var isExe = isexe.sync('some-file-name') - -// treat errors as just "not executable" -isexe('maybe-missing-file', { ignoreErrors: true }, callback) -var isExe = isexe.sync('maybe-missing-file', { ignoreErrors: true }) -``` - -## API - -### `isexe(path, [options], [callback])` - -Check if the path is executable. If no callback provided, and a -global `Promise` object is available, then a Promise will be returned. - -Will raise whatever errors may be raised by `fs.stat`, unless -`options.ignoreErrors` is set to true. - -### `isexe.sync(path, [options])` - -Same as `isexe` but returns the value and throws any errors raised. - -### Options - -* `ignoreErrors` Treat all errors as "no, this is not executable", but - don't raise them. -* `uid` Number to use as the user id -* `gid` Number to use as the group id -* `pathExt` List of path extensions to use instead of `PATHEXT` - environment variable on Windows. diff --git a/node_modules/isstream/.npmignore b/node_modules/isstream/.npmignore deleted file mode 100644 index aa1ec1ea06181..0000000000000 --- a/node_modules/isstream/.npmignore +++ /dev/null @@ -1 +0,0 @@ -*.tgz diff --git a/node_modules/isstream/.travis.yml b/node_modules/isstream/.travis.yml deleted file mode 100644 index 1fec2ab9afd64..0000000000000 --- a/node_modules/isstream/.travis.yml +++ /dev/null @@ -1,12 +0,0 @@ -language: node_js -node_js: - - "0.8" - - "0.10" - - "0.11" -branches: - only: - - master -notifications: - email: - - rod@vagg.org -script: npm test diff --git a/node_modules/isstream/README.md b/node_modules/isstream/README.md deleted file mode 100644 index 06770e82f2f27..0000000000000 --- a/node_modules/isstream/README.md +++ /dev/null @@ -1,66 +0,0 @@ -# isStream - -[![Build Status](https://secure.travis-ci.org/rvagg/isstream.png)](http://travis-ci.org/rvagg/isstream) - -**Test if an object is a `Stream`** - -[![NPM](https://nodei.co/npm/isstream.svg)](https://nodei.co/npm/isstream/) - -The missing `Stream.isStream(obj)`: determine if an object is standard Node.js `Stream`. Works for Node-core `Stream` objects (for 0.8, 0.10, 0.11, and in theory, older and newer versions) and all versions of **[readable-stream](https://github.com/isaacs/readable-stream)**. - -## Usage: - -```js -var isStream = require('isstream') -var Stream = require('stream') - -isStream(new Stream()) // true - -isStream({}) // false - -isStream(new Stream.Readable()) // true -isStream(new Stream.Writable()) // true -isStream(new Stream.Duplex()) // true -isStream(new Stream.Transform()) // true -isStream(new Stream.PassThrough()) // true -``` - -## But wait! There's more! - -You can also test for `isReadable(obj)`, `isWritable(obj)` and `isDuplex(obj)` to test for implementations of Streams2 (and Streams3) base classes. - -```js -var isReadable = require('isstream').isReadable -var isWritable = require('isstream').isWritable -var isDuplex = require('isstream').isDuplex -var Stream = require('stream') - -isReadable(new Stream()) // false -isWritable(new Stream()) // false -isDuplex(new Stream()) // false - -isReadable(new Stream.Readable()) // true -isReadable(new Stream.Writable()) // false -isReadable(new Stream.Duplex()) // true -isReadable(new Stream.Transform()) // true -isReadable(new Stream.PassThrough()) // true - -isWritable(new Stream.Readable()) // false -isWritable(new Stream.Writable()) // true -isWritable(new Stream.Duplex()) // true -isWritable(new Stream.Transform()) // true -isWritable(new Stream.PassThrough()) // true - -isDuplex(new Stream.Readable()) // false -isDuplex(new Stream.Writable()) // false -isDuplex(new Stream.Duplex()) // true -isDuplex(new Stream.Transform()) // true -isDuplex(new Stream.PassThrough()) // true -``` - -*Reminder: when implementing your own streams, please [use **readable-stream** rather than core streams](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html).* - - -## License - -**isStream** is Copyright (c) 2015 Rod Vagg [@rvagg](https://twitter.com/rvagg) and licenced under the MIT licence. All rights not explicitly granted in the MIT license are reserved. See the included LICENSE.md file for more details. diff --git a/node_modules/jsbn/.npmignore b/node_modules/jsbn/.npmignore deleted file mode 100644 index 28f1ba7565f46..0000000000000 --- a/node_modules/jsbn/.npmignore +++ /dev/null @@ -1,2 +0,0 @@ -node_modules -.DS_Store \ No newline at end of file diff --git a/node_modules/jsbn/README.md b/node_modules/jsbn/README.md deleted file mode 100644 index 7aac67f53ff0e..0000000000000 --- a/node_modules/jsbn/README.md +++ /dev/null @@ -1,175 +0,0 @@ -# jsbn: javascript big number - -[Tom Wu's Original Website](http://www-cs-students.stanford.edu/~tjw/jsbn/) - -I felt compelled to put this on github and publish to npm. I haven't tested every other big integer library out there, but the few that I have tested in comparison to this one have not even come close in performance. I am aware of the `bi` module on npm, however it has been modified and I wanted to publish the original without modifications. This is jsbn and jsbn2 from Tom Wu's original website above, with the modular pattern applied to prevent global leaks and to allow for use with node.js on the server side. - -## usage - - var BigInteger = require('jsbn'); - - var a = new BigInteger('91823918239182398123'); - alert(a.bitLength()); // 67 - - -## API - -### bi.toString() - -returns the base-10 number as a string - -### bi.negate() - -returns a new BigInteger equal to the negation of `bi` - -### bi.abs - -returns new BI of absolute value - -### bi.compareTo - - - -### bi.bitLength - - - -### bi.mod - - - -### bi.modPowInt - - - -### bi.clone - - - -### bi.intValue - - - -### bi.byteValue - - - -### bi.shortValue - - - -### bi.signum - - - -### bi.toByteArray - - - -### bi.equals - - - -### bi.min - - - -### bi.max - - - -### bi.and - - - -### bi.or - - - -### bi.xor - - - -### bi.andNot - - - -### bi.not - - - -### bi.shiftLeft - - - -### bi.shiftRight - - - -### bi.getLowestSetBit - - - -### bi.bitCount - - - -### bi.testBit - - - -### bi.setBit - - - -### bi.clearBit - - - -### bi.flipBit - - - -### bi.add - - - -### bi.subtract - - - -### bi.multiply - - - -### bi.divide - - - -### bi.remainder - - - -### bi.divideAndRemainder - - - -### bi.modPow - - - -### bi.modInverse - - - -### bi.pow - - - -### bi.gcd - - - -### bi.isProbablePrime - - diff --git a/node_modules/json-parse-even-better-errors/CHANGELOG.md b/node_modules/json-parse-even-better-errors/CHANGELOG.md deleted file mode 100644 index dfd67330a6aba..0000000000000 --- a/node_modules/json-parse-even-better-errors/CHANGELOG.md +++ /dev/null @@ -1,50 +0,0 @@ -# Change Log - -All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. - -## 2.0.0 - -* Add custom error classes - -<a name="1.0.2"></a> -## [1.0.2](https://github.com/npm/json-parse-even-better-errors/compare/v1.0.1...v1.0.2) (2018-03-30) - - -### Bug Fixes - -* **messages:** More friendly messages for non-string ([#1](https://github.com/npm/json-parse-even-better-errors/issues/1)) ([a476d42](https://github.com/npm/json-parse-even-better-errors/commit/a476d42)) - - - -<a name="1.0.1"></a> -## [1.0.1](https://github.com/npm/json-parse-even-better-errors/compare/v1.0.0...v1.0.1) (2017-08-16) - - -### Bug Fixes - -* **license:** oops. Forgot to update license.md ([efe2958](https://github.com/npm/json-parse-even-better-errors/commit/efe2958)) - - - -<a name="1.0.0"></a> -# 1.0.0 (2017-08-15) - - -### Features - -* **init:** Initial Commit ([562c977](https://github.com/npm/json-parse-even-better-errors/commit/562c977)) - - -### BREAKING CHANGES - -* **init:** This is the first commit! - - - -<a name="0.1.0"></a> -# 0.1.0 (2017-08-15) - - -### Features - -* **init:** Initial Commit ([9dd1a19](https://github.com/npm/json-parse-even-better-errors/commit/9dd1a19)) diff --git a/node_modules/json-parse-even-better-errors/README.md b/node_modules/json-parse-even-better-errors/README.md deleted file mode 100644 index 2799efe69ec84..0000000000000 --- a/node_modules/json-parse-even-better-errors/README.md +++ /dev/null @@ -1,96 +0,0 @@ -# json-parse-even-better-errors - -[`json-parse-even-better-errors`](https://github.com/npm/json-parse-even-better-errors) -is a Node.js library for getting nicer errors out of `JSON.parse()`, -including context and position of the parse errors. - -It also preserves the newline and indentation styles of the JSON data, by -putting them in the object or array in the `Symbol.for('indent')` and -`Symbol.for('newline')` properties. - -## Install - -`$ npm install --save json-parse-even-better-errors` - -## Table of Contents - -* [Example](#example) -* [Features](#features) -* [Contributing](#contributing) -* [API](#api) - * [`parse`](#parse) - -### Example - -```javascript -const parseJson = require('json-parse-even-better-errors') - -parseJson('"foo"') // returns the string 'foo' -parseJson('garbage') // more useful error message -parseJson.noExceptions('garbage') // returns undefined -``` - -### Features - -* Like JSON.parse, but the errors are better. -* Strips a leading byte-order-mark that you sometimes get reading files. -* Has a `noExceptions` method that returns undefined rather than throwing. -* Attaches the newline character(s) used to the `Symbol.for('newline')` - property on objects and arrays. -* Attaches the indentation character(s) used to the `Symbol.for('indent')` - property on objects and arrays. - -## Indentation - -To preserve indentation when the file is saved back to disk, use -`data[Symbol.for('indent')]` as the third argument to `JSON.stringify`, and -if you want to preserve windows `\r\n` newlines, replace the `\n` chars in -the string with `data[Symbol.for('newline')]`. - -For example: - -```js -const txt = await readFile('./package.json', 'utf8') -const data = parseJsonEvenBetterErrors(txt) -const indent = Symbol.for('indent') -const newline = Symbol.for('newline') -// .. do some stuff to the data .. -const string = JSON.stringify(data, null, data[indent]) + '\n' -const eolFixed = data[newline] === '\n' ? string - : string.replace(/\n/g, data[newline]) -await writeFile('./package.json', eolFixed) -``` - -Indentation is determined by looking at the whitespace between the initial -`{` and `[` and the character that follows it. If you have lots of weird -inconsistent indentation, then it won't track that or give you any way to -preserve it. Whether this is a bug or a feature is debatable ;) - -### API - -#### <a name="parse"></a> `parse(txt, reviver = null, context = 20)` - -Works just like `JSON.parse`, but will include a bit more information when -an error happens, and attaches a `Symbol.for('indent')` and -`Symbol.for('newline')` on objects and arrays. This throws a -`JSONParseError`. - -#### <a name="parse"></a> `parse.noExceptions(txt, reviver = null)` - -Works just like `JSON.parse`, but will return `undefined` rather than -throwing an error. - -#### <a name="jsonparseerror"></a> `class JSONParseError(er, text, context = 20, caller = null)` - -Extends the JavaScript `SyntaxError` class to parse the message and provide -better metadata. - -Pass in the error thrown by the built-in `JSON.parse`, and the text being -parsed, and it'll parse out the bits needed to be helpful. - -`context` defaults to 20. - -Set a `caller` function to trim internal implementation details out of the -stack trace. When calling `parseJson`, this is set to the `parseJson` -function. If not set, then the constructor defaults to itself, so the -stack trace will point to the spot where you call `new JSONParseError`. diff --git a/node_modules/json-schema-traverse/.eslintrc.yml b/node_modules/json-schema-traverse/.eslintrc.yml deleted file mode 100644 index ab1762da9c119..0000000000000 --- a/node_modules/json-schema-traverse/.eslintrc.yml +++ /dev/null @@ -1,27 +0,0 @@ -extends: eslint:recommended -env: - node: true - browser: true -rules: - block-scoped-var: 2 - complexity: [2, 13] - curly: [2, multi-or-nest, consistent] - dot-location: [2, property] - dot-notation: 2 - indent: [2, 2, SwitchCase: 1] - linebreak-style: [2, unix] - new-cap: 2 - no-console: [2, allow: [warn, error]] - no-else-return: 2 - no-eq-null: 2 - no-fallthrough: 2 - no-invalid-this: 2 - no-return-assign: 2 - no-shadow: 1 - no-trailing-spaces: 2 - no-use-before-define: [2, nofunc] - quotes: [2, single, avoid-escape] - semi: [2, always] - strict: [2, global] - valid-jsdoc: [2, requireReturn: false] - no-control-regex: 0 diff --git a/node_modules/json-schema-traverse/.travis.yml b/node_modules/json-schema-traverse/.travis.yml deleted file mode 100644 index 7ddce74b84199..0000000000000 --- a/node_modules/json-schema-traverse/.travis.yml +++ /dev/null @@ -1,8 +0,0 @@ -language: node_js -node_js: - - "4" - - "6" - - "7" - - "8" -after_script: - - coveralls < coverage/lcov.info diff --git a/node_modules/json-schema-traverse/README.md b/node_modules/json-schema-traverse/README.md deleted file mode 100644 index d5ccaf450a2a2..0000000000000 --- a/node_modules/json-schema-traverse/README.md +++ /dev/null @@ -1,83 +0,0 @@ -# json-schema-traverse -Traverse JSON Schema passing each schema object to callback - -[![Build Status](https://travis-ci.org/epoberezkin/json-schema-traverse.svg?branch=master)](https://travis-ci.org/epoberezkin/json-schema-traverse) -[![npm version](https://badge.fury.io/js/json-schema-traverse.svg)](https://www.npmjs.com/package/json-schema-traverse) -[![Coverage Status](https://coveralls.io/repos/github/epoberezkin/json-schema-traverse/badge.svg?branch=master)](https://coveralls.io/github/epoberezkin/json-schema-traverse?branch=master) - - -## Install - -``` -npm install json-schema-traverse -``` - - -## Usage - -```javascript -const traverse = require('json-schema-traverse'); -const schema = { - properties: { - foo: {type: 'string'}, - bar: {type: 'integer'} - } -}; - -traverse(schema, {cb}); -// cb is called 3 times with: -// 1. root schema -// 2. {type: 'string'} -// 3. {type: 'integer'} - -// Or: - -traverse(schema, {cb: {pre, post}}); -// pre is called 3 times with: -// 1. root schema -// 2. {type: 'string'} -// 3. {type: 'integer'} -// -// post is called 3 times with: -// 1. {type: 'string'} -// 2. {type: 'integer'} -// 3. root schema - -``` - -Callback function `cb` is called for each schema object (not including draft-06 boolean schemas), including the root schema, in pre-order traversal. Schema references ($ref) are not resolved, they are passed as is. Alternatively, you can pass a `{pre, post}` object as `cb`, and then `pre` will be called before traversing child elements, and `post` will be called after all child elements have been traversed. - -Callback is passed these parameters: - -- _schema_: the current schema object -- _JSON pointer_: from the root schema to the current schema object -- _root schema_: the schema passed to `traverse` object -- _parent JSON pointer_: from the root schema to the parent schema object (see below) -- _parent keyword_: the keyword inside which this schema appears (e.g. `properties`, `anyOf`, etc.) -- _parent schema_: not necessarily parent object/array; in the example above the parent schema for `{type: 'string'}` is the root schema -- _index/property_: index or property name in the array/object containing multiple schemas; in the example above for `{type: 'string'}` the property name is `'foo'` - - -## Traverse objects in all unknown keywords - -```javascript -const traverse = require('json-schema-traverse'); -const schema = { - mySchema: { - minimum: 1, - maximum: 2 - } -}; - -traverse(schema, {allKeys: true, cb}); -// cb is called 2 times with: -// 1. root schema -// 2. mySchema -``` - -Without option `allKeys: true` callback will be called only with root schema. - - -## License - -[MIT](https://github.com/epoberezkin/json-schema-traverse/blob/master/LICENSE) diff --git a/node_modules/json-schema-traverse/spec/.eslintrc.yml b/node_modules/json-schema-traverse/spec/.eslintrc.yml deleted file mode 100644 index 3344da7eb323b..0000000000000 --- a/node_modules/json-schema-traverse/spec/.eslintrc.yml +++ /dev/null @@ -1,6 +0,0 @@ -parserOptions: - ecmaVersion: 6 -globals: - beforeEach: false - describe: false - it: false diff --git a/node_modules/json-schema/README.md b/node_modules/json-schema/README.md deleted file mode 100644 index ccc591b68fc58..0000000000000 --- a/node_modules/json-schema/README.md +++ /dev/null @@ -1,5 +0,0 @@ -JSON Schema is a repository for the JSON Schema specification, reference schemas and a CommonJS implementation of JSON Schema (not the only JavaScript implementation of JSON Schema, JSV is another excellent JavaScript validator). - -Code is licensed under the AFL or BSD license as part of the Persevere -project which is administered under the Dojo foundation, -and all contributions require a Dojo CLA. \ No newline at end of file diff --git a/node_modules/json-stringify-nice/.github/FUNDING.yml b/node_modules/json-stringify-nice/.github/FUNDING.yml deleted file mode 100644 index 20d8c03a4dca6..0000000000000 --- a/node_modules/json-stringify-nice/.github/FUNDING.yml +++ /dev/null @@ -1,3 +0,0 @@ -# These are supported funding model platforms - -github: [isaacs] diff --git a/node_modules/json-stringify-nice/.npmignore b/node_modules/json-stringify-nice/.npmignore deleted file mode 100644 index e69acb4cf452b..0000000000000 --- a/node_modules/json-stringify-nice/.npmignore +++ /dev/null @@ -1,23 +0,0 @@ -# ignore most things, include some others -/* -/.* - -!.github -!bin/ -!lib/ -!docs/ -!package.json -!package-lock.json -!README.md -!CONTRIBUTING.md -!LICENSE -!CHANGELOG.md -!example/ -!scripts/ -!tap-snapshots/ -!test/ -!.travis.yml -!.gitignore -!.gitattributes -!coverage-map.js -!index.js diff --git a/node_modules/json-stringify-nice/README.md b/node_modules/json-stringify-nice/README.md deleted file mode 100644 index 66cb1a7c53b8c..0000000000000 --- a/node_modules/json-stringify-nice/README.md +++ /dev/null @@ -1,105 +0,0 @@ -# json-stringify-nice - -Stringify an object sorting scalars before objects, and defaulting to -2-space indent. - -Sometimes you want to stringify an object in a consistent way, and for -human legibility reasons, you may want to put any non-object properties -ahead of any object properties, so that it's easier to track the nesting -level as you read through the object, but you don't want to have to be -meticulous about maintaining object property order as you're building up -the object, since it doesn't matter in code, it only matters in the output -file. Also, it'd be nice to have it default to reasonable spacing without -having to remember to add `, null, 2)` to all your `JSON.stringify()` -calls. - -If that is what you want, then this module is for you, because it does -all of that. - -## USAGE - -```js -const stringify = require('json-stringify-nice') -const obj = { - z: 1, - y: 'z', - obj: { a: {}, b: 'x' }, - a: { b: 1, a: { nested: true} }, - yy: 'a', -} - -console.log(stringify(obj)) -/* output: -{ - "y": "z", <-- alphabetical sorting like whoa! - "yy": "a", - "z": 1, - "a": { <-- a sorted before obj, because alphabetical, and both objects - "b": 1, - "a": { <-- note that a comes after b, because it's an object - "nested": true - } - }, - "obj": { - "b": "x", - "a": {} - } -} -*/ - -// specify an array of keys if you have some that you prefer -// to be sorted in a specific order. preferred keys come before -// any other keys, and in the order specified, but objects are -// still sorted AFTER scalars, so the preferences only apply -// when both values are objects or both are non-objects. -console.log(stringify(obj, ['z', 'yy', 'obj'])) -/* output -{ - "z": 1, <-- z comes before other scalars - "yy": "a", <-- yy comes after z, but before other scalars - "y": "z", <-- then all the other scalar values - "obj": { <-- obj comes before other objects, but after scalars - "b": "x", - "a": {} - }, - "a": { - "b": 1, - "a": { - "nested": true - } - } -} -*/ - -// can also specify a replacer or indent value like with JSON.stringify -// this turns all values with an 'a' key into a doggo meme from 2011 -const replacer = (key, val) => - key === 'a' ? { hello: '📞 yes', 'this is': '🐕', ...val } : val - -console.log(stringify(obj, replacer, '📞🐶')) - -/* output: -{ -📞🐶"y": "z", -📞🐶"yy": "a", -📞🐶"z": 1, -📞🐶"a": { -📞🐶📞🐶"b": 1, -📞🐶📞🐶"hello": "📞 yes", -📞🐶📞🐶"this is": "🐕", -📞🐶📞🐶"a": { -📞🐶📞🐶📞🐶"hello": "📞 yes", -📞🐶📞🐶📞🐶"nested": true, -📞🐶📞🐶📞🐶"this is": "🐕" -📞🐶📞🐶} -📞🐶}, -📞🐶"obj": { -📞🐶📞🐶"b": "x", -📞🐶📞🐶"a": { -📞🐶📞🐶📞🐶"hello": "📞 yes", -📞🐶📞🐶📞🐶"this is": "🐕" -📞🐶📞🐶} -📞🐶} -} -*/ -``` diff --git a/node_modules/json-stringify-nice/index.js b/node_modules/json-stringify-nice/index.js index 1ca7e14fa0c66..36557bb055f01 100644 --- a/node_modules/json-stringify-nice/index.js +++ b/node_modules/json-stringify-nice/index.js @@ -1,11 +1,11 @@ -const isObj = val => val && !Array.isArray(val) && typeof val === 'object' +const isObj = val => !!val && !Array.isArray(val) && typeof val === 'object' const compare = (ak, bk, prefKeys) => prefKeys.includes(ak) && !prefKeys.includes(bk) ? -1 : prefKeys.includes(bk) && !prefKeys.includes(ak) ? 1 : prefKeys.includes(ak) && prefKeys.includes(bk) ? prefKeys.indexOf(ak) - prefKeys.indexOf(bk) - : ak.localeCompare(bk) + : ak.localeCompare(bk, 'en') const sort = (replacer, seen) => (key, val) => { const prefKeys = Array.isArray(replacer) ? replacer : [] diff --git a/node_modules/json-stringify-nice/package-lock.json b/node_modules/json-stringify-nice/package-lock.json deleted file mode 100644 index b20f9a8977db6..0000000000000 --- a/node_modules/json-stringify-nice/package-lock.json +++ /dev/null @@ -1,3447 +0,0 @@ -{ - "name": "json-stringify-nice", - "version": "1.1.1", - "lockfileVersion": 1, - "requires": true, - "dependencies": { - "@babel/code-frame": { - "version": "7.5.5", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.5.5.tgz", - "integrity": "sha512-27d4lZoomVyo51VegxI20xZPuSHusqbQag/ztrBC7wegWoQ1nLREPVSKSW8byhTlzTKyNE4ifaTA6lCp7JjpFw==", - "dev": true, - "requires": { - "@babel/highlight": "^7.0.0" - } - }, - "@babel/generator": { - "version": "7.7.2", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.7.2.tgz", - "integrity": "sha512-WthSArvAjYLz4TcbKOi88me+KmDJdKSlfwwN8CnUYn9jBkzhq0ZEPuBfkAWIvjJ3AdEV1Cf/+eSQTnp3IDJKlQ==", - "dev": true, - "requires": { - "@babel/types": "^7.7.2", - "jsesc": "^2.5.1", - "lodash": "^4.17.13", - "source-map": "^0.5.0" - }, - "dependencies": { - "source-map": { - "version": "0.5.7", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", - "integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=", - "dev": true - } - } - }, - "@babel/helper-function-name": { - "version": "7.7.0", - "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.7.0.tgz", - "integrity": "sha512-tDsJgMUAP00Ugv8O2aGEua5I2apkaQO7lBGUq1ocwN3G23JE5Dcq0uh3GvFTChPa4b40AWiAsLvCZOA2rdnQ7Q==", - "dev": true, - "requires": { - "@babel/helper-get-function-arity": "^7.7.0", - "@babel/template": "^7.7.0", - "@babel/types": "^7.7.0" - } - }, - "@babel/helper-get-function-arity": { - "version": "7.7.0", - "resolved": "https://registry.npmjs.org/@babel/helper-get-function-arity/-/helper-get-function-arity-7.7.0.tgz", - "integrity": "sha512-tLdojOTz4vWcEnHWHCuPN5P85JLZWbm5Fx5ZsMEMPhF3Uoe3O7awrbM2nQ04bDOUToH/2tH/ezKEOR8zEYzqyw==", - "dev": true, - "requires": { - "@babel/types": "^7.7.0" - } - }, - "@babel/helper-split-export-declaration": { - "version": "7.7.0", - "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.7.0.tgz", - "integrity": "sha512-HgYSI8rH08neWlAH3CcdkFg9qX9YsZysZI5GD8LjhQib/mM0jGOZOVkoUiiV2Hu978fRtjtsGsW6w0pKHUWtqA==", - "dev": true, - "requires": { - "@babel/types": "^7.7.0" - } - }, - "@babel/highlight": { - "version": "7.5.0", - "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.5.0.tgz", - "integrity": "sha512-7dV4eu9gBxoM0dAnj/BCFDW9LFU0zvTrkq0ugM7pnHEgguOEeOz1so2ZghEdzviYzQEED0r4EAgpsBChKy1TRQ==", - "dev": true, - "requires": { - "chalk": "^2.0.0", - "esutils": "^2.0.2", - "js-tokens": "^4.0.0" - } - }, - "@babel/parser": { - "version": "7.7.3", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.7.3.tgz", - "integrity": "sha512-bqv+iCo9i+uLVbI0ILzKkvMorqxouI+GbV13ivcARXn9NNEabi2IEz912IgNpT/60BNXac5dgcfjb94NjsF33A==", - "dev": true - }, - "@babel/runtime": { - "version": "7.7.2", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.7.2.tgz", - "integrity": "sha512-JONRbXbTXc9WQE2mAZd1p0Z3DZ/6vaQIkgYMSTP3KjRCyd7rCZCcfhCyX+YjwcKxcZ82UrxbRD358bpExNgrjw==", - "dev": true, - "requires": { - "regenerator-runtime": "^0.13.2" - } - }, - "@babel/template": { - "version": "7.7.0", - "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.7.0.tgz", - "integrity": "sha512-OKcwSYOW1mhWbnTBgQY5lvg1Fxg+VyfQGjcBduZFljfc044J5iDlnDSfhQ867O17XHiSCxYHUxHg2b7ryitbUQ==", - "dev": true, - "requires": { - "@babel/code-frame": "^7.0.0", - "@babel/parser": "^7.7.0", - "@babel/types": "^7.7.0" - } - }, - "@babel/traverse": { - "version": "7.7.2", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.7.2.tgz", - "integrity": "sha512-TM01cXib2+rgIZrGJOLaHV/iZUAxf4A0dt5auY6KNZ+cm6aschuJGqKJM3ROTt3raPUdIDk9siAufIFEleRwtw==", - "dev": true, - "requires": { - "@babel/code-frame": "^7.5.5", - "@babel/generator": "^7.7.2", - "@babel/helper-function-name": "^7.7.0", - "@babel/helper-split-export-declaration": "^7.7.0", - "@babel/parser": "^7.7.2", - "@babel/types": "^7.7.2", - "debug": "^4.1.0", - "globals": "^11.1.0", - "lodash": "^4.17.13" - } - }, - "@babel/types": { - "version": "7.7.2", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.7.2.tgz", - "integrity": "sha512-YTf6PXoh3+eZgRCBzzP25Bugd2ngmpQVrk7kXX0i5N9BO7TFBtIgZYs7WtxtOGs8e6A4ZI7ECkbBCEHeXocvOA==", - "dev": true, - "requires": { - "esutils": "^2.0.2", - "lodash": "^4.17.13", - "to-fast-properties": "^2.0.0" - } - }, - "ajv": { - "version": "6.10.2", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.10.2.tgz", - "integrity": "sha512-TXtUUEYHuaTEbLZWIKUr5pmBuhDLy+8KYtPYdcV8qC+pOZL+NKqYwvWSRrVXHn+ZmRRAu8vJTAznH7Oag6RVRw==", - "dev": true, - "requires": { - "fast-deep-equal": "^2.0.1", - "fast-json-stable-stringify": "^2.0.0", - "json-schema-traverse": "^0.4.1", - "uri-js": "^4.2.2" - } - }, - "ansi-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", - "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=", - "dev": true - }, - "ansi-styles": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", - "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", - "dev": true, - "requires": { - "color-convert": "^1.9.0" - } - }, - "anymatch": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.1.tgz", - "integrity": "sha512-mM8522psRCqzV+6LhomX5wgp25YVibjh8Wj23I5RPkPppSVSjyKD2A2mBJmWGa+KN7f2D6LNh9jkBCeyLktzjg==", - "dev": true, - "requires": { - "normalize-path": "^3.0.0", - "picomatch": "^2.0.4" - } - }, - "append-transform": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/append-transform/-/append-transform-1.0.0.tgz", - "integrity": "sha512-P009oYkeHyU742iSZJzZZywj4QRJdnTWffaKuJQLablCZ1uz6/cW4yaRgcDaoQ+uwOxxnt0gRUcwfsNP2ri0gw==", - "dev": true, - "requires": { - "default-require-extensions": "^2.0.0" - } - }, - "archy": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/archy/-/archy-1.0.0.tgz", - "integrity": "sha1-+cjBN1fMHde8N5rHeyxipcKGjEA=", - "dev": true - }, - "arg": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/arg/-/arg-4.1.1.tgz", - "integrity": "sha512-SlmP3fEA88MBv0PypnXZ8ZfJhwmDeIE3SP71j37AiXQBXYosPV0x6uISAaHYSlSVhmHOVkomen0tbGk6Anlebw==", - "dev": true - }, - "argparse": { - "version": "1.0.10", - "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", - "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", - "dev": true, - "requires": { - "sprintf-js": "~1.0.2" - } - }, - "asn1": { - "version": "0.2.4", - "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.4.tgz", - "integrity": "sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg==", - "dev": true, - "requires": { - "safer-buffer": "~2.1.0" - } - }, - "assert-plus": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", - "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=", - "dev": true - }, - "async-hook-domain": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/async-hook-domain/-/async-hook-domain-1.1.3.tgz", - "integrity": "sha512-ZovMxSbADV3+biB7oR1GL5lGyptI24alp0LWHlmz1OFc5oL47pz3EiIF6nXOkDW7yLqih4NtsiYduzdDW0i+Wg==", - "dev": true, - "requires": { - "source-map-support": "^0.5.11" - } - }, - "asynckit": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", - "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=", - "dev": true - }, - "aws-sign2": { - "version": "0.7.0", - "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz", - "integrity": "sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg=", - "dev": true - }, - "aws4": { - "version": "1.8.0", - "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.8.0.tgz", - "integrity": "sha512-ReZxvNHIOv88FlT7rxcXIIC0fPt4KZqZbOlivyWtXLt8ESx84zd3kMC6iK5jVeS2qt+g7ftS7ye4fi06X5rtRQ==", - "dev": true - }, - "balanced-match": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", - "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=", - "dev": true - }, - "bcrypt-pbkdf": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz", - "integrity": "sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4=", - "dev": true, - "requires": { - "tweetnacl": "^0.14.3" - } - }, - "binary-extensions": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.0.0.tgz", - "integrity": "sha512-Phlt0plgpIIBOGTT/ehfFnbNlfsDEiqmzE2KRXoX1bLIlir4X/MR+zSyBEkL05ffWgnRSf/DXv+WrUAVr93/ow==", - "dev": true - }, - "bind-obj-methods": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/bind-obj-methods/-/bind-obj-methods-2.0.0.tgz", - "integrity": "sha512-3/qRXczDi2Cdbz6jE+W3IflJOutRVica8frpBn14de1mBOkzDo+6tY33kNhvkw54Kn3PzRRD2VnGbGPcTAk4sw==", - "dev": true - }, - "brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", - "dev": true, - "requires": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, - "braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "dev": true, - "requires": { - "fill-range": "^7.0.1" - } - }, - "browser-process-hrtime": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/browser-process-hrtime/-/browser-process-hrtime-1.0.0.tgz", - "integrity": "sha512-9o5UecI3GhkpM6DrXr69PblIuWxPKk9Y0jHBRhdocZ2y7YECBFCsHm79Pr3OyR2AvjhDkabFJaDJMYRazHgsow==", - "dev": true - }, - "buffer-from": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.1.tgz", - "integrity": "sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A==", - "dev": true - }, - "caching-transform": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/caching-transform/-/caching-transform-3.0.2.tgz", - "integrity": "sha512-Mtgcv3lh3U0zRii/6qVgQODdPA4G3zhG+jtbCWj39RXuUFTMzH0vcdMtaJS1jPowd+It2Pqr6y3NJMQqOqCE2w==", - "dev": true, - "requires": { - "hasha": "^3.0.0", - "make-dir": "^2.0.0", - "package-hash": "^3.0.0", - "write-file-atomic": "^2.4.2" - }, - "dependencies": { - "write-file-atomic": { - "version": "2.4.3", - "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-2.4.3.tgz", - "integrity": "sha512-GaETH5wwsX+GcnzhPgKcKjJ6M2Cq3/iZp1WyY/X1CSqrW+jVNM9Y7D8EC2sM4ZG/V8wZlSniJnCKWPmBYAucRQ==", - "dev": true, - "requires": { - "graceful-fs": "^4.1.11", - "imurmurhash": "^0.1.4", - "signal-exit": "^3.0.2" - } - } - } - }, - "camelcase": { - "version": "5.3.1", - "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", - "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", - "dev": true - }, - "caseless": { - "version": "0.12.0", - "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", - "integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw=", - "dev": true - }, - "chalk": { - "version": "2.4.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", - "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", - "dev": true, - "requires": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" - } - }, - "chokidar": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.3.0.tgz", - "integrity": "sha512-dGmKLDdT3Gdl7fBUe8XK+gAtGmzy5Fn0XkkWQuYxGIgWVPPse2CxFA5mtrlD0TOHaHjEUqkWNyP1XdHoJES/4A==", - "dev": true, - "requires": { - "anymatch": "~3.1.1", - "braces": "~3.0.2", - "fsevents": "~2.1.1", - "glob-parent": "~5.1.0", - "is-binary-path": "~2.1.0", - "is-glob": "~4.0.1", - "normalize-path": "~3.0.0", - "readdirp": "~3.2.0" - } - }, - "cliui": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-4.1.0.tgz", - "integrity": "sha512-4FG+RSG9DL7uEwRUZXZn3SS34DiDPfzP0VOiEwtUWlE+AR2EIg+hSyvrIgUUfhdgR/UkAeW2QHgeP+hWrXs7jQ==", - "dev": true, - "requires": { - "string-width": "^2.1.1", - "strip-ansi": "^4.0.0", - "wrap-ansi": "^2.0.0" - } - }, - "code-point-at": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/code-point-at/-/code-point-at-1.1.0.tgz", - "integrity": "sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c=", - "dev": true - }, - "color-convert": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", - "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", - "dev": true, - "requires": { - "color-name": "1.1.3" - } - }, - "color-name": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=", - "dev": true - }, - "color-support": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-support/-/color-support-1.1.3.tgz", - "integrity": "sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==", - "dev": true - }, - "combined-stream": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", - "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", - "dev": true, - "requires": { - "delayed-stream": "~1.0.0" - } - }, - "commander": { - "version": "2.20.3", - "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", - "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==", - "dev": true, - "optional": true - }, - "commondir": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/commondir/-/commondir-1.0.1.tgz", - "integrity": "sha1-3dgA2gxmEnOTzKWVDqloo6rxJTs=", - "dev": true - }, - "concat-map": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", - "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", - "dev": true - }, - "convert-source-map": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.7.0.tgz", - "integrity": "sha512-4FJkXzKXEDB1snCFZlLP4gpC3JILicCpGbzG9f9G7tGqGCzETQ2hWPrcinA9oU4wtf2biUaEH5065UnMeR33oA==", - "dev": true, - "requires": { - "safe-buffer": "~5.1.1" - }, - "dependencies": { - "safe-buffer": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", - "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", - "dev": true - } - } - }, - "core-util-is": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", - "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=", - "dev": true - }, - "coveralls": { - "version": "3.0.7", - "resolved": "https://registry.npmjs.org/coveralls/-/coveralls-3.0.7.tgz", - "integrity": "sha512-mUuH2MFOYB2oBaA4D4Ykqi9LaEYpMMlsiOMJOrv358yAjP6enPIk55fod2fNJ8AvwoYXStWQls37rA+s5e7boA==", - "dev": true, - "requires": { - "growl": "~> 1.10.0", - "js-yaml": "^3.13.1", - "lcov-parse": "^0.0.10", - "log-driver": "^1.2.7", - "minimist": "^1.2.0", - "request": "^2.86.0" - } - }, - "cp-file": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/cp-file/-/cp-file-6.2.0.tgz", - "integrity": "sha512-fmvV4caBnofhPe8kOcitBwSn2f39QLjnAnGq3gO9dfd75mUytzKNZB1hde6QHunW2Rt+OwuBOMc3i1tNElbszA==", - "dev": true, - "requires": { - "graceful-fs": "^4.1.2", - "make-dir": "^2.0.0", - "nested-error-stacks": "^2.0.0", - "pify": "^4.0.1", - "safe-buffer": "^5.0.1" - } - }, - "cross-spawn": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-4.0.2.tgz", - "integrity": "sha1-e5JHYhwjrf3ThWAEqCPL45dCTUE=", - "dev": true, - "requires": { - "lru-cache": "^4.0.1", - "which": "^1.2.9" - }, - "dependencies": { - "which": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", - "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", - "dev": true, - "requires": { - "isexe": "^2.0.0" - } - } - } - }, - "dashdash": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz", - "integrity": "sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA=", - "dev": true, - "requires": { - "assert-plus": "^1.0.0" - } - }, - "debug": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", - "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", - "dev": true, - "requires": { - "ms": "^2.1.1" - } - }, - "decamelize": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", - "integrity": "sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=", - "dev": true - }, - "default-require-extensions": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/default-require-extensions/-/default-require-extensions-2.0.0.tgz", - "integrity": "sha1-9fj7sYp9bVCyH2QfZJ67Uiz+JPc=", - "dev": true, - "requires": { - "strip-bom": "^3.0.0" - } - }, - "delayed-stream": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", - "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=", - "dev": true - }, - "diff": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.1.tgz", - "integrity": "sha512-s2+XdvhPCOF01LRQBC8hf4vhbVmI2CGS5aZnxLJlT5FtdhPCDFq80q++zK2KlrVorVDdL5BOGZ/VfLrVtYNF+Q==", - "dev": true - }, - "ecc-jsbn": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz", - "integrity": "sha1-OoOpBOVDUyh4dMVkt1SThoSamMk=", - "dev": true, - "requires": { - "jsbn": "~0.1.0", - "safer-buffer": "^2.1.0" - } - }, - "emoji-regex": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", - "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==", - "dev": true - }, - "error-ex": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", - "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", - "dev": true, - "requires": { - "is-arrayish": "^0.2.1" - } - }, - "es6-error": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/es6-error/-/es6-error-4.1.1.tgz", - "integrity": "sha512-Um/+FxMr9CISWh0bi5Zv0iOD+4cFh5qLeks1qhAopKVAJw3drgKbKySikp7wGhDL0HPeaja0P5ULZrxLkniUVg==", - "dev": true - }, - "escape-string-regexp": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=", - "dev": true - }, - "esm": { - "version": "3.2.25", - "resolved": "https://registry.npmjs.org/esm/-/esm-3.2.25.tgz", - "integrity": "sha512-U1suiZ2oDVWv4zPO56S0NcR5QriEahGtdN2OR6FiOG4WJvcjBVFB0qI4+eKoWFH483PKGuLuu6V8Z4T5g63UVA==", - "dev": true - }, - "esprima": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", - "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", - "dev": true - }, - "esutils": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", - "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", - "dev": true - }, - "events-to-array": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/events-to-array/-/events-to-array-1.1.2.tgz", - "integrity": "sha1-LUH1Y+H+QA7Uli/hpNXGp1Od9/Y=", - "dev": true - }, - "extend": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", - "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==", - "dev": true - }, - "extsprintf": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", - "integrity": "sha1-lpGEQOMEGnpBT4xS48V06zw+HgU=", - "dev": true - }, - "fast-deep-equal": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-2.0.1.tgz", - "integrity": "sha1-ewUhjd+WZ79/Nwv3/bLLFf3Qqkk=", - "dev": true - }, - "fast-json-stable-stringify": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.0.0.tgz", - "integrity": "sha1-1RQsDK7msRifh9OnYREGT4bIu/I=", - "dev": true - }, - "fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "dev": true, - "requires": { - "to-regex-range": "^5.0.1" - } - }, - "find-cache-dir": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-2.1.0.tgz", - "integrity": "sha512-Tq6PixE0w/VMFfCgbONnkiQIVol/JJL7nRMi20fqzA4NRs9AfeqMGeRdPi3wIhYkxjeBaWh2rxwapn5Tu3IqOQ==", - "dev": true, - "requires": { - "commondir": "^1.0.1", - "make-dir": "^2.0.0", - "pkg-dir": "^3.0.0" - } - }, - "find-up": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", - "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", - "dev": true, - "requires": { - "locate-path": "^3.0.0" - } - }, - "findit": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/findit/-/findit-2.0.0.tgz", - "integrity": "sha1-ZQnwEmr0wXhVHPqZOU4DLhOk1W4=", - "dev": true - }, - "flow-parser": { - "version": "0.111.3", - "resolved": "https://registry.npmjs.org/flow-parser/-/flow-parser-0.111.3.tgz", - "integrity": "sha512-iEjGZ94OBMcESxnLorXNjJmtd/JtQYXUVrQpfwvtAKkuyawRmv+2LM6nqyOsOJkISEYbyY6ziudRE0u4VyPSVA==", - "dev": true - }, - "flow-remove-types": { - "version": "2.111.3", - "resolved": "https://registry.npmjs.org/flow-remove-types/-/flow-remove-types-2.111.3.tgz", - "integrity": "sha512-M9k0igaQDnPXzTsolDMwZL6ksYKPjLsp7NJqgyiELnkGBWlnfvWlN06RuyYdr9WrTSv9wxgmLoa+rMa/W4fffg==", - "dev": true, - "requires": { - "flow-parser": "^0.111.3", - "pirates": "^3.0.2", - "vlq": "^0.2.1" - } - }, - "foreground-child": { - "version": "1.5.6", - "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-1.5.6.tgz", - "integrity": "sha1-T9ca0t/elnibmApcCilZN8svXOk=", - "dev": true, - "requires": { - "cross-spawn": "^4", - "signal-exit": "^3.0.0" - } - }, - "forever-agent": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", - "integrity": "sha1-+8cfDEGt6zf5bFd60e1C2P2sypE=", - "dev": true - }, - "form-data": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz", - "integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==", - "dev": true, - "requires": { - "asynckit": "^0.4.0", - "combined-stream": "^1.0.6", - "mime-types": "^2.1.12" - } - }, - "fs-exists-cached": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/fs-exists-cached/-/fs-exists-cached-1.0.0.tgz", - "integrity": "sha1-zyVVTKBQ3EmuZla0HeQiWJidy84=", - "dev": true - }, - "fs.realpath": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", - "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=", - "dev": true - }, - "fsevents": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.1.2.tgz", - "integrity": "sha512-R4wDiBwZ0KzpgOWetKDug1FZcYhqYnUYKtfZYt4mD5SBz76q0KR4Q9o7GIPamsVPGmW3EYPPJ0dOOjvx32ldZA==", - "dev": true, - "optional": true - }, - "function-loop": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/function-loop/-/function-loop-1.0.2.tgz", - "integrity": "sha512-Iw4MzMfS3udk/rqxTiDDCllhGwlOrsr50zViTOO/W6lS/9y6B1J0BD2VZzrnWUYBJsl3aeqjgR5v7bWWhZSYbA==", - "dev": true - }, - "get-caller-file": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", - "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", - "dev": true - }, - "getpass": { - "version": "0.1.7", - "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", - "integrity": "sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo=", - "dev": true, - "requires": { - "assert-plus": "^1.0.0" - } - }, - "glob": { - "version": "7.1.6", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", - "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", - "dev": true, - "requires": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.0.4", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - } - }, - "glob-parent": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.0.tgz", - "integrity": "sha512-qjtRgnIVmOfnKUE3NJAQEdk+lKrxfw8t5ke7SXtfMTHcjsBfOfWXCQfdb30zfDoZQ2IRSIiidmjtbHZPZ++Ihw==", - "dev": true, - "requires": { - "is-glob": "^4.0.1" - } - }, - "globals": { - "version": "11.12.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", - "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", - "dev": true - }, - "graceful-fs": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.3.tgz", - "integrity": "sha512-a30VEBm4PEdx1dRB7MFK7BejejvCvBronbLjht+sHuGYj8PHs7M/5Z+rt5lw551vZ7yfTCj4Vuyy3mSJytDWRQ==", - "dev": true - }, - "growl": { - "version": "1.10.5", - "resolved": "https://registry.npmjs.org/growl/-/growl-1.10.5.tgz", - "integrity": "sha512-qBr4OuELkhPenW6goKVXiv47US3clb3/IbuWF9KNKEijAy9oeHxU9IgzjvJhHkUzhaj7rOUD7+YGWqUjLp5oSA==", - "dev": true - }, - "handlebars": { - "version": "4.5.1", - "resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.5.1.tgz", - "integrity": "sha512-C29UoFzHe9yM61lOsIlCE5/mQVGrnIOrOq7maQl76L7tYPCgC1og0Ajt6uWnX4ZTxBPnjw+CUvawphwCfJgUnA==", - "dev": true, - "requires": { - "neo-async": "^2.6.0", - "optimist": "^0.6.1", - "source-map": "^0.6.1", - "uglify-js": "^3.1.4" - } - }, - "har-schema": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz", - "integrity": "sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI=", - "dev": true - }, - "har-validator": { - "version": "5.1.3", - "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.1.3.tgz", - "integrity": "sha512-sNvOCzEQNr/qrvJgc3UG/kD4QtlHycrzwS+6mfTrrSq97BvaYcPZZI1ZSqGSPR73Cxn4LKTD4PttRwfU7jWq5g==", - "dev": true, - "requires": { - "ajv": "^6.5.5", - "har-schema": "^2.0.0" - } - }, - "has-flag": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=", - "dev": true - }, - "hasha": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/hasha/-/hasha-3.0.0.tgz", - "integrity": "sha1-UqMvq4Vp1BymmmH/GiFPjrfIvTk=", - "dev": true, - "requires": { - "is-stream": "^1.0.1" - } - }, - "hosted-git-info": { - "version": "2.8.5", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.5.tgz", - "integrity": "sha512-kssjab8CvdXfcXMXVcvsXum4Hwdq9XGtRD3TteMEvEbq0LXyiNQr6AprqKqfeaDXze7SxWvRxdpwE6ku7ikLkg==", - "dev": true - }, - "http-signature": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz", - "integrity": "sha1-muzZJRFHcvPZW2WmCruPfBj7rOE=", - "dev": true, - "requires": { - "assert-plus": "^1.0.0", - "jsprim": "^1.2.2", - "sshpk": "^1.7.0" - } - }, - "imurmurhash": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", - "integrity": "sha1-khi5srkoojixPcT7a21XbyMUU+o=", - "dev": true - }, - "inflight": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", - "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", - "dev": true, - "requires": { - "once": "^1.3.0", - "wrappy": "1" - } - }, - "inherits": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", - "dev": true - }, - "is-arrayish": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", - "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=", - "dev": true - }, - "is-binary-path": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", - "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", - "dev": true, - "requires": { - "binary-extensions": "^2.0.0" - } - }, - "is-extglob": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", - "integrity": "sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=", - "dev": true - }, - "is-fullwidth-code-point": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", - "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", - "dev": true - }, - "is-glob": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.1.tgz", - "integrity": "sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg==", - "dev": true, - "requires": { - "is-extglob": "^2.1.1" - } - }, - "is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "dev": true - }, - "is-stream": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz", - "integrity": "sha1-EtSj3U5o4Lec6428hBc66A2RykQ=", - "dev": true - }, - "is-typedarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", - "integrity": "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo=", - "dev": true - }, - "isarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", - "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=", - "dev": true, - "optional": true - }, - "isexe": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=", - "dev": true - }, - "isstream": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", - "integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo=", - "dev": true - }, - "istanbul-lib-coverage": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.5.tgz", - "integrity": "sha512-8aXznuEPCJvGnMSRft4udDRDtb1V3pkQkMMI5LI+6HuQz5oQ4J2UFn1H82raA3qJtyOLkkwVqICBQkjnGtn5mA==", - "dev": true - }, - "istanbul-lib-hook": { - "version": "2.0.7", - "resolved": "https://registry.npmjs.org/istanbul-lib-hook/-/istanbul-lib-hook-2.0.7.tgz", - "integrity": "sha512-vrRztU9VRRFDyC+aklfLoeXyNdTfga2EI3udDGn4cZ6fpSXpHLV9X6CHvfoMCPtggg8zvDDmC4b9xfu0z6/llA==", - "dev": true, - "requires": { - "append-transform": "^1.0.0" - } - }, - "istanbul-lib-instrument": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-3.3.0.tgz", - "integrity": "sha512-5nnIN4vo5xQZHdXno/YDXJ0G+I3dAm4XgzfSVTPLQpj/zAV2dV6Juy0yaf10/zrJOJeHoN3fraFe+XRq2bFVZA==", - "dev": true, - "requires": { - "@babel/generator": "^7.4.0", - "@babel/parser": "^7.4.3", - "@babel/template": "^7.4.0", - "@babel/traverse": "^7.4.3", - "@babel/types": "^7.4.0", - "istanbul-lib-coverage": "^2.0.5", - "semver": "^6.0.0" - }, - "dependencies": { - "semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", - "dev": true - } - } - }, - "istanbul-lib-processinfo": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/istanbul-lib-processinfo/-/istanbul-lib-processinfo-1.0.0.tgz", - "integrity": "sha512-FY0cPmWa4WoQNlvB8VOcafiRoB5nB+l2Pz2xGuXHRSy1KM8QFOYfz/rN+bGMCAeejrY3mrpF5oJHcN0s/garCg==", - "dev": true, - "requires": { - "archy": "^1.0.0", - "cross-spawn": "^6.0.5", - "istanbul-lib-coverage": "^2.0.3", - "rimraf": "^2.6.3", - "uuid": "^3.3.2" - }, - "dependencies": { - "cross-spawn": { - "version": "6.0.5", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", - "integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==", - "dev": true, - "requires": { - "nice-try": "^1.0.4", - "path-key": "^2.0.1", - "semver": "^5.5.0", - "shebang-command": "^1.2.0", - "which": "^1.2.9" - } - }, - "which": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", - "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", - "dev": true, - "requires": { - "isexe": "^2.0.0" - } - } - } - }, - "istanbul-lib-report": { - "version": "2.0.8", - "resolved": "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-2.0.8.tgz", - "integrity": "sha512-fHBeG573EIihhAblwgxrSenp0Dby6tJMFR/HvlerBsrCTD5bkUuoNtn3gVh29ZCS824cGGBPn7Sg7cNk+2xUsQ==", - "dev": true, - "requires": { - "istanbul-lib-coverage": "^2.0.5", - "make-dir": "^2.1.0", - "supports-color": "^6.1.0" - }, - "dependencies": { - "supports-color": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-6.1.0.tgz", - "integrity": "sha512-qe1jfm1Mg7Nq/NSh6XE24gPXROEVsWHxC1LIx//XNlD9iw7YZQGjZNjYN7xGaEG6iKdA8EtNFW6R0gjnVXp+wQ==", - "dev": true, - "requires": { - "has-flag": "^3.0.0" - } - } - } - }, - "istanbul-lib-source-maps": { - "version": "3.0.6", - "resolved": "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-3.0.6.tgz", - "integrity": "sha512-R47KzMtDJH6X4/YW9XTx+jrLnZnscW4VpNN+1PViSYTejLVPWv7oov+Duf8YQSPyVRUvueQqz1TcsC6mooZTXw==", - "dev": true, - "requires": { - "debug": "^4.1.1", - "istanbul-lib-coverage": "^2.0.5", - "make-dir": "^2.1.0", - "rimraf": "^2.6.3", - "source-map": "^0.6.1" - } - }, - "istanbul-reports": { - "version": "2.2.6", - "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-2.2.6.tgz", - "integrity": "sha512-SKi4rnMyLBKe0Jy2uUdx28h8oG7ph2PPuQPvIAh31d+Ci+lSiEu4C+h3oBPuJ9+mPKhOyW0M8gY4U5NM1WLeXA==", - "dev": true, - "requires": { - "handlebars": "^4.1.2" - } - }, - "jackspeak": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-1.4.0.tgz", - "integrity": "sha512-VDcSunT+wcccoG46FtzuBAyQKlzhHjli4q31e1fIHGOsRspqNUFjVzGb+7eIFDlTvqLygxapDHPHS0ouT2o/tw==", - "dev": true, - "requires": { - "cliui": "^4.1.0" - } - }, - "js-tokens": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", - "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", - "dev": true - }, - "js-yaml": { - "version": "3.13.1", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.13.1.tgz", - "integrity": "sha512-YfbcO7jXDdyj0DGxYVSlSeQNHbD7XPWvrVWeVUujrQEoZzWJIRrCPoyk6kL6IAjAG2IolMK4T0hNUe0HOUs5Jw==", - "dev": true, - "requires": { - "argparse": "^1.0.7", - "esprima": "^4.0.0" - } - }, - "jsbn": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", - "integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM=", - "dev": true - }, - "jsesc": { - "version": "2.5.2", - "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz", - "integrity": "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==", - "dev": true - }, - "json-parse-better-errors": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz", - "integrity": "sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw==", - "dev": true - }, - "json-schema": { - "version": "0.2.3", - "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz", - "integrity": "sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM=", - "dev": true - }, - "json-schema-traverse": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", - "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", - "dev": true - }, - "json-stringify-safe": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", - "integrity": "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus=", - "dev": true - }, - "jsprim": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.1.tgz", - "integrity": "sha1-MT5mvB5cwG5Di8G3SZwuXFastqI=", - "dev": true, - "requires": { - "assert-plus": "1.0.0", - "extsprintf": "1.3.0", - "json-schema": "0.2.3", - "verror": "1.10.0" - } - }, - "lcov-parse": { - "version": "0.0.10", - "resolved": "https://registry.npmjs.org/lcov-parse/-/lcov-parse-0.0.10.tgz", - "integrity": "sha1-GwuP+ayceIklBYK3C3ExXZ2m2aM=", - "dev": true - }, - "load-json-file": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-4.0.0.tgz", - "integrity": "sha1-L19Fq5HjMhYjT9U62rZo607AmTs=", - "dev": true, - "requires": { - "graceful-fs": "^4.1.2", - "parse-json": "^4.0.0", - "pify": "^3.0.0", - "strip-bom": "^3.0.0" - }, - "dependencies": { - "pify": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", - "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=", - "dev": true - } - } - }, - "locate-path": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", - "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", - "dev": true, - "requires": { - "p-locate": "^3.0.0", - "path-exists": "^3.0.0" - } - }, - "lodash": { - "version": "4.17.15", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz", - "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==", - "dev": true - }, - "lodash.flattendeep": { - "version": "4.4.0", - "resolved": "https://registry.npmjs.org/lodash.flattendeep/-/lodash.flattendeep-4.4.0.tgz", - "integrity": "sha1-+wMJF/hqMTTlvJvsDWngAT3f7bI=", - "dev": true - }, - "log-driver": { - "version": "1.2.7", - "resolved": "https://registry.npmjs.org/log-driver/-/log-driver-1.2.7.tgz", - "integrity": "sha512-U7KCmLdqsGHBLeWqYlFA0V0Sl6P08EE1ZrmA9cxjUE0WVqT9qnyVDPz1kzpFEP0jdJuFnasWIfSd7fsaNXkpbg==", - "dev": true - }, - "lru-cache": { - "version": "4.1.5", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.1.5.tgz", - "integrity": "sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g==", - "dev": true, - "requires": { - "pseudomap": "^1.0.2", - "yallist": "^2.1.2" - } - }, - "make-dir": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-2.1.0.tgz", - "integrity": "sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA==", - "dev": true, - "requires": { - "pify": "^4.0.1", - "semver": "^5.6.0" - } - }, - "make-error": { - "version": "1.3.5", - "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.5.tgz", - "integrity": "sha512-c3sIjNUow0+8swNwVpqoH4YCShKNFkMaw6oH1mNS2haDZQqkeZFlHS3dhoeEbKKmJB4vXpJucU6oH75aDYeE9g==", - "dev": true - }, - "merge-source-map": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/merge-source-map/-/merge-source-map-1.1.0.tgz", - "integrity": "sha512-Qkcp7P2ygktpMPh2mCQZaf3jhN6D3Z/qVZHSdWvQ+2Ef5HgRAPBO57A77+ENm0CPx2+1Ce/MYKi3ymqdfuqibw==", - "dev": true, - "requires": { - "source-map": "^0.6.1" - } - }, - "mime-db": { - "version": "1.40.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.40.0.tgz", - "integrity": "sha512-jYdeOMPy9vnxEqFRRo6ZvTZ8d9oPb+k18PKoYNYUe2stVEBPPwsln/qWzdbmaIvnhZ9v2P+CuecK+fpUfsV2mA==", - "dev": true - }, - "mime-types": { - "version": "2.1.24", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.24.tgz", - "integrity": "sha512-WaFHS3MCl5fapm3oLxU4eYDw77IQM2ACcxQ9RIxfaC3ooc6PFuBMGZZsYpvoXS5D5QTWPieo1jjLdAm3TBP3cQ==", - "dev": true, - "requires": { - "mime-db": "1.40.0" - } - }, - "minimatch": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", - "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", - "dev": true, - "requires": { - "brace-expansion": "^1.1.7" - } - }, - "minimist": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.0.tgz", - "integrity": "sha1-o1AIsg9BOD7sH7kU9M1d95omQoQ=", - "dev": true - }, - "minipass": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.1.1.tgz", - "integrity": "sha512-UFqVihv6PQgwj8/yTGvl9kPz7xIAY+R5z6XYjRInD3Gk3qx6QGSD6zEcpeG4Dy/lQnv1J6zv8ejV90hyYIKf3w==", - "dev": true, - "requires": { - "yallist": "^4.0.0" - }, - "dependencies": { - "yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true - } - } - }, - "mkdirp": { - "version": "0.5.1", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", - "integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=", - "dev": true, - "requires": { - "minimist": "0.0.8" - }, - "dependencies": { - "minimist": { - "version": "0.0.8", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz", - "integrity": "sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0=", - "dev": true - } - } - }, - "ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", - "dev": true - }, - "neo-async": { - "version": "2.6.1", - "resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.1.tgz", - "integrity": "sha512-iyam8fBuCUpWeKPGpaNMetEocMt364qkCsfL9JuhjXX6dRnguRVOfk2GZaDpPjcOKiiXCPINZC1GczQ7iTq3Zw==", - "dev": true - }, - "nested-error-stacks": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/nested-error-stacks/-/nested-error-stacks-2.1.0.tgz", - "integrity": "sha512-AO81vsIO1k1sM4Zrd6Hu7regmJN1NSiAja10gc4bX3F0wd+9rQmcuHQaHVQCYIEC8iFXnE+mavh23GOt7wBgug==", - "dev": true - }, - "nice-try": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/nice-try/-/nice-try-1.0.5.tgz", - "integrity": "sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==", - "dev": true - }, - "node-modules-regexp": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/node-modules-regexp/-/node-modules-regexp-1.0.0.tgz", - "integrity": "sha1-jZ2+KJZKSsVxLpExZCEHxx6Q7EA=", - "dev": true - }, - "normalize-package-data": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz", - "integrity": "sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==", - "dev": true, - "requires": { - "hosted-git-info": "^2.1.4", - "resolve": "^1.10.0", - "semver": "2 || 3 || 4 || 5", - "validate-npm-package-license": "^3.0.1" - } - }, - "normalize-path": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", - "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", - "dev": true - }, - "number-is-nan": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.1.tgz", - "integrity": "sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0=", - "dev": true - }, - "nyc": { - "version": "14.1.1", - "resolved": "https://registry.npmjs.org/nyc/-/nyc-14.1.1.tgz", - "integrity": "sha512-OI0vm6ZGUnoGZv/tLdZ2esSVzDwUC88SNs+6JoSOMVxA+gKMB8Tk7jBwgemLx4O40lhhvZCVw1C+OYLOBOPXWw==", - "dev": true, - "requires": { - "archy": "^1.0.0", - "caching-transform": "^3.0.2", - "convert-source-map": "^1.6.0", - "cp-file": "^6.2.0", - "find-cache-dir": "^2.1.0", - "find-up": "^3.0.0", - "foreground-child": "^1.5.6", - "glob": "^7.1.3", - "istanbul-lib-coverage": "^2.0.5", - "istanbul-lib-hook": "^2.0.7", - "istanbul-lib-instrument": "^3.3.0", - "istanbul-lib-report": "^2.0.8", - "istanbul-lib-source-maps": "^3.0.6", - "istanbul-reports": "^2.2.4", - "js-yaml": "^3.13.1", - "make-dir": "^2.1.0", - "merge-source-map": "^1.1.0", - "resolve-from": "^4.0.0", - "rimraf": "^2.6.3", - "signal-exit": "^3.0.2", - "spawn-wrap": "^1.4.2", - "test-exclude": "^5.2.3", - "uuid": "^3.3.2", - "yargs": "^13.2.2", - "yargs-parser": "^13.0.0" - } - }, - "oauth-sign": { - "version": "0.9.0", - "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz", - "integrity": "sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ==", - "dev": true - }, - "once": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", - "dev": true, - "requires": { - "wrappy": "1" - } - }, - "opener": { - "version": "1.5.1", - "resolved": "https://registry.npmjs.org/opener/-/opener-1.5.1.tgz", - "integrity": "sha512-goYSy5c2UXE4Ra1xixabeVh1guIX/ZV/YokJksb6q2lubWu6UbvPQ20p542/sFIll1nl8JnCyK9oBaOcCWXwvA==", - "dev": true - }, - "optimist": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/optimist/-/optimist-0.6.1.tgz", - "integrity": "sha1-2j6nRob6IaGaERwybpDrFaAZZoY=", - "dev": true, - "requires": { - "minimist": "~0.0.1", - "wordwrap": "~0.0.2" - }, - "dependencies": { - "minimist": { - "version": "0.0.10", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.10.tgz", - "integrity": "sha1-3j+YVD2/lggr5IrRoMfNqDYwHc8=", - "dev": true - } - } - }, - "os-homedir": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/os-homedir/-/os-homedir-1.0.2.tgz", - "integrity": "sha1-/7xJiDNuDoM94MFox+8VISGqf7M=", - "dev": true - }, - "own-or": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/own-or/-/own-or-1.0.0.tgz", - "integrity": "sha1-Tod/vtqaLsgAD7wLyuOWRe6L+Nw=", - "dev": true - }, - "own-or-env": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/own-or-env/-/own-or-env-1.0.1.tgz", - "integrity": "sha512-y8qULRbRAlL6x2+M0vIe7jJbJx/kmUTzYonRAa2ayesR2qWLswninkVyeJe4x3IEXhdgoNodzjQRKAoEs6Fmrw==", - "dev": true, - "requires": { - "own-or": "^1.0.0" - } - }, - "p-limit": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.2.1.tgz", - "integrity": "sha512-85Tk+90UCVWvbDavCLKPOLC9vvY8OwEX/RtKF+/1OADJMVlFfEHOiMTPVyxg7mk/dKa+ipdHm0OUkTvCpMTuwg==", - "dev": true, - "requires": { - "p-try": "^2.0.0" - } - }, - "p-locate": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", - "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", - "dev": true, - "requires": { - "p-limit": "^2.0.0" - } - }, - "p-try": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", - "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", - "dev": true - }, - "package-hash": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/package-hash/-/package-hash-3.0.0.tgz", - "integrity": "sha512-lOtmukMDVvtkL84rJHI7dpTYq+0rli8N2wlnqUcBuDWCfVhRUfOmnR9SsoHFMLpACvEV60dX7rd0rFaYDZI+FA==", - "dev": true, - "requires": { - "graceful-fs": "^4.1.15", - "hasha": "^3.0.0", - "lodash.flattendeep": "^4.4.0", - "release-zalgo": "^1.0.0" - } - }, - "parse-json": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-4.0.0.tgz", - "integrity": "sha1-vjX1Qlvh9/bHRxhPmKeIy5lHfuA=", - "dev": true, - "requires": { - "error-ex": "^1.3.1", - "json-parse-better-errors": "^1.0.1" - } - }, - "path-exists": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", - "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=", - "dev": true - }, - "path-is-absolute": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", - "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", - "dev": true - }, - "path-key": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", - "integrity": "sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A=", - "dev": true - }, - "path-parse": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.6.tgz", - "integrity": "sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw==", - "dev": true - }, - "path-type": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-3.0.0.tgz", - "integrity": "sha512-T2ZUsdZFHgA3u4e5PfPbjd7HDDpxPnQb5jN0SrDsjNSuVXHJqtwTnWqG0B1jZrgmJ/7lj1EmVIByWt1gxGkWvg==", - "dev": true, - "requires": { - "pify": "^3.0.0" - }, - "dependencies": { - "pify": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", - "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=", - "dev": true - } - } - }, - "performance-now": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", - "integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=", - "dev": true - }, - "picomatch": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.1.1.tgz", - "integrity": "sha512-OYMyqkKzK7blWO/+XZYP6w8hH0LDvkBvdvKukti+7kqYFCiEAk+gI3DWnryapc0Dau05ugGTy0foQ6mqn4AHYA==", - "dev": true - }, - "pify": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz", - "integrity": "sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==", - "dev": true - }, - "pirates": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/pirates/-/pirates-3.0.2.tgz", - "integrity": "sha512-c5CgUJq6H2k6MJz72Ak1F5sN9n9wlSlJyEnwvpm9/y3WB4E3pHBDT2c6PEiS1vyJvq2bUxUAIu0EGf8Cx4Ic7Q==", - "dev": true, - "requires": { - "node-modules-regexp": "^1.0.0" - } - }, - "pkg-dir": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-3.0.0.tgz", - "integrity": "sha512-/E57AYkoeQ25qkxMj5PBOVgF8Kiu/h7cYS30Z5+R7WaiCCBfLq58ZI/dSeaEKb9WVJV5n/03QwrN3IeWIFllvw==", - "dev": true, - "requires": { - "find-up": "^3.0.0" - } - }, - "process-nextick-args": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", - "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==", - "dev": true, - "optional": true - }, - "pseudomap": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/pseudomap/-/pseudomap-1.0.2.tgz", - "integrity": "sha1-8FKijacOYYkX7wqKw0wa5aaChrM=", - "dev": true - }, - "psl": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/psl/-/psl-1.4.0.tgz", - "integrity": "sha512-HZzqCGPecFLyoRj5HLfuDSKYTJkAfB5thKBIkRHtGjWwY7p1dAyveIbXIq4tO0KYfDF2tHqPUgY9SDnGm00uFw==", - "dev": true - }, - "punycode": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", - "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==", - "dev": true - }, - "qs": { - "version": "6.5.2", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz", - "integrity": "sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA==", - "dev": true - }, - "read-pkg": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-3.0.0.tgz", - "integrity": "sha1-nLxoaXj+5l0WwA4rGcI3/Pbjg4k=", - "dev": true, - "requires": { - "load-json-file": "^4.0.0", - "normalize-package-data": "^2.3.2", - "path-type": "^3.0.0" - } - }, - "read-pkg-up": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-4.0.0.tgz", - "integrity": "sha512-6etQSH7nJGsK0RbG/2TeDzZFa8shjQ1um+SwQQ5cwKy0dhSXdOncEhb1CPpvQG4h7FyOV6EB6YlV0yJvZQNAkA==", - "dev": true, - "requires": { - "find-up": "^3.0.0", - "read-pkg": "^3.0.0" - } - }, - "readable-stream": { - "version": "2.3.6", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", - "integrity": "sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw==", - "dev": true, - "optional": true, - "requires": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - }, - "dependencies": { - "safe-buffer": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", - "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", - "dev": true, - "optional": true - } - } - }, - "readdirp": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.2.0.tgz", - "integrity": "sha512-crk4Qu3pmXwgxdSgGhgA/eXiJAPQiX4GMOZZMXnqKxHX7TaoL+3gQVo/WeuAiogr07DpnfjIMpXXa+PAIvwPGQ==", - "dev": true, - "requires": { - "picomatch": "^2.0.4" - } - }, - "regenerator-runtime": { - "version": "0.13.3", - "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.3.tgz", - "integrity": "sha512-naKIZz2GQ8JWh///G7L3X6LaQUAMp2lvb1rvwwsURe/VXwD6VMfr+/1NuNw3ag8v2kY1aQ/go5SNn79O9JU7yw==", - "dev": true - }, - "release-zalgo": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/release-zalgo/-/release-zalgo-1.0.0.tgz", - "integrity": "sha1-CXALflB0Mpc5Mw5TXFqQ+2eFFzA=", - "dev": true, - "requires": { - "es6-error": "^4.0.1" - } - }, - "request": { - "version": "2.88.0", - "resolved": "https://registry.npmjs.org/request/-/request-2.88.0.tgz", - "integrity": "sha512-NAqBSrijGLZdM0WZNsInLJpkJokL72XYjUpnB0iwsRgxh7dB6COrHnTBNwN0E+lHDAJzu7kLAkDeY08z2/A0hg==", - "dev": true, - "requires": { - "aws-sign2": "~0.7.0", - "aws4": "^1.8.0", - "caseless": "~0.12.0", - "combined-stream": "~1.0.6", - "extend": "~3.0.2", - "forever-agent": "~0.6.1", - "form-data": "~2.3.2", - "har-validator": "~5.1.0", - "http-signature": "~1.2.0", - "is-typedarray": "~1.0.0", - "isstream": "~0.1.2", - "json-stringify-safe": "~5.0.1", - "mime-types": "~2.1.19", - "oauth-sign": "~0.9.0", - "performance-now": "^2.1.0", - "qs": "~6.5.2", - "safe-buffer": "^5.1.2", - "tough-cookie": "~2.4.3", - "tunnel-agent": "^0.6.0", - "uuid": "^3.3.2" - } - }, - "require-directory": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", - "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=", - "dev": true - }, - "require-main-filename": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz", - "integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==", - "dev": true - }, - "resolve": { - "version": "1.12.0", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.12.0.tgz", - "integrity": "sha512-B/dOmuoAik5bKcD6s6nXDCjzUKnaDvdkRyAk6rsmsKLipWj4797iothd7jmmUhWTfinVMU+wc56rYKsit2Qy4w==", - "dev": true, - "requires": { - "path-parse": "^1.0.6" - } - }, - "resolve-from": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", - "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", - "dev": true - }, - "rimraf": { - "version": "2.7.1", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", - "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==", - "dev": true, - "requires": { - "glob": "^7.1.3" - } - }, - "safe-buffer": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.0.tgz", - "integrity": "sha512-fZEwUGbVl7kouZs1jCdMLdt95hdIv0ZeHg6L7qPeciMZhZ+/gdesW4wgTARkrFWEpspjEATAzUGPG8N2jJiwbg==", - "dev": true - }, - "safer-buffer": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", - "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", - "dev": true - }, - "semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", - "dev": true - }, - "set-blocking": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", - "integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc=", - "dev": true - }, - "shebang-command": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", - "integrity": "sha1-RKrGW2lbAzmJaMOfNj/uXer98eo=", - "dev": true, - "requires": { - "shebang-regex": "^1.0.0" - } - }, - "shebang-regex": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", - "integrity": "sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM=", - "dev": true - }, - "signal-exit": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.2.tgz", - "integrity": "sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0=", - "dev": true - }, - "source-map": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", - "dev": true - }, - "source-map-support": { - "version": "0.5.16", - "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.16.tgz", - "integrity": "sha512-efyLRJDr68D9hBBNIPWFjhpFzURh+KJykQwvMyW5UiZzYwoF6l4YMMDIJJEyFWxWCqfyxLzz6tSfUFR+kXXsVQ==", - "dev": true, - "requires": { - "buffer-from": "^1.0.0", - "source-map": "^0.6.0" - } - }, - "spawn-wrap": { - "version": "1.4.3", - "resolved": "https://registry.npmjs.org/spawn-wrap/-/spawn-wrap-1.4.3.tgz", - "integrity": "sha512-IgB8md0QW/+tWqcavuFgKYR/qIRvJkRLPJDFaoXtLLUaVcCDK0+HeFTkmQHj3eprcYhc+gOl0aEA1w7qZlYezw==", - "dev": true, - "requires": { - "foreground-child": "^1.5.6", - "mkdirp": "^0.5.0", - "os-homedir": "^1.0.1", - "rimraf": "^2.6.2", - "signal-exit": "^3.0.2", - "which": "^1.3.0" - }, - "dependencies": { - "which": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", - "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", - "dev": true, - "requires": { - "isexe": "^2.0.0" - } - } - } - }, - "spdx-correct": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.1.0.tgz", - "integrity": "sha512-lr2EZCctC2BNR7j7WzJ2FpDznxky1sjfxvvYEyzxNyb6lZXHODmEoJeFu4JupYlkfha1KZpJyoqiJ7pgA1qq8Q==", - "dev": true, - "requires": { - "spdx-expression-parse": "^3.0.0", - "spdx-license-ids": "^3.0.0" - } - }, - "spdx-exceptions": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.2.0.tgz", - "integrity": "sha512-2XQACfElKi9SlVb1CYadKDXvoajPgBVPn/gOQLrTvHdElaVhr7ZEbqJaRnJLVNeaI4cMEAgVCeBMKF6MWRDCRA==", - "dev": true - }, - "spdx-expression-parse": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.0.tgz", - "integrity": "sha512-Yg6D3XpRD4kkOmTpdgbUiEJFKghJH03fiC1OPll5h/0sO6neh2jqRDVHOQ4o/LMea0tgCkbMgea5ip/e+MkWyg==", - "dev": true, - "requires": { - "spdx-exceptions": "^2.1.0", - "spdx-license-ids": "^3.0.0" - } - }, - "spdx-license-ids": { - "version": "3.0.5", - "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.5.tgz", - "integrity": "sha512-J+FWzZoynJEXGphVIS+XEh3kFSjZX/1i9gFBaWQcB+/tmpe2qUsSBABpcxqxnAxFdiUFEgAX1bjYGQvIZmoz9Q==", - "dev": true - }, - "sprintf-js": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", - "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=", - "dev": true - }, - "sshpk": { - "version": "1.16.1", - "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.16.1.tgz", - "integrity": "sha512-HXXqVUq7+pcKeLqqZj6mHFUMvXtOJt1uoUx09pFW6011inTMxqI8BA8PM95myrIyyKwdnzjdFjLiE6KBPVtJIg==", - "dev": true, - "requires": { - "asn1": "~0.2.3", - "assert-plus": "^1.0.0", - "bcrypt-pbkdf": "^1.0.0", - "dashdash": "^1.12.0", - "ecc-jsbn": "~0.1.1", - "getpass": "^0.1.1", - "jsbn": "~0.1.0", - "safer-buffer": "^2.0.2", - "tweetnacl": "~0.14.0" - } - }, - "stack-utils": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/stack-utils/-/stack-utils-1.0.2.tgz", - "integrity": "sha512-MTX+MeG5U994cazkjd/9KNAapsHnibjMLnfXodlkXw76JEea0UiNzrqidzo1emMwk7w5Qhc9jd4Bn9TBb1MFwA==", - "dev": true - }, - "string-width": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", - "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==", - "dev": true, - "requires": { - "is-fullwidth-code-point": "^2.0.0", - "strip-ansi": "^4.0.0" - } - }, - "string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", - "dev": true, - "optional": true, - "requires": { - "safe-buffer": "~5.1.0" - }, - "dependencies": { - "safe-buffer": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", - "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", - "dev": true, - "optional": true - } - } - }, - "strip-ansi": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", - "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", - "dev": true, - "requires": { - "ansi-regex": "^3.0.0" - } - }, - "strip-bom": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", - "integrity": "sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM=", - "dev": true - }, - "supports-color": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", - "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", - "dev": true, - "requires": { - "has-flag": "^3.0.0" - } - }, - "tap": { - "version": "14.9.2", - "resolved": "https://registry.npmjs.org/tap/-/tap-14.9.2.tgz", - "integrity": "sha512-Fyy/sjsw4eb+Hnphin4oMtDtKxmrob/vrnaIDv/F3thFFQjQFqMg8xf45zRFGHxUfezlrO6KsH8TpWNlTDINfA==", - "dev": true, - "requires": { - "async-hook-domain": "^1.1.2", - "bind-obj-methods": "^2.0.0", - "browser-process-hrtime": "^1.0.0", - "chokidar": "^3.0.2", - "color-support": "^1.1.0", - "coveralls": "^3.0.6", - "diff": "^4.0.1", - "esm": "^3.2.25", - "findit": "^2.0.0", - "flow-remove-types": "^2.107.0", - "foreground-child": "^1.3.3", - "fs-exists-cached": "^1.0.0", - "function-loop": "^1.0.2", - "glob": "^7.1.4", - "import-jsx": "^2.0.0", - "ink": "^2.3.0", - "isexe": "^2.0.0", - "istanbul-lib-processinfo": "^1.0.0", - "jackspeak": "^1.4.0", - "minipass": "^3.0.0", - "mkdirp": "^0.5.1", - "nyc": "^14.1.1", - "opener": "^1.5.1", - "own-or": "^1.0.0", - "own-or-env": "^1.0.1", - "react": "^16.9.0", - "rimraf": "^2.7.1", - "signal-exit": "^3.0.0", - "source-map-support": "^0.5.16", - "stack-utils": "^1.0.2", - "tap-mocha-reporter": "^5.0.0", - "tap-parser": "^10.0.1", - "tap-yaml": "^1.0.0", - "tcompare": "^2.3.0", - "treport": "^0.4.2", - "trivial-deferred": "^1.0.1", - "ts-node": "^8.3.0", - "typescript": "^3.6.3", - "which": "^2.0.1", - "write-file-atomic": "^3.0.0", - "yaml": "^1.6.0", - "yapool": "^1.0.0" - }, - "dependencies": { - "@babel/runtime": { - "version": "7.6.3", - "bundled": true, - "dev": true, - "requires": { - "regenerator-runtime": "^0.13.2" - }, - "dependencies": { - "regenerator-runtime": { - "version": "0.13.3", - "bundled": true, - "dev": true - } - } - }, - "@types/prop-types": { - "version": "15.7.3", - "bundled": true, - "dev": true - }, - "@types/react": { - "version": "16.9.5", - "bundled": true, - "dev": true, - "requires": { - "@types/prop-types": "*", - "csstype": "^2.2.0" - } - }, - "ansi-escapes": { - "version": "4.2.1", - "bundled": true, - "dev": true, - "requires": { - "type-fest": "^0.5.2" - } - }, - "ansi-regex": { - "version": "2.1.1", - "bundled": true, - "dev": true - }, - "ansi-styles": { - "version": "2.2.1", - "bundled": true, - "dev": true - }, - "ansicolors": { - "version": "0.3.2", - "bundled": true, - "dev": true - }, - "arrify": { - "version": "1.0.1", - "bundled": true, - "dev": true - }, - "astral-regex": { - "version": "1.0.0", - "bundled": true, - "dev": true - }, - "auto-bind": { - "version": "2.1.1", - "bundled": true, - "dev": true, - "requires": { - "@types/react": "^16.8.12" - } - }, - "babel-code-frame": { - "version": "6.26.0", - "bundled": true, - "dev": true, - "requires": { - "chalk": "^1.1.3", - "esutils": "^2.0.2", - "js-tokens": "^3.0.2" - } - }, - "babel-core": { - "version": "6.26.3", - "bundled": true, - "dev": true, - "requires": { - "babel-code-frame": "^6.26.0", - "babel-generator": "^6.26.0", - "babel-helpers": "^6.24.1", - "babel-messages": "^6.23.0", - "babel-register": "^6.26.0", - "babel-runtime": "^6.26.0", - "babel-template": "^6.26.0", - "babel-traverse": "^6.26.0", - "babel-types": "^6.26.0", - "babylon": "^6.18.0", - "convert-source-map": "^1.5.1", - "debug": "^2.6.9", - "json5": "^0.5.1", - "lodash": "^4.17.4", - "minimatch": "^3.0.4", - "path-is-absolute": "^1.0.1", - "private": "^0.1.8", - "slash": "^1.0.0", - "source-map": "^0.5.7" - }, - "dependencies": { - "source-map": { - "version": "0.5.7", - "bundled": true, - "dev": true - } - } - }, - "babel-generator": { - "version": "6.26.1", - "bundled": true, - "dev": true, - "requires": { - "babel-messages": "^6.23.0", - "babel-runtime": "^6.26.0", - "babel-types": "^6.26.0", - "detect-indent": "^4.0.0", - "jsesc": "^1.3.0", - "lodash": "^4.17.4", - "source-map": "^0.5.7", - "trim-right": "^1.0.1" - }, - "dependencies": { - "source-map": { - "version": "0.5.7", - "bundled": true, - "dev": true - } - } - }, - "babel-helper-builder-react-jsx": { - "version": "6.26.0", - "bundled": true, - "dev": true, - "requires": { - "babel-runtime": "^6.26.0", - "babel-types": "^6.26.0", - "esutils": "^2.0.2" - } - }, - "babel-helpers": { - "version": "6.24.1", - "bundled": true, - "dev": true, - "requires": { - "babel-runtime": "^6.22.0", - "babel-template": "^6.24.1" - } - }, - "babel-messages": { - "version": "6.23.0", - "bundled": true, - "dev": true, - "requires": { - "babel-runtime": "^6.22.0" - } - }, - "babel-plugin-syntax-jsx": { - "version": "6.18.0", - "bundled": true, - "dev": true - }, - "babel-plugin-syntax-object-rest-spread": { - "version": "6.13.0", - "bundled": true, - "dev": true - }, - "babel-plugin-transform-es2015-destructuring": { - "version": "6.23.0", - "bundled": true, - "dev": true, - "requires": { - "babel-runtime": "^6.22.0" - } - }, - "babel-plugin-transform-object-rest-spread": { - "version": "6.26.0", - "bundled": true, - "dev": true, - "requires": { - "babel-plugin-syntax-object-rest-spread": "^6.8.0", - "babel-runtime": "^6.26.0" - } - }, - "babel-plugin-transform-react-jsx": { - "version": "6.24.1", - "bundled": true, - "dev": true, - "requires": { - "babel-helper-builder-react-jsx": "^6.24.1", - "babel-plugin-syntax-jsx": "^6.8.0", - "babel-runtime": "^6.22.0" - } - }, - "babel-register": { - "version": "6.26.0", - "bundled": true, - "dev": true, - "requires": { - "babel-core": "^6.26.0", - "babel-runtime": "^6.26.0", - "core-js": "^2.5.0", - "home-or-tmp": "^2.0.0", - "lodash": "^4.17.4", - "mkdirp": "^0.5.1", - "source-map-support": "^0.4.15" - }, - "dependencies": { - "source-map": { - "version": "0.5.7", - "bundled": true, - "dev": true - }, - "source-map-support": { - "version": "0.4.18", - "bundled": true, - "dev": true, - "requires": { - "source-map": "^0.5.6" - } - } - } - }, - "babel-runtime": { - "version": "6.26.0", - "bundled": true, - "dev": true, - "requires": { - "core-js": "^2.4.0", - "regenerator-runtime": "^0.11.0" - } - }, - "babel-template": { - "version": "6.26.0", - "bundled": true, - "dev": true, - "requires": { - "babel-runtime": "^6.26.0", - "babel-traverse": "^6.26.0", - "babel-types": "^6.26.0", - "babylon": "^6.18.0", - "lodash": "^4.17.4" - } - }, - "babel-traverse": { - "version": "6.26.0", - "bundled": true, - "dev": true, - "requires": { - "babel-code-frame": "^6.26.0", - "babel-messages": "^6.23.0", - "babel-runtime": "^6.26.0", - "babel-types": "^6.26.0", - "babylon": "^6.18.0", - "debug": "^2.6.8", - "globals": "^9.18.0", - "invariant": "^2.2.2", - "lodash": "^4.17.4" - } - }, - "babel-types": { - "version": "6.26.0", - "bundled": true, - "dev": true, - "requires": { - "babel-runtime": "^6.26.0", - "esutils": "^2.0.2", - "lodash": "^4.17.4", - "to-fast-properties": "^1.0.3" - } - }, - "babylon": { - "version": "6.18.0", - "bundled": true, - "dev": true - }, - "balanced-match": { - "version": "1.0.0", - "bundled": true, - "dev": true - }, - "brace-expansion": { - "version": "1.1.11", - "bundled": true, - "dev": true, - "requires": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, - "caller-callsite": { - "version": "2.0.0", - "bundled": true, - "dev": true, - "requires": { - "callsites": "^2.0.0" - } - }, - "caller-path": { - "version": "2.0.0", - "bundled": true, - "dev": true, - "requires": { - "caller-callsite": "^2.0.0" - } - }, - "callsites": { - "version": "2.0.0", - "bundled": true, - "dev": true - }, - "cardinal": { - "version": "2.1.1", - "bundled": true, - "dev": true, - "requires": { - "ansicolors": "~0.3.2", - "redeyed": "~2.1.0" - } - }, - "chalk": { - "version": "1.1.3", - "bundled": true, - "dev": true, - "requires": { - "ansi-styles": "^2.2.1", - "escape-string-regexp": "^1.0.2", - "has-ansi": "^2.0.0", - "strip-ansi": "^3.0.0", - "supports-color": "^2.0.0" - } - }, - "ci-info": { - "version": "2.0.0", - "bundled": true, - "dev": true - }, - "cli-cursor": { - "version": "2.1.0", - "bundled": true, - "dev": true, - "requires": { - "restore-cursor": "^2.0.0" - } - }, - "cli-truncate": { - "version": "1.1.0", - "bundled": true, - "dev": true, - "requires": { - "slice-ansi": "^1.0.0", - "string-width": "^2.0.0" - } - }, - "color-convert": { - "version": "1.9.3", - "bundled": true, - "dev": true, - "requires": { - "color-name": "1.1.3" - } - }, - "color-name": { - "version": "1.1.3", - "bundled": true, - "dev": true - }, - "concat-map": { - "version": "0.0.1", - "bundled": true, - "dev": true - }, - "convert-source-map": { - "version": "1.6.0", - "bundled": true, - "dev": true, - "requires": { - "safe-buffer": "~5.1.1" - }, - "dependencies": { - "safe-buffer": { - "version": "5.1.2", - "bundled": true, - "dev": true - } - } - }, - "core-js": { - "version": "2.6.10", - "bundled": true, - "dev": true - }, - "csstype": { - "version": "2.6.7", - "bundled": true, - "dev": true - }, - "debug": { - "version": "2.6.9", - "bundled": true, - "dev": true, - "requires": { - "ms": "2.0.0" - } - }, - "detect-indent": { - "version": "4.0.0", - "bundled": true, - "dev": true, - "requires": { - "repeating": "^2.0.0" - } - }, - "emoji-regex": { - "version": "7.0.3", - "bundled": true, - "dev": true - }, - "escape-string-regexp": { - "version": "1.0.5", - "bundled": true, - "dev": true - }, - "esprima": { - "version": "4.0.1", - "bundled": true, - "dev": true - }, - "esutils": { - "version": "2.0.3", - "bundled": true, - "dev": true - }, - "events-to-array": { - "version": "1.1.2", - "bundled": true, - "dev": true - }, - "globals": { - "version": "9.18.0", - "bundled": true, - "dev": true - }, - "has-ansi": { - "version": "2.0.0", - "bundled": true, - "dev": true, - "requires": { - "ansi-regex": "^2.0.0" - } - }, - "has-flag": { - "version": "3.0.0", - "bundled": true, - "dev": true - }, - "home-or-tmp": { - "version": "2.0.0", - "bundled": true, - "dev": true, - "requires": { - "os-homedir": "^1.0.0", - "os-tmpdir": "^1.0.1" - } - }, - "import-jsx": { - "version": "2.0.0", - "bundled": true, - "dev": true, - "requires": { - "babel-core": "^6.25.0", - "babel-plugin-transform-es2015-destructuring": "^6.23.0", - "babel-plugin-transform-object-rest-spread": "^6.23.0", - "babel-plugin-transform-react-jsx": "^6.24.1", - "caller-path": "^2.0.0", - "resolve-from": "^3.0.0" - } - }, - "ink": { - "version": "2.5.0", - "bundled": true, - "dev": true, - "requires": { - "@types/react": "^16.8.6", - "ansi-escapes": "^4.2.1", - "arrify": "^1.0.1", - "auto-bind": "^2.0.0", - "chalk": "^2.4.1", - "cli-cursor": "^2.1.0", - "cli-truncate": "^1.1.0", - "is-ci": "^2.0.0", - "lodash.throttle": "^4.1.1", - "log-update": "^3.0.0", - "prop-types": "^15.6.2", - "react-reconciler": "^0.21.0", - "scheduler": "^0.15.0", - "signal-exit": "^3.0.2", - "slice-ansi": "^1.0.0", - "string-length": "^2.0.0", - "widest-line": "^2.0.0", - "wrap-ansi": "^5.0.0", - "yoga-layout-prebuilt": "^1.9.3" - }, - "dependencies": { - "ansi-styles": { - "version": "3.2.1", - "bundled": true, - "dev": true, - "requires": { - "color-convert": "^1.9.0" - } - }, - "chalk": { - "version": "2.4.2", - "bundled": true, - "dev": true, - "requires": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" - } - }, - "supports-color": { - "version": "5.5.0", - "bundled": true, - "dev": true, - "requires": { - "has-flag": "^3.0.0" - } - } - } - }, - "invariant": { - "version": "2.2.4", - "bundled": true, - "dev": true, - "requires": { - "loose-envify": "^1.0.0" - } - }, - "is-ci": { - "version": "2.0.0", - "bundled": true, - "dev": true, - "requires": { - "ci-info": "^2.0.0" - } - }, - "is-finite": { - "version": "1.0.2", - "bundled": true, - "dev": true, - "requires": { - "number-is-nan": "^1.0.0" - } - }, - "is-fullwidth-code-point": { - "version": "2.0.0", - "bundled": true, - "dev": true - }, - "js-tokens": { - "version": "3.0.2", - "bundled": true, - "dev": true - }, - "jsesc": { - "version": "1.3.0", - "bundled": true, - "dev": true - }, - "json5": { - "version": "0.5.1", - "bundled": true, - "dev": true - }, - "lodash": { - "version": "4.17.15", - "bundled": true, - "dev": true - }, - "lodash.throttle": { - "version": "4.1.1", - "bundled": true, - "dev": true - }, - "log-update": { - "version": "3.3.0", - "bundled": true, - "dev": true, - "requires": { - "ansi-escapes": "^3.2.0", - "cli-cursor": "^2.1.0", - "wrap-ansi": "^5.0.0" - }, - "dependencies": { - "ansi-escapes": { - "version": "3.2.0", - "bundled": true, - "dev": true - } - } - }, - "loose-envify": { - "version": "1.4.0", - "bundled": true, - "dev": true, - "requires": { - "js-tokens": "^3.0.0 || ^4.0.0" - } - }, - "mimic-fn": { - "version": "1.2.0", - "bundled": true, - "dev": true - }, - "minimatch": { - "version": "3.0.4", - "bundled": true, - "dev": true, - "requires": { - "brace-expansion": "^1.1.7" - } - }, - "minipass": { - "version": "3.0.1", - "bundled": true, - "dev": true, - "requires": { - "yallist": "^4.0.0" - }, - "dependencies": { - "yallist": { - "version": "4.0.0", - "bundled": true, - "dev": true - } - } - }, - "mkdirp": { - "version": "0.5.1", - "bundled": true, - "dev": true, - "requires": { - "minimist": "0.0.8" - }, - "dependencies": { - "minimist": { - "version": "0.0.8", - "bundled": true, - "dev": true - } - } - }, - "ms": { - "version": "2.0.0", - "bundled": true, - "dev": true - }, - "number-is-nan": { - "version": "1.0.1", - "bundled": true, - "dev": true - }, - "object-assign": { - "version": "4.1.1", - "bundled": true, - "dev": true - }, - "onetime": { - "version": "2.0.1", - "bundled": true, - "dev": true, - "requires": { - "mimic-fn": "^1.0.0" - } - }, - "os-homedir": { - "version": "1.0.2", - "bundled": true, - "dev": true - }, - "os-tmpdir": { - "version": "1.0.2", - "bundled": true, - "dev": true - }, - "path-is-absolute": { - "version": "1.0.1", - "bundled": true, - "dev": true - }, - "private": { - "version": "0.1.8", - "bundled": true, - "dev": true - }, - "prop-types": { - "version": "15.7.2", - "bundled": true, - "dev": true, - "requires": { - "loose-envify": "^1.4.0", - "object-assign": "^4.1.1", - "react-is": "^16.8.1" - } - }, - "punycode": { - "version": "2.1.1", - "bundled": true, - "dev": true - }, - "react": { - "version": "16.10.2", - "bundled": true, - "dev": true, - "requires": { - "loose-envify": "^1.1.0", - "object-assign": "^4.1.1", - "prop-types": "^15.6.2" - } - }, - "react-is": { - "version": "16.10.2", - "bundled": true, - "dev": true - }, - "react-reconciler": { - "version": "0.21.0", - "bundled": true, - "dev": true, - "requires": { - "loose-envify": "^1.1.0", - "object-assign": "^4.1.1", - "prop-types": "^15.6.2", - "scheduler": "^0.15.0" - } - }, - "redeyed": { - "version": "2.1.1", - "bundled": true, - "dev": true, - "requires": { - "esprima": "~4.0.0" - } - }, - "regenerator-runtime": { - "version": "0.11.1", - "bundled": true, - "dev": true - }, - "repeating": { - "version": "2.0.1", - "bundled": true, - "dev": true, - "requires": { - "is-finite": "^1.0.0" - } - }, - "resolve-from": { - "version": "3.0.0", - "bundled": true, - "dev": true - }, - "restore-cursor": { - "version": "2.0.0", - "bundled": true, - "dev": true, - "requires": { - "onetime": "^2.0.0", - "signal-exit": "^3.0.2" - } - }, - "scheduler": { - "version": "0.15.0", - "bundled": true, - "dev": true, - "requires": { - "loose-envify": "^1.1.0", - "object-assign": "^4.1.1" - } - }, - "signal-exit": { - "version": "3.0.2", - "bundled": true, - "dev": true - }, - "slash": { - "version": "1.0.0", - "bundled": true, - "dev": true - }, - "slice-ansi": { - "version": "1.0.0", - "bundled": true, - "dev": true, - "requires": { - "is-fullwidth-code-point": "^2.0.0" - } - }, - "string-length": { - "version": "2.0.0", - "bundled": true, - "dev": true, - "requires": { - "astral-regex": "^1.0.0", - "strip-ansi": "^4.0.0" - }, - "dependencies": { - "ansi-regex": { - "version": "3.0.0", - "bundled": true, - "dev": true - }, - "strip-ansi": { - "version": "4.0.0", - "bundled": true, - "dev": true, - "requires": { - "ansi-regex": "^3.0.0" - } - } - } - }, - "string-width": { - "version": "2.1.1", - "bundled": true, - "dev": true, - "requires": { - "is-fullwidth-code-point": "^2.0.0", - "strip-ansi": "^4.0.0" - }, - "dependencies": { - "ansi-regex": { - "version": "3.0.0", - "bundled": true, - "dev": true - }, - "strip-ansi": { - "version": "4.0.0", - "bundled": true, - "dev": true, - "requires": { - "ansi-regex": "^3.0.0" - } - } - } - }, - "strip-ansi": { - "version": "3.0.1", - "bundled": true, - "dev": true, - "requires": { - "ansi-regex": "^2.0.0" - } - }, - "supports-color": { - "version": "2.0.0", - "bundled": true, - "dev": true - }, - "tap-parser": { - "version": "10.0.1", - "bundled": true, - "dev": true, - "requires": { - "events-to-array": "^1.0.1", - "minipass": "^3.0.0", - "tap-yaml": "^1.0.0" - } - }, - "tap-yaml": { - "version": "1.0.0", - "bundled": true, - "dev": true, - "requires": { - "yaml": "^1.5.0" - } - }, - "to-fast-properties": { - "version": "1.0.3", - "bundled": true, - "dev": true - }, - "treport": { - "version": "0.4.2", - "bundled": true, - "dev": true, - "requires": { - "cardinal": "^2.1.1", - "chalk": "^2.4.2", - "import-jsx": "^2.0.0", - "ink": "^2.1.1", - "ms": "^2.1.1", - "react": "^16.8.6", - "string-length": "^2.0.0", - "tap-parser": "^10.0.1", - "unicode-length": "^2.0.1" - }, - "dependencies": { - "ansi-styles": { - "version": "3.2.1", - "bundled": true, - "dev": true, - "requires": { - "color-convert": "^1.9.0" - } - }, - "chalk": { - "version": "2.4.2", - "bundled": true, - "dev": true, - "requires": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" - } - }, - "ms": { - "version": "2.1.2", - "bundled": true, - "dev": true - }, - "supports-color": { - "version": "5.5.0", - "bundled": true, - "dev": true, - "requires": { - "has-flag": "^3.0.0" - } - }, - "unicode-length": { - "version": "2.0.2", - "bundled": true, - "dev": true, - "requires": { - "punycode": "^2.0.0", - "strip-ansi": "^3.0.1" - } - } - } - }, - "trim-right": { - "version": "1.0.1", - "bundled": true, - "dev": true - }, - "type-fest": { - "version": "0.5.2", - "bundled": true, - "dev": true - }, - "widest-line": { - "version": "2.0.1", - "bundled": true, - "dev": true, - "requires": { - "string-width": "^2.1.1" - } - }, - "wrap-ansi": { - "version": "5.1.0", - "bundled": true, - "dev": true, - "requires": { - "ansi-styles": "^3.2.0", - "string-width": "^3.0.0", - "strip-ansi": "^5.0.0" - }, - "dependencies": { - "ansi-regex": { - "version": "4.1.0", - "bundled": true, - "dev": true - }, - "ansi-styles": { - "version": "3.2.1", - "bundled": true, - "dev": true, - "requires": { - "color-convert": "^1.9.0" - } - }, - "string-width": { - "version": "3.1.0", - "bundled": true, - "dev": true, - "requires": { - "emoji-regex": "^7.0.1", - "is-fullwidth-code-point": "^2.0.0", - "strip-ansi": "^5.1.0" - } - }, - "strip-ansi": { - "version": "5.2.0", - "bundled": true, - "dev": true, - "requires": { - "ansi-regex": "^4.1.0" - } - } - } - }, - "yaml": { - "version": "1.7.1", - "bundled": true, - "dev": true, - "requires": { - "@babel/runtime": "^7.5.5" - } - }, - "yoga-layout-prebuilt": { - "version": "1.9.3", - "bundled": true, - "dev": true - } - } - }, - "tap-mocha-reporter": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/tap-mocha-reporter/-/tap-mocha-reporter-5.0.0.tgz", - "integrity": "sha512-8HlAtdmYGlDZuW83QbF/dc46L7cN+AGhLZcanX3I9ILvxUAl+G2/mtucNPSXecTlG/4iP1hv6oMo0tMhkn3Tsw==", - "dev": true, - "requires": { - "color-support": "^1.1.0", - "debug": "^2.1.3", - "diff": "^1.3.2", - "escape-string-regexp": "^1.0.3", - "glob": "^7.0.5", - "readable-stream": "^2.1.5", - "tap-parser": "^10.0.0", - "tap-yaml": "^1.0.0", - "unicode-length": "^1.0.0" - }, - "dependencies": { - "debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "dev": true, - "requires": { - "ms": "2.0.0" - } - }, - "diff": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/diff/-/diff-1.4.0.tgz", - "integrity": "sha1-fyjS657nsVqX79ic5j3P2qPMur8=", - "dev": true - }, - "ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", - "dev": true - } - } - }, - "tap-parser": { - "version": "10.0.1", - "resolved": "https://registry.npmjs.org/tap-parser/-/tap-parser-10.0.1.tgz", - "integrity": "sha512-qdT15H0DoJIi7zOqVXDn9X0gSM68JjNy1w3VemwTJlDnETjbi6SutnqmBfjDJAwkFS79NJ97gZKqie00ZCGmzg==", - "dev": true, - "requires": { - "events-to-array": "^1.0.1", - "minipass": "^3.0.0", - "tap-yaml": "^1.0.0" - } - }, - "tap-yaml": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/tap-yaml/-/tap-yaml-1.0.0.tgz", - "integrity": "sha512-Rxbx4EnrWkYk0/ztcm5u3/VznbyFJpyXO12dDBHKWiDVxy7O2Qw6MRrwO5H6Ww0U5YhRY/4C/VzWmFPhBQc4qQ==", - "dev": true, - "requires": { - "yaml": "^1.5.0" - } - }, - "tcompare": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/tcompare/-/tcompare-2.3.0.tgz", - "integrity": "sha512-fAfA73uFtFGybWGt4+IYT6UPLYVZQ4NfsP+IXEZGY0vh8e2IF7LVKafcQNMRBLqP0wzEA65LM9Tqj+FSmO8GLw==", - "dev": true - }, - "test-exclude": { - "version": "5.2.3", - "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-5.2.3.tgz", - "integrity": "sha512-M+oxtseCFO3EDtAaGH7iiej3CBkzXqFMbzqYAACdzKui4eZA+pq3tZEwChvOdNfa7xxy8BfbmgJSIr43cC/+2g==", - "dev": true, - "requires": { - "glob": "^7.1.3", - "minimatch": "^3.0.4", - "read-pkg-up": "^4.0.0", - "require-main-filename": "^2.0.0" - } - }, - "to-fast-properties": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz", - "integrity": "sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4=", - "dev": true - }, - "to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dev": true, - "requires": { - "is-number": "^7.0.0" - } - }, - "tough-cookie": { - "version": "2.4.3", - "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.4.3.tgz", - "integrity": "sha512-Q5srk/4vDM54WJsJio3XNn6K2sCG+CQ8G5Wz6bZhRZoAe/+TxjWB/GlFAnYEbkYVlON9FMk/fE3h2RLpPXo4lQ==", - "dev": true, - "requires": { - "psl": "^1.1.24", - "punycode": "^1.4.1" - }, - "dependencies": { - "punycode": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", - "integrity": "sha1-wNWmOycYgArY4esPpSachN1BhF4=", - "dev": true - } - } - }, - "trivial-deferred": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/trivial-deferred/-/trivial-deferred-1.0.1.tgz", - "integrity": "sha1-N21NKdlR1jaKb3oK6FwvTV4GWPM=", - "dev": true - }, - "ts-node": { - "version": "8.4.1", - "resolved": "https://registry.npmjs.org/ts-node/-/ts-node-8.4.1.tgz", - "integrity": "sha512-5LpRN+mTiCs7lI5EtbXmF/HfMeCjzt7DH9CZwtkr6SywStrNQC723wG+aOWFiLNn7zT3kD/RnFqi3ZUfr4l5Qw==", - "dev": true, - "requires": { - "arg": "^4.1.0", - "diff": "^4.0.1", - "make-error": "^1.1.1", - "source-map-support": "^0.5.6", - "yn": "^3.0.0" - } - }, - "tunnel-agent": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", - "integrity": "sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0=", - "dev": true, - "requires": { - "safe-buffer": "^5.0.1" - } - }, - "tweetnacl": { - "version": "0.14.5", - "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", - "integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q=", - "dev": true - }, - "typedarray-to-buffer": { - "version": "3.1.5", - "resolved": "https://registry.npmjs.org/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz", - "integrity": "sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q==", - "dev": true, - "requires": { - "is-typedarray": "^1.0.0" - } - }, - "typescript": { - "version": "3.7.2", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.7.2.tgz", - "integrity": "sha512-ml7V7JfiN2Xwvcer+XAf2csGO1bPBdRbFCkYBczNZggrBZ9c7G3riSUeJmqEU5uOtXNPMhE3n+R4FA/3YOAWOQ==", - "dev": true - }, - "uglify-js": { - "version": "3.6.8", - "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.6.8.tgz", - "integrity": "sha512-XhHJ3S3ZyMwP8kY1Gkugqx3CJh2C3O0y8NPiSxtm1tyD/pktLAkFZsFGpuNfTZddKDQ/bbDBLAd2YyA1pbi8HQ==", - "dev": true, - "optional": true, - "requires": { - "commander": "~2.20.3", - "source-map": "~0.6.1" - } - }, - "unicode-length": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/unicode-length/-/unicode-length-1.0.3.tgz", - "integrity": "sha1-Wtp6f+1RhBpBijKM8UlHisg1irs=", - "dev": true, - "requires": { - "punycode": "^1.3.2", - "strip-ansi": "^3.0.1" - }, - "dependencies": { - "ansi-regex": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", - "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=", - "dev": true - }, - "punycode": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", - "integrity": "sha1-wNWmOycYgArY4esPpSachN1BhF4=", - "dev": true - }, - "strip-ansi": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", - "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", - "dev": true, - "requires": { - "ansi-regex": "^2.0.0" - } - } - } - }, - "uri-js": { - "version": "4.2.2", - "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.2.2.tgz", - "integrity": "sha512-KY9Frmirql91X2Qgjry0Wd4Y+YTdrdZheS8TFwvkbLWf/G5KNJDCh6pKL5OZctEW4+0Baa5idK2ZQuELRwPznQ==", - "dev": true, - "requires": { - "punycode": "^2.1.0" - } - }, - "util-deprecate": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", - "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=", - "dev": true, - "optional": true - }, - "uuid": { - "version": "3.3.3", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.3.tgz", - "integrity": "sha512-pW0No1RGHgzlpHJO1nsVrHKpOEIxkGg1xB+v0ZmdNH5OAeAwzAVrCnI2/6Mtx+Uys6iaylxa+D3g4j63IKKjSQ==", - "dev": true - }, - "validate-npm-package-license": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz", - "integrity": "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==", - "dev": true, - "requires": { - "spdx-correct": "^3.0.0", - "spdx-expression-parse": "^3.0.0" - } - }, - "verror": { - "version": "1.10.0", - "resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz", - "integrity": "sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA=", - "dev": true, - "requires": { - "assert-plus": "^1.0.0", - "core-util-is": "1.0.2", - "extsprintf": "^1.2.0" - } - }, - "vlq": { - "version": "0.2.3", - "resolved": "https://registry.npmjs.org/vlq/-/vlq-0.2.3.tgz", - "integrity": "sha512-DRibZL6DsNhIgYQ+wNdWDL2SL3bKPlVrRiBqV5yuMm++op8W4kGFtaQfCs4KEJn0wBZcHVHJ3eoywX8983k1ow==", - "dev": true - }, - "which": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.1.tgz", - "integrity": "sha512-N7GBZOTswtB9lkQBZA4+zAXrjEIWAUOB93AvzUiudRzRxhUdLURQ7D/gAIMY1gatT/LTbmbcv8SiYazy3eYB7w==", - "dev": true, - "requires": { - "isexe": "^2.0.0" - } - }, - "which-module": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.0.tgz", - "integrity": "sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=", - "dev": true - }, - "wordwrap": { - "version": "0.0.3", - "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz", - "integrity": "sha1-o9XabNXAvAAI03I0u68b7WMFkQc=", - "dev": true - }, - "wrap-ansi": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-2.1.0.tgz", - "integrity": "sha1-2Pw9KE3QV5T+hJc8rs3Rz4JP3YU=", - "dev": true, - "requires": { - "string-width": "^1.0.1", - "strip-ansi": "^3.0.1" - }, - "dependencies": { - "ansi-regex": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", - "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=", - "dev": true - }, - "is-fullwidth-code-point": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz", - "integrity": "sha1-754xOG8DGn8NZDr4L95QxFfvAMs=", - "dev": true, - "requires": { - "number-is-nan": "^1.0.0" - } - }, - "string-width": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz", - "integrity": "sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=", - "dev": true, - "requires": { - "code-point-at": "^1.0.0", - "is-fullwidth-code-point": "^1.0.0", - "strip-ansi": "^3.0.0" - } - }, - "strip-ansi": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", - "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", - "dev": true, - "requires": { - "ansi-regex": "^2.0.0" - } - } - } - }, - "wrappy": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", - "dev": true - }, - "write-file-atomic": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-3.0.1.tgz", - "integrity": "sha512-JPStrIyyVJ6oCSz/691fAjFtefZ6q+fP6tm+OS4Qw6o+TGQxNp1ziY2PgS+X/m0V8OWhZiO/m4xSj+Pr4RrZvw==", - "dev": true, - "requires": { - "imurmurhash": "^0.1.4", - "is-typedarray": "^1.0.0", - "signal-exit": "^3.0.2", - "typedarray-to-buffer": "^3.1.5" - } - }, - "y18n": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.0.tgz", - "integrity": "sha512-r9S/ZyXu/Xu9q1tYlpsLIsa3EeLXXk0VwlxqTcFRfg9EhMW+17kbt9G0NrgCmhGb5vT2hyhJZLfDGx+7+5Uj/w==", - "dev": true - }, - "yallist": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-2.1.2.tgz", - "integrity": "sha1-HBH5IY8HYImkfdUS+TxmmaaoHVI=", - "dev": true - }, - "yaml": { - "version": "1.7.2", - "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.7.2.tgz", - "integrity": "sha512-qXROVp90sb83XtAoqE8bP9RwAkTTZbugRUTm5YeFCBfNRPEp2YzTeqWiz7m5OORHzEvrA/qcGS8hp/E+MMROYw==", - "dev": true, - "requires": { - "@babel/runtime": "^7.6.3" - } - }, - "yapool": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/yapool/-/yapool-1.0.0.tgz", - "integrity": "sha1-9pPymjFbUNmp2iZGp6ZkXJaYW2o=", - "dev": true - }, - "yargs": { - "version": "13.3.0", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-13.3.0.tgz", - "integrity": "sha512-2eehun/8ALW8TLoIl7MVaRUrg+yCnenu8B4kBlRxj3GJGDKU1Og7sMXPNm1BYyM1DOJmTZ4YeN/Nwxv+8XJsUA==", - "dev": true, - "requires": { - "cliui": "^5.0.0", - "find-up": "^3.0.0", - "get-caller-file": "^2.0.1", - "require-directory": "^2.1.1", - "require-main-filename": "^2.0.0", - "set-blocking": "^2.0.0", - "string-width": "^3.0.0", - "which-module": "^2.0.0", - "y18n": "^4.0.0", - "yargs-parser": "^13.1.1" - }, - "dependencies": { - "ansi-regex": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", - "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", - "dev": true - }, - "cliui": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-5.0.0.tgz", - "integrity": "sha512-PYeGSEmmHM6zvoef2w8TPzlrnNpXIjTipYK780YswmIP9vjxmd6Y2a3CB2Ks6/AU8NHjZugXvo8w3oWM2qnwXA==", - "dev": true, - "requires": { - "string-width": "^3.1.0", - "strip-ansi": "^5.2.0", - "wrap-ansi": "^5.1.0" - } - }, - "string-width": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", - "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", - "dev": true, - "requires": { - "emoji-regex": "^7.0.1", - "is-fullwidth-code-point": "^2.0.0", - "strip-ansi": "^5.1.0" - } - }, - "strip-ansi": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", - "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", - "dev": true, - "requires": { - "ansi-regex": "^4.1.0" - } - }, - "wrap-ansi": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-5.1.0.tgz", - "integrity": "sha512-QC1/iN/2/RPVJ5jYK8BGttj5z83LmSKmvbvrXPNCLZSEb32KKVDJDl/MOt2N01qU2H/FkzEa9PKto1BqDjtd7Q==", - "dev": true, - "requires": { - "ansi-styles": "^3.2.0", - "string-width": "^3.0.0", - "strip-ansi": "^5.0.0" - } - } - } - }, - "yargs-parser": { - "version": "13.1.1", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-13.1.1.tgz", - "integrity": "sha512-oVAVsHz6uFrg3XQheFII8ESO2ssAf9luWuAd6Wexsu4F3OtIW0o8IribPXYrD4WC24LWtPrJlGy87y5udK+dxQ==", - "dev": true, - "requires": { - "camelcase": "^5.0.0", - "decamelize": "^1.2.0" - } - }, - "yn": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", - "integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==", - "dev": true - } - } -} diff --git a/node_modules/json-stringify-nice/package.json b/node_modules/json-stringify-nice/package.json index e060b2ed70e6d..c9663290ee4bc 100644 --- a/node_modules/json-stringify-nice/package.json +++ b/node_modules/json-stringify-nice/package.json @@ -1,24 +1,40 @@ { "name": "json-stringify-nice", - "version": "1.1.1", + "version": "1.1.4", "description": "Stringify an object sorting scalars before objects, and defaulting to 2-space indent", "author": "Isaac Z. Schlueter <i@izs.me> (https://izs.me)", "license": "ISC", "scripts": { "test": "tap", + "posttest": "npm run lint", "snap": "tap", + "postsnap": "npm run lintfix", + "eslint": "eslint", + "lint": "npm run eslint -- index.js test/**/*.js", + "lintfix": "npm run lint -- --fix", "preversion": "npm test", "postversion": "npm publish", "postpublish": "git push origin --follow-tags" }, "tap": { + "test-env": [ + "LC_ALL=sk" + ], "check-coverage": true }, "devDependencies": { - "tap": "^14.9.2" + "eslint": "^7.25.0", + "eslint-plugin-import": "^2.22.1", + "eslint-plugin-node": "^11.1.0", + "eslint-plugin-promise": "^5.1.0", + "eslint-plugin-standard": "^5.0.0", + "tap": "^15.0.6" }, "funding": { "url": "https://github.com/sponsors/isaacs" }, - "repository": "https://github.com/isaacs/json-stringify-nice" + "repository": "https://github.com/isaacs/json-stringify-nice", + "files": [ + "index.js" + ] } diff --git a/node_modules/json-stringify-nice/tap-snapshots/test-basic.js-TAP.test.js b/node_modules/json-stringify-nice/tap-snapshots/test-basic.js-TAP.test.js deleted file mode 100644 index 53d5fda6c3a14..0000000000000 --- a/node_modules/json-stringify-nice/tap-snapshots/test-basic.js-TAP.test.js +++ /dev/null @@ -1,127 +0,0 @@ -/* IMPORTANT - * This snapshot file is auto-generated, but designed for humans. - * It should be checked into source control and tracked carefully. - * Re-generate by setting TAP_SNAPSHOT=1 and running tests. - * Make sure to inspect the output below. Do not ignore changes! - */ -'use strict' -exports[`test/basic.js TAP basic sorting operation with default 2-space indent > mix of objects and out of order keys 1`] = ` -{ - "y": "z", - "yy": "a", - "z": 1, - "a": { - "a": 2, - "b": 1 - }, - "obj": { - "b": "x", - "a": {} - } -} - -` - -exports[`test/basic.js TAP replacer function is used > replace a val with phone doggo 1`] = ` -{ - "y": "z", - "yy": "a", - "z": 1, - "a": { - "b": 1, - "hello": "📞 yes", - "this is": "🐕", - "a": { - "hello": "📞 yes", - "nested": true, - "this is": "🐕" - } - }, - "obj": { - "b": "x", - "a": { - "hello": "📞 yes", - "this is": "🐕" - } - } -} - -` - -exports[`test/basic.js TAP sort keys explicitly with a preference list > replace a val with preferences 1`] = ` -{ - "z": 1, - "yy": "a", - "y": "z", - "obj": { - "b": "x", - "a": {} - }, - "a": { - "b": 1, - "a": { - "nested": true - } - } -} - -` - -exports[`test/basic.js TAP spaces can be set > boolean false 1`] = ` -{"y":"z","yy":"a","z":1,"a":{"a":2,"b":1},"obj":{"b":"x","a":{}}} -` - -exports[`test/basic.js TAP spaces can be set > empty string 1`] = ` -{"y":"z","yy":"a","z":1,"a":{"a":2,"b":1},"obj":{"b":"x","a":{}}} -` - -exports[`test/basic.js TAP spaces can be set > space face 1`] = ` -{ - ^_^ "y": "z", - ^_^ "yy": "a", - ^_^ "z": 1, - ^_^ "a": { - ^_^ ^_^ "a": 2, - ^_^ ^_^ "b": 1 - ^_^ }, - ^_^ "obj": { - ^_^ ^_^ "b": "x", - ^_^ ^_^ "a": {} - ^_^ } -} - -` - -exports[`test/basic.js TAP spaces can be set > tab 1`] = ` -{ - "y": "z", - "yy": "a", - "z": 1, - "a": { - "a": 2, - "b": 1 - }, - "obj": { - "b": "x", - "a": {} - } -} - -` - -exports[`test/basic.js TAP spaces can be set > the number 3 1`] = ` -{ - "y": "z", - "yy": "a", - "z": 1, - "a": { - "a": 2, - "b": 1 - }, - "obj": { - "b": "x", - "a": {} - } -} - -` diff --git a/node_modules/json-stringify-nice/test/basic.js b/node_modules/json-stringify-nice/test/basic.js deleted file mode 100644 index 23c3ceb384520..0000000000000 --- a/node_modules/json-stringify-nice/test/basic.js +++ /dev/null @@ -1,68 +0,0 @@ -const t = require('tap') -const stringify = require('../') - -t.test('basic sorting operation with default 2-space indent', t => { - t.plan(1) - t.matchSnapshot(stringify({ - z: 1, - y: 'z', - obj: { a: {}, b: 'x' }, - a: { b: 1, a: 2}, - yy: 'a', - }), 'mix of objects and out of order keys') -}) - -t.test('throws same error on cycles as JSON.stringify', t => { - t.plan(1) - const cycle = { a: { b: { c: {} } } } - cycle.a.b.c = cycle.a - try { - JSON.stringify(cycle) - } catch (builtinEr) { - t.throws(() => stringify(cycle), builtinEr, 'same error as builtin') - } -}) - -t.test('spaces can be set', t => { - t.plan(5) - const obj = { - z: 1, - y: 'z', - obj: { a: {}, b: 'x' }, - a: { b: 1, a: 2}, - yy: 'a', - } - t.matchSnapshot(stringify(obj, 0, '\t'), 'tab') - t.matchSnapshot(stringify(obj, null, ' ^_^ '), 'space face') - t.matchSnapshot(stringify(obj, false, 3), 'the number 3') - t.matchSnapshot(stringify(obj, false, ''), 'empty string') - t.matchSnapshot(stringify(obj, false, false), 'boolean false') -}) - -t.test('replacer function is used', t => { - t.plan(1) - const obj = { - z: 1, - y: 'z', - obj: { a: {}, b: 'x' }, - a: { b: 1, a: { nested: true} }, - yy: 'a', - } - const replacer = (key, val) => - key === 'a' ? { hello: '📞 yes', 'this is': '🐕', ...val } - : val - t.matchSnapshot(stringify(obj, replacer), 'replace a val with phone doggo') -}) - -t.test('sort keys explicitly with a preference list', t => { - t.plan(1) - const obj = { - z: 1, - y: 'z', - obj: { a: {}, b: 'x' }, - a: { b: 1, a: { nested: true} }, - yy: 'a', - } - const preference = ['obj', 'z', 'yy'] - t.matchSnapshot(stringify(obj, preference), 'replace a val with preferences') -}) diff --git a/node_modules/json-stringify-safe/.npmignore b/node_modules/json-stringify-safe/.npmignore deleted file mode 100644 index 17d6b3677f037..0000000000000 --- a/node_modules/json-stringify-safe/.npmignore +++ /dev/null @@ -1 +0,0 @@ -/*.tgz diff --git a/node_modules/json-stringify-safe/CHANGELOG.md b/node_modules/json-stringify-safe/CHANGELOG.md deleted file mode 100644 index 42bcb60af47a5..0000000000000 --- a/node_modules/json-stringify-safe/CHANGELOG.md +++ /dev/null @@ -1,14 +0,0 @@ -## Unreleased -- Fixes stringify to only take ancestors into account when checking - circularity. - It previously assumed every visited object was circular which led to [false - positives][issue9]. - Uses the tiny serializer I wrote for [Must.js][must] a year and a half ago. -- Fixes calling the `replacer` function in the proper context (`thisArg`). -- Fixes calling the `cycleReplacer` function in the proper context (`thisArg`). -- Speeds serializing by a factor of - Big-O(h-my-god-it-linearly-searched-every-object) it had ever seen. Searching - only the ancestors for a circular references speeds up things considerably. - -[must]: https://github.com/moll/js-must -[issue9]: https://github.com/isaacs/json-stringify-safe/issues/9 diff --git a/node_modules/json-stringify-safe/README.md b/node_modules/json-stringify-safe/README.md deleted file mode 100644 index a11f302a33070..0000000000000 --- a/node_modules/json-stringify-safe/README.md +++ /dev/null @@ -1,52 +0,0 @@ -# json-stringify-safe - -Like JSON.stringify, but doesn't throw on circular references. - -## Usage - -Takes the same arguments as `JSON.stringify`. - -```javascript -var stringify = require('json-stringify-safe'); -var circularObj = {}; -circularObj.circularRef = circularObj; -circularObj.list = [ circularObj, circularObj ]; -console.log(stringify(circularObj, null, 2)); -``` - -Output: - -```json -{ - "circularRef": "[Circular]", - "list": [ - "[Circular]", - "[Circular]" - ] -} -``` - -## Details - -``` -stringify(obj, serializer, indent, decycler) -``` - -The first three arguments are the same as to JSON.stringify. The last -is an argument that's only used when the object has been seen already. - -The default `decycler` function returns the string `'[Circular]'`. -If, for example, you pass in `function(k,v){}` (return nothing) then it -will prune cycles. If you pass in `function(k,v){ return {foo: 'bar'}}`, -then cyclical objects will always be represented as `{"foo":"bar"}` in -the result. - -``` -stringify.getSerialize(serializer, decycler) -``` - -Returns a serializer that can be used elsewhere. This is the actual -function that's passed to JSON.stringify. - -**Note** that the function returned from `getSerialize` is stateful for now, so -do **not** use it more than once. diff --git a/node_modules/jsonparse/.npmignore b/node_modules/jsonparse/.npmignore deleted file mode 100644 index b512c09d47662..0000000000000 --- a/node_modules/jsonparse/.npmignore +++ /dev/null @@ -1 +0,0 @@ -node_modules \ No newline at end of file diff --git a/node_modules/jsonparse/README.markdown b/node_modules/jsonparse/README.markdown deleted file mode 100644 index 0f405d359fe6c..0000000000000 --- a/node_modules/jsonparse/README.markdown +++ /dev/null @@ -1,11 +0,0 @@ -This is a streaming JSON parser. For a simpler, sax-based version see this gist: https://gist.github.com/1821394 - -The MIT License (MIT) -Copyright (c) 2011-2012 Tim Caswell - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - diff --git a/node_modules/jsprim/README.md b/node_modules/jsprim/README.md deleted file mode 100644 index b3f28a46c9d7f..0000000000000 --- a/node_modules/jsprim/README.md +++ /dev/null @@ -1,287 +0,0 @@ -# jsprim: utilities for primitive JavaScript types - -This module provides miscellaneous facilities for working with strings, -numbers, dates, and objects and arrays of these basic types. - - -### deepCopy(obj) - -Creates a deep copy of a primitive type, object, or array of primitive types. - - -### deepEqual(obj1, obj2) - -Returns whether two objects are equal. - - -### isEmpty(obj) - -Returns true if the given object has no properties and false otherwise. This -is O(1) (unlike `Object.keys(obj).length === 0`, which is O(N)). - -### hasKey(obj, key) - -Returns true if the given object has an enumerable, non-inherited property -called `key`. [For information on enumerability and ownership of properties, see -the MDN -documentation.](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Enumerability_and_ownership_of_properties) - -### forEachKey(obj, callback) - -Like Array.forEach, but iterates enumerable, owned properties of an object -rather than elements of an array. Equivalent to: - - for (var key in obj) { - if (Object.prototype.hasOwnProperty.call(obj, key)) { - callback(key, obj[key]); - } - } - - -### flattenObject(obj, depth) - -Flattens an object up to a given level of nesting, returning an array of arrays -of length "depth + 1", where the first "depth" elements correspond to flattened -columns and the last element contains the remaining object . For example: - - flattenObject({ - 'I': { - 'A': { - 'i': { - 'datum1': [ 1, 2 ], - 'datum2': [ 3, 4 ] - }, - 'ii': { - 'datum1': [ 3, 4 ] - } - }, - 'B': { - 'i': { - 'datum1': [ 5, 6 ] - }, - 'ii': { - 'datum1': [ 7, 8 ], - 'datum2': [ 3, 4 ], - }, - 'iii': { - } - } - }, - 'II': { - 'A': { - 'i': { - 'datum1': [ 1, 2 ], - 'datum2': [ 3, 4 ] - } - } - } - }, 3) - -becomes: - - [ - [ 'I', 'A', 'i', { 'datum1': [ 1, 2 ], 'datum2': [ 3, 4 ] } ], - [ 'I', 'A', 'ii', { 'datum1': [ 3, 4 ] } ], - [ 'I', 'B', 'i', { 'datum1': [ 5, 6 ] } ], - [ 'I', 'B', 'ii', { 'datum1': [ 7, 8 ], 'datum2': [ 3, 4 ] } ], - [ 'I', 'B', 'iii', {} ], - [ 'II', 'A', 'i', { 'datum1': [ 1, 2 ], 'datum2': [ 3, 4 ] } ] - ] - -This function is strict: "depth" must be a non-negative integer and "obj" must -be a non-null object with at least "depth" levels of nesting under all keys. - - -### flattenIter(obj, depth, func) - -This is similar to `flattenObject` except that instead of returning an array, -this function invokes `func(entry)` for each `entry` in the array that -`flattenObject` would return. `flattenIter(obj, depth, func)` is logically -equivalent to `flattenObject(obj, depth).forEach(func)`. Importantly, this -version never constructs the full array. Its memory usage is O(depth) rather -than O(n) (where `n` is the number of flattened elements). - -There's another difference between `flattenObject` and `flattenIter` that's -related to the special case where `depth === 0`. In this case, `flattenObject` -omits the array wrapping `obj` (which is regrettable). - - -### pluck(obj, key) - -Fetch nested property "key" from object "obj", traversing objects as needed. -For example, `pluck(obj, "foo.bar.baz")` is roughly equivalent to -`obj.foo.bar.baz`, except that: - -1. If traversal fails, the resulting value is undefined, and no error is - thrown. For example, `pluck({}, "foo.bar")` is just undefined. -2. If "obj" has property "key" directly (without traversing), the - corresponding property is returned. For example, - `pluck({ 'foo.bar': 1 }, 'foo.bar')` is 1, not undefined. This is also - true recursively, so `pluck({ 'a': { 'foo.bar': 1 } }, 'a.foo.bar')` is - also 1, not undefined. - - -### randElt(array) - -Returns an element from "array" selected uniformly at random. If "array" is -empty, throws an Error. - - -### startsWith(str, prefix) - -Returns true if the given string starts with the given prefix and false -otherwise. - - -### endsWith(str, suffix) - -Returns true if the given string ends with the given suffix and false -otherwise. - - -### parseInteger(str, options) - -Parses the contents of `str` (a string) as an integer. On success, the integer -value is returned (as a number). On failure, an error is **returned** describing -why parsing failed. - -By default, leading and trailing whitespace characters are not allowed, nor are -trailing characters that are not part of the numeric representation. This -behaviour can be toggled by using the options below. The empty string (`''`) is -not considered valid input. If the return value cannot be precisely represented -as a number (i.e., is smaller than `Number.MIN_SAFE_INTEGER` or larger than -`Number.MAX_SAFE_INTEGER`), an error is returned. Additionally, the string -`'-0'` will be parsed as the integer `0`, instead of as the IEEE floating point -value `-0`. - -This function accepts both upper and lowercase characters for digits, similar to -`parseInt()`, `Number()`, and [strtol(3C)](https://illumos.org/man/3C/strtol). - -The following may be specified in `options`: - -Option | Type | Default | Meaning ------------------- | ------- | ------- | --------------------------- -base | number | 10 | numeric base (radix) to use, in the range 2 to 36 -allowSign | boolean | true | whether to interpret any leading `+` (positive) and `-` (negative) characters -allowImprecise | boolean | false | whether to accept values that may have lost precision (past `MAX_SAFE_INTEGER` or below `MIN_SAFE_INTEGER`) -allowPrefix | boolean | false | whether to interpret the prefixes `0b` (base 2), `0o` (base 8), `0t` (base 10), or `0x` (base 16) -allowTrailing | boolean | false | whether to ignore trailing characters -trimWhitespace | boolean | false | whether to trim any leading or trailing whitespace/line terminators -leadingZeroIsOctal | boolean | false | whether a leading zero indicates octal - -Note that if `base` is unspecified, and `allowPrefix` or `leadingZeroIsOctal` -are, then the leading characters can change the default base from 10. If `base` -is explicitly specified and `allowPrefix` is true, then the prefix will only be -accepted if it matches the specified base. `base` and `leadingZeroIsOctal` -cannot be used together. - -**Context:** It's tricky to parse integers with JavaScript's built-in facilities -for several reasons: - -- `parseInt()` and `Number()` by default allow the base to be specified in the - input string by a prefix (e.g., `0x` for hex). -- `parseInt()` allows trailing nonnumeric characters. -- `Number(str)` returns 0 when `str` is the empty string (`''`). -- Both functions return incorrect values when the input string represents a - valid integer outside the range of integers that can be represented precisely. - Specifically, `parseInt('9007199254740993')` returns 9007199254740992. -- Both functions always accept `-` and `+` signs before the digit. -- Some older JavaScript engines always interpret a leading 0 as indicating - octal, which can be surprising when parsing input from users who expect a - leading zero to be insignificant. - -While each of these may be desirable in some contexts, there are also times when -none of them are wanted. `parseInteger()` grants greater control over what -input's permissible. - -### iso8601(date) - -Converts a Date object to an ISO8601 date string of the form -"YYYY-MM-DDTHH:MM:SS.sssZ". This format is not customizable. - - -### parseDateTime(str) - -Parses a date expressed as a string, as either a number of milliseconds since -the epoch or any string format that Date accepts, giving preference to the -former where these two sets overlap (e.g., strings containing small numbers). - - -### hrtimeDiff(timeA, timeB) - -Given two hrtime readings (as from Node's `process.hrtime()`), where timeA is -later than timeB, compute the difference and return that as an hrtime. It is -illegal to invoke this for a pair of times where timeB is newer than timeA. - -### hrtimeAdd(timeA, timeB) - -Add two hrtime intervals (as from Node's `process.hrtime()`), returning a new -hrtime interval array. This function does not modify either input argument. - - -### hrtimeAccum(timeA, timeB) - -Add two hrtime intervals (as from Node's `process.hrtime()`), storing the -result in `timeA`. This function overwrites (and returns) the first argument -passed in. - - -### hrtimeNanosec(timeA), hrtimeMicrosec(timeA), hrtimeMillisec(timeA) - -This suite of functions converts a hrtime interval (as from Node's -`process.hrtime()`) into a scalar number of nanoseconds, microseconds or -milliseconds. Results are truncated, as with `Math.floor()`. - - -### validateJsonObject(schema, object) - -Uses JSON validation (via JSV) to validate the given object against the given -schema. On success, returns null. On failure, *returns* (does not throw) a -useful Error object. - - -### extraProperties(object, allowed) - -Check an object for unexpected properties. Accepts the object to check, and an -array of allowed property name strings. If extra properties are detected, an -array of extra property names is returned. If no properties other than those -in the allowed list are present on the object, the returned array will be of -zero length. - -### mergeObjects(provided, overrides, defaults) - -Merge properties from objects "provided", "overrides", and "defaults". The -intended use case is for functions that accept named arguments in an "args" -object, but want to provide some default values and override other values. In -that case, "provided" is what the caller specified, "overrides" are what the -function wants to override, and "defaults" contains default values. - -The function starts with the values in "defaults", overrides them with the -values in "provided", and then overrides those with the values in "overrides". -For convenience, any of these objects may be falsey, in which case they will be -ignored. The input objects are never modified, but properties in the returned -object are not deep-copied. - -For example: - - mergeObjects(undefined, { 'objectMode': true }, { 'highWaterMark': 0 }) - -returns: - - { 'objectMode': true, 'highWaterMark': 0 } - -For another example: - - mergeObjects( - { 'highWaterMark': 16, 'objectMode': 7 }, /* from caller */ - { 'objectMode': true }, /* overrides */ - { 'highWaterMark': 0 }); /* default */ - -returns: - - { 'objectMode': true, 'highWaterMark': 16 } - - -# Contributing - -See separate [contribution guidelines](CONTRIBUTING.md). diff --git a/node_modules/just-diff-apply/README.md b/node_modules/just-diff-apply/README.md deleted file mode 100644 index 2068a483062b7..0000000000000 --- a/node_modules/just-diff-apply/README.md +++ /dev/null @@ -1,52 +0,0 @@ -## just-diff-apply - -Part of a [library](../../../../) of zero-dependency npm modules that do just do one thing. -Guilt-free utilities for every occasion. - -[Try it now](http://anguscroll.com/just/just-diff-apply) - -Apply a diff object to an object. -Pass converter to apply a http://jsonpatch.com standard patch - -```js - import diffApply from 'just-diff-apply'; - - const obj1 = {a: 3, b: 5}; - diffApply(obj1, - [ - { "op": "remove", "path": ['b'] }, - { "op": "replace", "path": ['a'], "value": 4 }, - { "op": "add", "path": ['c'], "value": 5 } - ] - ); - obj1; // {a: 4, c: 5} - - // using converter to apply jsPatch standard paths - // see http://jsonpatch.com - import {diffApply, jsonPatchPathConverter} from 'just-diff-apply' - const obj2 = {a: 3, b: 5}; - diffApply(obj2, [ - { "op": "remove", "path": '/b' }, - { "op": "replace", "path": '/a', "value": 4 } - { "op": "add", "path": '/c', "value": 5 } - ], jsonPatchPathConverter); - obj2; // {a: 4, c: 5} - - // arrays (array key can be string or numeric) - const obj3 = {a: 4, b: [1, 2, 3]}; - diffApply(obj3, [ - { "op": "replace", "path": ['a'], "value": 3 } - { "op": "replace", "path": ['b', 2], "value": 4 } - { "op": "add", "path": ['b', 3], "value": 9 } - ]); - obj3; // {a: 3, b: [1, 2, 4, 9]} - - // nested paths - const obj4 = {a: 4, b: {c: 3}}; - diffApply(obj4, [ - { "op": "replace", "path": ['a'], "value": 5 } - { "op": "remove", "path": ['b', 'c']} - { "op": "add", "path": ['b', 'd'], "value": 4 } - ]); - obj4; // {a: 5, b: {d: 4}} -``` diff --git a/node_modules/just-diff/README.md b/node_modules/just-diff/README.md deleted file mode 100644 index 836868fe9043b..0000000000000 --- a/node_modules/just-diff/README.md +++ /dev/null @@ -1,76 +0,0 @@ -## just-diff - -Part of a [library](../../../../) of zero-dependency npm modules that do just do one thing. -Guilt-free utilities for every occasion. - -[Try it now](http://anguscroll.com/just/just-diff) - -Return an object representing the difference between two other objects -Pass converter to format as http://jsonpatch.com - -```js -import {diff} from 'just-diff'; - -const obj1 = {a: 4, b: 5}; -const obj2 = {a: 3, b: 5}; -const obj3 = {a: 4, c: 5}; - -diff(obj1, obj2); -[ - { "op": "replace", "path": ['a'], "value": 3 } -] - -diff(obj2, obj3); -[ - { "op": "remove", "path": ['b'] }, - { "op": "replace", "path": ['a'], "value": 4 } - { "op": "add", "path": ['c'], "value": 5 } -] - -// using converter to generate jsPatch standard paths -import {diff, jsonPatchPathConverter} from 'just-diff' -diff(obj1, obj2, jsonPatchPathConverter); -[ - { "op": "replace", "path": '/a', "value": 3 } -] - -diff(obj2, obj3, jsonPatchPathConverter); -[ - { "op": "remove", "path": '/b' }, - { "op": "replace", "path": '/a', "value": 4 } - { "op": "add", "path": '/c', "value": 5 } -] - -// arrays -const obj4 = {a: 4, b: [1, 2, 3]}; -const obj5 = {a: 3, b: [1, 2, 4]}; -const obj6 = {a: 3, b: [1, 2, 4, 5]}; - -diff(obj4, obj5); -[ - { "op": "replace", "path": ['a'], "value": 3 } - { "op": "replace", "path": ['b', 2], "value": 4 } -] - -diff(obj5, obj6); -[ - { "op": "add", "path": ['b', 3], "value": 5 } -] - -// nested paths -const obj7 = {a: 4, b: {c: 3}}; -const obj8 = {a: 4, b: {c: 4}}; -const obj9 = {a: 5, b: {d: 4}}; - -diff(obj7, obj8); -[ - { "op": "replace", "path": ['b', 'c'], "value": 4 } -] - -diff(obj8, obj9); -[ - { "op": "replace", "path": ['a'], "value": 5 } - { "op": "remove", "path": ['b', 'c']} - { "op": "add", "path": ['b', 'd'], "value": 4 } -] -``` diff --git a/node_modules/just-diff/index.d.ts b/node_modules/just-diff/index.d.ts new file mode 100644 index 0000000000000..576ddc54957fc --- /dev/null +++ b/node_modules/just-diff/index.d.ts @@ -0,0 +1,20 @@ +// Definitions by: Cameron Hunter <https://github.com/cameronhunter> +// Modified by: Angus Croll <https://github.com/angus-c> +type Operation = "add" | "replace" | "remove"; + +type JSONPatchPathConverter<OUTPUT> = ( + arrayPath: Array<string | number> +) => OUTPUT; + +export function diff( + a: object | Array<any>, + b: object | Array<any>, +): Array<{ op: Operation; path: Array<string | number>; value: any }>; + +export function diff<PATH>( + a: object | Array<any>, + b: object | Array<any>, + jsonPatchPathConverter: JSONPatchPathConverter<PATH> +): Array<{ op: Operation; path: PATH; value: any }>; + +export const jsonPatchPathConverter: JSONPatchPathConverter<string>; \ No newline at end of file diff --git a/node_modules/just-diff/index.tests.ts b/node_modules/just-diff/index.tests.ts new file mode 100644 index 0000000000000..c7ebb70d3dc64 --- /dev/null +++ b/node_modules/just-diff/index.tests.ts @@ -0,0 +1,65 @@ +import diffObj = require('./index'); + +const {diff, jsonPatchPathConverter} = diffObj; +const obj1 = {a: 2, b: 3}; +const obj2 = {a: 2, c: 1}; +const arr1 = [1, 'bee']; +const arr2 = [2, 'bee']; + + +//OK +diff(obj1, obj2); +diff(arr1, arr2); +diff(obj1, arr1); +diff(obj2, arr2); +diff(/yes/, arr1); +diff(new Date(), arr2); + + +diff(obj1, obj2, jsonPatchPathConverter); +diff(arr1, arr2, jsonPatchPathConverter); +diff(obj1, arr1, jsonPatchPathConverter); +diff(obj2, arr2, jsonPatchPathConverter); + +// not OK +// @ts-expect-error +diff(obj1); +// @ts-expect-error +diff(arr2); +// @ts-expect-error +diff('a'); +// @ts-expect-error +diff(true); + +// @ts-expect-error +diff(obj1, 1); +// @ts-expect-error +diff(3, arr2); +// @ts-expect-error +diff(obj1, 'a'); +// @ts-expect-error +diff('b', arr2); + +// @ts-expect-error +diff('a', jsonPatchPathConverter); +// @ts-expect-error +diff(true, jsonPatchPathConverter); + +// @ts-expect-error +diff(obj1, 1, jsonPatchPathConverter); +// @ts-expect-error +diff(3, arr2, jsonPatchPathConverter); +// @ts-expect-error +diff(obj1, 'a', jsonPatchPathConverter); +// @ts-expect-error +diff('b', arr2, jsonPatchPathConverter); + +// @ts-expect-error +diff(obj1, obj2, 'a'); +// @ts-expect-error +diff(arr1, arr2, 1); +// @ts-expect-error +diff(obj1, arr1, 'bee'); +// @ts-expect-error +diff(obj2, arr2, 'nope'); + diff --git a/node_modules/just-diff/package.json b/node_modules/just-diff/package.json index dbe3aa2ba9cea..00be1d50fddbc 100644 --- a/node_modules/just-diff/package.json +++ b/node_modules/just-diff/package.json @@ -1,8 +1,9 @@ { "name": "just-diff", - "version": "3.0.2", + "version": "3.1.1", "description": "Return an object representing the diffs between two objects. Supports jsonPatch protocol", "main": "index.js", + "types": "index.d.ts", "scripts": { "test": "echo \"Error: no test specified\" && exit 1" }, @@ -19,4 +20,4 @@ "bugs": { "url": "https://github.com/angus-c/just/issues" } -} +} \ No newline at end of file diff --git a/node_modules/libnpmaccess/.github/settings.yml b/node_modules/libnpmaccess/.github/settings.yml deleted file mode 100644 index 4aaa0dd57e4ad..0000000000000 --- a/node_modules/libnpmaccess/.github/settings.yml +++ /dev/null @@ -1,2 +0,0 @@ ---- -_extends: 'open-source-project-boilerplate' diff --git a/node_modules/libnpmaccess/.github/workflows/ci.yml b/node_modules/libnpmaccess/.github/workflows/ci.yml deleted file mode 100644 index 71189bae7b962..0000000000000 --- a/node_modules/libnpmaccess/.github/workflows/ci.yml +++ /dev/null @@ -1,94 +0,0 @@ ---- -################################################################################ -# Template - Node CI -# -# Description: -# This contains the basic information to: install dependencies, run tests, -# get coverage, and run linting on a nodejs project. This template will run -# over the MxN matrix of all operating systems, and all current LTS versions -# of NodeJS. -# -# Dependencies: -# This template assumes that your project is using the `tap` module for -# testing. If you're not using this module, then the step that runs your -# coverage will need to be adjusted. -# -################################################################################ -name: Node CI - -on: [push, pull_request] - -jobs: - build: - strategy: - fail-fast: false - matrix: - node-version: [10.x, 12.x, 13.x] - os: [ubuntu-latest, windows-latest, macOS-latest] - - runs-on: ${{ matrix.os }} - - steps: - # Checkout the repository - - uses: actions/checkout@v2 - # Installs the specific version of Node.js - - name: Use Node.js ${{ matrix.node-version }} - uses: actions/setup-node@v1 - with: - node-version: ${{ matrix.node-version }} - - ################################################################################ - # Install Dependencies - # - # ASSUMPTIONS: - # - The project has a package-lock.json file - # - # Simply run the tests for the project. - ################################################################################ - - name: Install dependencies - run: npm ci - - ################################################################################ - # Run Testing - # - # ASSUMPTIONS: - # - The project has `tap` as a devDependency - # - There is a script called "test" in the package.json - # - # Simply run the tests for the project. - ################################################################################ - - name: Run tests - run: npm test -- --no-coverage - - ################################################################################ - # Run coverage check - # - # ASSUMPTIONS: - # - The project has `tap` as a devDependency - # - There is a script called "coverage" in the package.json - # - # Coverage should only be posted once, we are choosing the latest LTS of - # node, and ubuntu as the matrix point to post coverage from. We limit - # to the 'push' event so that coverage ins't posted twice from the - # pull-request event, and push event (line 3). - ################################################################################ - - name: Run coverage report - if: github.event_name == 'push' && matrix.node-version == '12.x' && matrix.os == 'ubuntu-latest' - run: npm test - env: - # The environment variable name is leveraged by `tap` - COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_REPO_TOKEN }} - - ################################################################################ - # Run linting - # - # ASSUMPTIONS: - # - There is a script called "lint" in the package.json - # - # We run linting AFTER we run testing and coverage checks, because if a step - # fails in an GitHub Action, all other steps are not run. We don't want to - # fail to run tests or coverage because of linting. It should be the lowest - # priority of all the steps. - ################################################################################ - - name: Run linter - run: npm run lint diff --git a/node_modules/libnpmaccess/CHANGELOG.md b/node_modules/libnpmaccess/CHANGELOG.md deleted file mode 100644 index 6d8036a9daf06..0000000000000 --- a/node_modules/libnpmaccess/CHANGELOG.md +++ /dev/null @@ -1,166 +0,0 @@ -# Change Log - -<a name="4.0.0"></a> -## [4.0.0](https://github.com/npm/libnpmaccess/compare/v3.0.2...v4.0.0) (2020-03-02) - -### BREAKING CHANGES -- `25ac61b` fix: remove figgy-pudding ([@claudiahdz](https://github.com/claudiahdz)) -- `8d6f692` chore: rename opts.mapJson to opts.mapJSON ([@mikemimik](https://github.com/mikemimik)) - -### Features -- `257879a` chore: removed standard-version as a dep; updated scripts for version/publishing ([@mikemimik](https://github.com/mikemimik)) -- `46c6740` fix: pull-request feedback; read full commit message ([@mikemimik](https://github.com/mikemimik)) -- `778c102` chore: updated test, made case more clear ([@mikemimik](https://github.com/mikemimik)) -- `6dc9852` fix: refactored 'pwrap' function out of code base; use native promises ([@mikemimik](https://github.com/mikemimik)) -- `d2e7219` chore: updated package scripts; update CI workflow ([@mikemimik](https://github.com/mikemimik)) -- `5872364` chore: renamed test/util/ to test/fixture/; tap will ignore now ([@mikemimik](https://github.com/mikemimik)) -- `3c6b71d` chore: linted test file; made tap usage 'better' ([@mikemimik](https://github.com/mikemimik)) -- `20f0858` fix: added default values to params for API functions (with tests) ([@mikemimik](https://github.com/mikemimik)) -- `3218289` feat: replace get-stream with minipass ([@mikemimik](https://github.com/mikemimik)) - -### Documentation -- `6c8ffa0` docs: removed opts.Promise from docs; no longer in use ([@mikemimik](https://github.com/mikemimik)) -- `311bff5` chore: added return types to function docs in README ([@mikemimik](https://github.com/mikemimik)) -- `823726a` chore: removed travis badge, added github actions badge ([@mikemimik](https://github.com/mikemimik)) -- `80e80ac` chore: updated README ([@mikemimik](https://github.com/mikemimik)) - -### Dependencies -- `baed2b9` deps: standard-version@7.1.0 (audit fix) ([@mikemimik](https://github.com/mikemimik)) -- `65c2204` deps: nock@12.0.1 (audit fix) ([@mikemimik](https://github.com/mikemimik)) -- `2668386` deps: npm-registry-fetch@8.0.0 ([@mikemimik](https://github.com/mikemimik)) -- `ef093e2` deps: tap@14.10.6 ([@mikemimik](https://github.com/mikemimik)) - -### Miscellanieous -- `8e33902` chore: basic project updates ([@claudiahdz](https://github.com/claudiahdz)) -- `50e1433` fix: update return value; add tests ([@mikemimik](https://github.com/mikemimik)) -- `36d5c80` chore: updated gitignore; includes coverage folder ([@mikemimik](https://github.com/mikemimik)) - ---- - -All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. - -<a name="3.0.2"></a> -## [3.0.2](https://github.com/npm/libnpmaccess/compare/v3.0.1...v3.0.2) (2019-07-16) - - - -<a name="3.0.1"></a> -## [3.0.1](https://github.com/npm/libnpmaccess/compare/v3.0.0...v3.0.1) (2018-11-12) - - -### Bug Fixes - -* **ls-packages:** fix confusing splitEntity arg check ([1769090](https://github.com/npm/libnpmaccess/commit/1769090)) - - - -<a name="3.0.0"></a> -# [3.0.0](https://github.com/npm/libnpmaccess/compare/v2.0.1...v3.0.0) (2018-08-24) - - -### Features - -* **api:** overhaul API ergonomics ([1faf00a](https://github.com/npm/libnpmaccess/commit/1faf00a)) - - -### BREAKING CHANGES - -* **api:** all API calls where scope and team were separate, or -where team was an extra, optional argument should now use a -fully-qualified team name instead, in the `scope:team` format. - - - -<a name="2.0.1"></a> -## [2.0.1](https://github.com/npm/libnpmaccess/compare/v2.0.0...v2.0.1) (2018-08-24) - - - -<a name="2.0.0"></a> -# [2.0.0](https://github.com/npm/libnpmaccess/compare/v1.2.2...v2.0.0) (2018-08-21) - - -### Bug Fixes - -* **json:** stop trying to parse response JSON ([20fdd84](https://github.com/npm/libnpmaccess/commit/20fdd84)) -* **lsPackages:** team URL was wrong D: ([b52201c](https://github.com/npm/libnpmaccess/commit/b52201c)) - - -### BREAKING CHANGES - -* **json:** use cases where registries were returning JSON -strings in the response body will no longer have an effect. All -API functions except for lsPackages and lsCollaborators will return -`true` on completion. - - - -<a name="1.2.2"></a> -## [1.2.2](https://github.com/npm/libnpmaccess/compare/v1.2.1...v1.2.2) (2018-08-20) - - -### Bug Fixes - -* **docs:** tiny doc hiccup fix ([106396f](https://github.com/npm/libnpmaccess/commit/106396f)) - - - -<a name="1.2.1"></a> -## [1.2.1](https://github.com/npm/libnpmaccess/compare/v1.2.0...v1.2.1) (2018-08-20) - - -### Bug Fixes - -* **docs:** document the stream interfaces ([c435aa2](https://github.com/npm/libnpmaccess/commit/c435aa2)) - - - -<a name="1.2.0"></a> -# [1.2.0](https://github.com/npm/libnpmaccess/compare/v1.1.0...v1.2.0) (2018-08-20) - - -### Bug Fixes - -* **readme:** fix up appveyor badge url ([42b45a1](https://github.com/npm/libnpmaccess/commit/42b45a1)) - - -### Features - -* **streams:** add streaming result support for lsPkg and lsCollab ([0f06f46](https://github.com/npm/libnpmaccess/commit/0f06f46)) - - - -<a name="1.1.0"></a> -# [1.1.0](https://github.com/npm/libnpmaccess/compare/v1.0.0...v1.1.0) (2018-08-17) - - -### Bug Fixes - -* **2fa:** escape package names correctly ([f2d83fe](https://github.com/npm/libnpmaccess/commit/f2d83fe)) -* **grant:** fix permissions validation ([07f7435](https://github.com/npm/libnpmaccess/commit/07f7435)) -* **ls-collaborators:** fix package name escaping + query ([3c02858](https://github.com/npm/libnpmaccess/commit/3c02858)) -* **ls-packages:** add query + fix fallback request order ([bdc4791](https://github.com/npm/libnpmaccess/commit/bdc4791)) -* **node6:** stop using Object.entries() ([4fec03c](https://github.com/npm/libnpmaccess/commit/4fec03c)) -* **public/restricted:** body should be string, not bool ([cffc727](https://github.com/npm/libnpmaccess/commit/cffc727)) -* **readme:** fix up title and badges ([2bd6113](https://github.com/npm/libnpmaccess/commit/2bd6113)) -* **specs:** require specs to be registry specs ([7892891](https://github.com/npm/libnpmaccess/commit/7892891)) - - -### Features - -* **test:** add 100% coverage test suite ([22b5dec](https://github.com/npm/libnpmaccess/commit/22b5dec)) - - - -<a name="1.0.0"></a> -# 1.0.0 (2018-08-17) - - -### Bug Fixes - -* **test:** -100 is apparently bad now ([a5ab879](https://github.com/npm/libnpmaccess/commit/a5ab879)) - - -### Features - -* **impl:** initial implementation of api ([7039390](https://github.com/npm/libnpmaccess/commit/7039390)) diff --git a/node_modules/libnpmaccess/README.md b/node_modules/libnpmaccess/README.md deleted file mode 100644 index c079344597968..0000000000000 --- a/node_modules/libnpmaccess/README.md +++ /dev/null @@ -1,247 +0,0 @@ -# libnpmaccess - -[![npm version](https://img.shields.io/npm/v/libnpmaccess.svg)](https://npm.im/libnpmaccess) -[![license](https://img.shields.io/npm/l/libnpmaccess.svg)](https://npm.im/libnpmaccess) -[![GitHub Actions](https://github.com/npm/libnpmaccess/workflows/Node%20CI/badge.svg)](https://github.com/npm/libnpmaccess/actions?query=workflow%3A%22Node+CI%22) -[![Coverage Status](https://coveralls.io/repos/github/npm/libnpmaccess/badge.svg?branch=latest)](https://coveralls.io/github/npm/libnpmaccess?branch=latest) - -[`libnpmaccess`](https://github.com/npm/libnpmaccess) is a Node.js -library that provides programmatic access to the guts of the npm CLI's `npm -access` command and its various subcommands. This includes managing account 2FA, -listing packages and permissions, looking at package collaborators, and defining -package permissions for users, orgs, and teams. - -## Example - -```javascript -const access = require('libnpmaccess') - -// List all packages @zkat has access to on the npm registry. -console.log(Object.keys(await access.lsPackages('zkat'))) -``` - -## Table of Contents - -* [Installing](#install) -* [Example](#example) -* [Contributing](#contributing) -* [API](#api) - * [access opts](#opts) - * [`public()`](#public) - * [`restricted()`](#restricted) - * [`grant()`](#grant) - * [`revoke()`](#revoke) - * [`tfaRequired()`](#tfa-required) - * [`tfaNotRequired()`](#tfa-not-required) - * [`lsPackages()`](#ls-packages) - * [`lsPackages.stream()`](#ls-packages-stream) - * [`lsCollaborators()`](#ls-collaborators) - * [`lsCollaborators.stream()`](#ls-collaborators-stream) - -### Install - -`$ npm install libnpmaccess` - -### API - -#### <a name="opts"></a> `opts` for `libnpmaccess` commands - -`libnpmaccess` uses [`npm-registry-fetch`](https://npm.im/npm-registry-fetch). -All options are passed through directly to that library, so please refer to [its -own `opts` -documentation](https://www.npmjs.com/package/npm-registry-fetch#fetch-options) -for options that can be passed in. - -A couple of options of note for those in a hurry: - -* `opts.token` - can be passed in and will be used as the authentication token for the registry. For other ways to pass in auth details, see the n-r-f docs. -* `opts.otp` - certain operations will require an OTP token to be passed in. If a `libnpmaccess` command fails with `err.code === EOTP`, please retry the request with `{otp: <2fa token>}` - -#### <a name="public"></a> `> access.public(spec, [opts]) -> Promise<Boolean>` - -`spec` must be an [`npm-package-arg`](https://npm.im/npm-package-arg)-compatible -registry spec. - -Makes package described by `spec` public. - -##### Example - -```javascript -await access.public('@foo/bar', {token: 'myregistrytoken'}) -// `@foo/bar` is now public -``` - -#### <a name="restricted"></a> `> access.restricted(spec, [opts]) -> Promise<Boolean>` - -`spec` must be an [`npm-package-arg`](https://npm.im/npm-package-arg)-compatible -registry spec. - -Makes package described by `spec` private/restricted. - -##### Example - -```javascript -await access.restricted('@foo/bar', {token: 'myregistrytoken'}) -// `@foo/bar` is now private -``` - -#### <a name="grant"></a> `> access.grant(spec, team, permissions, [opts]) -> Promise<Boolean>` - -`spec` must be an [`npm-package-arg`](https://npm.im/npm-package-arg)-compatible -registry spec. `team` must be a fully-qualified team name, in the `scope:team` -format, with or without the `@` prefix, and the team must be a valid team within -that scope. `permissions` must be one of `'read-only'` or `'read-write'`. - -Grants `read-only` or `read-write` permissions for a certain package to a team. - -##### Example - -```javascript -await access.grant('@foo/bar', '@foo:myteam', 'read-write', { - token: 'myregistrytoken' -}) -// `@foo/bar` is now read/write enabled for the @foo:myteam team. -``` - -#### <a name="revoke"></a> `> access.revoke(spec, team, [opts]) -> Promise<Boolean>` - -`spec` must be an [`npm-package-arg`](https://npm.im/npm-package-arg)-compatible -registry spec. `team` must be a fully-qualified team name, in the `scope:team` -format, with or without the `@` prefix, and the team must be a valid team within -that scope. `permissions` must be one of `'read-only'` or `'read-write'`. - -Removes access to a package from a certain team. - -##### Example - -```javascript -await access.revoke('@foo/bar', '@foo:myteam', { - token: 'myregistrytoken' -}) -// @foo:myteam can no longer access `@foo/bar` -``` - -#### <a name="tfa-required"></a> `> access.tfaRequired(spec, [opts]) -> Promise<Boolean>` - -`spec` must be an [`npm-package-arg`](https://npm.im/npm-package-arg)-compatible -registry spec. - -Makes it so publishing or managing a package requires using 2FA tokens to -complete operations. - -##### Example - -```javascript -await access.tfaRequires('lodash', {token: 'myregistrytoken'}) -// Publishing or changing dist-tags on `lodash` now require OTP to be enabled. -``` - -#### <a name="tfa-not-required"></a> `> access.tfaNotRequired(spec, [opts]) -> Promise<Boolean>` - -`spec` must be an [`npm-package-arg`](https://npm.im/npm-package-arg)-compatible -registry spec. - -Disabled the package-level 2FA requirement for `spec`. Note that you will need -to pass in an `otp` token in `opts` in order to complete this operation. - -##### Example - -```javascript -await access.tfaNotRequired('lodash', {otp: '123654', token: 'myregistrytoken'}) -// Publishing or editing dist-tags on `lodash` no longer requires OTP to be -// enabled. -``` - -#### <a name="ls-packages"></a> `> access.lsPackages(entity, [opts]) -> Promise<Object | null>` - -`entity` must be either a valid org or user name, or a fully-qualified team name -in the `scope:team` format, with or without the `@` prefix. - -Lists out packages a user, org, or team has access to, with corresponding -permissions. Packages that the access token does not have access to won't be -listed. - -In order to disambiguate between users and orgs, two requests may end up being -made when listing orgs or users. - -For a streamed version of these results, see -[`access.lsPackages.stream()`](#ls-package-stream). - -##### Example - -```javascript -await access.lsPackages('zkat', { - token: 'myregistrytoken' -}) -// Lists all packages `@zkat` has access to on the registry, and the -// corresponding permissions. -``` - -#### <a name="ls-packages-stream"></a> `> access.lsPackages.stream(scope, [team], [opts]) -> Stream` - -`entity` must be either a valid org or user name, or a fully-qualified team name -in the `scope:team` format, with or without the `@` prefix. - -Streams out packages a user, org, or team has access to, with corresponding -permissions, with each stream entry being formatted like `[packageName, -permissions]`. Packages that the access token does not have access to won't be -listed. - -In order to disambiguate between users and orgs, two requests may end up being -made when listing orgs or users. - -The returned stream is a valid `asyncIterator`. - -##### Example - -```javascript -for await (let [pkg, perm] of access.lsPackages.stream('zkat')) { - console.log('zkat has', perm, 'access to', pkg) -} -// zkat has read-write access to eggplant -// zkat has read-only access to @npmcorp/secret -``` - -#### <a name="ls-collaborators"></a> `> access.lsCollaborators(spec, [user], [opts]) -> Promise<Object | null>` - -`spec` must be an [`npm-package-arg`](https://npm.im/npm-package-arg)-compatible -registry spec. `user` must be a valid user name, with or without the `@` -prefix. - -Lists out access privileges for a certain package. Will only show permissions -for packages to which you have at least read access. If `user` is passed in, the -list is filtered only to teams _that_ user happens to belong to. - -For a streamed version of these results, see [`access.lsCollaborators.stream()`](#ls-collaborators-stream). - -##### Example - -```javascript -await access.lsCollaborators('@npm/foo', 'zkat', { - token: 'myregistrytoken' -}) -// Lists all teams with access to @npm/foo that @zkat belongs to. -``` - -#### <a name="ls-collaborators-stream"></a> `> access.lsCollaborators.stream(spec, [user], [opts]) -> Stream` - -`spec` must be an [`npm-package-arg`](https://npm.im/npm-package-arg)-compatible -registry spec. `user` must be a valid user name, with or without the `@` -prefix. - -Stream out access privileges for a certain package, with each entry in `[user, -permissions]` format. Will only show permissions for packages to which you have -at least read access. If `user` is passed in, the list is filtered only to teams -_that_ user happens to belong to. - -The returned stream is a valid `asyncIterator`. - -##### Example - -```javascript -for await (let [usr, perm] of access.lsCollaborators.stream('npm')) { - console.log(usr, 'has', perm, 'access to npm') -} -// zkat has read-write access to npm -// iarna has read-write access to npm -``` diff --git a/node_modules/libnpmaccess/package.json b/node_modules/libnpmaccess/package.json index 2b522e090a89b..23d4b444ca070 100644 --- a/node_modules/libnpmaccess/package.json +++ b/node_modules/libnpmaccess/package.json @@ -1,6 +1,6 @@ { "name": "libnpmaccess", - "version": "4.0.1", + "version": "4.0.3", "description": "programmatic library for `npm access` commands", "author": "Kat Marchán <kzm@sykosomatic.org>", "license": "ISC", @@ -14,7 +14,7 @@ "devDependencies": { "nock": "^12.0.1", "standard": "^14.3.0", - "tap": "^14.10.6" + "tap": "^14.11.0" }, "repository": { "type": "git", @@ -25,8 +25,8 @@ "dependencies": { "aproba": "^2.0.0", "minipass": "^3.1.1", - "npm-package-arg": "^8.0.0", - "npm-registry-fetch": "^9.0.0" + "npm-package-arg": "^8.1.2", + "npm-registry-fetch": "^11.0.0" }, "engines": { "node": ">=10" diff --git a/node_modules/libnpmdiff b/node_modules/libnpmdiff new file mode 120000 index 0000000000000..ae8dd62893029 --- /dev/null +++ b/node_modules/libnpmdiff @@ -0,0 +1 @@ +../packages/libnpmdiff \ No newline at end of file diff --git a/node_modules/libnpmdiff/LICENSE b/node_modules/libnpmexec/LICENSE similarity index 100% rename from node_modules/libnpmdiff/LICENSE rename to node_modules/libnpmexec/LICENSE diff --git a/node_modules/libnpmexec/lib/cache-install-dir.js b/node_modules/libnpmexec/lib/cache-install-dir.js new file mode 100644 index 0000000000000..4fb534f7dfe12 --- /dev/null +++ b/node_modules/libnpmexec/lib/cache-install-dir.js @@ -0,0 +1,19 @@ +const crypto = require('crypto') + +const { resolve } = require('path') + +const cacheInstallDir = ({ npxCache, packages }) => { + if (!npxCache) + throw new Error('Must provide a valid npxCache path') + + // only packages not found in ${prefix}/node_modules + return resolve(npxCache, getHash(packages)) +} + +const getHash = (packages) => + crypto.createHash('sha512') + .update(packages.sort((a, b) => a.localeCompare(b, 'en')).join('\n')) + .digest('hex') + .slice(0, 16) + +module.exports = cacheInstallDir diff --git a/node_modules/libnpmexec/lib/file-exists.js b/node_modules/libnpmexec/lib/file-exists.js new file mode 100644 index 0000000000000..a115be14b0042 --- /dev/null +++ b/node_modules/libnpmexec/lib/file-exists.js @@ -0,0 +1,29 @@ +const { resolve } = require('path') +const { promisify } = require('util') +const stat = promisify(require('fs').stat) +const walkUp = require('walk-up-path') + +const fileExists = (file) => stat(file) + .then((stat) => stat.isFile()) + .catch(() => false) + +const localFileExists = async (dir, binName, root = '/') => { + root = resolve(root).toLowerCase() + + for (const path of walkUp(resolve(dir))) { + const binDir = resolve(path, 'node_modules', '.bin') + + if (await fileExists(resolve(binDir, binName))) + return binDir + + if (path.toLowerCase() === root) + return false + } + + return false +} + +module.exports = { + fileExists, + localFileExists, +} diff --git a/node_modules/libnpmexec/lib/get-bin-from-manifest.js b/node_modules/libnpmexec/lib/get-bin-from-manifest.js new file mode 100644 index 0000000000000..038095b502300 --- /dev/null +++ b/node_modules/libnpmexec/lib/get-bin-from-manifest.js @@ -0,0 +1,20 @@ +const getBinFromManifest = (mani) => { + // if we have a bin matching (unscoped portion of) packagename, use that + // otherwise if there's 1 bin or all bin value is the same (alias), use + // that, otherwise fail + const bin = mani.bin || {} + if (new Set(Object.values(bin)).size === 1) + return Object.keys(bin)[0] + + // XXX probably a util to parse this better? + const name = mani.name.replace(/^@[^/]+\//, '') + if (bin[name]) + return name + + // XXX need better error message + throw Object.assign(new Error('could not determine executable to run'), { + pkgid: mani._id, + }) +} + +module.exports = getBinFromManifest diff --git a/node_modules/libnpmexec/lib/index.js b/node_modules/libnpmexec/lib/index.js new file mode 100644 index 0000000000000..57c2a148d3489 --- /dev/null +++ b/node_modules/libnpmexec/lib/index.js @@ -0,0 +1,184 @@ +const { delimiter, dirname, resolve } = require('path') +const { promisify } = require('util') +const read = promisify(require('read')) + +const Arborist = require('@npmcli/arborist') +const ciDetect = require('@npmcli/ci-detect') +const logger = require('proc-log') +const mkdirp = require('mkdirp-infer-owner') +const npa = require('npm-package-arg') +const pacote = require('pacote') +const readPackageJson = require('read-package-json-fast') + +const cacheInstallDir = require('./cache-install-dir.js') +const { fileExists, localFileExists } = require('./file-exists.js') +const getBinFromManifest = require('./get-bin-from-manifest.js') +const manifestMissing = require('./manifest-missing.js') +const noTTY = require('./no-tty.js') +const runScript = require('./run-script.js') +const isWindows = require('./is-windows.js') + +/* istanbul ignore next */ +const PATH = ( + process.env.PATH || process.env.Path || process.env.path +).split(delimiter) + +const exec = async (opts) => { + const { + args = [], + call = '', + color = false, + localBin = resolve('./node_modules/.bin'), + locationMsg = undefined, + globalBin = '', + output, + packages: _packages = [], + path = '.', + runPath = '.', + scriptShell = isWindows ? process.env.ComSpec || 'cmd' : 'sh', + yes = undefined, + ...flatOptions + } = opts + const log = flatOptions.log || logger + + // dereferences values because we manipulate it later + const packages = [..._packages] + const pathArr = [...PATH] + const _run = () => runScript({ + args, + call, + color, + flatOptions, + locationMsg, + log, + output, + path, + pathArr, + runPath, + scriptShell, + }) + + // nothing to maybe install, skip the arborist dance + if (!call && !args.length && !packages.length) + return await _run() + + const needPackageCommandSwap = args.length && !packages.length + // if there's an argument and no package has been explicitly asked for + // check the local and global bin paths for a binary named the same as + // the argument and run it if it exists, otherwise fall through to + // the behavior of treating the single argument as a package name + if (needPackageCommandSwap) { + let binExists = false + const dir = dirname(dirname(localBin)) + const localBinPath = await localFileExists(dir, args[0]) + if (localBinPath) { + pathArr.unshift(localBinPath) + binExists = true + } else if (await fileExists(`${globalBin}/${args[0]}`)) { + pathArr.unshift(globalBin) + binExists = true + } + + if (binExists) + return await _run() + + packages.push(args[0]) + } + + // If we do `npm exec foo`, and have a `foo` locally, then we'll + // always use that, so we don't really need to fetch the manifest. + // So: run npa on each packages entry, and if it is a name with a + // rawSpec==='', then try to readPackageJson at + // node_modules/${name}/package.json, and only pacote fetch if + // that fails. + const manis = await Promise.all(packages.map(async p => { + const spec = npa(p, path) + if (spec.type === 'tag' && spec.rawSpec === '') { + // fall through to the pacote.manifest() approach + try { + const pj = resolve(path, 'node_modules', spec.name, 'package.json') + return await readPackageJson(pj) + } catch (er) {} + } + // Force preferOnline to true so we are making sure to pull in the latest + // This is especially useful if the user didn't give us a version, and + // they expect to be running @latest + return await pacote.manifest(p, { + ...flatOptions, + preferOnline: true, + }) + })) + + if (needPackageCommandSwap) + args[0] = getBinFromManifest(manis[0]) + + // figure out whether we need to install stuff, or if local is fine + const localArb = new Arborist({ + ...flatOptions, + path, + }) + const tree = await localArb.loadActual() + + // do we have all the packages in manifest list? + const needInstall = + manis.some(manifest => manifestMissing({ tree, manifest })) + + if (needInstall) { + const { npxCache } = flatOptions + const installDir = cacheInstallDir({ npxCache, packages }) + await mkdirp(installDir) + const arb = new Arborist({ + ...flatOptions, + path: installDir, + }) + const tree = await arb.loadActual() + + // at this point, we have to ensure that we get the exact same + // version, because it's something that has only ever been installed + // by npm exec in the cache install directory + const add = manis.filter(mani => manifestMissing({ + tree, + manifest: { + ...mani, + _from: `${mani.name}@${mani.version}`, + }, + })) + .map(mani => mani._from) + .sort((a, b) => a.localeCompare(b, 'en')) + + // no need to install if already present + if (add.length) { + if (!yes) { + // set -n to always say no + if (yes === false) + throw new Error('canceled') + + if (noTTY() || ciDetect()) { + log.warn('exec', `The following package${ + add.length === 1 ? ' was' : 's were' + } not found and will be installed: ${ + add.map((pkg) => pkg.replace(/@$/, '')).join(', ') + }`) + } else { + const addList = add.map(a => ` ${a.replace(/@$/, '')}`) + .join('\n') + '\n' + const prompt = `Need to install the following packages:\n${ + addList + }Ok to proceed? ` + const confirm = await read({ prompt, default: 'y' }) + if (confirm.trim().toLowerCase().charAt(0) !== 'y') + throw new Error('canceled') + } + } + await arb.reify({ + ...flatOptions, + add, + }) + } + pathArr.unshift(resolve(installDir, 'node_modules/.bin')) + } + + return await _run() +} + +module.exports = exec diff --git a/node_modules/libnpmexec/lib/is-windows.js b/node_modules/libnpmexec/lib/is-windows.js new file mode 100644 index 0000000000000..fbece90ad7496 --- /dev/null +++ b/node_modules/libnpmexec/lib/is-windows.js @@ -0,0 +1 @@ +module.exports = process.platform === 'win32' diff --git a/node_modules/libnpmexec/lib/manifest-missing.js b/node_modules/libnpmexec/lib/manifest-missing.js new file mode 100644 index 0000000000000..4714680960992 --- /dev/null +++ b/node_modules/libnpmexec/lib/manifest-missing.js @@ -0,0 +1,17 @@ +const manifestMissing = ({ tree, manifest }) => { + // if the tree doesn't have a child by that name/version, return true + // true means we need to install it + const child = tree.children.get(manifest.name) + // if no child, we have to load it + if (!child) + return true + + // if no version/tag specified, allow whatever's there + if (manifest._from === `${manifest.name}@`) + return false + + // otherwise the version has to match what we WOULD get + return child.version !== manifest.version +} + +module.exports = manifestMissing diff --git a/node_modules/libnpmexec/lib/no-tty.js b/node_modules/libnpmexec/lib/no-tty.js new file mode 100644 index 0000000000000..601798d25cc77 --- /dev/null +++ b/node_modules/libnpmexec/lib/no-tty.js @@ -0,0 +1 @@ +module.exports = () => !process.stdin.isTTY diff --git a/node_modules/libnpmexec/lib/run-script.js b/node_modules/libnpmexec/lib/run-script.js new file mode 100644 index 0000000000000..819dacb8baee8 --- /dev/null +++ b/node_modules/libnpmexec/lib/run-script.js @@ -0,0 +1,86 @@ +const { delimiter } = require('path') + +const chalk = require('chalk') +const ciDetect = require('@npmcli/ci-detect') +const runScript = require('@npmcli/run-script') +const readPackageJson = require('read-package-json-fast') +const noTTY = require('./no-tty.js') + +const nocolor = { + reset: s => s, + bold: s => s, + dim: s => s, +} + +const run = async ({ + args, + call, + color, + flatOptions, + locationMsg, + log, + output = () => {}, + path, + pathArr, + runPath, + scriptShell, +}) => { + // turn list of args into command string + const script = call || args.shift() || scriptShell + const colorize = color ? chalk : nocolor + + // do the fakey runScript dance + // still should work if no package.json in cwd + const realPkg = await readPackageJson(`${path}/package.json`) + .catch(() => ({})) + const pkg = { + ...realPkg, + scripts: { + ...(realPkg.scripts || {}), + npx: script, + }, + } + + if (log && log.disableProgress) + log.disableProgress() + + try { + if (script === scriptShell) { + const isTTY = !noTTY() + + if (isTTY) { + if (ciDetect()) + return log.warn('exec', 'Interactive mode disabled in CI environment') + + locationMsg = locationMsg || ` at location:\n${colorize.dim(runPath)}` + + output(`${ + colorize.reset('\nEntering npm script environment') + }${ + colorize.reset(locationMsg) + }${ + colorize.bold('\nType \'exit\' or ^D when finished\n') + }`) + } + } + return await runScript({ + ...flatOptions, + pkg, + banner: false, + // we always run in cwd, not --prefix + path: runPath, + stdioString: true, + event: 'npx', + args, + env: { + PATH: pathArr.join(delimiter), + }, + stdio: 'inherit', + }) + } finally { + if (log && log.enableProgress) + log.enableProgress() + } +} + +module.exports = run diff --git a/node_modules/libnpmexec/package.json b/node_modules/libnpmexec/package.json new file mode 100644 index 0000000000000..dff91077d148a --- /dev/null +++ b/node_modules/libnpmexec/package.json @@ -0,0 +1,64 @@ +{ + "name": "libnpmexec", + "version": "2.0.0", + "files": [ + "lib" + ], + "main": "lib/index.js", + "engines": { + "node": ">=10" + }, + "description": "npm exec (npx) programmatic API", + "repository": "https://github.com/npm/libnpmexec", + "keywords": [ + "npm", + "npmcli", + "libnpm", + "cli", + "workspaces", + "libnpmexec" + ], + "author": "GitHub Inc.", + "contributors": [ + { + "name": "Ruy Adorno", + "url": "https://ruyadorno.com", + "twitter": "ruyadorno" + } + ], + "license": "ISC", + "scripts": { + "lint": "eslint lib/*.js", + "pretest": "npm run lint", + "test": "tap test/*.js", + "snap": "tap test/*.js", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags" + }, + "tap": { + "check-coverage": true + }, + "devDependencies": { + "bin-links": "^2.2.1", + "eslint": "^7.24.0", + "eslint-plugin-import": "^2.22.1", + "eslint-plugin-node": "^11.1.0", + "eslint-plugin-promise": "^5.1.0", + "eslint-plugin-standard": "^5.0.0", + "tap": "^15.0.6" + }, + "dependencies": { + "@npmcli/arborist": "^2.3.0", + "@npmcli/ci-detect": "^1.3.0", + "@npmcli/run-script": "^1.8.4", + "chalk": "^4.1.0", + "mkdirp-infer-owner": "^2.0.0", + "npm-package-arg": "^8.1.2", + "pacote": "^11.3.1", + "proc-log": "^1.0.0", + "read": "^1.0.7", + "read-package-json-fast": "^2.0.2", + "walk-up-path": "^1.0.0" + } +} diff --git a/node_modules/libnpmfund/CHANGELOG.md b/node_modules/libnpmfund/CHANGELOG.md deleted file mode 100644 index b890b58e1405a..0000000000000 --- a/node_modules/libnpmfund/CHANGELOG.md +++ /dev/null @@ -1,6 +0,0 @@ -# Changelog - -## 0.0.0-pre.0 - -- Initial pre-release. - diff --git a/node_modules/libnpmfund/README.md b/node_modules/libnpmfund/README.md deleted file mode 100644 index c373a9ceb7dd5..0000000000000 --- a/node_modules/libnpmfund/README.md +++ /dev/null @@ -1,129 +0,0 @@ -# libnpmfund - -[![npm version](https://img.shields.io/npm/v/libnpmfund.svg)](https://npm.im/libnpmfund) -[![license](https://img.shields.io/npm/l/libnpmfund.svg)](https://npm.im/libnpmfund) -[![GitHub Actions](https://github.com/npm/libnpmfund/workflows/node-ci/badge.svg)](https://github.com/npm/libnpmfund/actions?query=workflow%3Anode-ci) -[![Coverage Status](https://coveralls.io/repos/github/npm/libnpmfund/badge.svg?branch=master)](https://coveralls.io/github/npm/libnpmfund?branch=master) - -[`libnpmfund`](https://github.com/npm/libnpmfund) is a Node.js library for -retrieving **funding** information for packages installed using -[`arborist`](https://github.com/npm/arborist). - -## Table of Contents - -* [Example](#example) -* [Install](#install) -* [Contributing](#contributing) -* [API](#api) -* [LICENSE](#license) - -## Example - -```js -const { read } = require('libnpmfund') - -const fundingInfo = await read() -console.log( - JSON.stringify(fundingInfo, null, 2) -) -// => { - length: 2, - name: 'foo', - version: '1.0.0', - funding: { url: 'https://example.com' }, - dependencies: { - bar: { - version: '1.0.0', - funding: { url: 'http://collective.example.com' } - } - } -} -``` - -## Install - -`$ npm install libnpmfund` - -### Contributing - -The npm team enthusiastically welcomes contributions and project participation! -There's a bunch of things you can do if you want to contribute! The -[Contributor Guide](https://github.com/npm/cli/blob/latest/CONTRIBUTING.md) -outlines the process for community interaction and contribution. Please don't -hesitate to jump in if you'd like to, or even ask us questions if something -isn't clear. - -All participants and maintainers in this project are expected to follow the -[npm Code of Conduct](https://www.npmjs.com/policies/conduct), and just -generally be excellent to each other. - -Please refer to the [Changelog](CHANGELOG.md) for project history details, too. - -Happy hacking! - -### API - -##### <a name="fund.read"></a> `> fund.read([opts]) -> Promise<Object>` - -Reads **funding** info from a npm install and returns a promise for a -tree object that only contains packages in which funding info is defined. - -Options: - -- `countOnly`: Uses the tree-traversal logic from **npm fund** but skips over -any obj definition and just returns an obj containing `{ length }` - useful for -things such as printing a `6 packages are looking for funding` msg. -- `path`: Location to current working directory - -##### <a name="fund.readTree"></a> `> fund.readTree(tree, [opts]) -> Promise<Object>` - -Reads **funding** info from a given install tree and returns a tree object -that only contains packages in which funding info is defined. - -- `tree`: An [`arborist`](https://github.com/npm/arborist) tree to be used, e.g: - -```js -const Arborist = require('@npmcli/arborist') -const { readTree } = require('libnpmfund') - -const arb = new Arborist({ path: process.cwd() }) -const tree = await arb.loadActual() - -return readTree(tree, { countOnly: false }) -``` - -Options: - -- `countOnly`: Uses the tree-traversal logic from **npm fund** but skips over -any obj definition and just returns an obj containing `{ length }` - useful for -things such as printing a `6 packages are looking for funding` msg. - -##### <a name="fund.normalizeFunding"></a> `> fund.normalizeFunding(funding) -> Object` - -From a `funding` `<object|string|array>`, retrieves normalized funding objects -containing a `url` property. - -e.g: - -```js -normalizeFunding('http://example.com') -// => { - url: 'http://example.com' -} -``` - -##### <a name="fund.isValidFunding"></a> `> fund.isValidFunding(funding) -> Boolean` - -Returns `<true>` if `funding` is a valid funding object, e.g: - -```js -isValidFunding({ foo: 'not a valid funding obj' }) -// => false - -isValidFunding('http://example.com') -// => true -``` - -## LICENSE - -[ISC](./LICENSE) diff --git a/node_modules/libnpmfund/index.js b/node_modules/libnpmfund/index.js index 58aba028e3622..37bc1dd0b7916 100644 --- a/node_modules/libnpmfund/index.js +++ b/node_modules/libnpmfund/index.js @@ -15,11 +15,11 @@ function normalizeFunding (funding) { // Is the value of a `funding` property of a `package.json` // a valid type+url for `npm fund` to display? function isValidFunding (funding) { - if (!funding) return false + if (!funding) + return false - if (Array.isArray(funding)) { + if (Array.isArray(funding)) return funding.every(f => !Array.isArray(f) && isValidFunding(f)) - } try { var parsed = new URL(funding.url || funding) @@ -30,7 +30,8 @@ function isValidFunding (funding) { if ( parsed.protocol !== 'https:' && parsed.protocol !== 'http:' - ) return false + ) + return false return Boolean(parsed.host) } @@ -43,11 +44,18 @@ function readTree (tree, opts) { const { countOnly } = opts || {} const _trailingDependencies = Symbol('trailingDependencies') + let filterSet + + if (opts && opts.workspaces && opts.workspaces.length) { + const arb = new Arborist(opts) + filterSet = arb.workspaceDependencySet(tree, opts.workspaces) + } + function tracked (name, version) { const key = String(name) + String(version) - if (seen.has(key)) { + if (seen.has(key)) return true - } + seen.add(key) } @@ -81,30 +89,36 @@ function readTree (tree, opts) { function getFundingDependencies (tree) { const edges = tree && tree.edgesOut && tree.edgesOut.values() - if (!edges) return empty() + if (!edges) + return empty() const directDepsWithFunding = Array.from(edges).map(edge => { - if (!edge || !edge.to) return empty() + if (!edge || !edge.to) + return empty() const node = edge.to.target || edge.to - if (!node.package) return empty() + if (!node.package) + return empty() + + if (filterSet && filterSet.size > 0 && !filterSet.has(node)) + return empty() const { name, funding, version } = node.package // avoids duplicated items within the funding tree - if (tracked(name, version)) return empty() + if (tracked(name, version)) + return empty() const fundingItem = {} - if (version) { + if (version) fundingItem.version = version - } attachFundingInfo(fundingItem, funding) return { node, - fundingItem + fundingItem, } }) @@ -112,7 +126,8 @@ function readTree (tree, opts) { (res, { node, fundingItem }, i) => { if (!fundingItem || fundingItem.length === 0 || - !node) return res + !node) + return res // recurse const transitiveDependencies = node.edgesOut && @@ -121,16 +136,17 @@ function readTree (tree, opts) { // if we're only counting items there's no need // to add all the data to the resulting object - if (countOnly) return null + if (countOnly) + return null if (hasDependencies(transitiveDependencies)) { fundingItem.dependencies = retrieveDependencies(transitiveDependencies) } - if (isValidFunding(fundingItem.funding)) { + if (isValidFunding(fundingItem.funding)) res[node.package.name] = fundingItem - } else if (hasDependencies(fundingItem.dependencies)) { + else if (hasDependencies(fundingItem.dependencies)) { res[_trailingDependencies] = Object.assign( empty(), @@ -145,7 +161,7 @@ function readTree (tree, opts) { const treeDependencies = getFundingDependencies(tree) const result = { - length: packageWithFundingCount + length: packageWithFundingCount, } if (!countOnly) { @@ -154,13 +170,11 @@ function readTree (tree, opts) { (tree && tree.name) result.name = name || (tree && tree.path) - if (tree && tree.package && tree.package.version) { + if (tree && tree.package && tree.package.version) result.version = tree.package.version - } - if (tree && tree.package && tree.package.funding) { + if (tree && tree.package && tree.package.funding) result.funding = normalizeFunding(tree.package.funding) - } result.dependencies = retrieveDependencies(treeDependencies) } @@ -170,8 +184,7 @@ function readTree (tree, opts) { async function read (opts) { const arb = new Arborist(opts) - const tree = await arb.loadActual() - + const tree = await arb.loadActual(opts) return readTree(tree, opts) } @@ -179,5 +192,5 @@ module.exports = { read, readTree, normalizeFunding, - isValidFunding + isValidFunding, } diff --git a/node_modules/libnpmfund/package.json b/node_modules/libnpmfund/package.json index b25d3aa6b520e..7f4acad383bb8 100644 --- a/node_modules/libnpmfund/package.json +++ b/node_modules/libnpmfund/package.json @@ -1,6 +1,6 @@ { "name": "libnpmfund", - "version": "1.0.2", + "version": "1.1.0", "files": [ "index.js" ], @@ -25,8 +25,10 @@ ], "license": "ISC", "scripts": { - "lint": "standard", - "pretest": "npm run lint", + "eslint": "eslint", + "lint": "npm run eslint -- index.js test.js", + "lintfix": "npm run lint -- --fix", + "posttest": "npm run lint", "test": "tap", "snap": "tap", "preversion": "npm test", @@ -42,11 +44,14 @@ ] }, "devDependencies": { - "require-inject": "^1.4.4", - "standard": "^14.3.4", - "tap": "^14.10.7" + "eslint": "^7.26.0", + "eslint-plugin-import": "^2.22.1", + "eslint-plugin-node": "^11.1.0", + "eslint-plugin-promise": "^5.1.0", + "eslint-plugin-standard": "^5.0.0", + "tap": "^15.0.9" }, "dependencies": { - "@npmcli/arborist": "^2.0.0" + "@npmcli/arborist": "^2.5.0" } } diff --git a/node_modules/libnpmhook/CHANGELOG.md b/node_modules/libnpmhook/CHANGELOG.md deleted file mode 100644 index 0557274972259..0000000000000 --- a/node_modules/libnpmhook/CHANGELOG.md +++ /dev/null @@ -1,110 +0,0 @@ -# Change Log - -<a name="6.0.0"></a> -# [6.0.0](https://github.com/npm/libnpmhook/compare/v5.0.2...v6.0.0) (2020-02-26) - -### Breaking Changes - -* [`aa629b4`](https://github.com/npm/libnpmhook/commit/aa629b4) fix: remove figgy-pudding ([@claudiahdz](https://github.com/claudiahdz)) - -### Miscellaneuous - -* [`ea795fb`](https://github.com/npm/libnpmhook/commit/ea795fb) chore: basic project updates ([@claudiahdz](https://github.com/claudiahdz)) -* [`a0fdf7e`](https://github.com/npm/libnpmhook/commit/a0fdf7e) chore: cleanup badges, contrib, readme ([@ruyadorno](https://github.com/ruyadorno)) - ---- - -All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. - -<a name="5.0.2"></a> -## [5.0.2](https://github.com/npm/libnpmhook/compare/v5.0.1...v5.0.2) (2018-08-24) - - - -<a name="5.0.1"></a> -## [5.0.1](https://github.com/npm/libnpmhook/compare/v5.0.0...v5.0.1) (2018-08-23) - - -### Bug Fixes - -* **deps:** move JSONStream to prod deps ([bb63594](https://github.com/npm/libnpmhook/commit/bb63594)) - - - -<a name="5.0.0"></a> -# [5.0.0](https://github.com/npm/libnpmhook/compare/v4.0.1...v5.0.0) (2018-08-21) - - -### Features - -* **api:** overhauled API ([46b271b](https://github.com/npm/libnpmhook/commit/46b271b)) - - -### BREAKING CHANGES - -* **api:** the API for ls() has changed, and rm() no longer errors on 404 - - - -<a name="4.0.1"></a> -## [4.0.1](https://github.com/npm/libnpmhook/compare/v4.0.0...v4.0.1) (2018-04-09) - - - -<a name="4.0.0"></a> -# [4.0.0](https://github.com/npm/libnpmhook/compare/v3.0.1...v4.0.0) (2018-04-08) - - -### meta - -* drop support for node 4 and 7 ([f2a301e](https://github.com/npm/libnpmhook/commit/f2a301e)) - - -### BREAKING CHANGES - -* node@4 and node@7 are no longer supported - - - -<a name="3.0.1"></a> -## [3.0.1](https://github.com/npm/libnpmhook/compare/v3.0.0...v3.0.1) (2018-04-08) - - - -<a name="3.0.0"></a> -# [3.0.0](https://github.com/npm/libnpmhook/compare/v2.0.1...v3.0.0) (2018-04-04) - - -### add - -* guess type based on name ([9418224](https://github.com/npm/libnpmhook/commit/9418224)) - - -### BREAKING CHANGES - -* hook type is now based on name prefix - - - -<a name="2.0.1"></a> -## [2.0.1](https://github.com/npm/libnpmhook/compare/v2.0.0...v2.0.1) (2018-03-16) - - -### Bug Fixes - -* **urls:** was hitting the wrong URL endpoints ([10171a9](https://github.com/npm/libnpmhook/commit/10171a9)) - - - -<a name="2.0.0"></a> -# [2.0.0](https://github.com/npm/libnpmhook/compare/v1.0.0...v2.0.0) (2018-03-16) - - - -<a name="1.0.0"></a> -# 1.0.0 (2018-03-16) - - -### Features - -* **api:** baseline working api ([122658e](https://github.com/npm/npm-hooks/commit/122658e)) diff --git a/node_modules/libnpmhook/README.md b/node_modules/libnpmhook/README.md deleted file mode 100644 index ce6e8c1a51989..0000000000000 --- a/node_modules/libnpmhook/README.md +++ /dev/null @@ -1,271 +0,0 @@ -# libnpmhook - -[![npm version](https://img.shields.io/npm/v/libnpmhook.svg)](https://npm.im/libnpmhook) -[![license](https://img.shields.io/npm/l/libnpmhook.svg)](https://npm.im/libnpmhook) -[![Coverage Status](https://coveralls.io/repos/github/npm/libnpmhook/badge.svg?branch=latest)](https://coveralls.io/github/npm/libnpmhook?branch=latest) - -[`libnpmhook`](https://github.com/npm/libnpmhook) is a Node.js library for -programmatically managing the npm registry's server-side hooks. - -For a more general introduction to managing hooks, see [the introductory blog -post](https://blog.npmjs.org/post/145260155635/introducing-hooks-get-notifications-of-npm). - -## Table of Contents - -* [Example](#example) -* [Install](#install) -* [Contributing](#contributing) -* [API](#api) - * [hook opts](#opts) - * [`add()`](#add) - * [`rm()`](#rm) - * [`ls()`](#ls) - * [`ls.stream()`](#ls-stream) - * [`update()`](#update) - -## Example - -```js -const hooks = require('libnpmhook') - -console.log(await hooks.ls('mypkg', {token: 'deadbeef'})) -// array of hook objects on `mypkg`. -``` - -## Install - -`$ npm install libnpmhook` - -### API - -#### <a name="opts"></a> `opts` for `libnpmhook` commands - -`libnpmhook` uses [`npm-registry-fetch`](https://npm.im/npm-registry-fetch). -All options are passed through directly to that library, so please refer to [its -own `opts` -documentation](https://www.npmjs.com/package/npm-registry-fetch#fetch-options) -for options that can be passed in. - -A couple of options of note for those in a hurry: - -* `opts.token` - can be passed in and will be used as the authentication token for the registry. For other ways to pass in auth details, see the n-r-f docs. -* `opts.otp` - certain operations will require an OTP token to be passed in. If a `libnpmhook` command fails with `err.code === EOTP`, please retry the request with `{otp: <2fa token>}` - -#### <a name="add"></a> `> hooks.add(name, endpoint, secret, [opts]) -> Promise` - -`name` is the name of the package, org, or user/org scope to watch. The type is -determined by the name syntax: `'@foo/bar'` and `'foo'` are treated as packages, -`@foo` is treated as a scope, and `~user` is treated as an org name or scope. -Each type will attach to different events. - -The `endpoint` should be a fully-qualified http URL for the endpoint the hook -will send its payload to when it fires. `secret` is a shared secret that the -hook will send to that endpoint to verify that it's actually coming from the -registry hook. - -The returned Promise resolves to the full hook object that was created, -including its generated `id`. - -See also: [`POST -/v1/hooks/hook`](https://github.com/npm/registry/blob/master/docs/hooks/endpoints.md#post-v1hookshook) - -##### Example - -```javascript -await hooks.add('~zkat', 'https://example.com/api/added', 'supersekrit', { - token: 'myregistrytoken', - otp: '694207' -}) - -=> - -{ id: '16f7xoal', - username: 'zkat', - name: 'zkat', - endpoint: 'https://example.com/api/added', - secret: 'supersekrit', - type: 'owner', - created: '2018-08-21T20:05:25.125Z', - updated: '2018-08-21T20:05:25.125Z', - deleted: false, - delivered: false, - last_delivery: null, - response_code: 0, - status: 'active' } -``` - -#### <a name="find"></a> `> hooks.find(id, [opts]) -> Promise` - -Returns the hook identified by `id`. - -The returned Promise resolves to the full hook object that was found, or error -with `err.code` of `'E404'` if it didn't exist. - -See also: [`GET -/v1/hooks/hook/:id`](https://github.com/npm/registry/blob/master/docs/hooks/endpoints.md#get-v1hookshookid) - -##### Example - -```javascript -await hooks.find('16f7xoal', {token: 'myregistrytoken'}) - -=> - -{ id: '16f7xoal', - username: 'zkat', - name: 'zkat', - endpoint: 'https://example.com/api/added', - secret: 'supersekrit', - type: 'owner', - created: '2018-08-21T20:05:25.125Z', - updated: '2018-08-21T20:05:25.125Z', - deleted: false, - delivered: false, - last_delivery: null, - response_code: 0, - status: 'active' } -``` - -#### <a name="rm"></a> `> hooks.rm(id, [opts]) -> Promise` - -Removes the hook identified by `id`. - -The returned Promise resolves to the full hook object that was removed, if it -existed, or `null` if no such hook was there (instead of erroring). - -See also: [`DELETE -/v1/hooks/hook/:id`](https://github.com/npm/registry/blob/master/docs/hooks/endpoints.md#delete-v1hookshookid) - -##### Example - -```javascript -await hooks.rm('16f7xoal', { - token: 'myregistrytoken', - otp: '694207' -}) - -=> - -{ id: '16f7xoal', - username: 'zkat', - name: 'zkat', - endpoint: 'https://example.com/api/added', - secret: 'supersekrit', - type: 'owner', - created: '2018-08-21T20:05:25.125Z', - updated: '2018-08-21T20:05:25.125Z', - deleted: true, - delivered: false, - last_delivery: null, - response_code: 0, - status: 'active' } - -// Repeat it... -await hooks.rm('16f7xoal', { - token: 'myregistrytoken', - otp: '694207' -}) - -=> null -``` - -#### <a name="update"></a> `> hooks.update(id, endpoint, secret, [opts]) -> Promise` - -The `id` should be a hook ID from a previously-created hook. - -The `endpoint` should be a fully-qualified http URL for the endpoint the hook -will send its payload to when it fires. `secret` is a shared secret that the -hook will send to that endpoint to verify that it's actually coming from the -registry hook. - -The returned Promise resolves to the full hook object that was updated, if it -existed. Otherwise, it will error with an `'E404'` error code. - -See also: [`PUT -/v1/hooks/hook/:id`](https://github.com/npm/registry/blob/master/docs/hooks/endpoints.md#put-v1hookshookid) - -##### Example - -```javascript -await hooks.update('16fxoal', 'https://example.com/api/other', 'newsekrit', { - token: 'myregistrytoken', - otp: '694207' -}) - -=> - -{ id: '16f7xoal', - username: 'zkat', - name: 'zkat', - endpoint: 'https://example.com/api/other', - secret: 'newsekrit', - type: 'owner', - created: '2018-08-21T20:05:25.125Z', - updated: '2018-08-21T20:14:41.964Z', - deleted: false, - delivered: false, - last_delivery: null, - response_code: 0, - status: 'active' } -``` - -#### <a name="ls"></a> `> hooks.ls([opts]) -> Promise` - -Resolves to an array of hook objects associated with the account you're -authenticated as. - -Results can be further filtered with three values that can be passed in through -`opts`: - -* `opts.package` - filter results by package name -* `opts.limit` - maximum number of hooks to return -* `opts.offset` - pagination offset for results (use with `opts.limit`) - -See also: - * [`hooks.ls.stream()`](#ls-stream) - * [`GET -/v1/hooks`](https://github.com/npm/registry/blob/master/docs/hooks/endpoints.md#get-v1hooks) - -##### Example - -```javascript -await hooks.ls({token: 'myregistrytoken'}) - -=> -[ - { id: '16f7xoal', ... }, - { id: 'wnyf98a1', ... }, - ... -] -``` - -#### <a name="ls-stream"></a> `> hooks.ls.stream([opts]) -> Stream` - -Returns a stream of hook objects associated with the account you're -authenticated as. The returned stream is a valid `Symbol.asyncIterator` on -`node@>=10`. - -Results can be further filtered with three values that can be passed in through -`opts`: - -* `opts.package` - filter results by package name -* `opts.limit` - maximum number of hooks to return -* `opts.offset` - pagination offset for results (use with `opts.limit`) - -See also: - * [`hooks.ls()`](#ls) - * [`GET -/v1/hooks`](https://github.com/npm/registry/blob/master/docs/hooks/endpoints.md#get-v1hooks) - -##### Example - -```javascript -for await (let hook of hooks.ls.stream({token: 'myregistrytoken'})) { - console.log('found hook:', hook.id) -} - -=> -// outputs: -// found hook: 16f7xoal -// found hook: wnyf98a1 -``` diff --git a/node_modules/libnpmhook/package.json b/node_modules/libnpmhook/package.json index abdac88e2c45f..40951245a9ea3 100644 --- a/node_modules/libnpmhook/package.json +++ b/node_modules/libnpmhook/package.json @@ -1,6 +1,6 @@ { "name": "libnpmhook", - "version": "6.0.1", + "version": "6.0.3", "description": "programmatic API for managing npm registry hooks", "main": "index.js", "files": [ @@ -28,7 +28,7 @@ "license": "ISC", "dependencies": { "aproba": "^2.0.0", - "npm-registry-fetch": "^9.0.0" + "npm-registry-fetch": "^11.0.0" }, "devDependencies": { "nock": "^9.6.1", diff --git a/node_modules/libnpmorg/CHANGELOG.md b/node_modules/libnpmorg/CHANGELOG.md deleted file mode 100644 index 4cd5cd1cd68a1..0000000000000 --- a/node_modules/libnpmorg/CHANGELOG.md +++ /dev/null @@ -1,33 +0,0 @@ -# Change Log - -## 2.0.0 (2020-03-02) - -### BREAKING CHANGE -- Removed `figgy-pudding` as a dependecy -- Using native promises -- Require node >= v10 - -### Feature -- Updated stream interface to `minipass` type stream - ---- - -All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. - -<a name="1.0.1"></a> -## [1.0.1](https://github.com/npm/libnpmorg/compare/v1.0.0...v1.0.1) (2019-07-16) - - -### Bug Fixes - -* **standard:** standard --fix ([5118358](https://github.com/npm/libnpmorg/commit/5118358)) - - - -<a name="1.0.0"></a> -# 1.0.0 (2018-08-23) - - -### Features - -* **API:** implement org api ([731b9c6](https://github.com/npm/libnpmorg/commit/731b9c6)) diff --git a/node_modules/libnpmorg/README.md b/node_modules/libnpmorg/README.md deleted file mode 100644 index b2e1ed589b8e9..0000000000000 --- a/node_modules/libnpmorg/README.md +++ /dev/null @@ -1,149 +0,0 @@ -# libnpmorg - -[![npm version](https://img.shields.io/npm/v/libnpmorg.svg)](https://npm.im/libnpmorg) -[![license](https://img.shields.io/npm/l/libnpmorg.svg)](https://npm.im/libnpmorg) -[![GitHub Actions](https://github.com/npm/libnpmorg/workflows/Node%20CI/badge.svg)](https://github.com/npm/libnpmorg/workflows/Node%20CI/badge.svg) -[![Coverage Status](https://coveralls.io/repos/github/npm/libnpmorg/badge.svg?branch=latest)](https://coveralls.io/github/npm/libnpmorg?branch=latest) - -[`libnpmorg`](https://github.com/npm/libnpmorg) is a Node.js library for -programmatically accessing the [npm Org membership -API](https://github.com/npm/registry/blob/master/docs/orgs/memberships.md#membership-detail). - -## Table of Contents - -* [Example](#example) -* [Install](#install) -* [Contributing](#contributing) -* [API](#api) - * [hook opts](#opts) - * [`set()`](#set) - * [`rm()`](#rm) - * [`ls()`](#ls) - * [`ls.stream()`](#ls-stream) - -## Example - -```js -const org = require('libnpmorg') - -console.log(await org.ls('myorg', {token: 'deadbeef'})) -=> -Roster { - zkat: 'developer', - iarna: 'admin', - isaacs: 'owner' -} -``` - -## Install - -`$ npm install libnpmorg` - -### API - -#### <a name="opts"></a> `opts` for `libnpmorg` commands - -`libnpmorg` uses [`npm-registry-fetch`](https://npm.im/npm-registry-fetch). -All options are passed through directly to that library, so please refer to [its -own `opts` -documentation](https://www.npmjs.com/package/npm-registry-fetch#fetch-options) -for options that can be passed in. - -A couple of options of note for those in a hurry: - -* `opts.token` - can be passed in and will be used as the authentication token for the registry. For other ways to pass in auth details, see the n-r-f docs. -* `opts.otp` - certain operations will require an OTP token to be passed in. If a `libnpmorg` command fails with `err.code === EOTP`, please retry the request with `{otp: <2fa token>}` - -#### <a name="set"></a> `> org.set(org, user, [role], [opts]) -> Promise` - -The returned Promise resolves to a [Membership -Detail](https://github.com/npm/registry/blob/master/docs/orgs/memberships.md#membership-detail) -object. - -The `role` is optional and should be one of `admin`, `owner`, or `developer`. -`developer` is the default if no `role` is provided. - -`org` and `user` must be scope names for the org name and user name -respectively. They can optionally be prefixed with `@`. - -See also: [`PUT -/-/org/:scope/user`](https://github.com/npm/registry/blob/master/docs/orgs/memberships.md#org-membership-replace) - -##### Example - -```javascript -await org.set('@myorg', '@myuser', 'admin', {token: 'deadbeef'}) -=> -MembershipDetail { - org: { - name: 'myorg', - size: 15 - }, - user: 'myuser', - role: 'admin' -} -``` - -#### <a name="rm"></a> `> org.rm(org, user, [opts]) -> Promise` - -The Promise resolves to `null` on success. - -`org` and `user` must be scope names for the org name and user name -respectively. They can optionally be prefixed with `@`. - -See also: [`DELETE -/-/org/:scope/user`](https://github.com/npm/registry/blob/master/docs/orgs/memberships.md#org-membership-delete) - -##### Example - -```javascript -await org.rm('myorg', 'myuser', {token: 'deadbeef'}) -``` - -#### <a name="ls"></a> `> org.ls(org, [opts]) -> Promise` - -The Promise resolves to a -[Roster](https://github.com/npm/registry/blob/master/docs/orgs/memberships.md#roster) -object. - -`org` must be a scope name for an org, and can be optionally prefixed with `@`. - -See also: [`GET -/-/org/:scope/user`](https://github.com/npm/registry/blob/master/docs/orgs/memberships.md#org-roster) - -##### Example - -```javascript -await org.ls('myorg', {token: 'deadbeef'}) -=> -Roster { - zkat: 'developer', - iarna: 'admin', - isaacs: 'owner' -} -``` - -#### <a name="ls-stream"></a> `> org.ls.stream(org, [opts]) -> Stream` - -Returns a stream of entries for a -[Roster](https://github.com/npm/registry/blob/master/docs/orgs/memberships.md#roster), -with each emitted entry in `[key, value]` format. - -`org` must be a scope name for an org, and can be optionally prefixed with `@`. - -The returned stream is a valid `Symbol.asyncIterator`. - -See also: [`GET -/-/org/:scope/user`](https://github.com/npm/registry/blob/master/docs/orgs/memberships.md#org-roster) - -##### Example - -```javascript -for await (let [user, role] of org.ls.stream('myorg', {token: 'deadbeef'})) { - console.log(`user: ${user} (${role})`) -} -=> -user: zkat (developer) -user: iarna (admin) -user: isaacs (owner) -``` diff --git a/node_modules/libnpmorg/package.json b/node_modules/libnpmorg/package.json index b6074ed8e4a7c..0e82a207b7017 100644 --- a/node_modules/libnpmorg/package.json +++ b/node_modules/libnpmorg/package.json @@ -1,6 +1,6 @@ { "name": "libnpmorg", - "version": "2.0.1", + "version": "2.0.3", "description": "Programmatic api for `npm org` commands", "author": "Kat Marchán <kzm@sykosomatic.org>", "keywords": [ @@ -40,7 +40,7 @@ "homepage": "https://npmjs.com/package/libnpmorg", "dependencies": { "aproba": "^2.0.0", - "npm-registry-fetch": "^9.0.0" + "npm-registry-fetch": "^11.0.0" }, "engines": { "node": ">=10" diff --git a/node_modules/libnpmpack/CHANGELOG.md b/node_modules/libnpmpack/CHANGELOG.md deleted file mode 100644 index 2310ac7f89690..0000000000000 --- a/node_modules/libnpmpack/CHANGELOG.md +++ /dev/null @@ -1,17 +0,0 @@ -# Change Log - -<a name="2.0.0"></a> -# [2.0.0](https://github.com/npm/libnpmpack/compare/v1.0.0...v2.0.0) (2020-03-27) - -### Breaking Changes - -* [`cb2ecf2`](https://github.com/npm/libnpmpack/commit/cb2ecf2) feat: resolve to tarball data Buffer ([@claudiahdz](https://github.com/claudiahdz)) - -<a name="1.0.0"></a> -# 1.0.0 (2020-03-26) - -### Features - -* [`a35c590`](https://github.com/npm/libnpmpack/commit/a35c590) feat: pack tarballs from local dir or registry spec ([@claudiahdz](https://github.com/claudiahdz)) - -* [`6d72149`](https://github.com/npm/libnpmpack/commit/6d72149) feat: sorted tarball contents ([@eridal](https://github.com/eridal)) diff --git a/node_modules/libnpmpack/README.md b/node_modules/libnpmpack/README.md deleted file mode 100644 index 74b4934b0b719..0000000000000 --- a/node_modules/libnpmpack/README.md +++ /dev/null @@ -1,56 +0,0 @@ -# libnpmpack - -[![npm version](https://img.shields.io/npm/v/libnpmpack.svg)](https://npm.im/libnpmpack) -[![license](https://img.shields.io/npm/l/libnpmpack.svg)](https://npm.im/libnpmpack) -[![GitHub Actions](https://github.com/npm/libnpmpack/workflows/Node%20CI/badge.svg)](https://github.com/npm/libnpmpack/actions?query=workflow%3A%22Node+CI%22) -[![Coverage Status](https://coveralls.io/repos/github/npm/libnpmpack/badge.svg?branch=latest)](https://coveralls.io/github/npm/libnpmpack?branch=latest) - -[`libnpmpack`](https://github.com/npm/libnpmpack) is a Node.js library for -programmatically packing tarballs from a local directory or from a registry or github spec. If packing from a local source, `libnpmpack` will also run the `prepack` and `postpack` lifecycles. - -## Table of Contents - -* [Example](#example) -* [Install](#install) -* [API](#api) - * [`pack()`](#pack) - -## Example - -```js -const pack = require('libnpmpack') -``` - -## Install - -`$ npm install libnpmpack` - -### API - -#### <a name="pack"></a> `> pack(spec, [opts]) -> Promise` - -Packs a tarball from a local directory or from a registry or github spec and returns a Promise that resolves to the tarball data Buffer, with from, resolved, and integrity fields attached. - -If no options are passed, the tarball file will be saved on the same directory from which `pack` was called in. - -`libnpmpack` uses [`pacote`](https://npm.im/pacote). -Most options are passed through directly to that library, so please refer to -[its own `opts` -documentation](https://www.npmjs.com/package/pacote#options) -for options that can be passed in. - -##### Examples - -```javascript -// packs from cwd -const tarball = await pack() - -// packs from a local directory -const localTar = await pack('/Users/claudiahdz/projects/my-cool-pkg') - -// packs from a registry spec -const registryTar = await pack('abbrev@1.0.3') - -// packs from a github spec -const githubTar = await pack('isaacs/rimraf#PR-192') -``` diff --git a/node_modules/libnpmpublish/CHANGELOG.md b/node_modules/libnpmpublish/CHANGELOG.md deleted file mode 100644 index 57d21f8400c5b..0000000000000 --- a/node_modules/libnpmpublish/CHANGELOG.md +++ /dev/null @@ -1,91 +0,0 @@ -# Change Log - -<a name="3.0.1"></a> -# [3.0.1](https://github.com/npm/libnpmpublish/compare/v3.0.0...v3.0.1) (2020-03-27) - -### Features - -* [`3e02307`](https://github.com/npm/libnpmpublish/commit/3e02307) chore: pack tarballs using libnpmpack ([@claudiahdz](https://github.com/claudiahdz)) - -<a name="3.0.0"></a> -# [3.0.0](https://github.com/npm/libnpmpublish/compare/v2.0.0...v3.0.0) (2020-03-09) - -### Breaking Changes - -* [`ecaeb0b`](https://github.com/npm/libnpmpublish/commit/ecaeb0b) feat: pack tarballs from source code using pacote v10 ([@claudiahdz](https://github.com/claudiahdz)) - -* [`f6bf2b8`](https://github.com/npm/libnpmpublish/commit/f6bf2b8) feat: unpublish code refactor ([@claudiahdz](https://github.com/claudiahdz)) - -### Miscellaneuous - -* [`5cea10f`](https://github.com/npm/libnpmpublish/commit/5cea10f) chore: basic project updates ([@claudiahdz](https://github.com/claudiahdz)) -* [`3010b93`](https://github.com/npm/libnpmpublish/commit/3010b93) chore: cleanup badges + contributing ([@ruyadorno](https://github.com/ruyadorno)) - ---- - -All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. - -## [2.0.0](https://github.com/npm/libnpmpublish/compare/v1.1.3...v2.0.0) (2019-09-18) - - -### ⚠ BREAKING CHANGES - -* This drops support for Node.js version 6. - -### Bug Fixes - -* audit warnings, drop support for Node.js v6 ([d9a1fb6](https://github.com/npm/libnpmpublish/commit/d9a1fb6)) - -### [1.1.3](https://github.com/npm/libnpmpublish/compare/v1.1.2...v1.1.3) (2019-09-18) - -<a name="1.1.2"></a> -## [1.1.2](https://github.com/npm/libnpmpublish/compare/v1.1.1...v1.1.2) (2019-07-16) - - - -<a name="1.1.1"></a> -## [1.1.1](https://github.com/npm/libnpmpublish/compare/v1.1.0...v1.1.1) (2019-01-22) - - -### Bug Fixes - -* **auth:** send username in correct key ([#3](https://github.com/npm/libnpmpublish/issues/3)) ([38422d0](https://github.com/npm/libnpmpublish/commit/38422d0)) - - - -<a name="1.1.0"></a> -# [1.1.0](https://github.com/npm/libnpmpublish/compare/v1.0.1...v1.1.0) (2018-08-31) - - -### Features - -* **publish:** add support for publishConfig on manifests ([161723b](https://github.com/npm/libnpmpublish/commit/161723b)) - - - -<a name="1.0.1"></a> -## [1.0.1](https://github.com/npm/libnpmpublish/compare/v1.0.0...v1.0.1) (2018-08-31) - - -### Bug Fixes - -* **opts:** remove unused opts ([2837098](https://github.com/npm/libnpmpublish/commit/2837098)) - - - -<a name="1.0.0"></a> -# 1.0.0 (2018-08-31) - - -### Bug Fixes - -* **api:** use opts.algorithms, return true on success ([80fe34b](https://github.com/npm/libnpmpublish/commit/80fe34b)) -* **publish:** first test pass w/ bugfixes ([74135c9](https://github.com/npm/libnpmpublish/commit/74135c9)) -* **publish:** full coverage test and related fixes ([b5a3446](https://github.com/npm/libnpmpublish/commit/b5a3446)) - - -### Features - -* **docs:** add README with api docs ([553c13d](https://github.com/npm/libnpmpublish/commit/553c13d)) -* **publish:** add initial publish support. tests tbd ([5b3fe94](https://github.com/npm/libnpmpublish/commit/5b3fe94)) -* **unpublish:** add new api with unpublish support ([1c9d594](https://github.com/npm/libnpmpublish/commit/1c9d594)) diff --git a/node_modules/libnpmpublish/README.md b/node_modules/libnpmpublish/README.md deleted file mode 100644 index 0da46e89d3b05..0000000000000 --- a/node_modules/libnpmpublish/README.md +++ /dev/null @@ -1,105 +0,0 @@ -# libnpmpublish - -[`libnpmpublish`](https://github.com/npm/libnpmpublish) is a Node.js -library for programmatically publishing and unpublishing npm packages. Give -it a manifest as an object and a tarball as a Buffer, and it'll put them on -the registry for you. - -## Table of Contents - -* [Example](#example) -* [Install](#install) -* [API](#api) - * [publish/unpublish opts](#opts) - * [`publish()`](#publish) - * [`unpublish()`](#unpublish) - -## Example - -```js -const { publish, unpublish } = require('libnpmpublish') -``` - -## Install - -`$ npm install libnpmpublish` - -### API - -#### <a name="opts"></a> `opts` for `libnpmpublish` commands - -`libnpmpublish` uses -[`npm-registry-fetch`](https://npm.im/npm-registry-fetch). Most options -are passed through directly to that library, so please refer to [its own -`opts` documentation](http://npm.im/npm-registry-fetch#fetch-options) for -options that can be passed in. - -A couple of options of note: - -* `opts.defaultTag` - registers the published package with the given tag, - defaults to `latest`. - -* `opts.access` - tells the registry whether this package should be - published as public or restricted. Only applies to scoped packages, which - default to restricted. - -* `opts.token` - can be passed in and will be used as the authentication - token for the registry. For other ways to pass in auth details, see the - n-r-f docs. - -#### <a name="publish"></a> `> libpub.publish(manifest, tarData, [opts]) -> Promise` - -Sends the package represented by the `manifest` and `tarData` to the -configured registry. - -`manifest` should be the parsed `package.json` for the package being -published (which can also be the manifest pulled from a packument, a git -repo, tarball, etc.) - -`tarData` is a `Buffer` of the tarball being published. - -If `opts.npmVersion` is passed in, it will be used as the `_npmVersion` -field in the outgoing packument. You may put your own user-agent string in -there to identify your publishes. - -If `opts.algorithms` is passed in, it should be an array of hashing -algorithms to generate `integrity` hashes for. The default is `['sha512']`, -which means you end up with `dist.integrity = 'sha512-deadbeefbadc0ffee'`. -Any algorithm supported by your current node version is allowed -- npm -clients that do not support those algorithms will simply ignore the -unsupported hashes. - -##### Example - -```js -// note that pacote.manifest() and pacote.tarball() can also take -// any spec that npm can install. a folder shown here, since that's -// far and away the most common use case. -const path = '/a/path/to/your/source/code' -const pacote = require('pacote') // see: http://npm.im/pacote -const manifest = await pacote.manifest(path) -const tarData = await pacote.tarball(path) -await libpub.publish(manifest, tarData, { - npmVersion: 'my-pub-script@1.0.2', - token: 'my-auth-token-here' -}, opts) -// Package has been published to the npm registry. -``` - -#### <a name="unpublish"></a> `> libpub.unpublish(spec, [opts]) -> Promise` - -Unpublishes `spec` from the appropriate registry. The registry in question may -have its own limitations on unpublishing. - -`spec` should be either a string, or a valid -[`npm-package-arg`](https://npm.im/npm-package-arg) parsed spec object. For -legacy compatibility reasons, only `tag` and `version` specs will work as -expected. `range` specs will fail silently in most cases. - -##### Example - -```js -await libpub.unpublish('lodash', { token: 'i-am-the-worst'}) -// -// `lodash` has now been unpublished, along with all its versions -``` diff --git a/node_modules/libnpmpublish/package.json b/node_modules/libnpmpublish/package.json index 8476717a1c8e6..ac0d632f7d66d 100644 --- a/node_modules/libnpmpublish/package.json +++ b/node_modules/libnpmpublish/package.json @@ -1,6 +1,6 @@ { "name": "libnpmpublish", - "version": "4.0.0", + "version": "4.0.2", "description": "Programmatic API for the bits behind npm publish and unpublish", "author": "npm Inc. <support@npmjs.com>", "contributors": [ @@ -44,11 +44,11 @@ "bugs": "https://github.com/npm/libnpmpublish/issues", "homepage": "https://npmjs.com/package/libnpmpublish", "dependencies": { - "normalize-package-data": "^3.0.0", - "npm-package-arg": "^8.1.0", - "npm-registry-fetch": "^9.0.0", + "normalize-package-data": "^3.0.2", + "npm-package-arg": "^8.1.2", + "npm-registry-fetch": "^11.0.0", "semver": "^7.1.3", - "ssri": "^8.0.0" + "ssri": "^8.0.1" }, "engines": { "node": ">=10" diff --git a/node_modules/libnpmsearch/CHANGELOG.md b/node_modules/libnpmsearch/CHANGELOG.md deleted file mode 100644 index 03b7fedc5bf0d..0000000000000 --- a/node_modules/libnpmsearch/CHANGELOG.md +++ /dev/null @@ -1,57 +0,0 @@ -# Change Log - -<a name="3.0.0"></a> -# [3.0.0](https://github.com/npm/libnpmhook/compare/v2.0.2...v3.0.0) (2020-02-26) - -### Breaking Changes - -* [`45f4db1`](https://github.com/npm/libnpmsearch/commit/45f4db1) fix: remove figgy-pudding ([@claudiahdz](https://github.com/claudiahdz)) - -### Miscellaneuous - -* [`b413aae`](https://github.com/npm/libnpmsearch/commit/b413aae) chore: basic project updates ([@claudiahdz](https://github.com/claudiahdz)) -* [`534983c`](https://github.com/npm/libnpmsearch/commit/534983c) chore: remove pr temmsearch ([@ruyadorno](https://github.com/ruyadorno)) -* [`c503a89`](https://github.com/npm/libnpmsearch/commit/c503a89) chore: cleanup badges + contributing ([@ruyadorno](https://github.com/ruyadorno)) - ---- - -All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. - -<a name="2.0.2"></a> -## [2.0.2](https://github.com/npm/libnpmsearch/compare/v2.0.1...v2.0.2) (2019-07-16) - - - -<a name="2.0.1"></a> -## [2.0.1](https://github.com/npm/libnpmsearch/compare/v2.0.0...v2.0.1) (2019-06-10) - - -### Bug Fixes - -* **opts:** support `opts.from` properly ([#2](https://github.com/npm/libnpmsearch/issues/2)) ([da6636c](https://github.com/npm/libnpmsearch/commit/da6636c)) -* **standard:** standard --fix ([beca19c](https://github.com/npm/libnpmsearch/commit/beca19c)) - - - -<a name="2.0.0"></a> -# [2.0.0](https://github.com/npm/libnpmsearch/compare/v1.0.0...v2.0.0) (2018-08-28) - - -### Features - -* **opts:** added options for pagination, details, and sorting weights ([ff97eb5](https://github.com/npm/libnpmsearch/commit/ff97eb5)) - - -### BREAKING CHANGES - -* **opts:** this changes default requests and makes libnpmsearch return more complete data for individual packages, without null-defaulting - - - -<a name="1.0.0"></a> -# 1.0.0 (2018-08-27) - - -### Features - -* **api:** got API working ([fe90008](https://github.com/npm/libnpmsearch/commit/fe90008)) diff --git a/node_modules/libnpmsearch/README.md b/node_modules/libnpmsearch/README.md deleted file mode 100644 index 31f44fe247923..0000000000000 --- a/node_modules/libnpmsearch/README.md +++ /dev/null @@ -1,173 +0,0 @@ -# libnpmsearch - -[![npm version](https://img.shields.io/npm/v/libnpmsearch.svg)](https://npm.im/libnpmsearch) -[![license](https://img.shields.io/npm/l/libnpmsearch.svg)](https://npm.im/libnpmsearch) -[![Coverage Status](https://coveralls.io/repos/github/npm/libnpmsearch/badge.svg?branch=latest)](https://coveralls.io/github/npm/libnpmsearch?branch=latest) - -[`libnpmsearch`](https://github.com/npm/libnpmsearch) is a Node.js library for -programmatically accessing the npm search endpoint. It does **not** support -legacy search through `/-/all`. - -## Table of Contents - -* [Example](#example) -* [Install](#install) -* [Contributing](#contributing) -* [API](#api) - * [search opts](#opts) - * [`search()`](#search) - * [`search.stream()`](#search-stream) - -## Example - -```js -const search = require('libnpmsearch') - -console.log(await search('libnpm')) -=> -[ - { - name: 'libnpm', - description: 'programmatic npm API', - ...etc - }, - { - name: 'libnpmsearch', - description: 'Programmatic API for searching in npm and compatible registries', - ...etc - }, - ...more -] -``` - -## Install - -`$ npm install libnpmsearch` - -### API - -#### <a name="opts"></a> `opts` for `libnpmsearch` commands - -The following opts are used directly by `libnpmsearch` itself: - -* `opts.limit` - Number of results to limit the query to. Default: 20 -* `opts.from` - Offset number for results. Used with `opts.limit` for pagination. Default: 0 -* `opts.detailed` - If true, returns an object with `package`, `score`, and `searchScore` fields, with `package` being what would usually be returned, and the other two containing details about how that package scored. Useful for UIs. Default: false -* `opts.sortBy` - Used as a shorthand to set `opts.quality`, `opts.maintenance`, and `opts.popularity` with values that prioritize each one. Should be one of `'optimal'`, `'quality'`, `'maintenance'`, or `'popularity'`. Default: `'optimal'` -* `opts.maintenance` - Decimal number between `0` and `1` that defines the weight of `maintenance` metrics when scoring and sorting packages. Default: `0.65` (same as `opts.sortBy: 'optimal'`) -* `opts.popularity` - Decimal number between `0` and `1` that defines the weight of `popularity` metrics when scoring and sorting packages. Default: `0.98` (same as `opts.sortBy: 'optimal'`) -* `opts.quality` - Decimal number between `0` and `1` that defines the weight of `quality` metrics when scoring and sorting packages. Default: `0.5` (same as `opts.sortBy: 'optimal'`) - -`libnpmsearch` uses [`npm-registry-fetch`](https://npm.im/npm-registry-fetch). -Most options are passed through directly to that library, so please refer to -[its own `opts` -documentation](https://www.npmjs.com/package/npm-registry-fetch#fetch-options) -for options that can be passed in. - -A couple of options of note for those in a hurry: - -* `opts.token` - can be passed in and will be used as the authentication token for the registry. For other ways to pass in auth details, see the n-r-f docs. - -#### <a name="search"></a> `> search(query, [opts]) -> Promise` - -`query` must be either a String or an Array of search terms. - -If `opts.limit` is provided, it will be sent to the API to constrain the number -of returned results. You may receive more, or fewer results, at the endpoint's -discretion. - -The returned Promise resolved to an Array of search results with the following -format: - -```js -{ - name: String, - version: SemverString, - description: String || null, - maintainers: [ - { - username: String, - email: String - }, - ...etc - ] || null, - keywords: [String] || null, - date: Date || null -} -``` - -If `opts.limit` is provided, it will be sent to the API to constrain the number -of returned results. You may receive more, or fewer results, at the endpoint's -discretion. - -For streamed results, see [`search.stream`](#search-stream). - -##### Example - -```javascript -await search('libnpm') -=> -[ - { - name: 'libnpm', - description: 'programmatic npm API', - ...etc - }, - { - name: 'libnpmsearch', - description: 'Programmatic API for searching in npm and compatible registries', - ...etc - }, - ...more -] -``` - -#### <a name="search-stream"></a> `> search.stream(query, [opts]) -> Stream` - -`query` must be either a String or an Array of search terms. - -If `opts.limit` is provided, it will be sent to the API to constrain the number -of returned results. You may receive more, or fewer results, at the endpoint's -discretion. - -The returned Stream emits one entry per search result, with each entry having -the following format: - -```js -{ - name: String, - version: SemverString, - description: String || null, - maintainers: [ - { - username: String, - email: String - }, - ...etc - ] || null, - keywords: [String] || null, - date: Date || null -} -``` - -For getting results in one chunk, see [`search`](#search-stream). - -##### Example - -```javascript -search.stream('libnpm').on('data', console.log) -=> -// entry 1 -{ - name: 'libnpm', - description: 'programmatic npm API', - ...etc -} -// entry 2 -{ - name: 'libnpmsearch', - description: 'Programmatic API for searching in npm and compatible registries', - ...etc -} -// etc -``` diff --git a/node_modules/libnpmsearch/package.json b/node_modules/libnpmsearch/package.json index a32a194ae6a10..88179b8d6fde8 100644 --- a/node_modules/libnpmsearch/package.json +++ b/node_modules/libnpmsearch/package.json @@ -1,6 +1,6 @@ { "name": "libnpmsearch", - "version": "3.1.0", + "version": "3.1.2", "description": "Programmatic API for searching in npm and compatible registries.", "author": "Kat Marchán <kzm@sykosomatic.org>", "files": [ @@ -36,7 +36,7 @@ "bugs": "https://github.com/npm/libnpmsearch/issues", "homepage": "https://npmjs.com/package/libnpmsearch", "dependencies": { - "npm-registry-fetch": "^9.0.0" + "npm-registry-fetch": "^11.0.0" }, "engines": { "node": ">=10" diff --git a/node_modules/libnpmteam/CHANGELOG.md b/node_modules/libnpmteam/CHANGELOG.md deleted file mode 100644 index ba472cfcc52ba..0000000000000 --- a/node_modules/libnpmteam/CHANGELOG.md +++ /dev/null @@ -1,40 +0,0 @@ -# Change Log - -## [2.0.0](https://github.com/npm/libnpmteam/compare/v1.0.2...v2.0.0) (2020-03-02) - -### BREAKING CHANGE -- Removed `figgy-pudding` as a dependecy -- Using native promises -- Require node >= v10 - -### Feature -- Updated stream interface to `minipass` type stream - ---- - -All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. - -<a name="1.0.2"></a> -## [1.0.2](https://github.com/npm/libnpmteam/compare/v1.0.1...v1.0.2) (2019-07-16) - - -### Bug Fixes - -* **standard:** standard --fix ([3dc9144](https://github.com/npm/libnpmteam/commit/3dc9144)) - - - -<a name="1.0.1"></a> -## [1.0.1](https://github.com/npm/libnpmteam/compare/v1.0.0...v1.0.1) (2018-08-24) - - - -<a name="1.0.0"></a> -# 1.0.0 (2018-08-22) - - -### Features - -* **api:** implement team api ([50dd0e1](https://github.com/npm/libnpmteam/commit/50dd0e1)) -* **docs:** add fully-documented readme ([b1370f3](https://github.com/npm/libnpmteam/commit/b1370f3)) -* **test:** test --100 ftw ([9d3bdc3](https://github.com/npm/libnpmteam/commit/9d3bdc3)) diff --git a/node_modules/libnpmteam/README.md b/node_modules/libnpmteam/README.md deleted file mode 100644 index bb2700292dc8a..0000000000000 --- a/node_modules/libnpmteam/README.md +++ /dev/null @@ -1,189 +0,0 @@ -# libnpmteam - -[![npm version](https://img.shields.io/npm/v/libnpmteam.svg)](https://npm.im/libnpmteam) -[![license](https://img.shields.io/npm/l/libnpmteam.svg)](https://npm.im/libnpmteam) -[![GitHub Actions](https://github.com/npm/libnpmteam/workflows/Node%20CI/badge.svg)](https://github.com/npm/libnpmteam/workflows/Node%20CI/badge.svg) -[![Coverage Status](https://coveralls.io/repos/github/npm/libnpmteam/badge.svg?branch=latest)](https://coveralls.io/github/npm/libnpmteam?branch=latest) - -[`libnpmteam`](https://github.com/npm/libnpmteam) is a Node.js -library that provides programmatic access to the guts of the npm CLI's `npm -team` command and its various subcommands. - -## Example - -```javascript -const access = require('libnpmteam') - -// List all teams for the @npm org. -console.log(await team.lsTeams('npm')) -``` - -## Publishing -1. Manually create CHANGELOG.md file -1. Commit changes to CHANGELOG.md - ```bash - $ git commit -m "chore: updated CHANGELOG.md" - ``` -1. Run `npm version {newVersion}` - ```bash - # Example - $ npm version patch - # 1. Runs `coverage` and `lint` scripts - # 2. Bumps package version; and **create commit/tag** - # 3. Runs `npm publish`; publishing directory with **unpushed commit** - # 4. Runs `git push origin --follow-tags` - ``` - -## Table of Contents - -* [Installing](#install) -* [Example](#example) -* [API](#api) - * [team opts](#opts) - * [`create()`](#create) - * [`destroy()`](#destroy) - * [`add()`](#add) - * [`rm()`](#rm) - * [`lsTeams()`](#ls-teams) - * [`lsTeams.stream()`](#ls-teams-stream) - * [`lsUsers()`](#ls-users) - * [`lsUsers.stream()`](#ls-users-stream) - -### Install - -`$ npm install libnpmteam` - -### API - -#### <a name="opts"></a> `opts` for `libnpmteam` commands - -`libnpmteam` uses [`npm-registry-fetch`](https://npm.im/npm-registry-fetch). -All options are passed through directly to that library, so please refer to [its -own `opts` -documentation](https://www.npmjs.com/package/npm-registry-fetch#fetch-options) -for options that can be passed in. - -A couple of options of note for those in a hurry: - -* `opts.token` - can be passed in and will be used as the authentication token for the registry. For other ways to pass in auth details, see the n-r-f docs. -* `opts.otp` - certain operations will require an OTP token to be passed in. If a `libnpmteam` command fails with `err.code === EOTP`, please retry the request with `{otp: <2fa token>}` - -#### <a name="create"></a> `> team.create(team, [opts]) -> Promise` - -Creates a team named `team`. Team names use the format `@<scope>:<name>`, with -the `@` being optional. - -Additionally, `opts.description` may be passed in to include a description. - -##### Example - -```javascript -await team.create('@npm:cli', {token: 'myregistrytoken'}) -// The @npm:cli team now exists. -``` - -#### <a name="destroy"></a> `> team.destroy(team, [opts]) -> Promise` - -Destroys a team named `team`. Team names use the format `@<scope>:<name>`, with -the `@` being optional. - -##### Example - -```javascript -await team.destroy('@npm:cli', {token: 'myregistrytoken'}) -// The @npm:cli team has been destroyed. -``` - -#### <a name="add"></a> `> team.add(user, team, [opts]) -> Promise` - -Adds `user` to `team`. - -##### Example - -```javascript -await team.add('zkat', '@npm:cli', {token: 'myregistrytoken'}) -// @zkat now belongs to the @npm:cli team. -``` - -#### <a name="rm"></a> `> team.rm(user, team, [opts]) -> Promise` - -Removes `user` from `team`. - -##### Example - -```javascript -await team.rm('zkat', '@npm:cli', {token: 'myregistrytoken'}) -// @zkat is no longer part of the @npm:cli team. -``` - -#### <a name="ls-teams"></a> `> team.lsTeams(scope, [opts]) -> Promise` - -Resolves to an array of team names belonging to `scope`. - -##### Example - -```javascript -await team.lsTeams('@npm', {token: 'myregistrytoken'}) -=> -[ - 'npm:cli', - 'npm:web', - 'npm:registry', - 'npm:developers' -] -``` - -#### <a name="ls-teams-stream"></a> `> team.lsTeams.stream(scope, [opts]) -> Stream` - -Returns a stream of teams belonging to `scope`. - -For a Promise-based version of these results, see [`team.lsTeams()`](#ls-teams). - -##### Example - -```javascript -for await (let team of team.lsTeams.stream('@npm', {token: 'myregistrytoken'})) { - console.log(team) -} - -// outputs -// npm:cli -// npm:web -// npm:registry -// npm:developers -``` - -#### <a name="ls-users"></a> `> team.lsUsers(team, [opts]) -> Promise` - -Resolves to an array of usernames belonging to `team`. - -For a streamed version of these results, see [`team.lsUsers.stream()`](#ls-users-stream). - -##### Example - -```javascript -await team.lsUsers('@npm:cli', {token: 'myregistrytoken'}) -=> -[ - 'iarna', - 'zkat' -] -``` - -#### <a name="ls-users-stream"></a> `> team.lsUsers.stream(team, [opts]) -> Stream` - -Returns a stream of usernames belonging to `team`. - -For a Promise-based version of these results, see [`team.lsUsers()`](#ls-users). - -##### Example - -```javascript -for await (let user of team.lsUsers.stream('@npm:cli', {token: 'myregistrytoken'})) { - console.log(user) -} - -// outputs -// iarna -// zkat -``` diff --git a/node_modules/libnpmteam/package.json b/node_modules/libnpmteam/package.json index fc3bf5b3392d6..09837ad2dd14a 100644 --- a/node_modules/libnpmteam/package.json +++ b/node_modules/libnpmteam/package.json @@ -1,7 +1,7 @@ { "name": "libnpmteam", "description": "npm Team management APIs", - "version": "2.0.2", + "version": "2.0.4", "author": "Kat Marchán <kzm@zkat.tech>", "license": "ISC", "scripts": { @@ -27,7 +27,7 @@ "homepage": "https://npmjs.com/package/libnpmteam", "dependencies": { "aproba": "^2.0.0", - "npm-registry-fetch": "^9.0.0" + "npm-registry-fetch": "^11.0.0" }, "engines": { "node": ">=10" diff --git a/node_modules/libnpmversion/README.md b/node_modules/libnpmversion/README.md deleted file mode 100644 index daa0b88157c6c..0000000000000 --- a/node_modules/libnpmversion/README.md +++ /dev/null @@ -1,154 +0,0 @@ -# libnpmversion - -Library to do the things that 'npm version' does. - -## USAGE - -```js -const npmVersion = require('libnpmversion') - -// argument can be one of: -// - any semver version string (set to that exact version) -// - 'major', 'minor', 'patch', 'pre{major,minor,patch}' (increment at -// that value) -// - 'from-git' (set to the latest semver-lookin git tag - this skips -// gitTagVersion, but will still sign if asked) -npmVersion(arg, { - path: '/path/to/my/pkg', // defaults to cwd - - allowSameVersion: false, // allow tagging/etc to the current version - preid: '', // when arg=='pre', define the prerelease string, like 'beta' etc. - tagVersionPrefix: 'v', // tag as 'v1.2.3' when versioning to 1.2.3 - commitHooks: true, // default true, run git commit hooks, default true - gitTagVersion: true, // default true, tag the version - signGitCommit: false, // default false, gpg sign the git commit - signGitTag: false, // default false, gpg sign the git tag - force: false, // push forward recklessly if any problems happen - ignoreScripts: false, // do not run pre/post/version lifecycle scripts - message: 'v%s', // message for tag and commit, replace %s with the version -}).then(newVersion => { - console.error('version updated!', newVersion) -}) -``` - -## Description - -Run this in a package directory to bump the version and write the new data -back to `package.json`, `package-lock.json`, and, if present, -`npm-shrinkwrap.json`. - -The `newversion` argument should be a valid semver string, a valid second -argument to [semver.inc](https://github.com/npm/node-semver#functions) (one -of `patch`, `minor`, `major`, `prepatch`, `preminor`, `premajor`, -`prerelease`), or `from-git`. In the second case, the existing version will -be incremented by 1 in the specified field. `from-git` will try to read -the latest git tag, and use that as the new npm version. - -If run in a git repo, it will also create a version commit and tag. This -behavior is controlled by `gitTagVersion` (see below), and can be -disabled by setting `gitTagVersion: false` in the options. -It will fail if the working directory is not clean, unless `force: true` is -set. - -If supplied with a `message` string option, it will -use it as a commit message when creating a version commit. If the -`message` option contains `%s` then that will be replaced with the -resulting version number. - -If the `signGitTag` option is set, then the tag will be signed using -the `-s` flag to git. Note that you must have a default GPG key set up in -your git config for this to work properly. - -If `preversion`, `version`, or `postversion` are in the `scripts` property -of the package.json, they will be executed in the appropriate sequence. - -The exact order of execution is as follows: - -1. Check to make sure the git working directory is clean before we get - started. Your scripts may add files to the commit in future steps. - This step is skipped if the `force` flag is set. -2. Run the `preversion` script. These scripts have access to the old - `version` in package.json. A typical use would be running your full - test suite before deploying. Any files you want added to the commit - should be explicitly added using `git add`. -3. Bump `version` in `package.json` as requested (`patch`, `minor`, - `major`, explicit version number, etc). -4. Run the `version` script. These scripts have access to the new `version` - in package.json (so they can incorporate it into file headers in - generated files for example). Again, scripts should explicitly add - generated files to the commit using `git add`. -5. Commit and tag. -6. Run the `postversion` script. Use it to clean up the file system or - automatically push the commit and/or tag. - -Take the following example: - -```json -{ - "scripts": { - "preversion": "npm test", - "version": "npm run build && git add -A dist", - "postversion": "git push && git push --tags && rm -rf build/temp" - } -} -``` - -This runs all your tests, and proceeds only if they pass. Then runs your -`build` script, and adds everything in the `dist` directory to the commit. -After the commit, it pushes the new commit and tag up to the server, and -deletes the `build/temp` directory. - -## API - -### `npmVersion(newversion, options = {}) -> Promise<String>` - -Do the things. Returns a promise that resolves to the new version if -all is well, or rejects if any errors are encountered. - -### Options - -#### `path` String - -The path to the package being versionified. Defaults to process.cwd(). - -#### `allowSameVersion` Boolean - -Allow setting the version to the current version in package.json. Default -`false`. - -#### `preid` String -When the `newversion` is pre, premajor, preminor, or prepatch, this -defines the prerelease string, like 'beta' etc. - -#### `tagVersionPrefix` String - -The prefix to add to the raw semver string for the tag name. Defaults to -`'v'`. (So, by default it tags as 'v1.2.3' when versioning to 1.2.3.) - -#### `commitHooks` Boolean - -Run git commit hooks. Default true. - -#### `gitTagVersion` Boolean - -Tag the version, default true. - -#### `signGitCommit` Boolean - -GPG sign the git commit. Default `false`. - -#### `signGitTag` Boolean - -GPG sign the git tag. Default `false`. - -#### `force` Boolean - -Push forward recklessly if any problems happen. Default `false`. - -#### `ignoreScripts` Boolean - -Do not run pre/post/version lifecycle scripts. Default `false`. - -#### `message` String - -The message for the git commit and annotated git tag that are created. diff --git a/node_modules/libnpmversion/lib/commit.js b/node_modules/libnpmversion/lib/commit.js index bd621acb4393d..dec6edbec98c3 100644 --- a/node_modules/libnpmversion/lib/commit.js +++ b/node_modules/libnpmversion/lib/commit.js @@ -1,14 +1,17 @@ const git = require('@npmcli/git') module.exports = (version, opts) => { - const {commitHooks, allowSameVersion, signGitCommit, message} = opts + const { commitHooks, allowSameVersion, signGitCommit, message } = opts const args = ['commit'] - if (commitHooks === false) + if (commitHooks === false) { args.push('-n') - if (allowSameVersion) + } + if (allowSameVersion) { args.push('--allow-empty') - if (signGitCommit) + } + if (signGitCommit) { args.push('-S') + } args.push('-m') return git.spawn([...args, message.replace(/%s/g, version)], opts) } diff --git a/node_modules/libnpmversion/lib/enforce-clean.js b/node_modules/libnpmversion/lib/enforce-clean.js index 980419ffb43d8..6103da9bd96af 100644 --- a/node_modules/libnpmversion/lib/enforce-clean.js +++ b/node_modules/libnpmversion/lib/enforce-clean.js @@ -15,17 +15,16 @@ module.exports = async opts => { hadError = true // how can merges be real if our git isn't real? return true - } else + } else { throw er + } }) if (!clean) { - if (!force) + if (!force) { throw new Error('Git working directory not clean.') - log.warn( - 'version', - 'Git working directory not clean, proceeding forcefully.' - ) + } + log.warn('version', 'Git working directory not clean, proceeding forcefully.') } return !hadError diff --git a/node_modules/libnpmversion/lib/index.js b/node_modules/libnpmversion/lib/index.js index 525d8264e737a..b10b3e6ba4123 100644 --- a/node_modules/libnpmversion/lib/index.js +++ b/node_modules/libnpmversion/lib/index.js @@ -1,4 +1,4 @@ -const readJson = require('read-package-json-fast') +const readJson = require('./read-json.js') const version = require('./version.js') const proclog = require('./proc-log.js') @@ -13,9 +13,10 @@ module.exports = async (newversion, opts = {}) => { signGitTag = false, force = false, ignoreScripts = false, + scriptShell = undefined, preid = null, log = proclog, - message = 'v%s', + message = 'v%s' } = opts const pkg = opts.pkg || await readJson(path + '/package.json') @@ -31,9 +32,10 @@ module.exports = async (newversion, opts = {}) => { signGitTag, force, ignoreScripts, + scriptShell, preid, pkg, log, - message, + message }) } diff --git a/node_modules/libnpmversion/lib/read-json.js b/node_modules/libnpmversion/lib/read-json.js new file mode 100644 index 0000000000000..2dd0f7aa4902e --- /dev/null +++ b/node_modules/libnpmversion/lib/read-json.js @@ -0,0 +1,7 @@ +// can't use read-package-json-fast, because we want to ensure +// that we make as few changes as possible, even for safety issues. +const { promisify } = require('util') +const readFile = promisify(require('fs').readFile) +const parse = require('json-parse-even-better-errors') + +module.exports = async path => parse(await readFile(path)) diff --git a/node_modules/libnpmversion/lib/retrieve-tag.js b/node_modules/libnpmversion/lib/retrieve-tag.js index b657561b861e7..6adb6df317a8d 100644 --- a/node_modules/libnpmversion/lib/retrieve-tag.js +++ b/node_modules/libnpmversion/lib/retrieve-tag.js @@ -1,11 +1,11 @@ -const {spawn} = require('@npmcli/git') +const { spawn } = require('@npmcli/git') const semver = require('semver') module.exports = async opts => { - const tag = (await spawn(['describe', '--abbrev=0'], opts)).stdout.trim() - const match = tag.match(/v?(\d+\.\d+\.\d+(?:[-+].+)?)/) - const ver = match && semver.clean(match[1], { loose: true }) - if (ver) - return ver + const tag = (await spawn(['describe', '--tags', '--abbrev=0', '--match=*.*.*'], opts)).stdout.trim() + const ver = semver.coerce(tag, { loose: true }) + if (ver) { + return ver.version + } throw new Error(`Tag is not a valid version: ${JSON.stringify(tag)}`) } diff --git a/node_modules/libnpmversion/lib/tag.js b/node_modules/libnpmversion/lib/tag.js index bd6c803a38499..73134dd25e6fe 100644 --- a/node_modules/libnpmversion/lib/tag.js +++ b/node_modules/libnpmversion/lib/tag.js @@ -1,20 +1,30 @@ const git = require('@npmcli/git') module.exports = async (version, opts) => { - const { signGitTag, allowSameVersion, tagVersionPrefix, message } = opts - const tag = `${tagVersionPrefix}${version}` + const { + signGitTag, + allowSameVersion, + tagVersionPrefix, + message + } = opts + const tag = `${tagVersionPrefix}${version}` const flags = ['-'] - if (signGitTag) + + if (signGitTag) { flags.push('s') - if (allowSameVersion) + } + + if (allowSameVersion) { flags.push('f') + } + flags.push('m') return git.spawn([ 'tag', flags.join(''), message.replace(/%s/g, version), - tag, + tag ], opts) } diff --git a/node_modules/libnpmversion/lib/version.js b/node_modules/libnpmversion/lib/version.js index 31e6023f017a5..2ef79173fca76 100644 --- a/node_modules/libnpmversion/lib/version.js +++ b/node_modules/libnpmversion/lib/version.js @@ -4,7 +4,7 @@ const retrieveTag = require('./retrieve-tag.js') const semver = require('semver') const enforceClean = require('./enforce-clean.js') const writeJson = require('./write-json.js') -const readJson = require('read-package-json-fast') +const readJson = require('./read-json.js') const git = require('@npmcli/git') const commit = require('./commit.js') const tag = require('./tag.js') @@ -15,31 +15,30 @@ module.exports = async (newversion, opts) => { const { path, allowSameVersion, - tagVersionPrefix, - commitHooks, gitTagVersion, - signGitCommit, - signGitTag, - force, ignoreScripts, preid, pkg, - log, - message, + log } = opts const { valid, clean, inc } = semver const current = pkg.version || '0.0.0' const currentClean = clean(current) - const newV = valid(newversion, { loose: true }) ? clean(newversion, { loose: true }) - : newversion === 'from-git' ? await retrieveTag(opts) - : inc(currentClean, newversion, { loose: true }, preid) + let newV + if (valid(newversion, { loose: true })) { + newV = clean(newversion, { loose: true }) + } else if (newversion === 'from-git') { + newV = await retrieveTag(opts) + } else { + newV = inc(currentClean, newversion, { loose: true }, preid) + } if (!newV) { throw Object.assign(new Error('Invalid version: ' + newversion), { current, - requested: newversion, + requested: newversion }) } @@ -47,7 +46,7 @@ module.exports = async (newversion, opts) => { throw Object.assign(new Error('Version not changed'), { current, requested: newversion, - newVersion: newV, + newVersion: newV }) } @@ -68,8 +67,8 @@ module.exports = async (newversion, opts) => { banner: log.level !== 'silent', env: { npm_old_version: current, - npm_new_version: newV, - }, + npm_new_version: newV + } }) } @@ -102,8 +101,8 @@ module.exports = async (newversion, opts) => { banner: log.level !== 'silent', env: { npm_old_version: current, - npm_new_version: newV, - }, + npm_new_version: newV + } }) } @@ -116,8 +115,7 @@ module.exports = async (newversion, opts) => { } await commit(newV, opts) await tag(newV, opts) - } else - log.verbose('version', 'Not tagging: not in a git repo or no git cmd') + } else { log.verbose('version', 'Not tagging: not in a git repo or no git cmd') } if (!ignoreScripts) { await runScript({ @@ -128,8 +126,8 @@ module.exports = async (newversion, opts) => { banner: log.level !== 'silent', env: { npm_old_version: current, - npm_new_version: newV, - }, + npm_new_version: newV + } }) } diff --git a/node_modules/libnpmversion/lib/write-json.js b/node_modules/libnpmversion/lib/write-json.js index 30ca1af0f19ed..813bb7ffc279b 100644 --- a/node_modules/libnpmversion/lib/write-json.js +++ b/node_modules/libnpmversion/lib/write-json.js @@ -1,5 +1,5 @@ // write the json back, preserving the line breaks and indent -const {promisify} = require('util') +const { promisify } = require('util') const writeFile = promisify(require('fs').writeFile) const kIndent = Symbol.for('indent') const kNewline = Symbol.for('newline') @@ -7,7 +7,7 @@ const kNewline = Symbol.for('newline') module.exports = async (path, pkg) => { const { [kIndent]: indent = 2, - [kNewline]: newline = '\n', + [kNewline]: newline = '\n' } = pkg delete pkg._id const raw = JSON.stringify(pkg, null, indent) + '\n' diff --git a/node_modules/libnpmversion/package.json b/node_modules/libnpmversion/package.json index 3d15bbc2f7f72..1ee2ee5995a52 100644 --- a/node_modules/libnpmversion/package.json +++ b/node_modules/libnpmversion/package.json @@ -1,6 +1,6 @@ { "name": "libnpmversion", - "version": "1.0.11", + "version": "1.2.1", "main": "lib/index.js", "files": [ "lib/*.js" @@ -13,25 +13,34 @@ "author": "Isaac Z. Schlueter <i@izs.me> (https://izs.me)", "license": "ISC", "scripts": { + "lint": "standard", + "lint:fix": "standard --fix", "test": "tap", + "posttest": "npm run lint", "snap": "tap", "preversion": "npm test", "postversion": "npm publish", "prepublishOnly": "git push origin --follow-tags" }, + "standard": { + "ignore": [ + "tap-snapshots" + ] + }, "tap": { "coverage-map": "map.js", "check-coverage": true }, "devDependencies": { "require-inject": "^1.4.4", + "standard": "^16.0.3", "tap": "^14.11.0" }, "dependencies": { - "@npmcli/git": "^2.0.6", - "@npmcli/run-script": "^1.8.3", - "read-package-json-fast": "^2.0.1", - "semver": "^7.3.4", + "@npmcli/git": "^2.0.7", + "@npmcli/run-script": "^1.8.4", + "json-parse-even-better-errors": "^2.3.1", + "semver": "^7.3.5", "stringify-package": "^1.0.1" } } diff --git a/node_modules/lru-cache/README.md b/node_modules/lru-cache/README.md deleted file mode 100644 index 435dfebb7e27d..0000000000000 --- a/node_modules/lru-cache/README.md +++ /dev/null @@ -1,166 +0,0 @@ -# lru cache - -A cache object that deletes the least-recently-used items. - -[![Build Status](https://travis-ci.org/isaacs/node-lru-cache.svg?branch=master)](https://travis-ci.org/isaacs/node-lru-cache) [![Coverage Status](https://coveralls.io/repos/isaacs/node-lru-cache/badge.svg?service=github)](https://coveralls.io/github/isaacs/node-lru-cache) - -## Installation: - -```javascript -npm install lru-cache --save -``` - -## Usage: - -```javascript -var LRU = require("lru-cache") - , options = { max: 500 - , length: function (n, key) { return n * 2 + key.length } - , dispose: function (key, n) { n.close() } - , maxAge: 1000 * 60 * 60 } - , cache = new LRU(options) - , otherCache = new LRU(50) // sets just the max size - -cache.set("key", "value") -cache.get("key") // "value" - -// non-string keys ARE fully supported -// but note that it must be THE SAME object, not -// just a JSON-equivalent object. -var someObject = { a: 1 } -cache.set(someObject, 'a value') -// Object keys are not toString()-ed -cache.set('[object Object]', 'a different value') -assert.equal(cache.get(someObject), 'a value') -// A similar object with same keys/values won't work, -// because it's a different object identity -assert.equal(cache.get({ a: 1 }), undefined) - -cache.reset() // empty the cache -``` - -If you put more stuff in it, then items will fall out. - -If you try to put an oversized thing in it, then it'll fall out right -away. - -## Options - -* `max` The maximum size of the cache, checked by applying the length - function to all values in the cache. Not setting this is kind of - silly, since that's the whole purpose of this lib, but it defaults - to `Infinity`. Setting it to a non-number or negative number will - throw a `TypeError`. Setting it to 0 makes it be `Infinity`. -* `maxAge` Maximum age in ms. Items are not pro-actively pruned out - as they age, but if you try to get an item that is too old, it'll - drop it and return undefined instead of giving it to you. - Setting this to a negative value will make everything seem old! - Setting it to a non-number will throw a `TypeError`. -* `length` Function that is used to calculate the length of stored - items. If you're storing strings or buffers, then you probably want - to do something like `function(n, key){return n.length}`. The default is - `function(){return 1}`, which is fine if you want to store `max` - like-sized things. The item is passed as the first argument, and - the key is passed as the second argumnet. -* `dispose` Function that is called on items when they are dropped - from the cache. This can be handy if you want to close file - descriptors or do other cleanup tasks when items are no longer - accessible. Called with `key, value`. It's called *before* - actually removing the item from the internal cache, so if you want - to immediately put it back in, you'll have to do that in a - `nextTick` or `setTimeout` callback or it won't do anything. -* `stale` By default, if you set a `maxAge`, it'll only actually pull - stale items out of the cache when you `get(key)`. (That is, it's - not pre-emptively doing a `setTimeout` or anything.) If you set - `stale:true`, it'll return the stale value before deleting it. If - you don't set this, then it'll return `undefined` when you try to - get a stale entry, as if it had already been deleted. -* `noDisposeOnSet` By default, if you set a `dispose()` method, then - it'll be called whenever a `set()` operation overwrites an existing - key. If you set this option, `dispose()` will only be called when a - key falls out of the cache, not when it is overwritten. -* `updateAgeOnGet` When using time-expiring entries with `maxAge`, - setting this to `true` will make each item's effective time update - to the current time whenever it is retrieved from cache, causing it - to not expire. (It can still fall out of cache based on recency of - use, of course.) - -## API - -* `set(key, value, maxAge)` -* `get(key) => value` - - Both of these will update the "recently used"-ness of the key. - They do what you think. `maxAge` is optional and overrides the - cache `maxAge` option if provided. - - If the key is not found, `get()` will return `undefined`. - - The key and val can be any value. - -* `peek(key)` - - Returns the key value (or `undefined` if not found) without - updating the "recently used"-ness of the key. - - (If you find yourself using this a lot, you *might* be using the - wrong sort of data structure, but there are some use cases where - it's handy.) - -* `del(key)` - - Deletes a key out of the cache. - -* `reset()` - - Clear the cache entirely, throwing away all values. - -* `has(key)` - - Check if a key is in the cache, without updating the recent-ness - or deleting it for being stale. - -* `forEach(function(value,key,cache), [thisp])` - - Just like `Array.prototype.forEach`. Iterates over all the keys - in the cache, in order of recent-ness. (Ie, more recently used - items are iterated over first.) - -* `rforEach(function(value,key,cache), [thisp])` - - The same as `cache.forEach(...)` but items are iterated over in - reverse order. (ie, less recently used items are iterated over - first.) - -* `keys()` - - Return an array of the keys in the cache. - -* `values()` - - Return an array of the values in the cache. - -* `length` - - Return total length of objects in cache taking into account - `length` options function. - -* `itemCount` - - Return total quantity of objects currently in cache. Note, that - `stale` (see options) items are returned as part of this item - count. - -* `dump()` - - Return an array of the cache entries ready for serialization and usage - with 'destinationCache.load(arr)`. - -* `load(cacheEntriesArray)` - - Loads another cache entries array, obtained with `sourceCache.dump()`, - into the cache. The destination cache is reset before loading new entries - -* `prune()` - - Manually iterates over the entire cache proactively pruning old entries diff --git a/node_modules/make-fetch-happen/CHANGELOG.md b/node_modules/make-fetch-happen/CHANGELOG.md deleted file mode 100644 index 324dfc1058d93..0000000000000 --- a/node_modules/make-fetch-happen/CHANGELOG.md +++ /dev/null @@ -1,654 +0,0 @@ -# Changelog - -All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. - -### [8.0.3](https://github.com/npm/make-fetch-happen/compare/v8.0.2...v8.0.3) (2020-03-03) - - -### Bug Fixes - -* remoteFetch takes instance of fetch.Headers ([6e0de7b](https://github.com/npm/make-fetch-happen/commit/6e0de7b10b8597eaff69fea06a266914766cf5ab)), closes [#22](https://github.com/npm/make-fetch-happen/issues/22) - -### [8.0.1](https://github.com/npm/make-fetch-happen/compare/v8.0.0...v8.0.1) (2020-02-18) - -## [8.0.0](https://github.com/npm/make-fetch-happen/compare/v7.1.1...v8.0.0) (2020-02-18) - - -### ⚠ BREAKING CHANGES - -* this module now only supports taking a plain JavaScript -options object, not a figgy pudding config object. - -* update cacache and ssri ([09e4f97](https://github.com/npm/make-fetch-happen/commit/09e4f9794a6f134d3f1d8e65eb9bd940e38e5bfc)) - -### [7.1.1](https://github.com/npm/make-fetch-happen/compare/v7.1.0...v7.1.1) (2020-01-28) - -## [7.1.0](https://github.com/npm/make-fetch-happen/compare/v7.0.0...v7.1.0) (2019-12-17) - - -### Features - -* use globalAgent when in lambda ([bd9409d](https://github.com/npm/make-fetch-happen/commit/bd9409da246a979b665ebd23967ec01dd928ce47)), closes [#4](https://github.com/npm/make-fetch-happen/issues/4) - -## [7.0.0](https://github.com/npm/make-fetch-happen/compare/v6.1.0...v7.0.0) (2019-12-17) - - -### ⚠ BREAKING CHANGES - -* drops support for node v8, since it's EOL as of 2020-01-01 - -### Features - -* **github:** added github actions with coveralls integration ([1913c1b](https://github.com/npm/make-fetch-happen/commit/1913c1b51aaac6044b4dab65b3d19ec943a35f39)) -* updated fetch module; linting mostly; based on testing ([063f28e](https://github.com/npm/make-fetch-happen/commit/063f28ea1ac23f7e9d9d79e15949ca82b634ce97)) -* **utils:** fixed configure-options based on testing ([9dd4f6f](https://github.com/npm/make-fetch-happen/commit/9dd4f6f108442dc247de44e1ddc0341edcb84c9b)) -* fixed test dep requires; added mockRequire function to mock tests properly ([95de7a1](https://github.com/npm/make-fetch-happen/commit/95de7a171110907e30f41f489e4be983cd8184a5)) -* refactored functions into utilities ([74620dd](https://github.com/npm/make-fetch-happen/commit/74620dd7c2262ac46d9b4f6ac2dc9ff45a4f19ee)) -* updated dev deps; update tap; updated standard ([dce6eec](https://github.com/npm/make-fetch-happen/commit/dce6eece130fb20164a62eeabc6090811d8f14a4)) -* updated fetch tests; linting, logic, added tests ([d50aeaf](https://github.com/npm/make-fetch-happen/commit/d50aeafebeb5d8f7118d7f6660208f40ac487804)) - - -### Bug Fixes - -* format cache key with new URL object shape ([21cb6cc](https://github.com/npm/make-fetch-happen/commit/21cb6cc968aabff8b5c5c02e3666fb093fd6578c)) -* polish out an unnecessary URL object creation ([67a01d4](https://github.com/npm/make-fetch-happen/commit/67a01d46b2cacbadc22f49604ee524526cee3912)), closes [#14](https://github.com/npm/make-fetch-happen/issues/14) -* support user without password in proxy auth ([e24bbf9](https://github.com/npm/make-fetch-happen/commit/e24bbf935bc8a2c49070cdb2518e5ee290143191)) -* updated 'files' property in package file ([945e40c](https://github.com/npm/make-fetch-happen/commit/945e40c7fbb59333e0c632c490683e4babc68dc1)) -* Use WhatWG URL objects over deprecated legacy url API ([28aca97](https://github.com/npm/make-fetch-happen/commit/28aca97dfb63ca003ebf62d1b961771cfbb2481d)) - - -* drop node 8 ([9fa7944](https://github.com/npm/make-fetch-happen/commit/9fa7944cbc603f3a194dfb440f519a7d5265653e)) - -## [6.1.0](https://github.com/npm/make-fetch-happen/compare/v6.0.1...v6.1.0) (2019-11-14) - - -### Bug Fixes - -* **streams:** change condition/logic of fitInMemory used when defining memoize ([c173723](https://github.com/npm/make-fetch-happen/commit/c173723)) - -### [6.0.1](https://github.com/npm/make-fetch-happen/compare/v6.0.0...v6.0.1) (2019-10-23) - -<a name="6.0.0"></a> -# [6.0.0](https://github.com/npm/make-fetch-happen/compare/v5.0.0...v6.0.0) (2019-10-01) - -### Bug Fixes - -* preserve rfc7234 5.5.4 warnings ([001b91e](https://github.com/npm/make-fetch-happen/commit/001b91e)) -* properly detect thrown HTTP "error" objects ([d7cbeb4](https://github.com/npm/make-fetch-happen/commit/d7cbeb4)) -* safely create synthetic response body for 304 ([bc70f88](https://github.com/npm/make-fetch-happen/commit/bc70f88)) - -### Features - -* **promises:** refactor bluebird with native promises ([7482d54](https://github.com/npm/make-fetch-happen/commit/7482d54)) - -### BREAKING CHANGES - -* **streams:** refactor node streams with minipass ([1d7f5a3](https://github.com/npm/make-fetch-happen/commit/1d7f5a3)) - -<a name="5.0.0"></a> -# [5.0.0](https://github.com/npm/make-fetch-happen/compare/v4.0.2...v5.0.0) (2019-07-15) - - -### Features - -* cacache@12, no need for uid/gid opts ([fdb956f](https://github.com/npm/make-fetch-happen/commit/fdb956f)) - - -### BREAKING CHANGES - -* cache uid and gid are inferred from the cache folder itself, -not passed in as options. - - - -<a name="4.0.2"></a> -## [4.0.2](https://github.com/npm/make-fetch-happen/compare/v4.0.1...v4.0.2) (2019-07-02) - - - -<a name="4.0.1"></a> -## [4.0.1](https://github.com/npm/make-fetch-happen/compare/v4.0.0...v4.0.1) (2018-04-12) - - -### Bug Fixes - -* **integrity:** use new sri.match() for verification ([4f371a0](https://github.com/npm/make-fetch-happen/commit/4f371a0)) - - - -<a name="4.0.0"></a> -# [4.0.0](https://github.com/npm/make-fetch-happen/compare/v3.0.0...v4.0.0) (2018-04-09) - - -### meta - -* drop node@4, add node@9 ([7b0191a](https://github.com/npm/make-fetch-happen/commit/7b0191a)) - - -### BREAKING CHANGES - -* node@4 is no longer supported - - - -<a name="3.0.0"></a> -# [3.0.0](https://github.com/npm/make-fetch-happen/compare/v2.6.0...v3.0.0) (2018-03-12) - - -### Bug Fixes - -* **license:** switch to ISC ([#49](https://github.com/npm/make-fetch-happen/issues/49)) ([bf90c6d](https://github.com/npm/make-fetch-happen/commit/bf90c6d)) -* **standard:** standard@11 update ([ff0aa70](https://github.com/npm/make-fetch-happen/commit/ff0aa70)) - - -### BREAKING CHANGES - -* **license:** license changed from CC0 to ISC. - - - -<a name="2.6.0"></a> -# [2.6.0](https://github.com/npm/make-fetch-happen/compare/v2.5.0...v2.6.0) (2017-11-14) - - -### Bug Fixes - -* **integrity:** disable node-fetch compress when checking integrity (#42) ([a7cc74c](https://github.com/npm/make-fetch-happen/commit/a7cc74c)) - - -### Features - -* **onretry:** Add `options.onRetry` (#48) ([f90ccff](https://github.com/npm/make-fetch-happen/commit/f90ccff)) - - - -<a name="2.5.0"></a> -# [2.5.0](https://github.com/npm/make-fetch-happen/compare/v2.4.13...v2.5.0) (2017-08-24) - - -### Bug Fixes - -* **agent:** support timeout durations greater than 30 seconds ([04875ae](https://github.com/npm/make-fetch-happen/commit/04875ae)), closes [#35](https://github.com/npm/make-fetch-happen/issues/35) - - -### Features - -* **cache:** export cache deletion functionality (#40) ([3da4250](https://github.com/npm/make-fetch-happen/commit/3da4250)) - - - -<a name="2.4.13"></a> -## [2.4.13](https://github.com/npm/make-fetch-happen/compare/v2.4.12...v2.4.13) (2017-06-29) - - -### Bug Fixes - -* **deps:** bump other deps for bugfixes ([eab8297](https://github.com/npm/make-fetch-happen/commit/eab8297)) -* **proxy:** bump proxy deps with bugfixes (#32) ([632f860](https://github.com/npm/make-fetch-happen/commit/632f860)), closes [#32](https://github.com/npm/make-fetch-happen/issues/32) - - - -<a name="2.4.12"></a> -## [2.4.12](https://github.com/npm/make-fetch-happen/compare/v2.4.11...v2.4.12) (2017-06-06) - - -### Bug Fixes - -* **cache:** encode x-local-cache-etc headers to be header-safe ([dc9fb1b](https://github.com/npm/make-fetch-happen/commit/dc9fb1b)) - - - -<a name="2.4.11"></a> -## [2.4.11](https://github.com/npm/make-fetch-happen/compare/v2.4.10...v2.4.11) (2017-06-05) - - -### Bug Fixes - -* **deps:** bump deps with ssri fix ([bef1994](https://github.com/npm/make-fetch-happen/commit/bef1994)) - - - -<a name="2.4.10"></a> -## [2.4.10](https://github.com/npm/make-fetch-happen/compare/v2.4.9...v2.4.10) (2017-05-31) - - -### Bug Fixes - -* **deps:** bump dep versions with bugfixes ([0af4003](https://github.com/npm/make-fetch-happen/commit/0af4003)) -* **proxy:** use auth parameter for proxy authentication (#30) ([c687306](https://github.com/npm/make-fetch-happen/commit/c687306)) - - - -<a name="2.4.9"></a> -## [2.4.9](https://github.com/npm/make-fetch-happen/compare/v2.4.8...v2.4.9) (2017-05-25) - - -### Bug Fixes - -* **cache:** use the passed-in promise for resolving cache stuff ([4c46257](https://github.com/npm/make-fetch-happen/commit/4c46257)) - - - -<a name="2.4.8"></a> -## [2.4.8](https://github.com/npm/make-fetch-happen/compare/v2.4.7...v2.4.8) (2017-05-25) - - -### Bug Fixes - -* **cache:** pass uid/gid/Promise through to cache ([a847c92](https://github.com/npm/make-fetch-happen/commit/a847c92)) - - - -<a name="2.4.7"></a> -## [2.4.7](https://github.com/npm/make-fetch-happen/compare/v2.4.6...v2.4.7) (2017-05-24) - - -### Bug Fixes - -* **deps:** pull in various fixes from deps ([fc2a587](https://github.com/npm/make-fetch-happen/commit/fc2a587)) - - - -<a name="2.4.6"></a> -## [2.4.6](https://github.com/npm/make-fetch-happen/compare/v2.4.5...v2.4.6) (2017-05-24) - - -### Bug Fixes - -* **proxy:** choose agent for http(s)-proxy by protocol of destUrl ([ea4832a](https://github.com/npm/make-fetch-happen/commit/ea4832a)) -* **proxy:** make socks proxy working ([1de810a](https://github.com/npm/make-fetch-happen/commit/1de810a)) -* **proxy:** revert previous proxy solution ([563b0d8](https://github.com/npm/make-fetch-happen/commit/563b0d8)) - - - -<a name="2.4.5"></a> -## [2.4.5](https://github.com/npm/make-fetch-happen/compare/v2.4.4...v2.4.5) (2017-05-24) - - -### Bug Fixes - -* **proxy:** use the destination url when determining agent ([1a714e7](https://github.com/npm/make-fetch-happen/commit/1a714e7)) - - - -<a name="2.4.4"></a> -## [2.4.4](https://github.com/npm/make-fetch-happen/compare/v2.4.3...v2.4.4) (2017-05-23) - - -### Bug Fixes - -* **redirect:** handle redirects explicitly (#27) ([4c4af54](https://github.com/npm/make-fetch-happen/commit/4c4af54)) - - - -<a name="2.4.3"></a> -## [2.4.3](https://github.com/npm/make-fetch-happen/compare/v2.4.2...v2.4.3) (2017-05-06) - - -### Bug Fixes - -* **redirect:** redirects now delete authorization if hosts fail to match ([c071805](https://github.com/npm/make-fetch-happen/commit/c071805)) - - - -<a name="2.4.2"></a> -## [2.4.2](https://github.com/npm/make-fetch-happen/compare/v2.4.1...v2.4.2) (2017-05-04) - - -### Bug Fixes - -* **cache:** reduce race condition window by checking for content ([24544b1](https://github.com/npm/make-fetch-happen/commit/24544b1)) -* **match:** Rewrite the conditional stream logic (#25) ([66bba4b](https://github.com/npm/make-fetch-happen/commit/66bba4b)) - - - -<a name="2.4.1"></a> -## [2.4.1](https://github.com/npm/make-fetch-happen/compare/v2.4.0...v2.4.1) (2017-04-28) - - -### Bug Fixes - -* **memoization:** missed spots + allow passthrough of memo objs ([ac0cd12](https://github.com/npm/make-fetch-happen/commit/ac0cd12)) - - - -<a name="2.4.0"></a> -# [2.4.0](https://github.com/npm/make-fetch-happen/compare/v2.3.0...v2.4.0) (2017-04-28) - - -### Bug Fixes - -* **memoize:** cacache had a broken memoizer ([8a9ed4c](https://github.com/npm/make-fetch-happen/commit/8a9ed4c)) - - -### Features - -* **memoization:** only slurp stuff into memory if opts.memoize is not false ([0744adc](https://github.com/npm/make-fetch-happen/commit/0744adc)) - - - -<a name="2.3.0"></a> -# [2.3.0](https://github.com/npm/make-fetch-happen/compare/v2.2.6...v2.3.0) (2017-04-27) - - -### Features - -* **agent:** added opts.strictSSL and opts.localAddress ([c35015a](https://github.com/npm/make-fetch-happen/commit/c35015a)) -* **proxy:** Added opts.noProxy and NO_PROXY support ([f45c915](https://github.com/npm/make-fetch-happen/commit/f45c915)) - - - -<a name="2.2.6"></a> -## [2.2.6](https://github.com/npm/make-fetch-happen/compare/v2.2.5...v2.2.6) (2017-04-26) - - -### Bug Fixes - -* **agent:** check uppercase & lowercase proxy env (#24) ([acf2326](https://github.com/npm/make-fetch-happen/commit/acf2326)), closes [#22](https://github.com/npm/make-fetch-happen/issues/22) -* **deps:** switch to node-fetch-npm and stop bundling ([3db603b](https://github.com/npm/make-fetch-happen/commit/3db603b)) - - - -<a name="2.2.5"></a> -## [2.2.5](https://github.com/npm/make-fetch-happen/compare/v2.2.4...v2.2.5) (2017-04-23) - - -### Bug Fixes - -* **deps:** bump cacache and use its size feature ([926c1d3](https://github.com/npm/make-fetch-happen/commit/926c1d3)) - - - -<a name="2.2.4"></a> -## [2.2.4](https://github.com/npm/make-fetch-happen/compare/v2.2.3...v2.2.4) (2017-04-18) - - -### Bug Fixes - -* **integrity:** hash verification issues fixed ([07f9402](https://github.com/npm/make-fetch-happen/commit/07f9402)) - - - -<a name="2.2.3"></a> -## [2.2.3](https://github.com/npm/make-fetch-happen/compare/v2.2.2...v2.2.3) (2017-04-18) - - -### Bug Fixes - -* **staleness:** responses older than 8h were never stale :< ([b54dd75](https://github.com/npm/make-fetch-happen/commit/b54dd75)) -* **warning:** remove spurious warning, make format more spec-compliant ([2e4f6bb](https://github.com/npm/make-fetch-happen/commit/2e4f6bb)) - - - -<a name="2.2.2"></a> -## [2.2.2](https://github.com/npm/make-fetch-happen/compare/v2.2.1...v2.2.2) (2017-04-12) - - -### Bug Fixes - -* **retry:** stop retrying 404s ([6fafd53](https://github.com/npm/make-fetch-happen/commit/6fafd53)) - - - -<a name="2.2.1"></a> -## [2.2.1](https://github.com/npm/make-fetch-happen/compare/v2.2.0...v2.2.1) (2017-04-10) - - -### Bug Fixes - -* **deps:** move test-only deps to devDeps ([2daaf80](https://github.com/npm/make-fetch-happen/commit/2daaf80)) - - - -<a name="2.2.0"></a> -# [2.2.0](https://github.com/npm/make-fetch-happen/compare/v2.1.0...v2.2.0) (2017-04-09) - - -### Bug Fixes - -* **cache:** treat caches as private ([57b7dc2](https://github.com/npm/make-fetch-happen/commit/57b7dc2)) - - -### Features - -* **retry:** accept shorthand retry settings ([dfed69d](https://github.com/npm/make-fetch-happen/commit/dfed69d)) - - - -<a name="2.1.0"></a> -# [2.1.0](https://github.com/npm/make-fetch-happen/compare/v2.0.4...v2.1.0) (2017-04-09) - - -### Features - -* **cache:** cache now obeys Age and a variety of other things (#13) ([7b9652d](https://github.com/npm/make-fetch-happen/commit/7b9652d)) - - - -<a name="2.0.4"></a> -## [2.0.4](https://github.com/npm/make-fetch-happen/compare/v2.0.3...v2.0.4) (2017-04-09) - - -### Bug Fixes - -* **agent:** accept Request as fetch input, not just strings ([b71669a](https://github.com/npm/make-fetch-happen/commit/b71669a)) - - - -<a name="2.0.3"></a> -## [2.0.3](https://github.com/npm/make-fetch-happen/compare/v2.0.2...v2.0.3) (2017-04-09) - - -### Bug Fixes - -* **deps:** seriously ([c29e7e7](https://github.com/npm/make-fetch-happen/commit/c29e7e7)) - - - -<a name="2.0.2"></a> -## [2.0.2](https://github.com/npm/make-fetch-happen/compare/v2.0.1...v2.0.2) (2017-04-09) - - -### Bug Fixes - -* **deps:** use bundleDeps instead ([c36ebf0](https://github.com/npm/make-fetch-happen/commit/c36ebf0)) - - - -<a name="2.0.1"></a> -## [2.0.1](https://github.com/npm/make-fetch-happen/compare/v2.0.0...v2.0.1) (2017-04-09) - - -### Bug Fixes - -* **deps:** make sure node-fetch tarball included in release ([3bf49d1](https://github.com/npm/make-fetch-happen/commit/3bf49d1)) - - - -<a name="2.0.0"></a> -# [2.0.0](https://github.com/npm/make-fetch-happen/compare/v1.7.0...v2.0.0) (2017-04-09) - - -### Bug Fixes - -* **deps:** manually pull in newer node-fetch to avoid babel prod dep ([66e5e87](https://github.com/npm/make-fetch-happen/commit/66e5e87)) -* **retry:** be more specific about when we retry ([a47b782](https://github.com/npm/make-fetch-happen/commit/a47b782)) - - -### Features - -* **agent:** add ca/cert/key support to auto-agent (#15) ([57585a7](https://github.com/npm/make-fetch-happen/commit/57585a7)) - - -### BREAKING CHANGES - -* **agent:** pac proxies are no longer supported. -* **retry:** Retry logic has changes. - -* 404s, 420s, and 429s all retry now. -* ENOTFOUND no longer retries. -* Only ECONNRESET, ECONNREFUSED, EADDRINUSE, ETIMEDOUT, and `request-timeout` errors are retried. - - - -<a name="1.7.0"></a> -# [1.7.0](https://github.com/npm/make-fetch-happen/compare/v1.6.0...v1.7.0) (2017-04-08) - - -### Features - -* **cache:** add useful headers to inform users about cached data ([9bd7b00](https://github.com/npm/make-fetch-happen/commit/9bd7b00)) - - - -<a name="1.6.0"></a> -# [1.6.0](https://github.com/npm/make-fetch-happen/compare/v1.5.1...v1.6.0) (2017-04-06) - - -### Features - -* **agent:** better, keepalive-supporting, default http agents ([16277f6](https://github.com/npm/make-fetch-happen/commit/16277f6)) - - - -<a name="1.5.1"></a> -## [1.5.1](https://github.com/npm/make-fetch-happen/compare/v1.5.0...v1.5.1) (2017-04-05) - - -### Bug Fixes - -* **cache:** bump cacache for its fixed error messages ([2f2b916](https://github.com/npm/make-fetch-happen/commit/2f2b916)) -* **cache:** fix handling of errors in cache reads ([5729222](https://github.com/npm/make-fetch-happen/commit/5729222)) - - - -<a name="1.5.0"></a> -# [1.5.0](https://github.com/npm/make-fetch-happen/compare/v1.4.0...v1.5.0) (2017-04-04) - - -### Features - -* **retry:** retry requests on 408 timeouts, too ([8d8b5bd](https://github.com/npm/make-fetch-happen/commit/8d8b5bd)) - - - -<a name="1.4.0"></a> -# [1.4.0](https://github.com/npm/make-fetch-happen/compare/v1.3.1...v1.4.0) (2017-04-04) - - -### Bug Fixes - -* **cache:** stop relying on BB.catch ([2b04494](https://github.com/npm/make-fetch-happen/commit/2b04494)) - - -### Features - -* **retry:** report retry attempt number as extra header ([fd50927](https://github.com/npm/make-fetch-happen/commit/fd50927)) - - - -<a name="1.3.1"></a> -## [1.3.1](https://github.com/npm/make-fetch-happen/compare/v1.3.0...v1.3.1) (2017-04-04) - - -### Bug Fixes - -* **cache:** pretend cache entry is missing on ENOENT ([9c2bb26](https://github.com/npm/make-fetch-happen/commit/9c2bb26)) - - - -<a name="1.3.0"></a> -# [1.3.0](https://github.com/npm/make-fetch-happen/compare/v1.2.1...v1.3.0) (2017-04-04) - - -### Bug Fixes - -* **cache:** if metadata is missing for some odd reason, ignore the entry ([a021a6b](https://github.com/npm/make-fetch-happen/commit/a021a6b)) - - -### Features - -* **cache:** add special headers when request was loaded straight from cache ([8a7dbd1](https://github.com/npm/make-fetch-happen/commit/8a7dbd1)) -* **cache:** allow configuring algorithms to be calculated on insertion ([bf4a0f2](https://github.com/npm/make-fetch-happen/commit/bf4a0f2)) - - - -<a name="1.2.1"></a> -## [1.2.1](https://github.com/npm/make-fetch-happen/compare/v1.2.0...v1.2.1) (2017-04-03) - - -### Bug Fixes - -* **integrity:** update cacache and ssri and change EBADCHECKSUM -> EINTEGRITY ([b6cf6f6](https://github.com/npm/make-fetch-happen/commit/b6cf6f6)) - - - -<a name="1.2.0"></a> -# [1.2.0](https://github.com/npm/make-fetch-happen/compare/v1.1.0...v1.2.0) (2017-04-03) - - -### Features - -* **integrity:** full Subresource Integrity support (#10) ([a590159](https://github.com/npm/make-fetch-happen/commit/a590159)) - - - -<a name="1.1.0"></a> -# [1.1.0](https://github.com/npm/make-fetch-happen/compare/v1.0.1...v1.1.0) (2017-04-01) - - -### Features - -* **opts:** fetch.defaults() for default options ([522a65e](https://github.com/npm/make-fetch-happen/commit/522a65e)) - - - -<a name="1.0.1"></a> -## [1.0.1](https://github.com/npm/make-fetch-happen/compare/v1.0.0...v1.0.1) (2017-04-01) - - - -<a name="1.0.0"></a> -# 1.0.0 (2017-04-01) - - -### Bug Fixes - -* **cache:** default on cache-control header ([b872a2c](https://github.com/npm/make-fetch-happen/commit/b872a2c)) -* standard stuff and cache matching ([753f2c2](https://github.com/npm/make-fetch-happen/commit/753f2c2)) -* **agent:** nudge around things with opts.agent ([ed62b57](https://github.com/npm/make-fetch-happen/commit/ed62b57)) -* **agent:** {agent: false} has special behavior ([b8cc923](https://github.com/npm/make-fetch-happen/commit/b8cc923)) -* **cache:** invalidation on non-GET ([fe78fac](https://github.com/npm/make-fetch-happen/commit/fe78fac)) -* **cache:** make force-cache and only-if-cached work as expected ([f50e9df](https://github.com/npm/make-fetch-happen/commit/f50e9df)) -* **cache:** more spec compliance ([d5a56db](https://github.com/npm/make-fetch-happen/commit/d5a56db)) -* **cache:** only cache 200 gets ([0abb25a](https://github.com/npm/make-fetch-happen/commit/0abb25a)) -* **cache:** only load cache code if cache opt is a string ([250fcd5](https://github.com/npm/make-fetch-happen/commit/250fcd5)) -* **cache:** oops ([e3fa15a](https://github.com/npm/make-fetch-happen/commit/e3fa15a)) -* **cache:** refactored warning removal into main file ([5b0a9f9](https://github.com/npm/make-fetch-happen/commit/5b0a9f9)) -* **cache:** req constructor no longer needed in Cache ([5b74cbc](https://github.com/npm/make-fetch-happen/commit/5b74cbc)) -* **cache:** standard fetch api calls cacheMode "cache" ([6fba805](https://github.com/npm/make-fetch-happen/commit/6fba805)) -* **cache:** was using wrong method for non-GET/HEAD cache invalidation ([810763a](https://github.com/npm/make-fetch-happen/commit/810763a)) -* **caching:** a bunch of cache-related fixes ([8ebda1d](https://github.com/npm/make-fetch-happen/commit/8ebda1d)) -* **deps:** `cacache[@6](https://github.com/6).3.0` - race condition fixes ([9528442](https://github.com/npm/make-fetch-happen/commit/9528442)) -* **freshness:** fix regex for cacheControl matching ([070db86](https://github.com/npm/make-fetch-happen/commit/070db86)) -* **freshness:** fixed default freshness heuristic value ([5d29e88](https://github.com/npm/make-fetch-happen/commit/5d29e88)) -* **logging:** remove console.log calls ([a1d0a47](https://github.com/npm/make-fetch-happen/commit/a1d0a47)) -* **method:** node-fetch guarantees uppercase ([a1d68d6](https://github.com/npm/make-fetch-happen/commit/a1d68d6)) -* **opts:** simplified opts handling ([516fd6e](https://github.com/npm/make-fetch-happen/commit/516fd6e)) -* **proxy:** pass proxy option directly to ProxyAgent ([3398460](https://github.com/npm/make-fetch-happen/commit/3398460)) -* **retry:** false -> {retries: 0} ([297fbb6](https://github.com/npm/make-fetch-happen/commit/297fbb6)) -* **retry:** only retry put if body is not a stream ([a24e599](https://github.com/npm/make-fetch-happen/commit/a24e599)) -* **retry:** skip retries if body is a stream for ANY method ([780c0f8](https://github.com/npm/make-fetch-happen/commit/780c0f8)) - - -### Features - -* **api:** initial implementation -- can make and cache requests ([7d55b49](https://github.com/npm/make-fetch-happen/commit/7d55b49)) -* **fetch:** injectable cache, and retry support ([87b84bf](https://github.com/npm/make-fetch-happen/commit/87b84bf)) - - -### BREAKING CHANGES - -* **cache:** opts.cache -> opts.cacheManager; opts.cacheMode -> opts.cache -* **fetch:** opts.cache accepts a Cache-like obj or a path. Requests are now retried. -* **api:** actual api implemented diff --git a/node_modules/make-fetch-happen/README.md b/node_modules/make-fetch-happen/README.md deleted file mode 100644 index f454469e68508..0000000000000 --- a/node_modules/make-fetch-happen/README.md +++ /dev/null @@ -1,404 +0,0 @@ -# make-fetch-happen -[![npm version](https://img.shields.io/npm/v/make-fetch-happen.svg)](https://npm.im/make-fetch-happen) [![license](https://img.shields.io/npm/l/make-fetch-happen.svg)](https://npm.im/make-fetch-happen) [![Travis](https://img.shields.io/travis/npm/make-fetch-happen.svg)](https://travis-ci.org/npm/make-fetch-happen) [![Coverage Status](https://coveralls.io/repos/github/npm/make-fetch-happen/badge.svg?branch=latest)](https://coveralls.io/github/npm/make-fetch-happen?branch=latest) - -[`make-fetch-happen`](https://github.com/npm/make-fetch-happen) is a Node.js -library that wraps [`minipass-fetch`](https://github.com/npm/minipass-fetch) with additional -features [`minipass-fetch`](https://github.com/npm/minipass-fetch) doesn't intend to include, including HTTP Cache support, request -pooling, proxies, retries, [and more](#features)! - -## Install - -`$ npm install --save make-fetch-happen` - -## Table of Contents - -* [Example](#example) -* [Features](#features) -* [Contributing](#contributing) -* [API](#api) - * [`fetch`](#fetch) - * [`fetch.defaults`](#fetch-defaults) - * [`minipass-fetch` options](#minipass-fetch-options) - * [`make-fetch-happen` options](#extra-options) - * [`opts.cacheManager`](#opts-cache-manager) - * [`opts.cache`](#opts-cache) - * [`opts.proxy`](#opts-proxy) - * [`opts.noProxy`](#opts-no-proxy) - * [`opts.ca, opts.cert, opts.key`](#https-opts) - * [`opts.maxSockets`](#opts-max-sockets) - * [`opts.retry`](#opts-retry) - * [`opts.onRetry`](#opts-onretry) - * [`opts.integrity`](#opts-integrity) -* [Message From Our Sponsors](#wow) - -### Example - -```javascript -const fetch = require('make-fetch-happen').defaults({ - cacheManager: './my-cache' // path where cache will be written (and read) -}) - -fetch('https://registry.npmjs.org/make-fetch-happen').then(res => { - return res.json() // download the body as JSON -}).then(body => { - console.log(`got ${body.name} from web`) - return fetch('https://registry.npmjs.org/make-fetch-happen', { - cache: 'no-cache' // forces a conditional request - }) -}).then(res => { - console.log(res.status) // 304! cache validated! - return res.json().then(body => { - console.log(`got ${body.name} from cache`) - }) -}) -``` - -### Features - -* Builds around [`minipass-fetch`](https://npm.im/minipass-fetch) for the core [`fetch` API](https://fetch.spec.whatwg.org) implementation -* Request pooling out of the box -* Quite fast, really -* Automatic HTTP-semantics-aware request retries -* Cache-fallback automatic "offline mode" -* Proxy support (http, https, socks, socks4, socks5) -* Built-in request caching following full HTTP caching rules (`Cache-Control`, `ETag`, `304`s, cache fallback on error, etc). -* Customize cache storage with any [Cache API](https://developer.mozilla.org/en-US/docs/Web/API/Cache)-compliant `Cache` instance. Cache to Redis! -* Node.js Stream support -* Transparent gzip and deflate support -* [Subresource Integrity](https://developer.mozilla.org/en-US/docs/Web/Security/Subresource_Integrity) support -* Literally punches nazis -* (PENDING) Range request caching and resuming - -### Contributing - -The make-fetch-happen team enthusiastically welcomes contributions and project participation! There's a bunch of things you can do if you want to contribute! The [Contributor Guide](https://github.com/npm/cli/blob/latest/CONTRIBUTING.md) outlines the process for community interaction and contribution. Please don't hesitate to jump in if you'd like to, or even ask us questions if something isn't clear. - -All participants and maintainers in this project are expected to follow the [npm Code of Conduct](https://www.npmjs.com/policies/conduct), and just generally be excellent to each other. - -Please refer to the [Changelog](CHANGELOG.md) for project history details, too. - -Happy hacking! - -### API - -#### <a name="fetch"></a> `> fetch(uriOrRequest, [opts]) -> Promise<Response>` - -This function implements most of the [`fetch` API](https://developer.mozilla.org/en-US/docs/Web/API/WindowOrWorkerGlobalScope/fetch): given a `uri` string or a `Request` instance, it will fire off an http request and return a Promise containing the relevant response. - -If `opts` is provided, the [`minipass-fetch`-specific options](#minipass-fetch-options) will be passed to that library. There are also [additional options](#extra-options) specific to make-fetch-happen that add various features, such as HTTP caching, integrity verification, proxy support, and more. - -##### Example - -```javascript -fetch('https://google.com').then(res => res.buffer()) -``` - -#### <a name="fetch-defaults"></a> `> fetch.defaults([defaultUrl], [defaultOpts])` - -Returns a new `fetch` function that will call `make-fetch-happen` using `defaultUrl` and `defaultOpts` as default values to any calls. - -A defaulted `fetch` will also have a `.defaults()` method, so they can be chained. - -##### Example - -```javascript -const fetch = require('make-fetch-happen').defaults({ - cacheManager: './my-local-cache' -}) - -fetch('https://registry.npmjs.org/make-fetch-happen') // will always use the cache -``` - -#### <a name="minipass-fetch-options"></a> `> minipass-fetch options` - -The following options for `minipass-fetch` are used as-is: - -* method -* body -* redirect -* follow -* timeout -* compress -* size - -These other options are modified or augmented by make-fetch-happen: - -* headers - Default `User-Agent` set to make-fetch happen. `Connection` is set to `keep-alive` or `close` automatically depending on `opts.agent`. -* agent - * If agent is null, an http or https Agent will be automatically used. By default, these will be `http.globalAgent` and `https.globalAgent`. - * If [`opts.proxy`](#opts-proxy) is provided and `opts.agent` is null, the agent will be set to an appropriate proxy-handling agent. - * If `opts.agent` is an object, it will be used as the request-pooling agent argument for this request. - * If `opts.agent` is `false`, it will be passed as-is to the underlying request library. This causes a new Agent to be spawned for every request. - -For more details, see [the documentation for `minipass-fetch` itself](https://github.com/npm/minipass-fetch#options). - -#### <a name="extra-options"></a> `> make-fetch-happen options` - -make-fetch-happen augments the `minipass-fetch` API with additional features available through extra options. The following extra options are available: - -* [`opts.cacheManager`](#opts-cache-manager) - Cache target to read/write -* [`opts.cache`](#opts-cache) - `fetch` cache mode. Controls cache *behavior*. -* [`opts.proxy`](#opts-proxy) - Proxy agent -* [`opts.noProxy`](#opts-no-proxy) - Domain segments to disable proxying for. -* [`opts.ca, opts.cert, opts.key, opts.strictSSL`](#https-opts) -* [`opts.localAddress`](#opts-local-address) -* [`opts.maxSockets`](#opts-max-sockets) -* [`opts.retry`](#opts-retry) - Request retry settings -* [`opts.onRetry`](#opts-onretry) - a function called whenever a retry is attempted -* [`opts.integrity`](#opts-integrity) - [Subresource Integrity](https://developer.mozilla.org/en-US/docs/Web/Security/Subresource_Integrity) metadata. - -#### <a name="opts-cache-manager"></a> `> opts.cacheManager` - -Either a `String` or a `Cache`. If the former, it will be assumed to be a `Path` to be used as the cache root for [`cacache`](https://npm.im/cacache). - -If an object is provided, it will be assumed to be a compliant [`Cache` instance](https://developer.mozilla.org/en-US/docs/Web/API/Cache). Only `Cache.match()`, `Cache.put()`, and `Cache.delete()` are required. Options objects will not be passed in to `match()` or `delete()`. - -By implementing this API, you can customize the storage backend for make-fetch-happen itself -- for example, you could implement a cache that uses `redis` for caching, or simply keeps everything in memory. Most of the caching logic exists entirely on the make-fetch-happen side, so the only thing you need to worry about is reading, writing, and deleting, as well as making sure `fetch.Response` objects are what gets returned. - -You can refer to `cache.js` in the make-fetch-happen source code for a reference implementation. - -**NOTE**: Requests will not be cached unless their response bodies are consumed. You will need to use one of the `res.json()`, `res.buffer()`, etc methods on the response, or drain the `res.body` stream, in order for it to be written. - -The default cache manager also adds the following headers to cached responses: - -* `X-Local-Cache`: Path to the cache the content was found in -* `X-Local-Cache-Key`: Unique cache entry key for this response -* `X-Local-Cache-Hash`: Specific integrity hash for the cached entry -* `X-Local-Cache-Time`: UTCString of the cache insertion time for the entry - -Using [`cacache`](https://npm.im/cacache), a call like this may be used to -manually fetch the cached entry: - -```javascript -const h = response.headers -cacache.get(h.get('x-local-cache'), h.get('x-local-cache-key')) - -// grab content only, directly: -cacache.get.byDigest(h.get('x-local-cache'), h.get('x-local-cache-hash')) -``` - -##### Example - -```javascript -fetch('https://registry.npmjs.org/make-fetch-happen', { - cacheManager: './my-local-cache' -}) // -> 200-level response will be written to disk - -fetch('https://npm.im/cacache', { - cacheManager: new MyCustomRedisCache(process.env.PORT) -}) // -> 200-level response will be written to redis -``` - -A possible (minimal) implementation for `MyCustomRedisCache`: - -```javascript -const bluebird = require('bluebird') -const redis = require("redis") -bluebird.promisifyAll(redis.RedisClient.prototype) -class MyCustomRedisCache { - constructor (opts) { - this.redis = redis.createClient(opts) - } - match (req) { - return this.redis.getAsync(req.url).then(res => { - if (res) { - const parsed = JSON.parse(res) - return new fetch.Response(parsed.body, { - url: req.url, - headers: parsed.headers, - status: 200 - }) - } - }) - } - put (req, res) { - return res.buffer().then(body => { - return this.redis.setAsync(req.url, JSON.stringify({ - body: body, - headers: res.headers.raw() - })) - }).then(() => { - // return the response itself - return res - }) - } - 'delete' (req) { - return this.redis.unlinkAsync(req.url) - } -} -``` - -#### <a name="opts-cache"></a> `> opts.cache` - -This option follows the standard `fetch` API cache option. This option will do nothing if [`opts.cacheManager`](#opts-cache-manager) is null. The following values are accepted (as strings): - -* `default` - Fetch will inspect the HTTP cache on the way to the network. If there is a fresh response it will be used. If there is a stale response a conditional request will be created, and a normal request otherwise. It then updates the HTTP cache with the response. If the revalidation request fails (for example, on a 500 or if you're offline), the stale response will be returned. -* `no-store` - Fetch behaves as if there is no HTTP cache at all. -* `reload` - Fetch behaves as if there is no HTTP cache on the way to the network. Ergo, it creates a normal request and updates the HTTP cache with the response. -* `no-cache` - Fetch creates a conditional request if there is a response in the HTTP cache and a normal request otherwise. It then updates the HTTP cache with the response. -* `force-cache` - Fetch uses any response in the HTTP cache matching the request, not paying attention to staleness. If there was no response, it creates a normal request and updates the HTTP cache with the response. -* `only-if-cached` - Fetch uses any response in the HTTP cache matching the request, not paying attention to staleness. If there was no response, it returns a network error. (Can only be used when request’s mode is "same-origin". Any cached redirects will be followed assuming request’s redirect mode is "follow" and the redirects do not violate request’s mode.) - -(Note: option descriptions are taken from https://fetch.spec.whatwg.org/#http-network-or-cache-fetch) - -##### Example - -```javascript -const fetch = require('make-fetch-happen').defaults({ - cacheManager: './my-cache' -}) - -// Will error with ENOTCACHED if we haven't already cached this url -fetch('https://registry.npmjs.org/make-fetch-happen', { - cache: 'only-if-cached' -}) - -// Will refresh any local content and cache the new response -fetch('https://registry.npmjs.org/make-fetch-happen', { - cache: 'reload' -}) - -// Will use any local data, even if stale. Otherwise, will hit network. -fetch('https://registry.npmjs.org/make-fetch-happen', { - cache: 'force-cache' -}) -``` - -#### <a name="opts-proxy"></a> `> opts.proxy` - -A string or `new url.URL()`-d URI to proxy through. Different Proxy handlers will be -used depending on the proxy's protocol. - -Additionally, `process.env.HTTP_PROXY`, `process.env.HTTPS_PROXY`, and -`process.env.PROXY` are used if present and no `opts.proxy` value is provided. - -(Pending) `process.env.NO_PROXY` may also be configured to skip proxying requests for all, or specific domains. - -##### Example - -```javascript -fetch('https://registry.npmjs.org/make-fetch-happen', { - proxy: 'https://corporate.yourcompany.proxy:4445' -}) - -fetch('https://registry.npmjs.org/make-fetch-happen', { - proxy: { - protocol: 'https:', - hostname: 'corporate.yourcompany.proxy', - port: 4445 - } -}) -``` - -#### <a name="opts-no-proxy"></a> `> opts.noProxy` - -If present, should be a comma-separated string or an array of domain extensions -that a proxy should _not_ be used for. - -This option may also be provided through `process.env.NO_PROXY`. - -#### <a name="https-opts"></a> `> opts.ca, opts.cert, opts.key, opts.strictSSL` - -These values are passed in directly to the HTTPS agent and will be used for both -proxied and unproxied outgoing HTTPS requests. They mostly correspond to the -same options the `https` module accepts, which will be themselves passed to -`tls.connect()`. `opts.strictSSL` corresponds to `rejectUnauthorized`. - -#### <a name="opts-local-address"></a> `> opts.localAddress` - -Passed directly to `http` and `https` request calls. Determines the local -address to bind to. - -#### <a name="opts-max-sockets"></a> `> opts.maxSockets` - -Default: 15 - -Maximum number of active concurrent sockets to use for the underlying -Http/Https/Proxy agents. This setting applies once per spawned agent. - -15 is probably a _pretty good value_ for most use-cases, and balances speed -with, uh, not knocking out people's routers. 🤓 - -#### <a name="opts-retry"></a> `> opts.retry` - -An object that can be used to tune request retry settings. Retries will only be attempted on the following conditions: - -* Request method is NOT `POST` AND -* Request status is one of: `408`, `420`, `429`, or any status in the 500-range. OR -* Request errored with `ECONNRESET`, `ECONNREFUSED`, `EADDRINUSE`, `ETIMEDOUT`, or the `fetch` error `request-timeout`. - -The following are worth noting as explicitly not retried: - -* `getaddrinfo ENOTFOUND` and will be assumed to be either an unreachable domain or the user will be assumed offline. If a response is cached, it will be returned immediately. -* `ECONNRESET` currently has no support for restarting. It will eventually be supported but requires a bit more juggling due to streaming. - -If `opts.retry` is `false`, it is equivalent to `{retries: 0}` - -If `opts.retry` is a number, it is equivalent to `{retries: num}` - -The following retry options are available if you want more control over it: - -* retries -* factor -* minTimeout -* maxTimeout -* randomize - -For details on what each of these do, refer to the [`retry`](https://npm.im/retry) documentation. - -##### Example - -```javascript -fetch('https://flaky.site.com', { - retry: { - retries: 10, - randomize: true - } -}) - -fetch('http://reliable.site.com', { - retry: false -}) - -fetch('http://one-more.site.com', { - retry: 3 -}) -``` - -#### <a name="opts-onretry"></a> `> opts.onRetry` - -A function called whenever a retry is attempted. - -##### Example - -```javascript -fetch('https://flaky.site.com', { - onRetry() { - console.log('we will retry!') - } -}) -``` - -#### <a name="opts-integrity"></a> `> opts.integrity` - -Matches the response body against the given [Subresource Integrity](https://developer.mozilla.org/en-US/docs/Web/Security/Subresource_Integrity) metadata. If verification fails, the request will fail with an `EINTEGRITY` error. - -`integrity` may either be a string or an [`ssri`](https://npm.im/ssri) `Integrity`-like. - -##### Example - -```javascript -fetch('https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-1.0.0.tgz', { - integrity: 'sha1-o47j7zAYnedYFn1dF/fR9OV3z8Q=' -}) // -> ok - -fetch('https://malicious-registry.org/make-fetch-happen/-/make-fetch-happen-1.0.0.tgz', { - integrity: 'sha1-o47j7zAYnedYFn1dF/fR9OV3z8Q=' -}) // Error: EINTEGRITY -``` - -### <a name="wow"></a> Message From Our Sponsors - -![](stop.gif) - -![](happening.gif) diff --git a/node_modules/make-fetch-happen/cache.js b/node_modules/make-fetch-happen/cache.js deleted file mode 100644 index 234e3a41d0519..0000000000000 --- a/node_modules/make-fetch-happen/cache.js +++ /dev/null @@ -1,260 +0,0 @@ -'use strict' - -const fetch = require('minipass-fetch') -const cacache = require('cacache') -const ssri = require('ssri') -const url = require('url') - -const Minipass = require('minipass') -const MinipassFlush = require('minipass-flush') -const MinipassCollect = require('minipass-collect') -const MinipassPipeline = require('minipass-pipeline') - -const MAX_MEM_SIZE = 5 * 1024 * 1024 // 5MB - -// some headers should never be stored in the cache, either because -// they're a security footgun to leave lying around, or because we -// just don't need them taking up space. -// set to undefined so they're omitted from the JSON.stringify -const pruneHeaders = { - authorization: undefined, - 'npm-session': undefined, - 'set-cookie': undefined, - 'cf-ray': undefined, - 'cf-cache-status': undefined, - 'cf-request-id': undefined, - 'x-fetch-attempts': undefined, -} - -function cacheKey (req) { - const parsed = new url.URL(req.url) - return `make-fetch-happen:request-cache:${ - url.format({ - protocol: parsed.protocol, - slashes: true, - port: parsed.port, - hostname: parsed.hostname, - pathname: parsed.pathname, - search: parsed.search, - }) - }` -} - -// This is a cacache-based implementation of the Cache standard, -// using node-fetch. -// docs: https://developer.mozilla.org/en-US/docs/Web/API/Cache -// -module.exports = class Cache { - constructor (path, opts) { - this._path = path - this.Promise = (opts && opts.Promise) || Promise - } - - static get pruneHeaders () { - // exposed for testing, not modifiable - return { ...pruneHeaders } - } - - // Returns a Promise that resolves to the response associated with the first - // matching request in the Cache object. - match (req, opts) { - const key = cacheKey(req) - return cacache.get.info(this._path, key).then(info => { - return info && cacache.get.hasContent( - this._path, info.integrity, opts - ).then(exists => exists && info) - }).then(info => { - if (info && info.metadata && matchDetails(req, { - url: info.metadata.url, - reqHeaders: new fetch.Headers(info.metadata.reqHeaders), - resHeaders: new fetch.Headers(info.metadata.resHeaders), - cacheIntegrity: info.integrity, - integrity: opts && opts.integrity, - })) { - const resHeaders = new fetch.Headers(info.metadata.resHeaders) - addCacheHeaders(resHeaders, this._path, key, info.integrity, info.time) - if (req.method === 'HEAD') { - return new fetch.Response(null, { - url: req.url, - headers: resHeaders, - status: 200, - }) - } - const cachePath = this._path - // avoid opening cache file handles until a user actually tries to - // read from it. - const body = new Minipass() - const fitInMemory = info.size < MAX_MEM_SIZE - const removeOnResume = () => body.removeListener('resume', onResume) - const onResume = - opts.memoize !== false && fitInMemory - ? () => { - const c = cacache.get.stream.byDigest(cachePath, info.integrity, { - memoize: opts.memoize, - }) - c.on('error', /* istanbul ignore next */ err => { - body.emit('error', err) - }) - c.pipe(body) - } - : () => { - removeOnResume() - cacache.get.byDigest(cachePath, info.integrity, { - memoize: opts.memoize, - }) - .then(data => body.end(data)) - .catch(/* istanbul ignore next */ err => { - body.emit('error', err) - }) - } - body.once('resume', onResume) - body.once('end', () => removeOnResume) - return this.Promise.resolve(new fetch.Response(body, { - url: req.url, - headers: resHeaders, - status: 200, - size: info.size, - })) - } - }) - } - - // Takes both a request and its response and adds it to the given cache. - put (req, response, opts) { - opts = opts || {} - const size = response.headers.get('content-length') - const fitInMemory = !!size && opts.memoize !== false && size < MAX_MEM_SIZE - const ckey = cacheKey(req) - const cacheOpts = { - algorithms: opts.algorithms, - metadata: { - url: req.url, - reqHeaders: { - ...req.headers.raw(), - ...pruneHeaders, - }, - resHeaders: { - ...response.headers.raw(), - ...pruneHeaders, - }, - }, - size, - memoize: fitInMemory && opts.memoize, - } - if (req.method === 'HEAD' || response.status === 304) { - // Update metadata without writing - return cacache.get.info(this._path, ckey).then(info => { - // Providing these will bypass content write - cacheOpts.integrity = info.integrity - addCacheHeaders( - response.headers, this._path, ckey, info.integrity, info.time - ) - - return new MinipassPipeline( - cacache.get.stream.byDigest(this._path, info.integrity, cacheOpts), - cacache.put.stream(this._path, ckey, cacheOpts) - ).promise().then(() => { - return response - }) - }) - } - const oldBody = response.body - // the flush is the last thing in the pipeline. Build the pipeline - // back-to-front so we don't consume the data before we use it! - // We unshift in either a tee-stream to the cache put stream, - // or a collecter that dumps it to cache in one go, then the - // old body to bring in the data. - const newBody = new MinipassPipeline(new MinipassFlush({ - flush () { - return cacheWritePromise - }, - })) - - let cacheWriteResolve, cacheWriteReject - const cacheWritePromise = new Promise((resolve, reject) => { - cacheWriteResolve = resolve - cacheWriteReject = reject - }) - const cachePath = this._path - - if (fitInMemory) { - const collecter = new MinipassCollect.PassThrough() - collecter.on('collect', data => { - cacache.put( - cachePath, - ckey, - data, - cacheOpts - ).then(cacheWriteResolve, cacheWriteReject) - }) - newBody.unshift(collecter) - } else { - const tee = new Minipass() - const cacheStream = cacache.put.stream( - cachePath, - ckey, - cacheOpts - ) - tee.pipe(cacheStream) - cacheStream.promise().then(cacheWriteResolve, cacheWriteReject) - newBody.unshift(tee) - } - - newBody.unshift(oldBody) - return Promise.resolve(new fetch.Response(newBody, response)) - } - - // Finds the Cache entry whose key is the request, and if found, deletes the - // Cache entry and returns a Promise that resolves to true. If no Cache entry - // is found, it returns false. - 'delete' (req, opts) { - opts = opts || {} - if (typeof opts.memoize === 'object') { - if (opts.memoize.reset) - opts.memoize.reset() - else if (opts.memoize.clear) - opts.memoize.clear() - else { - Object.keys(opts.memoize).forEach(k => { - opts.memoize[k] = null - }) - } - } - return cacache.rm.entry( - this._path, - cacheKey(req) - // TODO - true/false - ).then(() => false) - } -} - -function matchDetails (req, cached) { - const reqUrl = new url.URL(req.url) - const cacheUrl = new url.URL(cached.url) - const vary = cached.resHeaders.get('Vary') - // https://tools.ietf.org/html/rfc7234#section-4.1 - if (vary) { - if (vary.match(/\*/)) - return false - else { - const fieldsMatch = vary.split(/\s*,\s*/).every(field => { - return cached.reqHeaders.get(field) === req.headers.get(field) - }) - if (!fieldsMatch) - return false - } - } - if (cached.integrity) - return ssri.parse(cached.integrity).match(cached.cacheIntegrity) - - reqUrl.hash = null - cacheUrl.hash = null - return url.format(reqUrl) === url.format(cacheUrl) -} - -function addCacheHeaders (resHeaders, path, key, hash, time) { - resHeaders.set('X-Local-Cache', encodeURIComponent(path)) - resHeaders.set('X-Local-Cache-Key', encodeURIComponent(key)) - resHeaders.set('X-Local-Cache-Hash', encodeURIComponent(hash)) - resHeaders.set('X-Local-Cache-Time', new Date(time).toUTCString()) -} diff --git a/node_modules/make-fetch-happen/index.js b/node_modules/make-fetch-happen/index.js deleted file mode 100644 index 54f72049c1d52..0000000000000 --- a/node_modules/make-fetch-happen/index.js +++ /dev/null @@ -1,457 +0,0 @@ -'use strict' - -const url = require('url') -const fetch = require('minipass-fetch') -const pkg = require('./package.json') -const retry = require('promise-retry') -let ssri - -const Minipass = require('minipass') -const MinipassPipeline = require('minipass-pipeline') -const getAgent = require('./agent') -const setWarning = require('./warning') - -const configureOptions = require('./utils/configure-options') -const iterableToObject = require('./utils/iterable-to-object') -const makePolicy = require('./utils/make-policy') - -const isURL = /^https?:/ -const USER_AGENT = `${pkg.name}/${pkg.version} (+https://npm.im/${pkg.name})` - -const RETRY_ERRORS = [ - 'ECONNRESET', // remote socket closed on us - 'ECONNREFUSED', // remote host refused to open connection - 'EADDRINUSE', // failed to bind to a local port (proxy?) - 'ETIMEDOUT', // someone in the transaction is WAY TOO SLOW - // Known codes we do NOT retry on: - // ENOTFOUND (getaddrinfo failure. Either bad hostname, or offline) -] - -const RETRY_TYPES = [ - 'request-timeout', -] - -// https://fetch.spec.whatwg.org/#http-network-or-cache-fetch -module.exports = cachingFetch -cachingFetch.defaults = function (_uri, _opts) { - const fetch = this - if (typeof _uri === 'object') { - _opts = _uri - _uri = null - } - - function defaultedFetch (uri, opts) { - const finalOpts = Object.assign({}, _opts || {}, opts || {}) - return fetch(uri || _uri, finalOpts) - } - - defaultedFetch.defaults = fetch.defaults - defaultedFetch.delete = fetch.delete - return defaultedFetch -} - -cachingFetch.delete = cacheDelete -function cacheDelete (uri, opts) { - opts = configureOptions(opts) - if (opts.cacheManager) { - const req = new fetch.Request(uri, { - method: opts.method, - headers: opts.headers, - }) - return opts.cacheManager.delete(req, opts) - } -} - -function initializeSsri () { - if (!ssri) - ssri = require('ssri') -} - -function cachingFetch (uri, _opts) { - const opts = configureOptions(_opts) - - if (opts.integrity) { - initializeSsri() - // if verifying integrity, fetch must not decompress - opts.compress = false - } - - const isCachable = ( - ( - opts.method === 'GET' || - opts.method === 'HEAD' - ) && - Boolean(opts.cacheManager) && - opts.cache !== 'no-store' && - opts.cache !== 'reload' - ) - - if (isCachable) { - const req = new fetch.Request(uri, { - method: opts.method, - headers: opts.headers, - }) - - return opts.cacheManager.match(req, opts).then(res => { - if (res) { - const warningCode = (res.headers.get('Warning') || '').match(/^\d+/) - if (warningCode && +warningCode >= 100 && +warningCode < 200) { - // https://tools.ietf.org/html/rfc7234#section-4.3.4 - // - // If a stored response is selected for update, the cache MUST: - // - // * delete any Warning header fields in the stored response with - // warn-code 1xx (see Section 5.5); - // - // * retain any Warning header fields in the stored response with - // warn-code 2xx; - // - res.headers.delete('Warning') - } - - if (opts.cache === 'default' && !isStale(req, res)) - return res - - if (opts.cache === 'default' || opts.cache === 'no-cache') - return conditionalFetch(req, res, opts) - - if (opts.cache === 'force-cache' || opts.cache === 'only-if-cached') { - // 112 Disconnected operation - // SHOULD be included if the cache is intentionally disconnected from - // the rest of the network for a period of time. - // (https://tools.ietf.org/html/rfc2616#section-14.46) - setWarning(res, 112, 'Disconnected operation') - return res - } - } - - if (!res && opts.cache === 'only-if-cached') { - const errorMsg = `request to ${ - uri - } failed: cache mode is 'only-if-cached' but no cached response available.` - - const err = new Error(errorMsg) - err.code = 'ENOTCACHED' - throw err - } - - // Missing cache entry, or mode is default (if stale), reload, no-store - return remoteFetch(req.url, opts) - }) - } - return remoteFetch(uri, opts) -} - -// https://tools.ietf.org/html/rfc7234#section-4.2 -function isStale (req, res) { - const _req = { - url: req.url, - method: req.method, - headers: iterableToObject(req.headers), - } - - const policy = makePolicy(req, res) - - const responseTime = res.headers.get('x-local-cache-time') || - /* istanbul ignore next - would be weird to get a 'stale' - * response that didn't come from cache with a cache time header */ - (res.headers.get('date') || 0) - - policy._responseTime = new Date(responseTime) - - const bool = !policy.satisfiesWithoutRevalidation(_req) - const headers = policy.responseHeaders() - if (headers.warning && /^113\b/.test(headers.warning)) { - // Possible to pick up a rfc7234 warning at this point. - // This is kind of a weird place to stick this, should probably go - // in cachingFetch. But by putting it here, we save an extra - // CachePolicy object construction. - res.headers.append('warning', headers.warning) - } - return bool -} - -function mustRevalidate (res) { - return (res.headers.get('cache-control') || '').match(/must-revalidate/i) -} - -function conditionalFetch (req, cachedRes, opts) { - const _req = { - url: req.url, - method: req.method, - headers: Object.assign({}, opts.headers || {}), - } - - const policy = makePolicy(req, cachedRes) - opts.headers = policy.revalidationHeaders(_req) - - return remoteFetch(req.url, opts) - .then(condRes => { - const revalidatedPolicy = policy.revalidatedPolicy(_req, { - status: condRes.status, - headers: iterableToObject(condRes.headers), - }) - - if (condRes.status >= 500 && !mustRevalidate(cachedRes)) { - // 111 Revalidation failed - // MUST be included if a cache returns a stale response because an - // attempt to revalidate the response failed, due to an inability to - // reach the server. - // (https://tools.ietf.org/html/rfc2616#section-14.46) - setWarning(cachedRes, 111, 'Revalidation failed') - return cachedRes - } - - if (condRes.status === 304) { // 304 Not Modified - // Create a synthetic response from the cached body and original req - const synthRes = new fetch.Response(cachedRes.body, condRes) - return opts.cacheManager.put(req, synthRes, opts) - .then(newRes => { - // Get the list first, because if we delete while iterating, - // it'll throw off the count and not make it through all - // of them. - const newHeaders = revalidatedPolicy.policy.responseHeaders() - const toDelete = [...newRes.headers.keys()] - .filter(k => !newHeaders[k]) - for (const key of toDelete) - newRes.headers.delete(key) - - for (const [key, val] of Object.entries(newHeaders)) - newRes.headers.set(key, val) - - return newRes - }) - } - - return condRes - }) - .then(res => res) - .catch(err => { - if (mustRevalidate(cachedRes)) - throw err - else { - // 111 Revalidation failed - // MUST be included if a cache returns a stale response because an - // attempt to revalidate the response failed, due to an inability to - // reach the server. - // (https://tools.ietf.org/html/rfc2616#section-14.46) - setWarning(cachedRes, 111, 'Revalidation failed') - // 199 Miscellaneous warning - // The warning text MAY include arbitrary information to be presented to - // a human user, or logged. A system receiving this warning MUST NOT take - // any automated action, besides presenting the warning to the user. - // (https://tools.ietf.org/html/rfc2616#section-14.46) - setWarning( - cachedRes, - 199, - `Miscellaneous Warning ${err.code}: ${err.message}` - ) - - return cachedRes - } - }) -} - -function remoteFetchHandleIntegrity (res, integrity) { - if (res.status !== 200) - return res // Error responses aren't subject to integrity checks. - - const oldBod = res.body - const newBod = ssri.integrityStream({ - integrity, - }) - return new fetch.Response(new MinipassPipeline(oldBod, newBod), res) -} - -function remoteFetch (uri, opts) { - const agent = getAgent(uri, opts) - const headers = opts.headers instanceof fetch.Headers - ? opts.headers - : new fetch.Headers(opts.headers) - if (!headers.get('connection')) - headers.set('connection', agent ? 'keep-alive' : 'close') - - if (!headers.get('user-agent')) - headers.set('user-agent', USER_AGENT) - - const reqOpts = { - agent, - body: opts.body, - compress: opts.compress, - follow: opts.follow, - headers, - method: opts.method, - redirect: 'manual', - size: opts.size, - counter: opts.counter, - timeout: opts.timeout, - ca: opts.ca, - cert: opts.cert, - key: opts.key, - rejectUnauthorized: opts.strictSSL, - } - - return retry( - (retryHandler, attemptNum) => { - const req = new fetch.Request(uri, reqOpts) - return fetch(req) - .then((res) => { - if (opts.integrity) - res = remoteFetchHandleIntegrity(res, opts.integrity) - - res.headers.set('x-fetch-attempts', attemptNum) - - const isStream = Minipass.isStream(req.body) - - if (opts.cacheManager) { - const isMethodGetHead = ( - req.method === 'GET' || - req.method === 'HEAD' - ) - - const isCachable = ( - opts.cache !== 'no-store' && - isMethodGetHead && - makePolicy(req, res).storable() && - res.status === 200 // No other statuses should be stored! - ) - - if (isCachable) - return opts.cacheManager.put(req, res, opts) - - if (!isMethodGetHead) { - return opts.cacheManager.delete(req).then(() => { - if (res.status >= 500 && req.method !== 'POST' && !isStream) { - if (typeof opts.onRetry === 'function') - opts.onRetry(res) - - return retryHandler(res) - } - - return res - }) - } - } - - const isRetriable = ( - req.method !== 'POST' && - !isStream && - ( - res.status === 408 || // Request Timeout - res.status === 420 || // Enhance Your Calm (usually Twitter rate-limit) - res.status === 429 || // Too Many Requests ("standard" rate-limiting) - res.status >= 500 // Assume server errors are momentary hiccups - ) - ) - - if (isRetriable) { - if (typeof opts.onRetry === 'function') - opts.onRetry(res) - - return retryHandler(res) - } - - if (!fetch.isRedirect(res.status)) - return res - - if (opts.redirect === 'manual') - return res - - // if (!fetch.isRedirect(res.status) || opts.redirect === 'manual') { - // return res - // } - - // handle redirects - matches behavior of fetch: https://github.com/bitinn/node-fetch - if (opts.redirect === 'error') { - const err = new fetch.FetchError(`redirect mode is set to error: ${uri}`, 'no-redirect', { code: 'ENOREDIRECT' }) - throw err - } - - if (!res.headers.get('location')) { - const err = new fetch.FetchError(`redirect location header missing at: ${uri}`, 'no-location', { code: 'EINVALIDREDIRECT' }) - throw err - } - - if (req.counter >= req.follow) { - const err = new fetch.FetchError(`maximum redirect reached at: ${uri}`, 'max-redirect', { code: 'EMAXREDIRECT' }) - throw err - } - - const resolvedUrlParsed = new url.URL(res.headers.get('location'), req.url) - const resolvedUrl = url.format(resolvedUrlParsed) - const redirectURL = (isURL.test(res.headers.get('location'))) - ? new url.URL(res.headers.get('location')) - : resolvedUrlParsed - - // Comment below is used under the following license: - // Copyright (c) 2010-2012 Mikeal Rogers - // Licensed under the Apache License, Version 2.0 (the "License"); - // you may not use this file except in compliance with the License. - // You may obtain a copy of the License at - // http://www.apache.org/licenses/LICENSE-2.0 - // Unless required by applicable law or agreed to in writing, - // software distributed under the License is distributed on an "AS - // IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - // express or implied. See the License for the specific language - // governing permissions and limitations under the License. - - // Remove authorization if changing hostnames (but not if just - // changing ports or protocols). This matches the behavior of request: - // https://github.com/request/request/blob/b12a6245/lib/redirect.js#L134-L138 - if (new url.URL(req.url).hostname !== redirectURL.hostname) - req.headers.delete('authorization') - - // for POST request with 301/302 response, or any request with 303 response, - // use GET when following redirect - if ( - res.status === 303 || - ( - req.method === 'POST' && - ( - res.status === 301 || - res.status === 302 - ) - ) - ) { - opts.method = 'GET' - opts.body = null - req.headers.delete('content-length') - } - - opts.headers = {} - req.headers.forEach((value, name) => { - opts.headers[name] = value - }) - - opts.counter = ++req.counter - return cachingFetch(resolvedUrl, opts) - }) - .catch(err => { - const code = (err.code === 'EPROMISERETRY') - ? err.retried.code - : err.code - - const isRetryError = ( - RETRY_ERRORS.indexOf(code) === -1 && - RETRY_TYPES.indexOf(err.type) === -1 - ) - - if (req.method === 'POST' || isRetryError) - throw err - - if (typeof opts.onRetry === 'function') - opts.onRetry(err) - - return retryHandler(err) - }) - }, - opts.retry - ).catch(err => { - if (err.status >= 400 && err.type !== 'system') { - // this is an HTTP response "error" that we care about - return err - } - - throw err - }) -} diff --git a/node_modules/make-fetch-happen/agent.js b/node_modules/make-fetch-happen/lib/agent.js similarity index 88% rename from node_modules/make-fetch-happen/agent.js rename to node_modules/make-fetch-happen/lib/agent.js index e27eb4f3a801d..873d69cf4760b 100644 --- a/node_modules/make-fetch-happen/agent.js +++ b/node_modules/make-fetch-happen/lib/agent.js @@ -4,8 +4,8 @@ const url = require('url') const isLambda = require('is-lambda') const AGENT_CACHE = new LRU({ max: 50 }) -let HttpsAgent -let HttpAgent +const HttpAgent = require('agentkeepalive') +const HttpsAgent = HttpAgent.HttpsAgent module.exports = getAgent @@ -66,11 +66,6 @@ function getAgent (uri, opts) { return proxy } - if (!HttpsAgent) { - HttpAgent = require('agentkeepalive') - HttpsAgent = HttpAgent.HttpsAgent - } - const agent = isHttps ? new HttpsAgent({ maxSockets: agentMaxSockets, ca: opts.ca, @@ -155,15 +150,15 @@ function getProxyUri (uri, opts) { } const getAuth = u => - u.username && u.password ? `${u.username}:${u.password}` - : u.username ? u.username + u.username && u.password ? decodeURIComponent(`${u.username}:${u.password}`) + : u.username ? decodeURIComponent(u.username) : null const getPath = u => u.pathname + u.search + u.hash -let HttpProxyAgent -let HttpsProxyAgent -let SocksProxyAgent +const HttpProxyAgent = require('http-proxy-agent') +const HttpsProxyAgent = require('https-proxy-agent') +const SocksProxyAgent = require('socks-proxy-agent') module.exports.getProxy = getProxy function getProxy (proxyUrl, opts, isHttps) { const popts = { @@ -182,23 +177,13 @@ function getProxy (proxyUrl, opts, isHttps) { } if (proxyUrl.protocol === 'http:' || proxyUrl.protocol === 'https:') { - if (!isHttps) { - if (!HttpProxyAgent) - HttpProxyAgent = require('http-proxy-agent') - + if (!isHttps) return new HttpProxyAgent(popts) - } else { - if (!HttpsProxyAgent) - HttpsProxyAgent = require('https-proxy-agent') - + else return new HttpsProxyAgent(popts) - } - } else if (proxyUrl.protocol.startsWith('socks')) { - if (!SocksProxyAgent) - SocksProxyAgent = require('socks-proxy-agent') - + } else if (proxyUrl.protocol.startsWith('socks')) return new SocksProxyAgent(popts) - } else { + else { throw Object.assign( new Error(`unsupported proxy protocol: '${proxyUrl.protocol}'`), { diff --git a/node_modules/make-fetch-happen/lib/cache/entry.js b/node_modules/make-fetch-happen/lib/cache/entry.js new file mode 100644 index 0000000000000..a2acea156ee6f --- /dev/null +++ b/node_modules/make-fetch-happen/lib/cache/entry.js @@ -0,0 +1,460 @@ +const { Request, Response } = require('minipass-fetch') +const Minipass = require('minipass') +const MinipassCollect = require('minipass-collect') +const MinipassFlush = require('minipass-flush') +const MinipassPipeline = require('minipass-pipeline') +const cacache = require('cacache') +const url = require('url') + +const CachePolicy = require('./policy.js') +const cacheKey = require('./key.js') +const remote = require('../remote.js') + +const hasOwnProperty = (obj, prop) => Object.prototype.hasOwnProperty.call(obj, prop) + +// maximum amount of data we will buffer into memory +// if we'll exceed this, we switch to streaming +const MAX_MEM_SIZE = 5 * 1024 * 1024 // 5MB + +// allow list for request headers that will be written to the cache index +// note: we will also store any request headers +// that are named in a response's vary header +const KEEP_REQUEST_HEADERS = [ + 'accept-charset', + 'accept-encoding', + 'accept-language', + 'accept', + 'cache-control', +] + +// allow list for response headers that will be written to the cache index +// note: we must not store the real response's age header, or when we load +// a cache policy based on the metadata it will think the cached response +// is always stale +const KEEP_RESPONSE_HEADERS = [ + 'cache-control', + 'content-encoding', + 'content-language', + 'content-type', + 'date', + 'etag', + 'expires', + 'last-modified', + 'location', + 'pragma', + 'vary', +] + +// return an object containing all metadata to be written to the index +const getMetadata = (request, response, options) => { + const metadata = { + time: Date.now(), + url: request.url, + reqHeaders: {}, + resHeaders: {}, + } + + // only save the status if it's not a 200 or 304 + if (response.status !== 200 && response.status !== 304) + metadata.status = response.status + + for (const name of KEEP_REQUEST_HEADERS) { + if (request.headers.has(name)) + metadata.reqHeaders[name] = request.headers.get(name) + } + + // if the request's host header differs from the host in the url + // we need to keep it, otherwise it's just noise and we ignore it + const host = request.headers.get('host') + const parsedUrl = new url.URL(request.url) + if (host && parsedUrl.host !== host) + metadata.reqHeaders.host = host + + // if the response has a vary header, make sure + // we store the relevant request headers too + if (response.headers.has('vary')) { + const vary = response.headers.get('vary') + // a vary of "*" means every header causes a different response. + // in that scenario, we do not include any additional headers + // as the freshness check will always fail anyway and we don't + // want to bloat the cache indexes + if (vary !== '*') { + // copy any other request headers that will vary the response + const varyHeaders = vary.trim().toLowerCase().split(/\s*,\s*/) + for (const name of varyHeaders) { + // explicitly ignore accept-encoding here + if (name !== 'accept-encoding' && request.headers.has(name)) + metadata.reqHeaders[name] = request.headers.get(name) + } + } + } + + for (const name of KEEP_RESPONSE_HEADERS) { + if (response.headers.has(name)) + metadata.resHeaders[name] = response.headers.get(name) + } + + // we only store accept-encoding and content-encoding if the user + // has disabled automatic compression and decompression in minipass-fetch + // since if it's enabled (the default) then the content will have + // already been decompressed making the header a lie + if (options.compress === false) { + metadata.reqHeaders['accept-encoding'] = request.headers.get('accept-encoding') + metadata.resHeaders['content-encoding'] = response.headers.get('content-encoding') + } + + return metadata +} + +// symbols used to hide objects that may be lazily evaluated in a getter +const _request = Symbol('request') +const _response = Symbol('response') +const _policy = Symbol('policy') + +class CacheEntry { + constructor ({ entry, request, response, options }) { + if (entry) { + this.key = entry.key + this.entry = entry + // previous versions of this module didn't write an explicit timestamp in + // the metadata, so fall back to the entry's timestamp. we can't use the + // entry timestamp to determine staleness because cacache will update it + // when it verifies its data + this.entry.metadata.time = this.entry.metadata.time || this.entry.time + } else + this.key = cacheKey(request) + + this.options = options + + // these properties are behind getters that lazily evaluate + this[_request] = request + this[_response] = response + this[_policy] = null + } + + // returns a CacheEntry instance that satisfies the given request + // or undefined if no existing entry satisfies + static async find (request, options) { + try { + // compacts the index and returns an array of unique entries + var matches = await cacache.index.compact(options.cachePath, cacheKey(request), (A, B) => { + const entryA = new CacheEntry({ entry: A, options }) + const entryB = new CacheEntry({ entry: B, options }) + return entryA.policy.satisfies(entryB.request) + }, { + validateEntry: (entry) => { + // if an integrity is null, it needs to have a status specified + if (entry.integrity === null) + return !!(entry.metadata && entry.metadata.status) + + return true + }, + }) + } catch (err) { + // if the compact request fails, ignore the error and return + return + } + + // a cache mode of 'reload' means to behave as though we have no cache + // on the way to the network. return undefined to allow cacheFetch to + // create a brand new request no matter what. + if (options.cache === 'reload') + return + + // find the specific entry that satisfies the request + let match + for (const entry of matches) { + const _entry = new CacheEntry({ + entry, + options, + }) + + if (_entry.policy.satisfies(request)) { + match = _entry + break + } + } + + return match + } + + // if the user made a PUT/POST/PATCH then we invalidate our + // cache for the same url by deleting the index entirely + static async invalidate (request, options) { + const key = cacheKey(request) + try { + await cacache.rm.entry(options.cachePath, key, { removeFully: true }) + } catch (err) { + // ignore errors + } + } + + get request () { + if (!this[_request]) { + this[_request] = new Request(this.entry.metadata.url, { + method: 'GET', + headers: this.entry.metadata.reqHeaders, + }) + } + + return this[_request] + } + + get response () { + if (!this[_response]) { + this[_response] = new Response(null, { + url: this.entry.metadata.url, + counter: this.options.counter, + status: this.entry.metadata.status || 200, + headers: { + ...this.entry.metadata.resHeaders, + 'content-length': this.entry.size, + }, + }) + } + + return this[_response] + } + + get policy () { + if (!this[_policy]) { + this[_policy] = new CachePolicy({ + entry: this.entry, + request: this.request, + response: this.response, + options: this.options, + }) + } + + return this[_policy] + } + + // wraps the response in a pipeline that stores the data + // in the cache while the user consumes it + async store (status) { + // if we got a status other than 200, 301, or 308, + // or the CachePolicy forbid storage, append the + // cache status header and return it untouched + if (this.request.method !== 'GET' || ![200, 301, 308].includes(this.response.status) || !this.policy.storable()) { + this.response.headers.set('x-local-cache-status', 'skip') + return this.response + } + + const size = this.response.headers.get('content-length') + const fitsInMemory = !!size && Number(size) < MAX_MEM_SIZE + const shouldBuffer = this.options.memoize !== false && fitsInMemory + const cacheOpts = { + algorithms: this.options.algorithms, + metadata: getMetadata(this.request, this.response, this.options), + size, + memoize: fitsInMemory && this.options.memoize, + } + + let body = null + // we only set a body if the status is a 200, redirects are + // stored as metadata only + if (this.response.status === 200) { + let cacheWriteResolve, cacheWriteReject + const cacheWritePromise = new Promise((resolve, reject) => { + cacheWriteResolve = resolve + cacheWriteReject = reject + }) + + body = new MinipassPipeline(new MinipassFlush({ + flush () { + return cacheWritePromise + }, + })) + + let abortStream, onResume + if (shouldBuffer) { + // if the result fits in memory, use a collect stream to gather + // the response and write it to cacache while also passing it through + // to the user + onResume = () => { + const collector = new MinipassCollect.PassThrough() + abortStream = collector + collector.on('collect', (data) => { + // TODO if the cache write fails, log a warning but return the response anyway + cacache.put(this.options.cachePath, this.key, data, cacheOpts).then(cacheWriteResolve, cacheWriteReject) + }) + body.unshift(collector) + body.unshift(this.response.body) + } + } else { + // if it does not fit in memory, create a tee stream and use + // that to pipe to both the cache and the user simultaneously + onResume = () => { + const tee = new Minipass() + const cacheStream = cacache.put.stream(this.options.cachePath, this.key, cacheOpts) + abortStream = cacheStream + tee.pipe(cacheStream) + // TODO if the cache write fails, log a warning but return the response anyway + cacheStream.promise().then(cacheWriteResolve, cacheWriteReject) + body.unshift(tee) + body.unshift(this.response.body) + } + } + + body.once('resume', onResume) + body.once('end', () => body.removeListener('resume', onResume)) + this.response.body.on('error', (err) => { + // the abortStream will either be a MinipassCollect if we buffer + // or a cacache write stream, either way be sure to listen for + // errors from the actual response and avoid writing data that we + // know to be invalid to the cache + abortStream.destroy(err) + }) + } else + await cacache.index.insert(this.options.cachePath, this.key, null, cacheOpts) + + // note: we do not set the x-local-cache-hash header because we do not know + // the hash value until after the write to the cache completes, which doesn't + // happen until after the response has been sent and it's too late to write + // the header anyway + this.response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath)) + this.response.headers.set('x-local-cache-key', encodeURIComponent(this.key)) + this.response.headers.set('x-local-cache-mode', shouldBuffer ? 'buffer' : 'stream') + this.response.headers.set('x-local-cache-status', status) + this.response.headers.set('x-local-cache-time', new Date().toISOString()) + const newResponse = new Response(body, { + url: this.response.url, + status: this.response.status, + headers: this.response.headers, + counter: this.options.counter, + }) + return newResponse + } + + // use the cached data to create a response and return it + async respond (method, options, status) { + let response + const size = Number(this.response.headers.get('content-length')) + const fitsInMemory = !!size && size < MAX_MEM_SIZE + const shouldBuffer = this.options.memoize !== false && fitsInMemory + if (method === 'HEAD' || [301, 308].includes(this.response.status)) { + // if the request is a HEAD, or the response is a redirect, + // then the metadata in the entry already includes everything + // we need to build a response + response = this.response + } else { + // we're responding with a full cached response, so create a body + // that reads from cacache and attach it to a new Response + const body = new Minipass() + const removeOnResume = () => body.removeListener('resume', onResume) + let onResume + if (shouldBuffer) { + onResume = async () => { + removeOnResume() + try { + const content = await cacache.get.byDigest(this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize }) + body.end(content) + } catch (err) { + if (err.code === 'EINTEGRITY') + await cacache.rm.content(this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize }) + if (err.code === 'ENOENT' || err.code === 'EINTEGRITY') + await CacheEntry.invalidate(this.request, this.options) + body.emit('error', err) + } + } + } else { + onResume = () => { + const cacheStream = cacache.get.stream.byDigest(this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize }) + cacheStream.on('error', async (err) => { + cacheStream.pause() + if (err.code === 'EINTEGRITY') + await cacache.rm.content(this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize }) + if (err.code === 'ENOENT' || err.code === 'EINTEGRITY') + await CacheEntry.invalidate(this.request, this.options) + body.emit('error', err) + cacheStream.resume() + }) + cacheStream.pipe(body) + } + } + + body.once('resume', onResume) + body.once('end', removeOnResume) + response = new Response(body, { + url: this.entry.metadata.url, + counter: options.counter, + status: 200, + headers: { + ...this.policy.responseHeaders(), + }, + }) + } + + response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath)) + response.headers.set('x-local-cache-hash', encodeURIComponent(this.entry.integrity)) + response.headers.set('x-local-cache-key', encodeURIComponent(this.key)) + response.headers.set('x-local-cache-mode', shouldBuffer ? 'buffer' : 'stream') + response.headers.set('x-local-cache-status', status) + response.headers.set('x-local-cache-time', new Date(this.entry.metadata.time).toUTCString()) + return response + } + + // use the provided request along with this cache entry to + // revalidate the stored response. returns a response, either + // from the cache or from the update + async revalidate (request, options) { + const revalidateRequest = new Request(request, { + headers: this.policy.revalidationHeaders(request), + }) + + try { + // NOTE: be sure to remove the headers property from the + // user supplied options, since we have already defined + // them on the new request object. if they're still in the + // options then those will overwrite the ones from the policy + var response = await remote(revalidateRequest, { + ...options, + headers: undefined, + }) + } catch (err) { + // if the network fetch fails, return the stale + // cached response unless it has a cache-control + // of 'must-revalidate' + if (!this.policy.mustRevalidate) + return this.respond(request.method, options, 'stale') + + throw err + } + + if (this.policy.revalidated(revalidateRequest, response)) { + // we got a 304, write a new index to the cache and respond from cache + const metadata = getMetadata(request, response, options) + // 304 responses do not include headers that are specific to the response data + // since they do not include a body, so we copy values for headers that were + // in the old cache entry to the new one, if the new metadata does not already + // include that header + for (const name of KEEP_RESPONSE_HEADERS) { + if (!hasOwnProperty(metadata.resHeaders, name) && hasOwnProperty(this.entry.metadata.resHeaders, name)) + metadata.resHeaders[name] = this.entry.metadata.resHeaders[name] + } + + try { + await cacache.index.insert(options.cachePath, this.key, this.entry.integrity, { + size: this.entry.size, + metadata, + }) + } catch (err) { + // if updating the cache index fails, we ignore it and + // respond anyway + } + return this.respond(request.method, options, 'revalidated') + } + + // if we got a modified response, create a new entry based on it + const newEntry = new CacheEntry({ + request, + response, + options, + }) + + // respond with the new entry while writing it to the cache + return newEntry.store('updated') + } +} + +module.exports = CacheEntry diff --git a/node_modules/make-fetch-happen/lib/cache/errors.js b/node_modules/make-fetch-happen/lib/cache/errors.js new file mode 100644 index 0000000000000..31e97c4b033c0 --- /dev/null +++ b/node_modules/make-fetch-happen/lib/cache/errors.js @@ -0,0 +1,10 @@ +class NotCachedError extends Error { + constructor (url) { + super(`request to ${url} failed: cache mode is 'only-if-cached' but no cached response is available.`) + this.code = 'ENOTCACHED' + } +} + +module.exports = { + NotCachedError, +} diff --git a/node_modules/make-fetch-happen/lib/cache/index.js b/node_modules/make-fetch-happen/lib/cache/index.js new file mode 100644 index 0000000000000..cca93d9b4eb5d --- /dev/null +++ b/node_modules/make-fetch-happen/lib/cache/index.js @@ -0,0 +1,45 @@ +const { NotCachedError } = require('./errors.js') +const CacheEntry = require('./entry.js') +const remote = require('../remote.js') + +// do whatever is necessary to get a Response and return it +const cacheFetch = async (request, options) => { + // try to find a cached entry that satisfies this request + const entry = await CacheEntry.find(request, options) + if (!entry) { + // no cached result, if the cache mode is 'only-if-cached' that's a failure + if (options.cache === 'only-if-cached') + throw new NotCachedError(request.url) + + // otherwise, we make a request, store it and return it + const response = await remote(request, options) + const entry = new CacheEntry({ request, response, options }) + return entry.store('miss') + } + + // we have a cached response that satisfies this request, however if the cache + // mode is 'no-cache' then we send the revalidation request no matter what + if (options.cache === 'no-cache') + return entry.revalidate(request, options) + + // if the cached entry is not stale, or if the cache mode is 'force-cache' or + // 'only-if-cached' we can respond with the cached entry. set the status + // based on the result of needsRevalidation and respond + const _needsRevalidation = entry.policy.needsRevalidation(request) + if (options.cache === 'force-cache' || + options.cache === 'only-if-cached' || + !_needsRevalidation) + return entry.respond(request.method, options, _needsRevalidation ? 'stale' : 'hit') + + // if we got here, the cache entry is stale so revalidate it + return entry.revalidate(request, options) +} + +cacheFetch.invalidate = async (request, options) => { + if (!options.cachePath) + return + + return CacheEntry.invalidate(request, options) +} + +module.exports = cacheFetch diff --git a/node_modules/make-fetch-happen/lib/cache/key.js b/node_modules/make-fetch-happen/lib/cache/key.js new file mode 100644 index 0000000000000..f7684d562b7fa --- /dev/null +++ b/node_modules/make-fetch-happen/lib/cache/key.js @@ -0,0 +1,17 @@ +const { URL, format } = require('url') + +// options passed to url.format() when generating a key +const formatOptions = { + auth: false, + fragment: false, + search: true, + unicode: false, +} + +// returns a string to be used as the cache key for the Request +const cacheKey = (request) => { + const parsed = new URL(request.url) + return `make-fetch-happen:request-cache:${format(parsed, formatOptions)}` +} + +module.exports = cacheKey diff --git a/node_modules/make-fetch-happen/lib/cache/policy.js b/node_modules/make-fetch-happen/lib/cache/policy.js new file mode 100644 index 0000000000000..e0959f64ddf9d --- /dev/null +++ b/node_modules/make-fetch-happen/lib/cache/policy.js @@ -0,0 +1,161 @@ +const CacheSemantics = require('http-cache-semantics') +const Negotiator = require('negotiator') +const ssri = require('ssri') + +// HACK: negotiator lazy loads several of its own modules +// as a micro optimization. we need to be sure that they're +// in memory as soon as possible at startup so that we do +// not try to lazy load them after the directory has been +// retired during a self update of the npm CLI, we do this +// by calling all of the methods that trigger a lazy load +// on a fake instance. +const preloadNegotiator = new Negotiator({ headers: {} }) +preloadNegotiator.charsets() +preloadNegotiator.encodings() +preloadNegotiator.languages() +preloadNegotiator.mediaTypes() + +// options passed to http-cache-semantics constructor +const policyOptions = { + shared: false, + ignoreCargoCult: true, +} + +// a fake empty response, used when only testing the +// request for storability +const emptyResponse = { status: 200, headers: {} } + +// returns a plain object representation of the Request +const requestObject = (request) => { + const _obj = { + method: request.method, + url: request.url, + headers: {}, + } + + request.headers.forEach((value, key) => { + _obj.headers[key] = value + }) + + return _obj +} + +// returns a plain object representation of the Response +const responseObject = (response) => { + const _obj = { + status: response.status, + headers: {}, + } + + response.headers.forEach((value, key) => { + _obj.headers[key] = value + }) + + return _obj +} + +class CachePolicy { + constructor ({ entry, request, response, options }) { + this.entry = entry + this.request = requestObject(request) + this.response = responseObject(response) + this.options = options + this.policy = new CacheSemantics(this.request, this.response, policyOptions) + + if (this.entry) { + // if we have an entry, copy the timestamp to the _responseTime + // this is necessary because the CacheSemantics constructor forces + // the value to Date.now() which means a policy created from a + // cache entry is likely to always identify itself as stale + this.policy._responseTime = this.entry.metadata.time + } + } + + // static method to quickly determine if a request alone is storable + static storable (request, options) { + // no cachePath means no caching + if (!options.cachePath) + return false + + // user explicitly asked not to cache + if (options.cache === 'no-store') + return false + + // we only cache GET and HEAD requests + if (!['GET', 'HEAD'].includes(request.method)) + return false + + // otherwise, let http-cache-semantics make the decision + // based on the request's headers + const policy = new CacheSemantics(requestObject(request), emptyResponse, policyOptions) + return policy.storable() + } + + // returns true if the policy satisfies the request + satisfies (request) { + const _req = requestObject(request) + if (this.request.headers.host !== _req.headers.host) + return false + + const negotiatorA = new Negotiator(this.request) + const negotiatorB = new Negotiator(_req) + + if (JSON.stringify(negotiatorA.mediaTypes()) !== JSON.stringify(negotiatorB.mediaTypes())) + return false + + if (JSON.stringify(negotiatorA.languages()) !== JSON.stringify(negotiatorB.languages())) + return false + + if (JSON.stringify(negotiatorA.encodings()) !== JSON.stringify(negotiatorB.encodings())) + return false + + if (this.options.integrity) + return ssri.parse(this.options.integrity).match(this.entry.integrity) + + return true + } + + // returns true if the request and response allow caching + storable () { + return this.policy.storable() + } + + // NOTE: this is a hack to avoid parsing the cache-control + // header ourselves, it returns true if the response's + // cache-control contains must-revalidate + get mustRevalidate () { + return !!this.policy._rescc['must-revalidate'] + } + + // returns true if the cached response requires revalidation + // for the given request + needsRevalidation (request) { + const _req = requestObject(request) + // force method to GET because we only cache GETs + // but can serve a HEAD from a cached GET + _req.method = 'GET' + return !this.policy.satisfiesWithoutRevalidation(_req) + } + + responseHeaders () { + return this.policy.responseHeaders() + } + + // returns a new object containing the appropriate headers + // to send a revalidation request + revalidationHeaders (request) { + const _req = requestObject(request) + return this.policy.revalidationHeaders(_req) + } + + // returns true if the request/response was revalidated + // successfully. returns false if a new response was received + revalidated (request, response) { + const _req = requestObject(request) + const _res = responseObject(response) + const policy = this.policy.revalidatedPolicy(_req, _res) + return !policy.modified + } +} + +module.exports = CachePolicy diff --git a/node_modules/make-fetch-happen/lib/fetch.js b/node_modules/make-fetch-happen/lib/fetch.js new file mode 100644 index 0000000000000..dfded79295da1 --- /dev/null +++ b/node_modules/make-fetch-happen/lib/fetch.js @@ -0,0 +1,100 @@ +'use strict' + +const { FetchError, Request, isRedirect } = require('minipass-fetch') +const url = require('url') + +const CachePolicy = require('./cache/policy.js') +const cache = require('./cache/index.js') +const remote = require('./remote.js') + +// given a Request, a Response and user options +// return true if the response is a redirect that +// can be followed. we throw errors that will result +// in the fetch being rejected if the redirect is +// possible but invalid for some reason +const canFollowRedirect = (request, response, options) => { + if (!isRedirect(response.status)) + return false + + if (options.redirect === 'manual') + return false + + if (options.redirect === 'error') + throw new FetchError(`redirect mode is set to error: ${request.url}`, 'no-redirect', { code: 'ENOREDIRECT' }) + + if (!response.headers.has('location')) + throw new FetchError(`redirect location header missing for: ${request.url}`, 'no-location', { code: 'EINVALIDREDIRECT' }) + + if (request.counter >= request.follow) + throw new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect', { code: 'EMAXREDIRECT' }) + + return true +} + +// given a Request, a Response, and the user's options return an object +// with a new Request and a new options object that will be used for +// following the redirect +const getRedirect = (request, response, options) => { + const _opts = { ...options } + const location = response.headers.get('location') + const redirectUrl = new url.URL(location, /^https?:/.test(location) ? undefined : request.url) + // Comment below is used under the following license: + // Copyright (c) 2010-2012 Mikeal Rogers + // Licensed under the Apache License, Version 2.0 (the "License"); + // you may not use this file except in compliance with the License. + // You may obtain a copy of the License at + // http://www.apache.org/licenses/LICENSE-2.0 + // Unless required by applicable law or agreed to in writing, + // software distributed under the License is distributed on an "AS + // IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + // express or implied. See the License for the specific language + // governing permissions and limitations under the License. + + // Remove authorization if changing hostnames (but not if just + // changing ports or protocols). This matches the behavior of request: + // https://github.com/request/request/blob/b12a6245/lib/redirect.js#L134-L138 + if (new url.URL(request.url).hostname !== redirectUrl.hostname) + request.headers.delete('authorization') + + // for POST request with 301/302 response, or any request with 303 response, + // use GET when following redirect + if (response.status === 303 || (request.method === 'POST' && [301, 302].includes(response.status))) { + _opts.method = 'GET' + _opts.body = null + request.headers.delete('content-length') + } + + _opts.headers = {} + request.headers.forEach((value, key) => { + _opts.headers[key] = value + }) + + _opts.counter = ++request.counter + const redirectReq = new Request(url.format(redirectUrl), _opts) + return { + request: redirectReq, + options: _opts, + } +} + +const fetch = async (request, options) => { + const response = CachePolicy.storable(request, options) + ? await cache(request, options) + : await remote(request, options) + + // if the request wasn't a GET or HEAD, and the response + // status is between 200 and 399 inclusive, invalidate the + // request url + if (!['GET', 'HEAD'].includes(request.method) && + response.status >= 200 && + response.status <= 399) + await cache.invalidate(request, options) + + if (!canFollowRedirect(request, response, options)) + return response + + const redirect = getRedirect(request, response, options) + return fetch(redirect.request, redirect.options) +} + +module.exports = fetch diff --git a/node_modules/make-fetch-happen/lib/index.js b/node_modules/make-fetch-happen/lib/index.js new file mode 100644 index 0000000000000..6028bc0725129 --- /dev/null +++ b/node_modules/make-fetch-happen/lib/index.js @@ -0,0 +1,40 @@ +const { FetchError, Headers, Request, Response } = require('minipass-fetch') + +const configureOptions = require('./options.js') +const fetch = require('./fetch.js') + +const makeFetchHappen = (url, opts) => { + const options = configureOptions(opts) + + const request = new Request(url, options) + return fetch(request, options) +} + +makeFetchHappen.defaults = (defaultUrl, defaultOptions = {}) => { + if (typeof defaultUrl === 'object') { + defaultOptions = defaultUrl + defaultUrl = null + } + + const defaultedFetch = (url, options = {}) => { + const finalUrl = url || defaultUrl + const finalOptions = { + ...defaultOptions, + ...options, + headers: { + ...defaultOptions.headers, + ...options.headers, + }, + } + return makeFetchHappen(finalUrl, finalOptions) + } + + defaultedFetch.defaults = makeFetchHappen.defaults + return defaultedFetch +} + +module.exports = makeFetchHappen +module.exports.FetchError = FetchError +module.exports.Headers = Headers +module.exports.Request = Request +module.exports.Response = Response diff --git a/node_modules/make-fetch-happen/lib/options.js b/node_modules/make-fetch-happen/lib/options.js new file mode 100644 index 0000000000000..08891754868a5 --- /dev/null +++ b/node_modules/make-fetch-happen/lib/options.js @@ -0,0 +1,45 @@ +const conditionalHeaders = [ + 'if-modified-since', + 'if-none-match', + 'if-unmodified-since', + 'if-match', + 'if-range', +] + +const configureOptions = (opts) => { + const options = { ...opts } + options.method = options.method ? options.method.toUpperCase() : 'GET' + if (Object.prototype.hasOwnProperty.call(options, 'strictSSL')) + options.rejectUnauthorized = options.strictSSL + + if (!options.retry) + options.retry = { retries: 0 } + else if (typeof options.retry === 'string') { + const retries = parseInt(options.retry, 10) + if (isFinite(retries)) + options.retry = { retries } + else + options.retry = { retries: 0 } + } else if (typeof options.retry === 'number') + options.retry = { retries: options.retry } + else + options.retry = { retries: 0, ...options.retry } + + options.cache = options.cache || 'default' + if (options.cache === 'default') { + const hasConditionalHeader = Object.keys(options.headers || {}).some((name) => { + return conditionalHeaders.includes(name.toLowerCase()) + }) + if (hasConditionalHeader) + options.cache = 'no-store' + } + + // cacheManager is deprecated, but if it's set and + // cachePath is not we should copy it to the new field + if (options.cacheManager && !options.cachePath) + options.cachePath = options.cacheManager + + return options +} + +module.exports = configureOptions diff --git a/node_modules/make-fetch-happen/lib/remote.js b/node_modules/make-fetch-happen/lib/remote.js new file mode 100644 index 0000000000000..7e4ed24edb530 --- /dev/null +++ b/node_modules/make-fetch-happen/lib/remote.js @@ -0,0 +1,102 @@ +const Minipass = require('minipass') +const MinipassPipeline = require('minipass-pipeline') +const fetch = require('minipass-fetch') +const promiseRetry = require('promise-retry') +const ssri = require('ssri') + +const getAgent = require('./agent.js') +const pkg = require('../package.json') + +const USER_AGENT = `${pkg.name}/${pkg.version} (+https://npm.im/${pkg.name})` + +const RETRY_ERRORS = [ + 'ECONNRESET', // remote socket closed on us + 'ECONNREFUSED', // remote host refused to open connection + 'EADDRINUSE', // failed to bind to a local port (proxy?) + 'ETIMEDOUT', // someone in the transaction is WAY TOO SLOW + 'ERR_SOCKET_TIMEOUT', // same as above, but this one comes from agentkeepalive + // Known codes we do NOT retry on: + // ENOTFOUND (getaddrinfo failure. Either bad hostname, or offline) +] + +const RETRY_TYPES = [ + 'request-timeout', +] + +// make a request directly to the remote source, +// retrying certain classes of errors as well as +// following redirects (through the cache if necessary) +// and verifying response integrity +const remoteFetch = (request, options) => { + const agent = getAgent(request.url, options) + if (!request.headers.has('connection')) + request.headers.set('connection', agent ? 'keep-alive' : 'close') + + if (!request.headers.has('user-agent')) + request.headers.set('user-agent', USER_AGENT) + + // keep our own options since we're overriding the agent + // and the redirect mode + const _opts = { + ...options, + agent, + redirect: 'manual', + } + + return promiseRetry(async (retryHandler, attemptNum) => { + const req = new fetch.Request(request, _opts) + try { + let res = await fetch(req, _opts) + if (_opts.integrity && res.status === 200) { + // we got a 200 response and the user has specified an expected + // integrity value, so wrap the response in an ssri stream to verify it + const integrityStream = ssri.integrityStream({ integrity: _opts.integrity }) + res = new fetch.Response(new MinipassPipeline(res.body, integrityStream), res) + } + + res.headers.set('x-fetch-attempts', attemptNum) + + // do not retry POST requests, or requests with a streaming body + // do retry requests with a 408, 420, 429 or 500+ status in the response + const isStream = Minipass.isStream(req.body) + const isRetriable = req.method !== 'POST' && + !isStream && + ([408, 420, 429].includes(res.status) || res.status >= 500) + + if (isRetriable) { + if (typeof options.onRetry === 'function') + options.onRetry(res) + + return retryHandler(res) + } + + return res + } catch (err) { + const code = (err.code === 'EPROMISERETRY') + ? err.retried.code + : err.code + + // err.retried will be the thing that was thrown from above + // if it's a response, we just got a bad status code and we + // can re-throw to allow the retry + const isRetryError = err.retried instanceof fetch.Response || + (RETRY_ERRORS.includes(code) && RETRY_TYPES.includes(err.type)) + + if (req.method === 'POST' || isRetryError) + throw err + + if (typeof options.onRetry === 'function') + options.onRetry(err) + + return retryHandler(err) + } + }, options.retry).catch((err) => { + // don't reject for http errors, just return them + if (err.status >= 400 && err.type !== 'system') + return err + + throw err + }) +} + +module.exports = remoteFetch diff --git a/node_modules/make-fetch-happen/package.json b/node_modules/make-fetch-happen/package.json index 7e854dcdf0880..e4a26a9cd94df 100644 --- a/node_modules/make-fetch-happen/package.json +++ b/node_modules/make-fetch-happen/package.json @@ -1,21 +1,19 @@ { "name": "make-fetch-happen", - "version": "8.0.14", + "version": "9.0.4", "description": "Opinionated, caching, retrying fetch client", - "main": "index.js", + "main": "lib/index.js", "files": [ - "*.js", - "lib", - "utils" + "lib" ], "scripts": { "preversion": "npm t", "postversion": "npm publish", "prepublishOnly": "git push --follow-tags", - "test": "tap test/*.js", + "test": "tap", "posttest": "npm run lint", "eslint": "eslint", - "lint": "npm run eslint -- *.js utils test", + "lint": "npm run eslint -- lib test", "lintfix": "npm run lint -- --fix" }, "repository": "https://github.com/npm/make-fetch-happen", @@ -36,7 +34,7 @@ "license": "ISC", "dependencies": { "agentkeepalive": "^4.1.3", - "cacache": "^15.0.5", + "cacache": "^15.2.0", "http-cache-semantics": "^4.1.0", "http-proxy-agent": "^4.0.1", "https-proxy-agent": "^5.0.0", @@ -47,26 +45,32 @@ "minipass-fetch": "^1.3.2", "minipass-flush": "^1.0.5", "minipass-pipeline": "^1.2.4", + "negotiator": "^0.6.2", "promise-retry": "^2.0.1", "socks-proxy-agent": "^5.0.0", "ssri": "^8.0.0" }, "devDependencies": { - "eslint": "^7.14.0", - "eslint-plugin-import": "^2.22.1", + "eslint": "^7.26.0", + "eslint-plugin-import": "^2.23.2", "eslint-plugin-node": "^11.1.0", - "eslint-plugin-promise": "^4.2.1", + "eslint-plugin-promise": "^5.1.0", "eslint-plugin-standard": "^5.0.0", "mkdirp": "^1.0.4", - "nock": "^11.9.1", + "nock": "^13.0.11", "npmlog": "^4.1.2", "require-inject": "^1.4.2", - "rimraf": "^2.7.1", + "rimraf": "^3.0.2", "safe-buffer": "^5.2.1", - "standard-version": "^7.1.0", - "tap": "^14.11.0" + "standard-version": "^9.3.0", + "tap": "^15.0.9" }, "engines": { "node": ">= 10" + }, + "tap": { + "color": 1, + "files": "test/*.js", + "check-coverage": true } } diff --git a/node_modules/make-fetch-happen/utils/configure-options.js b/node_modules/make-fetch-happen/utils/configure-options.js deleted file mode 100644 index 75ea5d15ecda0..0000000000000 --- a/node_modules/make-fetch-happen/utils/configure-options.js +++ /dev/null @@ -1,32 +0,0 @@ -'use strict' - -const initializeCache = require('./initialize-cache') - -module.exports = function configureOptions (_opts) { - const opts = Object.assign({}, _opts || {}) - opts.method = (opts.method || 'GET').toUpperCase() - - if (!opts.retry) { - // opts.retry was falsy; set default - opts.retry = { retries: 0 } - } else { - if (typeof opts.retry !== 'object') { - // Shorthand - if (typeof opts.retry === 'number') - opts.retry = { retries: opts.retry } - - if (typeof opts.retry === 'string') { - const value = parseInt(opts.retry, 10) - opts.retry = (value) ? { retries: value } : { retries: 0 } - } - } else { - // Set default retries - opts.retry = Object.assign({}, { retries: 0 }, opts.retry) - } - } - - if (opts.cacheManager) - initializeCache(opts) - - return opts -} diff --git a/node_modules/make-fetch-happen/utils/initialize-cache.js b/node_modules/make-fetch-happen/utils/initialize-cache.js deleted file mode 100644 index 9f96bf56226ef..0000000000000 --- a/node_modules/make-fetch-happen/utils/initialize-cache.js +++ /dev/null @@ -1,26 +0,0 @@ -'use strict' - -const isHeaderConditional = require('./is-header-conditional') -// Default cacache-based cache -const Cache = require('../cache') - -module.exports = function initializeCache (opts) { - /** - * NOTE: `opts.cacheManager` is the path to cache - * We're making the assumption that if `opts.cacheManager` *isn't* a string, - * it's a cache object - */ - if (typeof opts.cacheManager === 'string') { - // Need to make a cache object - opts.cacheManager = new Cache(opts.cacheManager, opts) - } - - opts.cache = opts.cache || 'default' - - if (opts.cache === 'default' && isHeaderConditional(opts.headers)) { - // If header list contains `If-Modified-Since`, `If-None-Match`, - // `If-Unmodified-Since`, `If-Match`, or `If-Range`, fetch will set cache - // mode to "no-store" if it is "default". - opts.cache = 'no-store' - } -} diff --git a/node_modules/make-fetch-happen/utils/is-header-conditional.js b/node_modules/make-fetch-happen/utils/is-header-conditional.js deleted file mode 100644 index 5081e0ce127e2..0000000000000 --- a/node_modules/make-fetch-happen/utils/is-header-conditional.js +++ /dev/null @@ -1,17 +0,0 @@ -'use strict' - -module.exports = function isHeaderConditional (headers) { - if (!headers || typeof headers !== 'object') - return false - - const modifiers = [ - 'if-modified-since', - 'if-none-match', - 'if-unmodified-since', - 'if-match', - 'if-range', - ] - - return Object.keys(headers) - .some(h => modifiers.indexOf(h.toLowerCase()) !== -1) -} diff --git a/node_modules/make-fetch-happen/utils/iterable-to-object.js b/node_modules/make-fetch-happen/utils/iterable-to-object.js deleted file mode 100644 index 1fe5ba65448d6..0000000000000 --- a/node_modules/make-fetch-happen/utils/iterable-to-object.js +++ /dev/null @@ -1,9 +0,0 @@ -'use strict' - -module.exports = function iterableToObject (iter) { - const obj = {} - for (const k of iter.keys()) - obj[k] = iter.get(k) - - return obj -} diff --git a/node_modules/make-fetch-happen/utils/make-policy.js b/node_modules/make-fetch-happen/utils/make-policy.js deleted file mode 100644 index 5e884847dd8f4..0000000000000 --- a/node_modules/make-fetch-happen/utils/make-policy.js +++ /dev/null @@ -1,19 +0,0 @@ -'use strict' - -const CachePolicy = require('http-cache-semantics') - -const iterableToObject = require('./iterable-to-object') - -module.exports = function makePolicy (req, res) { - const _req = { - url: req.url, - method: req.method, - headers: iterableToObject(req.headers), - } - const _res = { - status: res.status, - headers: iterableToObject(res.headers), - } - - return new CachePolicy(_req, _res, { shared: false }) -} diff --git a/node_modules/make-fetch-happen/warning.js b/node_modules/make-fetch-happen/warning.js deleted file mode 100644 index 2b96024714e3b..0000000000000 --- a/node_modules/make-fetch-happen/warning.js +++ /dev/null @@ -1,24 +0,0 @@ -const url = require('url') - -module.exports = setWarning - -function setWarning (reqOrRes, code, message, replace) { - // Warning = "Warning" ":" 1#warning-value - // warning-value = warn-code SP warn-agent SP warn-text [SP warn-date] - // warn-code = 3DIGIT - // warn-agent = ( host [ ":" port ] ) | pseudonym - // ; the name or pseudonym of the server adding - // ; the Warning header, for use in debugging - // warn-text = quoted-string - // warn-date = <"> HTTP-date <"> - // (https://tools.ietf.org/html/rfc2616#section-14.46) - const host = new url.URL(reqOrRes.url).host - const jsonMessage = JSON.stringify(message) - const jsonDate = JSON.stringify(new Date().toUTCString()) - const header = replace ? 'set' : 'append' - - reqOrRes.headers[header]( - 'Warning', - `${code} ${host} ${jsonMessage} ${jsonDate}` - ) -} diff --git a/node_modules/mime-db/HISTORY.md b/node_modules/mime-db/HISTORY.md index 56d792f73d234..672d414ca3782 100644 --- a/node_modules/mime-db/HISTORY.md +++ b/node_modules/mime-db/HISTORY.md @@ -1,3 +1,31 @@ +1.49.0 / 2021-07-26 +=================== + + * Add extension `.trig` to `application/trig` + * Add new upstream MIME types + +1.48.0 / 2021-05-30 +=================== + + * Add extension `.mvt` to `application/vnd.mapbox-vector-tile` + * Add new upstream MIME types + * Mark `text/yaml` as compressible + +1.47.0 / 2021-04-01 +=================== + + * Add new upstream MIME types + * Remove ambigious extensions from IANA for `application/*+xml` types + * Update primary extension to `.es` for `application/ecmascript` + +1.46.0 / 2021-02-13 +=================== + + * Add extension `.amr` to `audio/amr` + * Add extension `.m4s` to `video/iso.segment` + * Add extension `.opus` to `audio/ogg` + * Add new upstream MIME types + 1.45.0 / 2020-09-22 =================== diff --git a/node_modules/mime-db/README.md b/node_modules/mime-db/README.md deleted file mode 100644 index f1e639139a84a..0000000000000 --- a/node_modules/mime-db/README.md +++ /dev/null @@ -1,102 +0,0 @@ -# mime-db - -[![NPM Version][npm-version-image]][npm-url] -[![NPM Downloads][npm-downloads-image]][npm-url] -[![Node.js Version][node-image]][node-url] -[![Build Status][travis-image]][travis-url] -[![Coverage Status][coveralls-image]][coveralls-url] - -This is a database of all mime types. -It consists of a single, public JSON file and does not include any logic, -allowing it to remain as un-opinionated as possible with an API. -It aggregates data from the following sources: - -- http://www.iana.org/assignments/media-types/media-types.xhtml -- http://svn.apache.org/repos/asf/httpd/httpd/trunk/docs/conf/mime.types -- http://hg.nginx.org/nginx/raw-file/default/conf/mime.types - -## Installation - -```bash -npm install mime-db -``` - -### Database Download - -If you're crazy enough to use this in the browser, you can just grab the -JSON file using [jsDelivr](https://www.jsdelivr.com/). It is recommended to -replace `master` with [a release tag](https://github.com/jshttp/mime-db/tags) -as the JSON format may change in the future. - -``` -https://cdn.jsdelivr.net/gh/jshttp/mime-db@master/db.json -``` - -## Usage - -<!-- eslint-disable no-unused-vars --> - -```js -var db = require('mime-db') - -// grab data on .js files -var data = db['application/javascript'] -``` - -## Data Structure - -The JSON file is a map lookup for lowercased mime types. -Each mime type has the following properties: - -- `.source` - where the mime type is defined. - If not set, it's probably a custom media type. - - `apache` - [Apache common media types](http://svn.apache.org/repos/asf/httpd/httpd/trunk/docs/conf/mime.types) - - `iana` - [IANA-defined media types](http://www.iana.org/assignments/media-types/media-types.xhtml) - - `nginx` - [nginx media types](http://hg.nginx.org/nginx/raw-file/default/conf/mime.types) -- `.extensions[]` - known extensions associated with this mime type. -- `.compressible` - whether a file of this type can be gzipped. -- `.charset` - the default charset associated with this type, if any. - -If unknown, every property could be `undefined`. - -## Contributing - -To edit the database, only make PRs against `src/custom-types.json` or -`src/custom-suffix.json`. - -The `src/custom-types.json` file is a JSON object with the MIME type as the -keys and the values being an object with the following keys: - -- `compressible` - leave out if you don't know, otherwise `true`/`false` to - indicate whether the data represented by the type is typically compressible. -- `extensions` - include an array of file extensions that are associated with - the type. -- `notes` - human-readable notes about the type, typically what the type is. -- `sources` - include an array of URLs of where the MIME type and the associated - extensions are sourced from. This needs to be a [primary source](https://en.wikipedia.org/wiki/Primary_source); - links to type aggregating sites and Wikipedia are _not acceptable_. - -To update the build, run `npm run build`. - -### Adding Custom Media Types - -The best way to get new media types included in this library is to register -them with the IANA. The community registration procedure is outlined in -[RFC 6838 section 5](http://tools.ietf.org/html/rfc6838#section-5). Types -registered with the IANA are automatically pulled into this library. - -If that is not possible / feasible, they can be added directly here as a -"custom" type. To do this, it is required to have a primary source that -definitively lists the media type. If an extension is going to be listed as -associateed with this media type, the source must definitively link the -media type and extension as well. - -[coveralls-image]: https://badgen.net/coveralls/c/github/jshttp/mime-db/master -[coveralls-url]: https://coveralls.io/r/jshttp/mime-db?branch=master -[node-image]: https://badgen.net/npm/node/mime-db -[node-url]: https://nodejs.org/en/download -[npm-downloads-image]: https://badgen.net/npm/dm/mime-db -[npm-url]: https://npmjs.org/package/mime-db -[npm-version-image]: https://badgen.net/npm/v/mime-db -[travis-image]: https://badgen.net/travis/jshttp/mime-db/master -[travis-url]: https://travis-ci.org/jshttp/mime-db diff --git a/node_modules/mime-db/db.json b/node_modules/mime-db/db.json index 05cfa68735f0a..911b0f865b704 100644 --- a/node_modules/mime-db/db.json +++ b/node_modules/mime-db/db.json @@ -11,6 +11,14 @@ "source": "iana", "compressible": true }, + "application/3gpphal+json": { + "source": "iana", + "compressible": true + }, + "application/3gpphalforms+json": { + "source": "iana", + "compressible": true + }, "application/a2l": { "source": "iana" }, @@ -236,6 +244,9 @@ "application/cfw": { "source": "iana" }, + "application/clr": { + "source": "iana" + }, "application/clue+xml": { "source": "iana", "compressible": true @@ -386,7 +397,7 @@ "application/ecmascript": { "source": "iana", "compressible": true, - "extensions": ["ecma","es"] + "extensions": ["es","ecma"] }, "application/edi-consent": { "source": "iana" @@ -402,6 +413,15 @@ "application/efi": { "source": "iana" }, + "application/elm+json": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/elm+xml": { + "source": "iana", + "compressible": true + }, "application/emergencycalldata.cap+xml": { "source": "iana", "charset": "UTF-8", @@ -663,6 +683,10 @@ "source": "iana", "compressible": true }, + "application/jscalendar+json": { + "source": "iana", + "compressible": true + }, "application/json": { "source": "iana", "charset": "UTF-8", @@ -753,6 +777,7 @@ "extensions": ["mads"] }, "application/manifest+json": { + "source": "iana", "charset": "UTF-8", "compressible": true, "extensions": ["webmanifest"] @@ -872,6 +897,9 @@ "application/mipc": { "source": "iana" }, + "application/missing-blocks+cbor-seq": { + "source": "iana" + }, "application/mmt-aei+xml": { "source": "iana", "compressible": true, @@ -918,13 +946,11 @@ }, "application/mrb-consumer+xml": { "source": "iana", - "compressible": true, - "extensions": ["xdf"] + "compressible": true }, "application/mrb-publish+xml": { "source": "iana", - "compressible": true, - "extensions": ["xdf"] + "compressible": true }, "application/msc-ivr+xml": { "source": "iana", @@ -985,6 +1011,9 @@ "application/nss": { "source": "iana" }, + "application/oauth-authz-req+jwt": { + "source": "iana" + }, "application/ocsp-request": { "source": "iana" }, @@ -1037,6 +1066,10 @@ "source": "iana", "extensions": ["oxps"] }, + "application/p21+zip": { + "source": "iana", + "compressible": false + }, "application/p2p-overlay+xml": { "source": "iana", "compressible": true, @@ -1171,6 +1204,10 @@ "source": "iana", "extensions": ["cww"] }, + "application/prs.cyn": { + "source": "iana", + "charset": "7-BIT" + }, "application/prs.hpub+zip": { "source": "iana", "compressible": false @@ -1324,6 +1361,10 @@ "source": "iana", "compressible": true }, + "application/sarif-external-properties+json": { + "source": "iana", + "compressible": true + }, "application/sbe": { "source": "iana" }, @@ -1607,7 +1648,8 @@ "source": "iana" }, "application/trig": { - "source": "iana" + "source": "iana", + "extensions": ["trig"] }, "application/ttml+xml": { "source": "iana", @@ -1678,6 +1720,9 @@ "application/vnd.3gpp-v2x-local-service-information": { "source": "iana" }, + "application/vnd.3gpp.5gnas": { + "source": "iana" + }, "application/vnd.3gpp.access-transfer-events+xml": { "source": "iana", "compressible": true @@ -1690,6 +1735,15 @@ "source": "iana", "compressible": true }, + "application/vnd.3gpp.gtpc": { + "source": "iana" + }, + "application/vnd.3gpp.interworking-data": { + "source": "iana" + }, + "application/vnd.3gpp.lpp": { + "source": "iana" + }, "application/vnd.3gpp.mc-signalling-ear": { "source": "iana" }, @@ -1799,6 +1853,12 @@ "source": "iana", "compressible": true }, + "application/vnd.3gpp.ngap": { + "source": "iana" + }, + "application/vnd.3gpp.pfcp": { + "source": "iana" + }, "application/vnd.3gpp.pic-bw-large": { "source": "iana", "extensions": ["plb"] @@ -1811,6 +1871,9 @@ "source": "iana", "extensions": ["pvb"] }, + "application/vnd.3gpp.s1ap": { + "source": "iana" + }, "application/vnd.3gpp.sms": { "source": "iana" }, @@ -1905,6 +1968,9 @@ "application/vnd.afpc.afplinedata-pagedef": { "source": "iana" }, + "application/vnd.afpc.cmoca-cmresource": { + "source": "iana" + }, "application/vnd.afpc.foca-charset": { "source": "iana" }, @@ -1917,6 +1983,9 @@ "application/vnd.afpc.modca": { "source": "iana" }, + "application/vnd.afpc.modca-cmtable": { + "source": "iana" + }, "application/vnd.afpc.modca-formdef": { "source": "iana" }, @@ -1993,6 +2062,12 @@ "source": "iana", "extensions": ["atx"] }, + "application/vnd.apache.arrow.file": { + "source": "iana" + }, + "application/vnd.apache.arrow.stream": { + "source": "iana" + }, "application/vnd.apache.thrift.binary": { "source": "iana" }, @@ -2292,6 +2367,12 @@ "application/vnd.crypto-shade-file": { "source": "iana" }, + "application/vnd.cryptomator.encrypted": { + "source": "iana" + }, + "application/vnd.cryptomator.vault": { + "source": "iana" + }, "application/vnd.ctc-posml": { "source": "iana", "extensions": ["pml"] @@ -2334,6 +2415,14 @@ "application/vnd.cybank": { "source": "iana" }, + "application/vnd.cyclonedx+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.cyclonedx+xml": { + "source": "iana", + "compressible": true + }, "application/vnd.d2l.coursepackage1p0+zip": { "source": "iana", "compressible": false @@ -2779,6 +2868,19 @@ "source": "iana", "extensions": ["fsc"] }, + "application/vnd.fujifilm.fb.docuworks": { + "source": "iana" + }, + "application/vnd.fujifilm.fb.docuworks.binder": { + "source": "iana" + }, + "application/vnd.fujifilm.fb.docuworks.container": { + "source": "iana" + }, + "application/vnd.fujifilm.fb.jfi+xml": { + "source": "iana", + "compressible": true + }, "application/vnd.fujitsu.oasys": { "source": "iana", "extensions": ["oas"] @@ -2857,6 +2959,9 @@ "source": "iana", "extensions": ["ggb"] }, + "application/vnd.geogebra.slides": { + "source": "iana" + }, "application/vnd.geogebra.tool": { "source": "iana", "extensions": ["ggt"] @@ -3386,7 +3491,8 @@ "extensions": ["portpkg"] }, "application/vnd.mapbox-vector-tile": { - "source": "iana" + "source": "iana", + "extensions": ["mvt"] }, "application/vnd.marlin.drm.actiontoken+xml": { "source": "iana", @@ -3746,6 +3852,9 @@ "source": "iana", "compressible": true }, + "application/vnd.nebumind.line": { + "source": "iana" + }, "application/vnd.nervana": { "source": "iana" }, @@ -4194,6 +4303,9 @@ "compressible": true, "extensions": ["osm"] }, + "application/vnd.opentimestamps.ots": { + "source": "iana" + }, "application/vnd.openxmlformats-officedocument.custom-properties+xml": { "source": "iana", "compressible": true @@ -4725,6 +4837,9 @@ "application/vnd.renlearn.rlprint": { "source": "iana" }, + "application/vnd.resilient.logic": { + "source": "iana" + }, "application/vnd.restful+json": { "source": "iana", "compressible": true @@ -4812,6 +4927,10 @@ "source": "iana", "extensions": ["see"] }, + "application/vnd.seis+json": { + "source": "iana", + "compressible": true + }, "application/vnd.sema": { "source": "iana", "extensions": ["sema"] @@ -5185,6 +5304,10 @@ "application/vnd.verimatrix.vcas": { "source": "iana" }, + "application/vnd.veritone.aion+json": { + "source": "iana", + "compressible": true + }, "application/vnd.veryant.thin": { "source": "iana" }, @@ -5232,6 +5355,9 @@ "source": "iana", "extensions": ["wtb"] }, + "application/vnd.wfa.dpp": { + "source": "iana" + }, "application/vnd.wfa.p2p": { "source": "iana" }, @@ -5387,6 +5513,7 @@ "source": "iana" }, "application/wasm": { + "source": "iana", "compressible": true, "extensions": ["wasm"] }, @@ -6024,8 +6151,7 @@ }, "application/xcap-error+xml": { "source": "iana", - "compressible": true, - "extensions": ["xer"] + "compressible": true }, "application/xcap-ns+xml": { "source": "iana", @@ -6166,7 +6292,8 @@ "extensions": ["adp"] }, "audio/amr": { - "source": "iana" + "source": "iana", + "extensions": ["amr"] }, "audio/amr-wb": { "source": "iana" @@ -6415,7 +6542,7 @@ "audio/ogg": { "source": "iana", "compressible": false, - "extensions": ["oga","ogg","spx"] + "extensions": ["oga","ogg","spx","opus"] }, "audio/opus": { "source": "iana" @@ -6463,6 +6590,9 @@ "source": "apache", "extensions": ["s3m"] }, + "audio/scip": { + "source": "iana" + }, "audio/silk": { "source": "apache", "extensions": ["sil"] @@ -6806,6 +6936,7 @@ "source": "iana" }, "image/avif": { + "source": "iana", "compressible": false, "extensions": ["avif"] }, @@ -7295,6 +7426,16 @@ "source": "iana", "extensions": ["obj"] }, + "model/step+zip": { + "source": "iana", + "compressible": false, + "extensions": ["stpz"] + }, + "model/step-xml+zip": { + "source": "iana", + "compressible": false, + "extensions": ["stpxz"] + }, "model/stl": { "source": "iana", "extensions": ["stl"] @@ -7345,9 +7486,16 @@ "source": "iana", "extensions": ["x_t"] }, + "model/vnd.pytha.pyox": { + "source": "iana" + }, "model/vnd.rosette.annotated-data-model": { "source": "iana" }, + "model/vnd.sap.vds": { + "source": "iana", + "extensions": ["vds"] + }, "model/vnd.usdz+zip": { "source": "iana", "compressible": false, @@ -7463,6 +7611,15 @@ "text/coffeescript": { "extensions": ["coffee","litcoffee"] }, + "text/cql": { + "source": "iana" + }, + "text/cql-expression": { + "source": "iana" + }, + "text/cql-identifier": { + "source": "iana" + }, "text/css": { "source": "iana", "charset": "UTF-8", @@ -7492,6 +7649,9 @@ "text/enriched": { "source": "iana" }, + "text/fhirpath": { + "source": "iana" + }, "text/flexfec": { "source": "iana" }, @@ -7611,6 +7771,7 @@ "source": "iana" }, "text/shex": { + "source": "iana", "extensions": ["shex"] }, "text/slim": { @@ -7882,6 +8043,7 @@ "source": "iana" }, "text/yaml": { + "compressible": true, "extensions": ["yaml","yml"] }, "video/1d-interleaved-parityfec": { @@ -7898,6 +8060,9 @@ "source": "iana", "extensions": ["3g2"] }, + "video/av1": { + "source": "iana" + }, "video/bmpeg": { "source": "iana" }, @@ -7913,6 +8078,9 @@ "video/encaprtp": { "source": "iana" }, + "video/ffv1": { + "source": "iana" + }, "video/flexfec": { "source": "iana" }, @@ -7944,7 +8112,8 @@ "source": "iana" }, "video/iso.segment": { - "source": "iana" + "source": "iana", + "extensions": ["m4s"] }, "video/jpeg": { "source": "iana", @@ -8024,6 +8193,9 @@ "video/rtx": { "source": "iana" }, + "video/scip": { + "source": "iana" + }, "video/smpte291": { "source": "iana" }, @@ -8161,6 +8333,9 @@ "video/vp8": { "source": "iana" }, + "video/vp9": { + "source": "iana" + }, "video/webm": { "source": "apache", "compressible": false, diff --git a/node_modules/mime-db/package.json b/node_modules/mime-db/package.json index 243b45f7978cc..e546efa728fd1 100644 --- a/node_modules/mime-db/package.json +++ b/node_modules/mime-db/package.json @@ -1,7 +1,7 @@ { "name": "mime-db", "description": "Media Type Database", - "version": "1.45.0", + "version": "1.49.0", "contributors": [ "Douglas Christopher Wilson <doug@somethingdoug.com>", "Jonathan Ong <me@jongleberry.com> (http://jongleberry.com)", @@ -22,16 +22,16 @@ "bluebird": "3.7.2", "co": "4.6.0", "cogent": "1.0.1", - "csv-parse": "4.12.0", - "eslint": "7.9.0", - "eslint-config-standard": "14.1.1", - "eslint-plugin-import": "2.22.0", - "eslint-plugin-markdown": "1.0.2", + "csv-parse": "4.16.0", + "eslint": "7.31.0", + "eslint-config-standard": "15.0.1", + "eslint-plugin-import": "2.23.4", + "eslint-plugin-markdown": "2.2.0", "eslint-plugin-node": "11.1.0", - "eslint-plugin-promise": "4.2.1", - "eslint-plugin-standard": "4.0.1", + "eslint-plugin-promise": "5.1.0", + "eslint-plugin-standard": "4.1.0", "gnode": "0.1.2", - "mocha": "8.1.3", + "mocha": "9.0.3", "nyc": "15.1.0", "raw-body": "2.4.1", "stream-to-array": "2.3.0" @@ -49,10 +49,10 @@ "scripts": { "build": "node scripts/build", "fetch": "node scripts/fetch-apache && gnode scripts/fetch-iana && node scripts/fetch-nginx", - "lint": "eslint --plugin markdown --ext js,md .", + "lint": "eslint .", "test": "mocha --reporter spec --bail --check-leaks test/", + "test-ci": "nyc --reporter=lcov --reporter=text npm test", "test-cov": "nyc --reporter=html --reporter=text npm test", - "test-travis": "nyc --reporter=text npm test", "update": "npm run fetch && npm run build", "version": "node scripts/version-history.js && git add HISTORY.md" } diff --git a/node_modules/mime-types/HISTORY.md b/node_modules/mime-types/HISTORY.md index af65fdb103c6d..977ffbb15d865 100644 --- a/node_modules/mime-types/HISTORY.md +++ b/node_modules/mime-types/HISTORY.md @@ -1,3 +1,35 @@ +2.1.32 / 2021-07-27 +=================== + + * deps: mime-db@1.49.0 + - Add extension `.trig` to `application/trig` + - Add new upstream MIME types + +2.1.31 / 2021-06-01 +=================== + + * deps: mime-db@1.48.0 + - Add extension `.mvt` to `application/vnd.mapbox-vector-tile` + - Add new upstream MIME types + - Mark `text/yaml` as compressible + +2.1.30 / 2021-04-02 +=================== + + * deps: mime-db@1.47.0 + - Add extension `.amr` to `audio/amr` + - Remove ambigious extensions from IANA for `application/*+xml` types + - Update primary extension to `.es` for `application/ecmascript` + +2.1.29 / 2021-02-17 +=================== + + * deps: mime-db@1.46.0 + - Add extension `.amr` to `audio/amr` + - Add extension `.m4s` to `video/iso.segment` + - Add extension `.opus` to `audio/ogg` + - Add new upstream MIME types + 2.1.28 / 2021-01-01 =================== diff --git a/node_modules/mime-types/README.md b/node_modules/mime-types/README.md deleted file mode 100644 index e8bf8ebcef4b2..0000000000000 --- a/node_modules/mime-types/README.md +++ /dev/null @@ -1,123 +0,0 @@ -# mime-types - -[![NPM Version][npm-version-image]][npm-url] -[![NPM Downloads][npm-downloads-image]][npm-url] -[![Node.js Version][node-version-image]][node-version-url] -[![Build Status][ci-image]][ci-url] -[![Test Coverage][coveralls-image]][coveralls-url] - -The ultimate javascript content-type utility. - -Similar to [the `mime@1.x` module](https://www.npmjs.com/package/mime), except: - -- __No fallbacks.__ Instead of naively returning the first available type, - `mime-types` simply returns `false`, so do - `var type = mime.lookup('unrecognized') || 'application/octet-stream'`. -- No `new Mime()` business, so you could do `var lookup = require('mime-types').lookup`. -- No `.define()` functionality -- Bug fixes for `.lookup(path)` - -Otherwise, the API is compatible with `mime` 1.x. - -## Install - -This is a [Node.js](https://nodejs.org/en/) module available through the -[npm registry](https://www.npmjs.com/). Installation is done using the -[`npm install` command](https://docs.npmjs.com/getting-started/installing-npm-packages-locally): - -```sh -$ npm install mime-types -``` - -## Adding Types - -All mime types are based on [mime-db](https://www.npmjs.com/package/mime-db), -so open a PR there if you'd like to add mime types. - -## API - -<!-- eslint-disable no-unused-vars --> - -```js -var mime = require('mime-types') -``` - -All functions return `false` if input is invalid or not found. - -### mime.lookup(path) - -Lookup the content-type associated with a file. - -<!-- eslint-disable no-undef --> - -```js -mime.lookup('json') // 'application/json' -mime.lookup('.md') // 'text/markdown' -mime.lookup('file.html') // 'text/html' -mime.lookup('folder/file.js') // 'application/javascript' -mime.lookup('folder/.htaccess') // false - -mime.lookup('cats') // false -``` - -### mime.contentType(type) - -Create a full content-type header given a content-type or extension. -When given an extension, `mime.lookup` is used to get the matching -content-type, otherwise the given content-type is used. Then if the -content-type does not already have a `charset` parameter, `mime.charset` -is used to get the default charset and add to the returned content-type. - -<!-- eslint-disable no-undef --> - -```js -mime.contentType('markdown') // 'text/x-markdown; charset=utf-8' -mime.contentType('file.json') // 'application/json; charset=utf-8' -mime.contentType('text/html') // 'text/html; charset=utf-8' -mime.contentType('text/html; charset=iso-8859-1') // 'text/html; charset=iso-8859-1' - -// from a full path -mime.contentType(path.extname('/path/to/file.json')) // 'application/json; charset=utf-8' -``` - -### mime.extension(type) - -Get the default extension for a content-type. - -<!-- eslint-disable no-undef --> - -```js -mime.extension('application/octet-stream') // 'bin' -``` - -### mime.charset(type) - -Lookup the implied default charset of a content-type. - -<!-- eslint-disable no-undef --> - -```js -mime.charset('text/markdown') // 'UTF-8' -``` - -### var type = mime.types[extension] - -A map of content-types by extension. - -### [extensions...] = mime.extensions[type] - -A map of extensions by content-type. - -## License - -[MIT](LICENSE) - -[ci-image]: https://badgen.net/github/checks/jshttp/mime-types/master?label=ci -[ci-url]: https://github.com/jshttp/mime-types/actions?query=workflow%3Aci -[coveralls-image]: https://badgen.net/coveralls/c/github/jshttp/mime-types/master -[coveralls-url]: https://coveralls.io/r/jshttp/mime-types?branch=master -[node-version-image]: https://badgen.net/npm/node/mime-types -[node-version-url]: https://nodejs.org/en/download -[npm-downloads-image]: https://badgen.net/npm/dm/mime-types -[npm-url]: https://npmjs.org/package/mime-types -[npm-version-image]: https://badgen.net/npm/v/mime-types diff --git a/node_modules/mime-types/package.json b/node_modules/mime-types/package.json index 26fa6f694ac5b..7567acd5508df 100644 --- a/node_modules/mime-types/package.json +++ b/node_modules/mime-types/package.json @@ -1,7 +1,7 @@ { "name": "mime-types", "description": "The ultimate javascript content-type utility.", - "version": "2.1.28", + "version": "2.1.32", "contributors": [ "Douglas Christopher Wilson <doug@somethingdoug.com>", "Jeremiah Senkpiel <fishrock123@rocketmail.com> (https://searchbeam.jit.su)", @@ -14,17 +14,17 @@ ], "repository": "jshttp/mime-types", "dependencies": { - "mime-db": "1.45.0" + "mime-db": "1.49.0" }, "devDependencies": { - "eslint": "7.17.0", + "eslint": "7.31.0", "eslint-config-standard": "14.1.1", - "eslint-plugin-import": "2.22.1", - "eslint-plugin-markdown": "1.0.2", + "eslint-plugin-import": "2.23.4", + "eslint-plugin-markdown": "2.2.0", "eslint-plugin-node": "11.1.0", - "eslint-plugin-promise": "4.2.1", + "eslint-plugin-promise": "5.1.0", "eslint-plugin-standard": "4.1.0", - "mocha": "8.2.1", + "mocha": "9.0.3", "nyc": "15.1.0" }, "files": [ @@ -36,7 +36,7 @@ "node": ">= 0.6" }, "scripts": { - "lint": "eslint --plugin markdown --ext js,md .", + "lint": "eslint .", "test": "mocha --reporter spec test/test.js", "test-ci": "nyc --reporter=lcov --reporter=text npm test", "test-cov": "nyc --reporter=html --reporter=text npm test" diff --git a/node_modules/minimatch/README.md b/node_modules/minimatch/README.md deleted file mode 100644 index ad72b8133eaf5..0000000000000 --- a/node_modules/minimatch/README.md +++ /dev/null @@ -1,209 +0,0 @@ -# minimatch - -A minimal matching utility. - -[![Build Status](https://secure.travis-ci.org/isaacs/minimatch.svg)](http://travis-ci.org/isaacs/minimatch) - - -This is the matching library used internally by npm. - -It works by converting glob expressions into JavaScript `RegExp` -objects. - -## Usage - -```javascript -var minimatch = require("minimatch") - -minimatch("bar.foo", "*.foo") // true! -minimatch("bar.foo", "*.bar") // false! -minimatch("bar.foo", "*.+(bar|foo)", { debug: true }) // true, and noisy! -``` - -## Features - -Supports these glob features: - -* Brace Expansion -* Extended glob matching -* "Globstar" `**` matching - -See: - -* `man sh` -* `man bash` -* `man 3 fnmatch` -* `man 5 gitignore` - -## Minimatch Class - -Create a minimatch object by instantiating the `minimatch.Minimatch` class. - -```javascript -var Minimatch = require("minimatch").Minimatch -var mm = new Minimatch(pattern, options) -``` - -### Properties - -* `pattern` The original pattern the minimatch object represents. -* `options` The options supplied to the constructor. -* `set` A 2-dimensional array of regexp or string expressions. - Each row in the - array corresponds to a brace-expanded pattern. Each item in the row - corresponds to a single path-part. For example, the pattern - `{a,b/c}/d` would expand to a set of patterns like: - - [ [ a, d ] - , [ b, c, d ] ] - - If a portion of the pattern doesn't have any "magic" in it - (that is, it's something like `"foo"` rather than `fo*o?`), then it - will be left as a string rather than converted to a regular - expression. - -* `regexp` Created by the `makeRe` method. A single regular expression - expressing the entire pattern. This is useful in cases where you wish - to use the pattern somewhat like `fnmatch(3)` with `FNM_PATH` enabled. -* `negate` True if the pattern is negated. -* `comment` True if the pattern is a comment. -* `empty` True if the pattern is `""`. - -### Methods - -* `makeRe` Generate the `regexp` member if necessary, and return it. - Will return `false` if the pattern is invalid. -* `match(fname)` Return true if the filename matches the pattern, or - false otherwise. -* `matchOne(fileArray, patternArray, partial)` Take a `/`-split - filename, and match it against a single row in the `regExpSet`. This - method is mainly for internal use, but is exposed so that it can be - used by a glob-walker that needs to avoid excessive filesystem calls. - -All other methods are internal, and will be called as necessary. - -### minimatch(path, pattern, options) - -Main export. Tests a path against the pattern using the options. - -```javascript -var isJS = minimatch(file, "*.js", { matchBase: true }) -``` - -### minimatch.filter(pattern, options) - -Returns a function that tests its -supplied argument, suitable for use with `Array.filter`. Example: - -```javascript -var javascripts = fileList.filter(minimatch.filter("*.js", {matchBase: true})) -``` - -### minimatch.match(list, pattern, options) - -Match against the list of -files, in the style of fnmatch or glob. If nothing is matched, and -options.nonull is set, then return a list containing the pattern itself. - -```javascript -var javascripts = minimatch.match(fileList, "*.js", {matchBase: true})) -``` - -### minimatch.makeRe(pattern, options) - -Make a regular expression object from the pattern. - -## Options - -All options are `false` by default. - -### debug - -Dump a ton of stuff to stderr. - -### nobrace - -Do not expand `{a,b}` and `{1..3}` brace sets. - -### noglobstar - -Disable `**` matching against multiple folder names. - -### dot - -Allow patterns to match filenames starting with a period, even if -the pattern does not explicitly have a period in that spot. - -Note that by default, `a/**/b` will **not** match `a/.d/b`, unless `dot` -is set. - -### noext - -Disable "extglob" style patterns like `+(a|b)`. - -### nocase - -Perform a case-insensitive match. - -### nonull - -When a match is not found by `minimatch.match`, return a list containing -the pattern itself if this option is set. When not set, an empty list -is returned if there are no matches. - -### matchBase - -If set, then patterns without slashes will be matched -against the basename of the path if it contains slashes. For example, -`a?b` would match the path `/xyz/123/acb`, but not `/xyz/acb/123`. - -### nocomment - -Suppress the behavior of treating `#` at the start of a pattern as a -comment. - -### nonegate - -Suppress the behavior of treating a leading `!` character as negation. - -### flipNegate - -Returns from negate expressions the same as if they were not negated. -(Ie, true on a hit, false on a miss.) - - -## Comparisons to other fnmatch/glob implementations - -While strict compliance with the existing standards is a worthwhile -goal, some discrepancies exist between minimatch and other -implementations, and are intentional. - -If the pattern starts with a `!` character, then it is negated. Set the -`nonegate` flag to suppress this behavior, and treat leading `!` -characters normally. This is perhaps relevant if you wish to start the -pattern with a negative extglob pattern like `!(a|B)`. Multiple `!` -characters at the start of a pattern will negate the pattern multiple -times. - -If a pattern starts with `#`, then it is treated as a comment, and -will not match anything. Use `\#` to match a literal `#` at the -start of a line, or set the `nocomment` flag to suppress this behavior. - -The double-star character `**` is supported by default, unless the -`noglobstar` flag is set. This is supported in the manner of bsdglob -and bash 4.1, where `**` only has special significance if it is the only -thing in a path part. That is, `a/**/b` will match `a/x/y/b`, but -`a/**b` will not. - -If an escaped pattern has no matches, and the `nonull` flag is set, -then minimatch.match returns the pattern as-provided, rather than -interpreting the character escapes. For example, -`minimatch.match([], "\\*a\\?")` will return `"\\*a\\?"` rather than -`"*a?"`. This is akin to setting the `nullglob` option in bash, except -that it does not resolve escaped pattern characters. - -If brace expansion is not disabled, then it is performed before any -other interpretation of the glob pattern. Thus, a pattern like -`+(a|{b),c)}`, which would not be valid in bash or zsh, is expanded -**first** into the set of `+(a|b)` and `+(a|c)`, and those patterns are -checked for validity. Since those two are valid, matching proceeds. diff --git a/node_modules/minipass-collect/README.md b/node_modules/minipass-collect/README.md deleted file mode 100644 index ae1c3dacaa066..0000000000000 --- a/node_modules/minipass-collect/README.md +++ /dev/null @@ -1,48 +0,0 @@ -# minipass-collect - -A Minipass stream that collects all the data into a single chunk - -Note that this buffers ALL data written to it, so it's only good for -situations where you are sure the entire stream fits in memory. - -Note: this is primarily useful for the `Collect.PassThrough` class, since -Minipass streams already have a `.collect()` method which returns a promise -that resolves to the array of chunks, and a `.concat()` method that returns -the data concatenated into a single Buffer or String. - -## USAGE - -```js -const Collect = require('minipass-collect') - -const collector = new Collect() -collector.on('data', allTheData => { - console.log('all the data!', allTheData) -}) - -someSourceOfData.pipe(collector) - -// note that you can also simply do: -someSourceOfData.pipe(new Minipass()).concat().then(data => ...) -// or even, if someSourceOfData is a Minipass: -someSourceOfData.concat().then(data => ...) -// but you might prefer to have it stream-shaped rather than -// Promise-shaped in some scenarios. -``` - -If you want to collect the data, but _also_ act as a passthrough stream, -then use `Collect.PassThrough` instead (for example to memoize streaming -responses), and listen on the `collect` event. - -```js -const Collect = require('minipass-collect') - -const collector = new Collect.PassThrough() -collector.on('collect', allTheData => { - console.log('all the data!', allTheData) -}) - -someSourceOfData.pipe(collector).pipe(someOtherStream) -``` - -All [minipass options](http://npm.im/minipass) are supported. diff --git a/node_modules/minipass-fetch/README.md b/node_modules/minipass-fetch/README.md deleted file mode 100644 index 925e6bec3f15d..0000000000000 --- a/node_modules/minipass-fetch/README.md +++ /dev/null @@ -1,29 +0,0 @@ -# minipass-fetch - -An implementation of window.fetch in Node.js using Minipass streams - -This is a fork (or more precisely, a reimplementation) of -[node-fetch](http://npm.im/node-fetch). All streams have been replaced -with [minipass streams](http://npm.im/minipass). - -The goal of this module is to stay in sync with the API presented by -`node-fetch`, with the exception of the streaming interface provided. - -## Why - -Minipass streams are faster and more deterministic in their timing contract -than node-core streams, making them a better fit for many server-side use -cases. - -## API - -See [node-fetch](http://npm.im/node-fetch) - -Differences from `node-fetch` (and, by extension, from the WhatWG Fetch -specification): - -- Returns [minipass](http://npm.im/minipass) streams instead of node-core - streams. -- Supports the full set of [TLS Options that may be provided to - `https.request()`](https://nodejs.org/api/https.html#https_https_request_options_callback) - when making `https` requests. diff --git a/node_modules/minipass-fetch/lib/index.js b/node_modules/minipass-fetch/lib/index.js index d6ed57942e80f..2ffcba8510554 100644 --- a/node_modules/minipass-fetch/lib/index.js +++ b/node_modules/minipass-fetch/lib/index.js @@ -94,6 +94,19 @@ const fetch = (url, opts) => { } req.on('error', er => { + // if a 'response' event is emitted before the 'error' event, then by the + // time this handler is run it's too late to reject the Promise for the + // response. instead, we forward the error event to the response stream + // so that the error will surface to the user when they try to consume + // the body. this is done as a side effect of aborting the request except + // for in windows, where we must forward the event manually, otherwise + // there is no longer a ref'd socket attached to the request and the + // stream never ends so the event loop runs out of work and the process + // exits without warning. + // coverage skipped here due to the difficulty in testing + // istanbul ignore next + if (req.res) + req.res.emit('error', er) reject(new FetchError(`request to ${request.url} failed, reason: ${ er.message}`, 'system', er)) finalize() @@ -286,8 +299,16 @@ const fetch = (url, opts) => { // for br - if (codings == 'br' && typeof zlib.BrotliDecompress === 'function') { - const decoder = new zlib.BrotliDecompress() + if (codings == 'br') { + // ignoring coverage so tests don't have to fake support (or lack of) for brotli + // istanbul ignore next + try { + var decoder = new zlib.BrotliDecompress() + } catch (err) { + reject(err) + finalize() + return + } // exceedingly rare that the stream would have an error, // but just in case we proxy it to the stream in use. body.on('error', /* istanbul ignore next */ er => decoder.emit('error', er)).pipe(decoder) diff --git a/node_modules/minipass-fetch/lib/request.js b/node_modules/minipass-fetch/lib/request.js index c5208a7fc1300..173f415d18e7b 100644 --- a/node_modules/minipass-fetch/lib/request.js +++ b/node_modules/minipass-fetch/lib/request.js @@ -77,6 +77,7 @@ class Request extends Body { crl, dhparam, ecdhCurve, + family, honorCipherOrder, key, passphrase, @@ -101,6 +102,7 @@ class Request extends Body { crl, dhparam, ecdhCurve, + family, honorCipherOrder, key, passphrase, @@ -208,6 +210,7 @@ class Request extends Body { crl, dhparam, ecdhCurve, + family, honorCipherOrder, key, passphrase, @@ -234,6 +237,7 @@ class Request extends Body { crl, dhparam, ecdhCurve, + family, honorCipherOrder, key, passphrase, diff --git a/node_modules/minipass-fetch/package.json b/node_modules/minipass-fetch/package.json index df48f372a6079..64dab7816bd12 100644 --- a/node_modules/minipass-fetch/package.json +++ b/node_modules/minipass-fetch/package.json @@ -1,6 +1,6 @@ { "name": "minipass-fetch", - "version": "1.3.3", + "version": "1.3.4", "description": "An implementation of window.fetch in Node.js using Minipass streams", "license": "MIT", "main": "lib/index.js", diff --git a/node_modules/minipass-flush/README.md b/node_modules/minipass-flush/README.md deleted file mode 100644 index 7eea40013a08d..0000000000000 --- a/node_modules/minipass-flush/README.md +++ /dev/null @@ -1,47 +0,0 @@ -# minipass-flush - -A Minipass stream that calls a flush function before emitting 'end' - -## USAGE - -```js -const Flush = require('minipass-flush') -cons f = new Flush({ - flush (cb) { - // call the cb when done, or return a promise - // the 'end' event will wait for it, along with - // close, finish, and prefinish. - // call the cb with an error, or return a rejecting - // promise to emit 'error' instead of doing the 'end' - return rerouteAllEncryptions().then(() => clearAllChannels()) - }, - // all other minipass options accepted as well -}) - -someDataSource.pipe(f).on('end', () => { - // proper flushing has been accomplished -}) - -// Or as a subclass implementing a 'flush' method: -class MyFlush extends Flush { - flush (cb) { - // old fashioned callback style! - rerouteAllEncryptions(er => { - if (er) - return cb(er) - clearAllChannels(er => { - if (er) - cb(er) - cb() - }) - }) - } -} -``` - -That's about it. - -If your `flush` method doesn't have to do anything asynchronous, then it's -better to call the callback right away in this tick, rather than returning -`Promise.resolve()`, so that the `end` event can happen as soon as -possible. diff --git a/node_modules/minipass-json-stream/README.md b/node_modules/minipass-json-stream/README.md deleted file mode 100644 index 79864a778fa33..0000000000000 --- a/node_modules/minipass-json-stream/README.md +++ /dev/null @@ -1,189 +0,0 @@ -# minipass-json-stream - -Like [JSONStream](http://npm.im/JSONStream), but using Minipass streams - -## install - -``` -npm install minipass-json-stream -``` - -## example - -```js - -const request = require('request') -const JSONStream = require('minipass-json-stream') -const es = require('event-stream') - -request({url: 'http://isaacs.couchone.com/registry/_all_docs'}) - .pipe(JSONStream.parse('rows.*')) - .pipe(es.mapSync(function (data) { - console.error(data) - return data - })) -``` - -## new JSONStream(options) - -Create a new stream. This is a [minipass](http://npm.im/minipass) stream -that is always set in `objectMode`. It emits objects parsed out of -string/buffer JSON input that match the supplied `path` option. - -## JSONStream.parse(path) - -Return a new JSONStream object to stream values that match a path. - -(Equivalent to `new JSONStream({path})`.) - -``` js -JSONStream.parse('rows.*.doc') -``` - -The `..` operator is the recursive descent operator from -[JSONPath](http://goessner.net/articles/JsonPath/), which will match a -child at any depth (see examples below). - -If your keys have keys that include `.` or `*` etc, use an array instead. -`['row', true, /^doc/]`. - -If you use an array, `RegExp`s, booleans, and/or functions. The `..` -operator is also available in array representation, using `{recurse: -true}`. any object that matches the path will be emitted as 'data' (and -`pipe`d down stream) - -If `path` is empty or null, no 'data' events are emitted. - -If you want to have keys emitted, you can prefix your `*` operator with -`$`: `obj.$*` - in this case the data passed to the stream is an object -with a `key` holding the key and a `value` property holding the data. - -### Examples - -query a couchdb view: - -``` bash -curl -sS localhost:5984/tests/_all_docs&include_docs=true -``` -you will get something like this: - -``` js -{"total_rows":129,"offset":0,"rows":[ - { "id":"change1_0.6995461115147918" - , "key":"change1_0.6995461115147918" - , "value":{"rev":"1-e240bae28c7bb3667f02760f6398d508"} - , "doc":{ - "_id": "change1_0.6995461115147918" - , "_rev": "1-e240bae28c7bb3667f02760f6398d508","hello":1} - }, - { "id":"change2_0.6995461115147918" - , "key":"change2_0.6995461115147918" - , "value":{"rev":"1-13677d36b98c0c075145bb8975105153"} - , "doc":{ - "_id":"change2_0.6995461115147918" - , "_rev":"1-13677d36b98c0c075145bb8975105153" - , "hello":2 - } - }, -]} -``` - -we are probably most interested in the `rows.*.doc` - -create a `JSONStream` that parses the documents from the feed like this: - -``` js -var stream = JSONStream.parse(['rows', true, 'doc']) //rows, ANYTHING, doc - -stream.on('data', function(data) { - console.log('received:', data); -}); - -//emits anything from _before_ the first match -stream.on('header', function (data) { - console.log('header:', data) // => {"total_rows":129,"offset":0} -}) -``` - -awesome! - -In case you wanted the contents the doc emitted: - -``` js -// equivalent to: 'rows.*.doc.$*' -var stream = JSONStream.parse([ - 'rows', - true, - 'doc', - {emitKey: true} -]) //rows, ANYTHING, doc, items in docs with keys - -stream.on('data', function(data) { - console.log('key:', data.key); - console.log('value:', data.value); -}); -``` - -You can also emit the path: - -``` js -var stream = JSONStream.parse([ - 'rows', - true, - 'doc', - {emitPath: true} -]) //rows, ANYTHING, doc, items in docs with keys - -stream.on('data', function(data) { - console.log('path:', data.path); - console.log('value:', data.value); -}); -``` - -### recursive patterns (..) - -`JSONStream.parse('docs..value')` -(or `JSONStream.parse(['docs', {recurse: true}, 'value'])` using an array) -will emit every `value` object that is a child, grand-child, etc. of the -`docs` object. In this example, it will match exactly 5 times at various depth -levels, emitting 0, 1, 2, 3 and 4 as results. - -```js -{ - "total": 5, - "docs": [ - { - "key": { - "value": 0, - "some": "property" - } - }, - {"value": 1}, - {"value": 2}, - {"blbl": [{}, {"a":0, "b":1, "value":3}, 10]}, - {"value": 4} - ] -} -``` - -## JSONStream.parse(pattern, map) - -(Equivalent to `new JSONStream({ pattern, map })`) - -provide a function that can be used to map or filter -the json output. `map` is passed the value at that node of the pattern, -if `map` return non-nullish (anything but `null` or `undefined`) -that value will be emitted in the stream. If it returns a nullish value, -nothing will be emitted. - -`JSONStream` also emits `'header'` and `'footer'` events, -the `'header'` event contains anything in the output that was before -the first match, and the `'footer'`, is anything after the last match. - -## Acknowlegements - -This module is a fork of [JSONStream](http://npm.im/JSONStream) by Dominic -Tarr, modified and redistributed under the terms of the MIT license. - -this module depends on https://github.com/creationix/jsonparse -by Tim Caswell diff --git a/node_modules/minipass-pipeline/README.md b/node_modules/minipass-pipeline/README.md deleted file mode 100644 index 12daa99f0b086..0000000000000 --- a/node_modules/minipass-pipeline/README.md +++ /dev/null @@ -1,69 +0,0 @@ -# minipass-pipeline - -Create a pipeline of streams using Minipass. - -Calls `.pipe()` on all the streams in the list. Returns a stream where -writes got to the first pipe in the chain, and reads are from the last. - -Errors are proxied along the chain and emitted on the Pipeline stream. - -## USAGE - -```js -const Pipeline = require('minipass-pipeline') - -// the list of streams to pipeline together, -// a bit like `input | transform | output` in bash -const p = new Pipeline(input, transform, output) - -p.write('foo') // writes to input -p.on('data', chunk => doSomething()) // reads from output stream - -// less contrived example (but still pretty contrived)... -const decode = new bunzipDecoder() -const unpack = tar.extract({ cwd: 'target-dir' }) -const tbz = new Pipeline(decode, unpack) - -fs.createReadStream('archive.tbz').pipe(tbz) - -// specify any minipass options if you like, as the first argument -// it'll only try to pipeline event emitters with a .pipe() method -const p = new Pipeline({ objectMode: true }, input, transform, output) - -// If you don't know the things to pipe in right away, that's fine. -// use p.push(stream) to add to the end, or p.unshift(stream) to the front -const databaseDecoderStreamDoohickey = (connectionInfo) => { - const p = new Pipeline() - logIntoDatabase(connectionInfo).then(connection => { - initializeDecoderRing(connectionInfo).then(decoderRing => { - p.push(connection, decoderRing) - getUpstreamSource(upstream => { - p.unshift(upstream) - }) - }) - }) - // return to caller right away - // emitted data will be upstream -> connection -> decoderRing pipeline - return p -} -``` - -Pipeline is a [minipass](http://npm.im/minipass) stream, so it's as -synchronous as the streams it wraps. It will buffer data until there is a -reader, but no longer, so make sure to attach your listeners before you -pipe it somewhere else. - -## `new Pipeline(opts = {}, ...streams)` - -Create a new Pipeline with the specified Minipass options and any streams -provided. - -## `pipeline.push(stream, ...)` - -Attach one or more streams to the pipeline at the end (read) side of the -pipe chain. - -## `pipeline.unshift(stream, ...)` - -Attach one or more streams to the pipeline at the start (write) side of the -pipe chain. diff --git a/node_modules/minipass-sized/.npmignore b/node_modules/minipass-sized/.npmignore deleted file mode 100644 index 2bec044be4bbd..0000000000000 --- a/node_modules/minipass-sized/.npmignore +++ /dev/null @@ -1,22 +0,0 @@ -# ignore most things, include some others -/* -/.* - -!bin/ -!lib/ -!docs/ -!package.json -!package-lock.json -!README.md -!CONTRIBUTING.md -!LICENSE -!CHANGELOG.md -!example/ -!scripts/ -!tap-snapshots/ -!test/ -!.travis.yml -!.gitignore -!.gitattributes -!coverage-map.js -!index.js diff --git a/node_modules/minipass-sized/README.md b/node_modules/minipass-sized/README.md deleted file mode 100644 index 6da403e6a2dab..0000000000000 --- a/node_modules/minipass-sized/README.md +++ /dev/null @@ -1,28 +0,0 @@ -# minipass-sized - -A Minipass stream that raises an error if you get a different number of -bytes than expected. - -## USAGE - -Use just like any old [minipass](http://npm.im/minipass) stream, but -provide a `size` option to the constructor. - -The `size` option must be a positive integer, smaller than -`Number.MAX_SAFE_INTEGER`. - -```js -const MinipassSized = require('minipass-sized') -// figure out how much data you expect to get -const expectedSize = +headers['content-length'] -const stream = new MinipassSized({ size: expectedSize }) -stream.on('error', er => { - // if it's the wrong size, then this will raise an error with - // { found: <number>, expect: <number>, code: 'EBADSIZE' } -}) -response.pipe(stream) -``` - -Caveats: this does not work with `objectMode` streams, and will throw a -`TypeError` from the constructor if the size argument is missing or -invalid. diff --git a/node_modules/minipass/README.md b/node_modules/minipass/README.md deleted file mode 100644 index 1a6ff7f5d778e..0000000000000 --- a/node_modules/minipass/README.md +++ /dev/null @@ -1,613 +0,0 @@ -# minipass - -A _very_ minimal implementation of a [PassThrough -stream](https://nodejs.org/api/stream.html#stream_class_stream_passthrough) - -[It's very -fast](https://docs.google.com/spreadsheets/d/1oObKSrVwLX_7Ut4Z6g3fZW-AX1j1-k6w-cDsrkaSbHM/edit#gid=0) -for objects, strings, and buffers. - -Supports pipe()ing (including multi-pipe() and backpressure transmission), -buffering data until either a `data` event handler or `pipe()` is added (so -you don't lose the first chunk), and most other cases where PassThrough is -a good idea. - -There is a `read()` method, but it's much more efficient to consume data -from this stream via `'data'` events or by calling `pipe()` into some other -stream. Calling `read()` requires the buffer to be flattened in some -cases, which requires copying memory. - -There is also no `unpipe()` method. Once you start piping, there is no -stopping it! - -If you set `objectMode: true` in the options, then whatever is written will -be emitted. Otherwise, it'll do a minimal amount of Buffer copying to -ensure proper Streams semantics when `read(n)` is called. - -`objectMode` can also be set by doing `stream.objectMode = true`, or by -writing any non-string/non-buffer data. `objectMode` cannot be set to -false once it is set. - -This is not a `through` or `through2` stream. It doesn't transform the -data, it just passes it right through. If you want to transform the data, -extend the class, and override the `write()` method. Once you're done -transforming the data however you want, call `super.write()` with the -transform output. - -For some examples of streams that extend Minipass in various ways, check -out: - -- [minizlib](http://npm.im/minizlib) -- [fs-minipass](http://npm.im/fs-minipass) -- [tar](http://npm.im/tar) -- [minipass-collect](http://npm.im/minipass-collect) -- [minipass-flush](http://npm.im/minipass-flush) -- [minipass-pipeline](http://npm.im/minipass-pipeline) -- [tap](http://npm.im/tap) -- [tap-parser](http://npm.im/tap) -- [treport](http://npm.im/tap) -- [minipass-fetch](http://npm.im/minipass-fetch) -- [pacote](http://npm.im/pacote) -- [make-fetch-happen](http://npm.im/make-fetch-happen) -- [cacache](http://npm.im/cacache) -- [ssri](http://npm.im/ssri) -- [npm-registry-fetch](http://npm.im/npm-registry-fetch) -- [minipass-json-stream](http://npm.im/minipass-json-stream) -- [minipass-sized](http://npm.im/minipass-sized) - -## Differences from Node.js Streams - -There are several things that make Minipass streams different from (and in -some ways superior to) Node.js core streams. - -Please read these caveats if you are familiar with noode-core streams and -intend to use Minipass streams in your programs. - -### Timing - -Minipass streams are designed to support synchronous use-cases. Thus, data -is emitted as soon as it is available, always. It is buffered until read, -but no longer. Another way to look at it is that Minipass streams are -exactly as synchronous as the logic that writes into them. - -This can be surprising if your code relies on `PassThrough.write()` always -providing data on the next tick rather than the current one, or being able -to call `resume()` and not have the entire buffer disappear immediately. - -However, without this synchronicity guarantee, there would be no way for -Minipass to achieve the speeds it does, or support the synchronous use -cases that it does. Simply put, waiting takes time. - -This non-deferring approach makes Minipass streams much easier to reason -about, especially in the context of Promises and other flow-control -mechanisms. - -### No High/Low Water Marks - -Node.js core streams will optimistically fill up a buffer, returning `true` -on all writes until the limit is hit, even if the data has nowhere to go. -Then, they will not attempt to draw more data in until the buffer size dips -below a minimum value. - -Minipass streams are much simpler. The `write()` method will return `true` -if the data has somewhere to go (which is to say, given the timing -guarantees, that the data is already there by the time `write()` returns). - -If the data has nowhere to go, then `write()` returns false, and the data -sits in a buffer, to be drained out immediately as soon as anyone consumes -it. - -### Hazards of Buffering (or: Why Minipass Is So Fast) - -Since data written to a Minipass stream is immediately written all the way -through the pipeline, and `write()` always returns true/false based on -whether the data was fully flushed, backpressure is communicated -immediately to the upstream caller. This minimizes buffering. - -Consider this case: - -```js -const {PassThrough} = require('stream') -const p1 = new PassThrough({ highWaterMark: 1024 }) -const p2 = new PassThrough({ highWaterMark: 1024 }) -const p3 = new PassThrough({ highWaterMark: 1024 }) -const p4 = new PassThrough({ highWaterMark: 1024 }) - -p1.pipe(p2).pipe(p3).pipe(p4) -p4.on('data', () => console.log('made it through')) - -// this returns false and buffers, then writes to p2 on next tick (1) -// p2 returns false and buffers, pausing p1, then writes to p3 on next tick (2) -// p3 returns false and buffers, pausing p2, then writes to p4 on next tick (3) -// p4 returns false and buffers, pausing p3, then emits 'data' and 'drain' -// on next tick (4) -// p3 sees p4's 'drain' event, and calls resume(), emitting 'resume' and -// 'drain' on next tick (5) -// p2 sees p3's 'drain', calls resume(), emits 'resume' and 'drain' on next tick (6) -// p1 sees p2's 'drain', calls resume(), emits 'resume' and 'drain' on next -// tick (7) - -p1.write(Buffer.alloc(2048)) // returns false -``` - -Along the way, the data was buffered and deferred at each stage, and -multiple event deferrals happened, for an unblocked pipeline where it was -perfectly safe to write all the way through! - -Furthermore, setting a `highWaterMark` of `1024` might lead someone reading -the code to think an advisory maximum of 1KiB is being set for the -pipeline. However, the actual advisory buffering level is the _sum_ of -`highWaterMark` values, since each one has its own bucket. - -Consider the Minipass case: - -```js -const m1 = new Minipass() -const m2 = new Minipass() -const m3 = new Minipass() -const m4 = new Minipass() - -m1.pipe(m2).pipe(m3).pipe(m4) -m4.on('data', () => console.log('made it through')) - -// m1 is flowing, so it writes the data to m2 immediately -// m2 is flowing, so it writes the data to m3 immediately -// m3 is flowing, so it writes the data to m4 immediately -// m4 is flowing, so it fires the 'data' event immediately, returns true -// m4's write returned true, so m3 is still flowing, returns true -// m3's write returned true, so m2 is still flowing, returns true -// m2's write returned true, so m1 is still flowing, returns true -// No event deferrals or buffering along the way! - -m1.write(Buffer.alloc(2048)) // returns true -``` - -It is extremely unlikely that you _don't_ want to buffer any data written, -or _ever_ buffer data that can be flushed all the way through. Neither -node-core streams nor Minipass ever fail to buffer written data, but -node-core streams do a lot of unnecessary buffering and pausing. - -As always, the faster implementation is the one that does less stuff and -waits less time to do it. - -### Immediately emit `end` for empty streams (when not paused) - -If a stream is not paused, and `end()` is called before writing any data -into it, then it will emit `end` immediately. - -If you have logic that occurs on the `end` event which you don't want to -potentially happen immediately (for example, closing file descriptors, -moving on to the next entry in an archive parse stream, etc.) then be sure -to call `stream.pause()` on creation, and then `stream.resume()` once you -are ready to respond to the `end` event. - -### Emit `end` When Asked - -One hazard of immediately emitting `'end'` is that you may not yet have had -a chance to add a listener. In order to avoid this hazard, Minipass -streams safely re-emit the `'end'` event if a new listener is added after -`'end'` has been emitted. - -Ie, if you do `stream.on('end', someFunction)`, and the stream has already -emitted `end`, then it will call the handler right away. (You can think of -this somewhat like attaching a new `.then(fn)` to a previously-resolved -Promise.) - -To prevent calling handlers multiple times who would not expect multiple -ends to occur, all listeners are removed from the `'end'` event whenever it -is emitted. - -### Impact of "immediate flow" on Tee-streams - -A "tee stream" is a stream piping to multiple destinations: - -```js -const tee = new Minipass() -t.pipe(dest1) -t.pipe(dest2) -t.write('foo') // goes to both destinations -``` - -Since Minipass streams _immediately_ process any pending data through the -pipeline when a new pipe destination is added, this can have surprising -effects, especially when a stream comes in from some other function and may -or may not have data in its buffer. - -```js -// WARNING! WILL LOSE DATA! -const src = new Minipass() -src.write('foo') -src.pipe(dest1) // 'foo' chunk flows to dest1 immediately, and is gone -src.pipe(dest2) // gets nothing! -``` - -The solution is to create a dedicated tee-stream junction that pipes to -both locations, and then pipe to _that_ instead. - -```js -// Safe example: tee to both places -const src = new Minipass() -src.write('foo') -const tee = new Minipass() -tee.pipe(dest1) -tee.pipe(dest2) -src.pipe(tee) // tee gets 'foo', pipes to both locations -``` - -The same caveat applies to `on('data')` event listeners. The first one -added will _immediately_ receive all of the data, leaving nothing for the -second: - -```js -// WARNING! WILL LOSE DATA! -const src = new Minipass() -src.write('foo') -src.on('data', handler1) // receives 'foo' right away -src.on('data', handler2) // nothing to see here! -``` - -Using a dedicated tee-stream can be used in this case as well: - -```js -// Safe example: tee to both data handlers -const src = new Minipass() -src.write('foo') -const tee = new Minipass() -tee.on('data', handler1) -tee.on('data', handler2) -src.pipe(tee) -``` - -## USAGE - -It's a stream! Use it like a stream and it'll most likely do what you -want. - -```js -const Minipass = require('minipass') -const mp = new Minipass(options) // optional: { encoding, objectMode } -mp.write('foo') -mp.pipe(someOtherStream) -mp.end('bar') -``` - -### OPTIONS - -* `encoding` How would you like the data coming _out_ of the stream to be - encoded? Accepts any values that can be passed to `Buffer.toString()`. -* `objectMode` Emit data exactly as it comes in. This will be flipped on - by default if you write() something other than a string or Buffer at any - point. Setting `objectMode: true` will prevent setting any encoding - value. - -### API - -Implements the user-facing portions of Node.js's `Readable` and `Writable` -streams. - -### Methods - -* `write(chunk, [encoding], [callback])` - Put data in. (Note that, in the - base Minipass class, the same data will come out.) Returns `false` if - the stream will buffer the next write, or true if it's still in "flowing" - mode. -* `end([chunk, [encoding]], [callback])` - Signal that you have no more - data to write. This will queue an `end` event to be fired when all the - data has been consumed. -* `setEncoding(encoding)` - Set the encoding for data coming of the stream. - This can only be done once. -* `pause()` - No more data for a while, please. This also prevents `end` - from being emitted for empty streams until the stream is resumed. -* `resume()` - Resume the stream. If there's data in the buffer, it is all - discarded. Any buffered events are immediately emitted. -* `pipe(dest)` - Send all output to the stream provided. There is no way - to unpipe. When data is emitted, it is immediately written to any and - all pipe destinations. -* `on(ev, fn)`, `emit(ev, fn)` - Minipass streams are EventEmitters. Some - events are given special treatment, however. (See below under "events".) -* `promise()` - Returns a Promise that resolves when the stream emits - `end`, or rejects if the stream emits `error`. -* `collect()` - Return a Promise that resolves on `end` with an array - containing each chunk of data that was emitted, or rejects if the stream - emits `error`. Note that this consumes the stream data. -* `concat()` - Same as `collect()`, but concatenates the data into a single - Buffer object. Will reject the returned promise if the stream is in - objectMode, or if it goes into objectMode by the end of the data. -* `read(n)` - Consume `n` bytes of data out of the buffer. If `n` is not - provided, then consume all of it. If `n` bytes are not available, then - it returns null. **Note** consuming streams in this way is less - efficient, and can lead to unnecessary Buffer copying. -* `destroy([er])` - Destroy the stream. If an error is provided, then an - `'error'` event is emitted. If the stream has a `close()` method, and - has not emitted a `'close'` event yet, then `stream.close()` will be - called. Any Promises returned by `.promise()`, `.collect()` or - `.concat()` will be rejected. After being destroyed, writing to the - stream will emit an error. No more data will be emitted if the stream is - destroyed, even if it was previously buffered. - -### Properties - -* `bufferLength` Read-only. Total number of bytes buffered, or in the case - of objectMode, the total number of objects. -* `encoding` The encoding that has been set. (Setting this is equivalent - to calling `setEncoding(enc)` and has the same prohibition against - setting multiple times.) -* `flowing` Read-only. Boolean indicating whether a chunk written to the - stream will be immediately emitted. -* `emittedEnd` Read-only. Boolean indicating whether the end-ish events - (ie, `end`, `prefinish`, `finish`) have been emitted. Note that - listening on any end-ish event will immediateyl re-emit it if it has - already been emitted. -* `writable` Whether the stream is writable. Default `true`. Set to - `false` when `end()` -* `readable` Whether the stream is readable. Default `true`. -* `buffer` A [yallist](http://npm.im/yallist) linked list of chunks written - to the stream that have not yet been emitted. (It's probably a bad idea - to mess with this.) -* `pipes` A [yallist](http://npm.im/yallist) linked list of streams that - this stream is piping into. (It's probably a bad idea to mess with - this.) -* `destroyed` A getter that indicates whether the stream was destroyed. -* `paused` True if the stream has been explicitly paused, otherwise false. -* `objectMode` Indicates whether the stream is in `objectMode`. Once set - to `true`, it cannot be set to `false`. - -### Events - -* `data` Emitted when there's data to read. Argument is the data to read. - This is never emitted while not flowing. If a listener is attached, that - will resume the stream. -* `end` Emitted when there's no more data to read. This will be emitted - immediately for empty streams when `end()` is called. If a listener is - attached, and `end` was already emitted, then it will be emitted again. - All listeners are removed when `end` is emitted. -* `prefinish` An end-ish event that follows the same logic as `end` and is - emitted in the same conditions where `end` is emitted. Emitted after - `'end'`. -* `finish` An end-ish event that follows the same logic as `end` and is - emitted in the same conditions where `end` is emitted. Emitted after - `'prefinish'`. -* `close` An indication that an underlying resource has been released. - Minipass does not emit this event, but will defer it until after `end` - has been emitted, since it throws off some stream libraries otherwise. -* `drain` Emitted when the internal buffer empties, and it is again - suitable to `write()` into the stream. -* `readable` Emitted when data is buffered and ready to be read by a - consumer. -* `resume` Emitted when stream changes state from buffering to flowing - mode. (Ie, when `resume` is called, `pipe` is called, or a `data` event - listener is added.) - -### Static Methods - -* `Minipass.isStream(stream)` Returns `true` if the argument is a stream, - and false otherwise. To be considered a stream, the object must be - either an instance of Minipass, or an EventEmitter that has either a - `pipe()` method, or both `write()` and `end()` methods. (Pretty much any - stream in node-land will return `true` for this.) - -## EXAMPLES - -Here are some examples of things you can do with Minipass streams. - -### simple "are you done yet" promise - -```js -mp.promise().then(() => { - // stream is finished -}, er => { - // stream emitted an error -}) -``` - -### collecting - -```js -mp.collect().then(all => { - // all is an array of all the data emitted - // encoding is supported in this case, so - // so the result will be a collection of strings if - // an encoding is specified, or buffers/objects if not. - // - // In an async function, you may do - // const data = await stream.collect() -}) -``` - -### collecting into a single blob - -This is a bit slower because it concatenates the data into one chunk for -you, but if you're going to do it yourself anyway, it's convenient this -way: - -```js -mp.concat().then(onebigchunk => { - // onebigchunk is a string if the stream - // had an encoding set, or a buffer otherwise. -}) -``` - -### iteration - -You can iterate over streams synchronously or asynchronously in platforms -that support it. - -Synchronous iteration will end when the currently available data is -consumed, even if the `end` event has not been reached. In string and -buffer mode, the data is concatenated, so unless multiple writes are -occurring in the same tick as the `read()`, sync iteration loops will -generally only have a single iteration. - -To consume chunks in this way exactly as they have been written, with no -flattening, create the stream with the `{ objectMode: true }` option. - -```js -const mp = new Minipass({ objectMode: true }) -mp.write('a') -mp.write('b') -for (let letter of mp) { - console.log(letter) // a, b -} -mp.write('c') -mp.write('d') -for (let letter of mp) { - console.log(letter) // c, d -} -mp.write('e') -mp.end() -for (let letter of mp) { - console.log(letter) // e -} -for (let letter of mp) { - console.log(letter) // nothing -} -``` - -Asynchronous iteration will continue until the end event is reached, -consuming all of the data. - -```js -const mp = new Minipass({ encoding: 'utf8' }) - -// some source of some data -let i = 5 -const inter = setInterval(() => { - if (i --> 0) - mp.write(Buffer.from('foo\n', 'utf8')) - else { - mp.end() - clearInterval(inter) - } -}, 100) - -// consume the data with asynchronous iteration -async function consume () { - for await (let chunk of mp) { - console.log(chunk) - } - return 'ok' -} - -consume().then(res => console.log(res)) -// logs `foo\n` 5 times, and then `ok` -``` - -### subclass that `console.log()`s everything written into it - -```js -class Logger extends Minipass { - write (chunk, encoding, callback) { - console.log('WRITE', chunk, encoding) - return super.write(chunk, encoding, callback) - } - end (chunk, encoding, callback) { - console.log('END', chunk, encoding) - return super.end(chunk, encoding, callback) - } -} - -someSource.pipe(new Logger()).pipe(someDest) -``` - -### same thing, but using an inline anonymous class - -```js -// js classes are fun -someSource - .pipe(new (class extends Minipass { - emit (ev, ...data) { - // let's also log events, because debugging some weird thing - console.log('EMIT', ev) - return super.emit(ev, ...data) - } - write (chunk, encoding, callback) { - console.log('WRITE', chunk, encoding) - return super.write(chunk, encoding, callback) - } - end (chunk, encoding, callback) { - console.log('END', chunk, encoding) - return super.end(chunk, encoding, callback) - } - })) - .pipe(someDest) -``` - -### subclass that defers 'end' for some reason - -```js -class SlowEnd extends Minipass { - emit (ev, ...args) { - if (ev === 'end') { - console.log('going to end, hold on a sec') - setTimeout(() => { - console.log('ok, ready to end now') - super.emit('end', ...args) - }, 100) - } else { - return super.emit(ev, ...args) - } - } -} -``` - -### transform that creates newline-delimited JSON - -```js -class NDJSONEncode extends Minipass { - write (obj, cb) { - try { - // JSON.stringify can throw, emit an error on that - return super.write(JSON.stringify(obj) + '\n', 'utf8', cb) - } catch (er) { - this.emit('error', er) - } - } - end (obj, cb) { - if (typeof obj === 'function') { - cb = obj - obj = undefined - } - if (obj !== undefined) { - this.write(obj) - } - return super.end(cb) - } -} -``` - -### transform that parses newline-delimited JSON - -```js -class NDJSONDecode extends Minipass { - constructor (options) { - // always be in object mode, as far as Minipass is concerned - super({ objectMode: true }) - this._jsonBuffer = '' - } - write (chunk, encoding, cb) { - if (typeof chunk === 'string' && - typeof encoding === 'string' && - encoding !== 'utf8') { - chunk = Buffer.from(chunk, encoding).toString() - } else if (Buffer.isBuffer(chunk)) - chunk = chunk.toString() - } - if (typeof encoding === 'function') { - cb = encoding - } - const jsonData = (this._jsonBuffer + chunk).split('\n') - this._jsonBuffer = jsonData.pop() - for (let i = 0; i < jsonData.length; i++) { - let parsed - try { - super.write(parsed) - } catch (er) { - this.emit('error', er) - continue - } - } - if (cb) - cb() - } -} -``` diff --git a/node_modules/minizlib/README.md b/node_modules/minizlib/README.md deleted file mode 100644 index 80e067ab381e1..0000000000000 --- a/node_modules/minizlib/README.md +++ /dev/null @@ -1,60 +0,0 @@ -# minizlib - -A fast zlib stream built on [minipass](http://npm.im/minipass) and -Node.js's zlib binding. - -This module was created to serve the needs of -[node-tar](http://npm.im/tar) and -[minipass-fetch](http://npm.im/minipass-fetch). - -Brotli is supported in versions of node with a Brotli binding. - -## How does this differ from the streams in `require('zlib')`? - -First, there are no convenience methods to compress or decompress a -buffer. If you want those, use the built-in `zlib` module. This is -only streams. That being said, Minipass streams to make it fairly easy to -use as one-liners: `new zlib.Deflate().end(data).read()` will return the -deflate compressed result. - -This module compresses and decompresses the data as fast as you feed -it in. It is synchronous, and runs on the main process thread. Zlib -and Brotli operations can be high CPU, but they're very fast, and doing it -this way means much less bookkeeping and artificial deferral. - -Node's built in zlib streams are built on top of `stream.Transform`. -They do the maximally safe thing with respect to consistent -asynchrony, buffering, and backpressure. - -See [Minipass](http://npm.im/minipass) for more on the differences between -Node.js core streams and Minipass streams, and the convenience methods -provided by that class. - -## Classes - -- Deflate -- Inflate -- Gzip -- Gunzip -- DeflateRaw -- InflateRaw -- Unzip -- BrotliCompress (Node v10 and higher) -- BrotliDecompress (Node v10 and higher) - -## USAGE - -```js -const zlib = require('minizlib') -const input = sourceOfCompressedData() -const decode = new zlib.BrotliDecompress() -const output = whereToWriteTheDecodedData() -input.pipe(decode).pipe(output) -``` - -## REPRODUCIBLE BUILDS - -To create reproducible gzip compressed files across different operating -systems, set `portable: true` in the options. This causes minizlib to set -the `OS` indicator in byte 9 of the extended gzip header to `0xFF` for -'unknown'. diff --git a/node_modules/mkdirp-infer-owner/README.md b/node_modules/mkdirp-infer-owner/README.md deleted file mode 100644 index c466ac3404b38..0000000000000 --- a/node_modules/mkdirp-infer-owner/README.md +++ /dev/null @@ -1,16 +0,0 @@ -# mkdirp-infer-owner - -[`mkdirp`](http://npm.im/mkdirp), but chown to the owner of the containing -folder if possible and necessary. - -That is, on Windows and when running as non-root, it's exactly the same as -[`mkdirp`](http://npm.im/mkdirp). - -When running as root on non-Windows systems, it uses -[`infer-owner`](http://npm.im/infer-owner) to find the owner of the -containing folder, and then [`chownr`](http://npm.im/chownr) to set the -ownership of the created folder to that same uid/gid. - -This is used by [npm](http://npm.im/npm) to prevent root-owned files and -folders from showing up in your home directory (either in `node_modules` or -in the `~/.npm` cache) when running as root. diff --git a/node_modules/mkdirp/CHANGELOG.md b/node_modules/mkdirp/CHANGELOG.md deleted file mode 100644 index 81458380be9a2..0000000000000 --- a/node_modules/mkdirp/CHANGELOG.md +++ /dev/null @@ -1,15 +0,0 @@ -# Changers Lorgs! - -## 1.0 - -Full rewrite. Essentially a brand new module. - -- Return a promise instead of taking a callback. -- Use native `fs.mkdir(path, { recursive: true })` when available. -- Drop support for outdated Node.js versions. (Technically still works on - Node.js v8, but only 10 and above are officially supported.) - -## 0.x - -Original and most widely used recursive directory creation implementation -in JavaScript, dating back to 2010. diff --git a/node_modules/mkdirp/README.markdown b/node_modules/mkdirp/readme.markdown similarity index 100% rename from node_modules/mkdirp/README.markdown rename to node_modules/mkdirp/readme.markdown diff --git a/node_modules/mute-stream/README.md b/node_modules/mute-stream/README.md deleted file mode 100644 index 8ab1238e46d1f..0000000000000 --- a/node_modules/mute-stream/README.md +++ /dev/null @@ -1,68 +0,0 @@ -# mute-stream - -Bytes go in, but they don't come out (when muted). - -This is a basic pass-through stream, but when muted, the bytes are -silently dropped, rather than being passed through. - -## Usage - -```javascript -var MuteStream = require('mute-stream') - -var ms = new MuteStream(options) - -ms.pipe(process.stdout) -ms.write('foo') // writes 'foo' to stdout -ms.mute() -ms.write('bar') // does not write 'bar' -ms.unmute() -ms.write('baz') // writes 'baz' to stdout - -// can also be used to mute incoming data -var ms = new MuteStream -input.pipe(ms) - -ms.on('data', function (c) { - console.log('data: ' + c) -}) - -input.emit('data', 'foo') // logs 'foo' -ms.mute() -input.emit('data', 'bar') // does not log 'bar' -ms.unmute() -input.emit('data', 'baz') // logs 'baz' -``` - -## Options - -All options are optional. - -* `replace` Set to a string to replace each character with the - specified string when muted. (So you can show `****` instead of the - password, for example.) - -* `prompt` If you are using a replacement char, and also using a - prompt with a readline stream (as for a `Password: *****` input), - then specify what the prompt is so that backspace will work - properly. Otherwise, pressing backspace will overwrite the prompt - with the replacement character, which is weird. - -## ms.mute() - -Set `muted` to `true`. Turns `.write()` into a no-op. - -## ms.unmute() - -Set `muted` to `false` - -## ms.isTTY - -True if the pipe destination is a TTY, or if the incoming pipe source is -a TTY. - -## Other stream methods... - -The other standard readable and writable stream methods are all -available. The MuteStream object acts as a facade to its pipe source -and destination. diff --git a/node_modules/negotiator/HISTORY.md b/node_modules/negotiator/HISTORY.md new file mode 100644 index 0000000000000..6d06c76aaa965 --- /dev/null +++ b/node_modules/negotiator/HISTORY.md @@ -0,0 +1,103 @@ +0.6.2 / 2019-04-29 +================== + + * Fix sorting charset, encoding, and language with extra parameters + +0.6.1 / 2016-05-02 +================== + + * perf: improve `Accept` parsing speed + * perf: improve `Accept-Charset` parsing speed + * perf: improve `Accept-Encoding` parsing speed + * perf: improve `Accept-Language` parsing speed + +0.6.0 / 2015-09-29 +================== + + * Fix including type extensions in parameters in `Accept` parsing + * Fix parsing `Accept` parameters with quoted equals + * Fix parsing `Accept` parameters with quoted semicolons + * Lazy-load modules from main entry point + * perf: delay type concatenation until needed + * perf: enable strict mode + * perf: hoist regular expressions + * perf: remove closures getting spec properties + * perf: remove a closure from media type parsing + * perf: remove property delete from media type parsing + +0.5.3 / 2015-05-10 +================== + + * Fix media type parameter matching to be case-insensitive + +0.5.2 / 2015-05-06 +================== + + * Fix comparing media types with quoted values + * Fix splitting media types with quoted commas + +0.5.1 / 2015-02-14 +================== + + * Fix preference sorting to be stable for long acceptable lists + +0.5.0 / 2014-12-18 +================== + + * Fix list return order when large accepted list + * Fix missing identity encoding when q=0 exists + * Remove dynamic building of Negotiator class + +0.4.9 / 2014-10-14 +================== + + * Fix error when media type has invalid parameter + +0.4.8 / 2014-09-28 +================== + + * Fix all negotiations to be case-insensitive + * Stable sort preferences of same quality according to client order + * Support Node.js 0.6 + +0.4.7 / 2014-06-24 +================== + + * Handle invalid provided languages + * Handle invalid provided media types + +0.4.6 / 2014-06-11 +================== + + * Order by specificity when quality is the same + +0.4.5 / 2014-05-29 +================== + + * Fix regression in empty header handling + +0.4.4 / 2014-05-29 +================== + + * Fix behaviors when headers are not present + +0.4.3 / 2014-04-16 +================== + + * Handle slashes on media params correctly + +0.4.2 / 2014-02-28 +================== + + * Fix media type sorting + * Handle media types params strictly + +0.4.1 / 2014-01-16 +================== + + * Use most specific matches + +0.4.0 / 2014-01-09 +================== + + * Remove preferred prefix from methods diff --git a/node_modules/negotiator/LICENSE b/node_modules/negotiator/LICENSE new file mode 100644 index 0000000000000..ea6b9e2e9ac25 --- /dev/null +++ b/node_modules/negotiator/LICENSE @@ -0,0 +1,24 @@ +(The MIT License) + +Copyright (c) 2012-2014 Federico Romero +Copyright (c) 2012-2014 Isaac Z. Schlueter +Copyright (c) 2014-2015 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/negotiator/index.js b/node_modules/negotiator/index.js new file mode 100644 index 0000000000000..8d4f6a226cb0d --- /dev/null +++ b/node_modules/negotiator/index.js @@ -0,0 +1,124 @@ +/*! + * negotiator + * Copyright(c) 2012 Federico Romero + * Copyright(c) 2012-2014 Isaac Z. Schlueter + * Copyright(c) 2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict'; + +/** + * Cached loaded submodules. + * @private + */ + +var modules = Object.create(null); + +/** + * Module exports. + * @public + */ + +module.exports = Negotiator; +module.exports.Negotiator = Negotiator; + +/** + * Create a Negotiator instance from a request. + * @param {object} request + * @public + */ + +function Negotiator(request) { + if (!(this instanceof Negotiator)) { + return new Negotiator(request); + } + + this.request = request; +} + +Negotiator.prototype.charset = function charset(available) { + var set = this.charsets(available); + return set && set[0]; +}; + +Negotiator.prototype.charsets = function charsets(available) { + var preferredCharsets = loadModule('charset').preferredCharsets; + return preferredCharsets(this.request.headers['accept-charset'], available); +}; + +Negotiator.prototype.encoding = function encoding(available) { + var set = this.encodings(available); + return set && set[0]; +}; + +Negotiator.prototype.encodings = function encodings(available) { + var preferredEncodings = loadModule('encoding').preferredEncodings; + return preferredEncodings(this.request.headers['accept-encoding'], available); +}; + +Negotiator.prototype.language = function language(available) { + var set = this.languages(available); + return set && set[0]; +}; + +Negotiator.prototype.languages = function languages(available) { + var preferredLanguages = loadModule('language').preferredLanguages; + return preferredLanguages(this.request.headers['accept-language'], available); +}; + +Negotiator.prototype.mediaType = function mediaType(available) { + var set = this.mediaTypes(available); + return set && set[0]; +}; + +Negotiator.prototype.mediaTypes = function mediaTypes(available) { + var preferredMediaTypes = loadModule('mediaType').preferredMediaTypes; + return preferredMediaTypes(this.request.headers.accept, available); +}; + +// Backwards compatibility +Negotiator.prototype.preferredCharset = Negotiator.prototype.charset; +Negotiator.prototype.preferredCharsets = Negotiator.prototype.charsets; +Negotiator.prototype.preferredEncoding = Negotiator.prototype.encoding; +Negotiator.prototype.preferredEncodings = Negotiator.prototype.encodings; +Negotiator.prototype.preferredLanguage = Negotiator.prototype.language; +Negotiator.prototype.preferredLanguages = Negotiator.prototype.languages; +Negotiator.prototype.preferredMediaType = Negotiator.prototype.mediaType; +Negotiator.prototype.preferredMediaTypes = Negotiator.prototype.mediaTypes; + +/** + * Load the given module. + * @private + */ + +function loadModule(moduleName) { + var module = modules[moduleName]; + + if (module !== undefined) { + return module; + } + + // This uses a switch for static require analysis + switch (moduleName) { + case 'charset': + module = require('./lib/charset'); + break; + case 'encoding': + module = require('./lib/encoding'); + break; + case 'language': + module = require('./lib/language'); + break; + case 'mediaType': + module = require('./lib/mediaType'); + break; + default: + throw new Error('Cannot find module \'' + moduleName + '\''); + } + + // Store to prevent invoking require() + modules[moduleName] = module; + + return module; +} diff --git a/node_modules/negotiator/lib/charset.js b/node_modules/negotiator/lib/charset.js new file mode 100644 index 0000000000000..cdd014803474a --- /dev/null +++ b/node_modules/negotiator/lib/charset.js @@ -0,0 +1,169 @@ +/** + * negotiator + * Copyright(c) 2012 Isaac Z. Schlueter + * Copyright(c) 2014 Federico Romero + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict'; + +/** + * Module exports. + * @public + */ + +module.exports = preferredCharsets; +module.exports.preferredCharsets = preferredCharsets; + +/** + * Module variables. + * @private + */ + +var simpleCharsetRegExp = /^\s*([^\s;]+)\s*(?:;(.*))?$/; + +/** + * Parse the Accept-Charset header. + * @private + */ + +function parseAcceptCharset(accept) { + var accepts = accept.split(','); + + for (var i = 0, j = 0; i < accepts.length; i++) { + var charset = parseCharset(accepts[i].trim(), i); + + if (charset) { + accepts[j++] = charset; + } + } + + // trim accepts + accepts.length = j; + + return accepts; +} + +/** + * Parse a charset from the Accept-Charset header. + * @private + */ + +function parseCharset(str, i) { + var match = simpleCharsetRegExp.exec(str); + if (!match) return null; + + var charset = match[1]; + var q = 1; + if (match[2]) { + var params = match[2].split(';') + for (var j = 0; j < params.length; j++) { + var p = params[j].trim().split('='); + if (p[0] === 'q') { + q = parseFloat(p[1]); + break; + } + } + } + + return { + charset: charset, + q: q, + i: i + }; +} + +/** + * Get the priority of a charset. + * @private + */ + +function getCharsetPriority(charset, accepted, index) { + var priority = {o: -1, q: 0, s: 0}; + + for (var i = 0; i < accepted.length; i++) { + var spec = specify(charset, accepted[i], index); + + if (spec && (priority.s - spec.s || priority.q - spec.q || priority.o - spec.o) < 0) { + priority = spec; + } + } + + return priority; +} + +/** + * Get the specificity of the charset. + * @private + */ + +function specify(charset, spec, index) { + var s = 0; + if(spec.charset.toLowerCase() === charset.toLowerCase()){ + s |= 1; + } else if (spec.charset !== '*' ) { + return null + } + + return { + i: index, + o: spec.i, + q: spec.q, + s: s + } +} + +/** + * Get the preferred charsets from an Accept-Charset header. + * @public + */ + +function preferredCharsets(accept, provided) { + // RFC 2616 sec 14.2: no header = * + var accepts = parseAcceptCharset(accept === undefined ? '*' : accept || ''); + + if (!provided) { + // sorted list of all charsets + return accepts + .filter(isQuality) + .sort(compareSpecs) + .map(getFullCharset); + } + + var priorities = provided.map(function getPriority(type, index) { + return getCharsetPriority(type, accepts, index); + }); + + // sorted list of accepted charsets + return priorities.filter(isQuality).sort(compareSpecs).map(function getCharset(priority) { + return provided[priorities.indexOf(priority)]; + }); +} + +/** + * Compare two specs. + * @private + */ + +function compareSpecs(a, b) { + return (b.q - a.q) || (b.s - a.s) || (a.o - b.o) || (a.i - b.i) || 0; +} + +/** + * Get full charset string. + * @private + */ + +function getFullCharset(spec) { + return spec.charset; +} + +/** + * Check if a spec has any quality. + * @private + */ + +function isQuality(spec) { + return spec.q > 0; +} diff --git a/node_modules/negotiator/lib/encoding.js b/node_modules/negotiator/lib/encoding.js new file mode 100644 index 0000000000000..8432cd77b8a96 --- /dev/null +++ b/node_modules/negotiator/lib/encoding.js @@ -0,0 +1,184 @@ +/** + * negotiator + * Copyright(c) 2012 Isaac Z. Schlueter + * Copyright(c) 2014 Federico Romero + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict'; + +/** + * Module exports. + * @public + */ + +module.exports = preferredEncodings; +module.exports.preferredEncodings = preferredEncodings; + +/** + * Module variables. + * @private + */ + +var simpleEncodingRegExp = /^\s*([^\s;]+)\s*(?:;(.*))?$/; + +/** + * Parse the Accept-Encoding header. + * @private + */ + +function parseAcceptEncoding(accept) { + var accepts = accept.split(','); + var hasIdentity = false; + var minQuality = 1; + + for (var i = 0, j = 0; i < accepts.length; i++) { + var encoding = parseEncoding(accepts[i].trim(), i); + + if (encoding) { + accepts[j++] = encoding; + hasIdentity = hasIdentity || specify('identity', encoding); + minQuality = Math.min(minQuality, encoding.q || 1); + } + } + + if (!hasIdentity) { + /* + * If identity doesn't explicitly appear in the accept-encoding header, + * it's added to the list of acceptable encoding with the lowest q + */ + accepts[j++] = { + encoding: 'identity', + q: minQuality, + i: i + }; + } + + // trim accepts + accepts.length = j; + + return accepts; +} + +/** + * Parse an encoding from the Accept-Encoding header. + * @private + */ + +function parseEncoding(str, i) { + var match = simpleEncodingRegExp.exec(str); + if (!match) return null; + + var encoding = match[1]; + var q = 1; + if (match[2]) { + var params = match[2].split(';'); + for (var j = 0; j < params.length; j++) { + var p = params[j].trim().split('='); + if (p[0] === 'q') { + q = parseFloat(p[1]); + break; + } + } + } + + return { + encoding: encoding, + q: q, + i: i + }; +} + +/** + * Get the priority of an encoding. + * @private + */ + +function getEncodingPriority(encoding, accepted, index) { + var priority = {o: -1, q: 0, s: 0}; + + for (var i = 0; i < accepted.length; i++) { + var spec = specify(encoding, accepted[i], index); + + if (spec && (priority.s - spec.s || priority.q - spec.q || priority.o - spec.o) < 0) { + priority = spec; + } + } + + return priority; +} + +/** + * Get the specificity of the encoding. + * @private + */ + +function specify(encoding, spec, index) { + var s = 0; + if(spec.encoding.toLowerCase() === encoding.toLowerCase()){ + s |= 1; + } else if (spec.encoding !== '*' ) { + return null + } + + return { + i: index, + o: spec.i, + q: spec.q, + s: s + } +}; + +/** + * Get the preferred encodings from an Accept-Encoding header. + * @public + */ + +function preferredEncodings(accept, provided) { + var accepts = parseAcceptEncoding(accept || ''); + + if (!provided) { + // sorted list of all encodings + return accepts + .filter(isQuality) + .sort(compareSpecs) + .map(getFullEncoding); + } + + var priorities = provided.map(function getPriority(type, index) { + return getEncodingPriority(type, accepts, index); + }); + + // sorted list of accepted encodings + return priorities.filter(isQuality).sort(compareSpecs).map(function getEncoding(priority) { + return provided[priorities.indexOf(priority)]; + }); +} + +/** + * Compare two specs. + * @private + */ + +function compareSpecs(a, b) { + return (b.q - a.q) || (b.s - a.s) || (a.o - b.o) || (a.i - b.i) || 0; +} + +/** + * Get full encoding string. + * @private + */ + +function getFullEncoding(spec) { + return spec.encoding; +} + +/** + * Check if a spec has any quality. + * @private + */ + +function isQuality(spec) { + return spec.q > 0; +} diff --git a/node_modules/negotiator/lib/language.js b/node_modules/negotiator/lib/language.js new file mode 100644 index 0000000000000..62f737f006021 --- /dev/null +++ b/node_modules/negotiator/lib/language.js @@ -0,0 +1,179 @@ +/** + * negotiator + * Copyright(c) 2012 Isaac Z. Schlueter + * Copyright(c) 2014 Federico Romero + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict'; + +/** + * Module exports. + * @public + */ + +module.exports = preferredLanguages; +module.exports.preferredLanguages = preferredLanguages; + +/** + * Module variables. + * @private + */ + +var simpleLanguageRegExp = /^\s*([^\s\-;]+)(?:-([^\s;]+))?\s*(?:;(.*))?$/; + +/** + * Parse the Accept-Language header. + * @private + */ + +function parseAcceptLanguage(accept) { + var accepts = accept.split(','); + + for (var i = 0, j = 0; i < accepts.length; i++) { + var language = parseLanguage(accepts[i].trim(), i); + + if (language) { + accepts[j++] = language; + } + } + + // trim accepts + accepts.length = j; + + return accepts; +} + +/** + * Parse a language from the Accept-Language header. + * @private + */ + +function parseLanguage(str, i) { + var match = simpleLanguageRegExp.exec(str); + if (!match) return null; + + var prefix = match[1], + suffix = match[2], + full = prefix; + + if (suffix) full += "-" + suffix; + + var q = 1; + if (match[3]) { + var params = match[3].split(';') + for (var j = 0; j < params.length; j++) { + var p = params[j].split('='); + if (p[0] === 'q') q = parseFloat(p[1]); + } + } + + return { + prefix: prefix, + suffix: suffix, + q: q, + i: i, + full: full + }; +} + +/** + * Get the priority of a language. + * @private + */ + +function getLanguagePriority(language, accepted, index) { + var priority = {o: -1, q: 0, s: 0}; + + for (var i = 0; i < accepted.length; i++) { + var spec = specify(language, accepted[i], index); + + if (spec && (priority.s - spec.s || priority.q - spec.q || priority.o - spec.o) < 0) { + priority = spec; + } + } + + return priority; +} + +/** + * Get the specificity of the language. + * @private + */ + +function specify(language, spec, index) { + var p = parseLanguage(language) + if (!p) return null; + var s = 0; + if(spec.full.toLowerCase() === p.full.toLowerCase()){ + s |= 4; + } else if (spec.prefix.toLowerCase() === p.full.toLowerCase()) { + s |= 2; + } else if (spec.full.toLowerCase() === p.prefix.toLowerCase()) { + s |= 1; + } else if (spec.full !== '*' ) { + return null + } + + return { + i: index, + o: spec.i, + q: spec.q, + s: s + } +}; + +/** + * Get the preferred languages from an Accept-Language header. + * @public + */ + +function preferredLanguages(accept, provided) { + // RFC 2616 sec 14.4: no header = * + var accepts = parseAcceptLanguage(accept === undefined ? '*' : accept || ''); + + if (!provided) { + // sorted list of all languages + return accepts + .filter(isQuality) + .sort(compareSpecs) + .map(getFullLanguage); + } + + var priorities = provided.map(function getPriority(type, index) { + return getLanguagePriority(type, accepts, index); + }); + + // sorted list of accepted languages + return priorities.filter(isQuality).sort(compareSpecs).map(function getLanguage(priority) { + return provided[priorities.indexOf(priority)]; + }); +} + +/** + * Compare two specs. + * @private + */ + +function compareSpecs(a, b) { + return (b.q - a.q) || (b.s - a.s) || (a.o - b.o) || (a.i - b.i) || 0; +} + +/** + * Get full language string. + * @private + */ + +function getFullLanguage(spec) { + return spec.full; +} + +/** + * Check if a spec has any quality. + * @private + */ + +function isQuality(spec) { + return spec.q > 0; +} diff --git a/node_modules/negotiator/lib/mediaType.js b/node_modules/negotiator/lib/mediaType.js new file mode 100644 index 0000000000000..67309dd75f1b6 --- /dev/null +++ b/node_modules/negotiator/lib/mediaType.js @@ -0,0 +1,294 @@ +/** + * negotiator + * Copyright(c) 2012 Isaac Z. Schlueter + * Copyright(c) 2014 Federico Romero + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict'; + +/** + * Module exports. + * @public + */ + +module.exports = preferredMediaTypes; +module.exports.preferredMediaTypes = preferredMediaTypes; + +/** + * Module variables. + * @private + */ + +var simpleMediaTypeRegExp = /^\s*([^\s\/;]+)\/([^;\s]+)\s*(?:;(.*))?$/; + +/** + * Parse the Accept header. + * @private + */ + +function parseAccept(accept) { + var accepts = splitMediaTypes(accept); + + for (var i = 0, j = 0; i < accepts.length; i++) { + var mediaType = parseMediaType(accepts[i].trim(), i); + + if (mediaType) { + accepts[j++] = mediaType; + } + } + + // trim accepts + accepts.length = j; + + return accepts; +} + +/** + * Parse a media type from the Accept header. + * @private + */ + +function parseMediaType(str, i) { + var match = simpleMediaTypeRegExp.exec(str); + if (!match) return null; + + var params = Object.create(null); + var q = 1; + var subtype = match[2]; + var type = match[1]; + + if (match[3]) { + var kvps = splitParameters(match[3]).map(splitKeyValuePair); + + for (var j = 0; j < kvps.length; j++) { + var pair = kvps[j]; + var key = pair[0].toLowerCase(); + var val = pair[1]; + + // get the value, unwrapping quotes + var value = val && val[0] === '"' && val[val.length - 1] === '"' + ? val.substr(1, val.length - 2) + : val; + + if (key === 'q') { + q = parseFloat(value); + break; + } + + // store parameter + params[key] = value; + } + } + + return { + type: type, + subtype: subtype, + params: params, + q: q, + i: i + }; +} + +/** + * Get the priority of a media type. + * @private + */ + +function getMediaTypePriority(type, accepted, index) { + var priority = {o: -1, q: 0, s: 0}; + + for (var i = 0; i < accepted.length; i++) { + var spec = specify(type, accepted[i], index); + + if (spec && (priority.s - spec.s || priority.q - spec.q || priority.o - spec.o) < 0) { + priority = spec; + } + } + + return priority; +} + +/** + * Get the specificity of the media type. + * @private + */ + +function specify(type, spec, index) { + var p = parseMediaType(type); + var s = 0; + + if (!p) { + return null; + } + + if(spec.type.toLowerCase() == p.type.toLowerCase()) { + s |= 4 + } else if(spec.type != '*') { + return null; + } + + if(spec.subtype.toLowerCase() == p.subtype.toLowerCase()) { + s |= 2 + } else if(spec.subtype != '*') { + return null; + } + + var keys = Object.keys(spec.params); + if (keys.length > 0) { + if (keys.every(function (k) { + return spec.params[k] == '*' || (spec.params[k] || '').toLowerCase() == (p.params[k] || '').toLowerCase(); + })) { + s |= 1 + } else { + return null + } + } + + return { + i: index, + o: spec.i, + q: spec.q, + s: s, + } +} + +/** + * Get the preferred media types from an Accept header. + * @public + */ + +function preferredMediaTypes(accept, provided) { + // RFC 2616 sec 14.2: no header = */* + var accepts = parseAccept(accept === undefined ? '*/*' : accept || ''); + + if (!provided) { + // sorted list of all types + return accepts + .filter(isQuality) + .sort(compareSpecs) + .map(getFullType); + } + + var priorities = provided.map(function getPriority(type, index) { + return getMediaTypePriority(type, accepts, index); + }); + + // sorted list of accepted types + return priorities.filter(isQuality).sort(compareSpecs).map(function getType(priority) { + return provided[priorities.indexOf(priority)]; + }); +} + +/** + * Compare two specs. + * @private + */ + +function compareSpecs(a, b) { + return (b.q - a.q) || (b.s - a.s) || (a.o - b.o) || (a.i - b.i) || 0; +} + +/** + * Get full type string. + * @private + */ + +function getFullType(spec) { + return spec.type + '/' + spec.subtype; +} + +/** + * Check if a spec has any quality. + * @private + */ + +function isQuality(spec) { + return spec.q > 0; +} + +/** + * Count the number of quotes in a string. + * @private + */ + +function quoteCount(string) { + var count = 0; + var index = 0; + + while ((index = string.indexOf('"', index)) !== -1) { + count++; + index++; + } + + return count; +} + +/** + * Split a key value pair. + * @private + */ + +function splitKeyValuePair(str) { + var index = str.indexOf('='); + var key; + var val; + + if (index === -1) { + key = str; + } else { + key = str.substr(0, index); + val = str.substr(index + 1); + } + + return [key, val]; +} + +/** + * Split an Accept header into media types. + * @private + */ + +function splitMediaTypes(accept) { + var accepts = accept.split(','); + + for (var i = 1, j = 0; i < accepts.length; i++) { + if (quoteCount(accepts[j]) % 2 == 0) { + accepts[++j] = accepts[i]; + } else { + accepts[j] += ',' + accepts[i]; + } + } + + // trim accepts + accepts.length = j + 1; + + return accepts; +} + +/** + * Split a string of parameters. + * @private + */ + +function splitParameters(str) { + var parameters = str.split(';'); + + for (var i = 1, j = 0; i < parameters.length; i++) { + if (quoteCount(parameters[j]) % 2 == 0) { + parameters[++j] = parameters[i]; + } else { + parameters[j] += ';' + parameters[i]; + } + } + + // trim parameters + parameters.length = j + 1; + + for (var i = 0; i < parameters.length; i++) { + parameters[i] = parameters[i].trim(); + } + + return parameters; +} diff --git a/node_modules/negotiator/package.json b/node_modules/negotiator/package.json new file mode 100644 index 0000000000000..0c7ff3c2e6468 --- /dev/null +++ b/node_modules/negotiator/package.json @@ -0,0 +1,42 @@ +{ + "name": "negotiator", + "description": "HTTP content negotiation", + "version": "0.6.2", + "contributors": [ + "Douglas Christopher Wilson <doug@somethingdoug.com>", + "Federico Romero <federico.romero@outboxlabs.com>", + "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)" + ], + "license": "MIT", + "keywords": [ + "http", + "content negotiation", + "accept", + "accept-language", + "accept-encoding", + "accept-charset" + ], + "repository": "jshttp/negotiator", + "devDependencies": { + "eslint": "5.16.0", + "eslint-plugin-markdown": "1.0.0", + "mocha": "6.1.4", + "nyc": "14.0.0" + }, + "files": [ + "lib/", + "HISTORY.md", + "LICENSE", + "index.js", + "README.md" + ], + "engines": { + "node": ">= 0.6" + }, + "scripts": { + "lint": "eslint --plugin markdown --ext js,md .", + "test": "mocha --reporter spec --check-leaks --bail test/", + "test-cov": "nyc --reporter=html --reporter=text npm test", + "test-travis": "nyc --reporter=text npm test" + } +} diff --git a/node_modules/node-gyp/.github/ISSUE_TEMPLATE.md b/node_modules/node-gyp/.github/ISSUE_TEMPLATE.md deleted file mode 100644 index 485e26ecae876..0000000000000 --- a/node_modules/node-gyp/.github/ISSUE_TEMPLATE.md +++ /dev/null @@ -1,49 +0,0 @@ -<!-- -Thank you for reporting an issue! - -Remember, this issue tracker is for reporting issues ONLY with node-gyp. - -If you have an issue installing a specific module, please file an issue on -that module's issue tracker (`npm issues modulename`). Open issue here only if -you are sure this is an issue with node-gyp, not with the module you are -trying to build. - -Fill out the form below. We probably won't investigate an issue that does not -provide the basic information we require. - ---> - -* **Node Version**: <!-- `node -v` and `npm -v` --> -* **Platform**: <!-- `uname -a` (UNIX), or `systeminfo | findstr /B /C:"OS Name" /C:"OS Version" /C:"System Type"` (Windows) --> -* **Compiler**: <!-- `cc -v` (UNIX) or `msbuild /version & cl` (Windows) --> -* **Module**: <!-- what you tried to build/install --> - -<details><summary>Verbose output (from npm or node-gyp):</summary> - -``` -Paste your log here, between the backticks. It can be: - - npm --verbose output, - - or contents of npm-debug.log, - - or output of node-gyp rebuild --verbose. -Include the command you were trying to run. - -This should look like this: - ->npm --verbose -npm info it worked if it ends with ok -npm verb cli [ -npm verb cli 'C:\\...\\node\\13.9.0\\x64\\node.exe', -npm verb cli 'C:\\...\\node\\13.9.0\\x64\\node_modules\\npm\\bin\\npm-cli.js', -npm verb cli '--verbose' -npm verb cli ] -npm info using npm@6.13.7 -npm info using node@v13.9.0 - -Usage: npm <command> -(...) -``` - -</details> - -<!-- Any further details --> - diff --git a/node_modules/node-gyp/.github/PULL_REQUEST_TEMPLATE.md b/node_modules/node-gyp/.github/PULL_REQUEST_TEMPLATE.md deleted file mode 100644 index 10156d89af112..0000000000000 --- a/node_modules/node-gyp/.github/PULL_REQUEST_TEMPLATE.md +++ /dev/null @@ -1,17 +0,0 @@ -<!-- -Thank you for your pull request. Please review the below requirements. - -Contributor guide: https://github.com/nodejs/node/blob/master/CONTRIBUTING.md ---> - -##### Checklist -<!-- Remove items that do not apply. For completed items, change [ ] to [x]. --> - -- [ ] `npm install && npm test` passes -- [ ] tests are included <!-- Bug fixes and new features should include tests --> -- [ ] documentation is changed or added -- [ ] commit message follows [commit guidelines](https://github.com/nodejs/node/blob/master/doc/guides/contributing/pull-requests.md#commit-message-guidelines) - -##### Description of change -<!-- Provide a description of the change --> - diff --git a/node_modules/node-gyp/.github/workflows/tests.yml b/node_modules/node-gyp/.github/workflows/tests.yml deleted file mode 100644 index 651b1a9f49ce3..0000000000000 --- a/node_modules/node-gyp/.github/workflows/tests.yml +++ /dev/null @@ -1,51 +0,0 @@ -# TODO: Line 47, enable pytest --doctest-modules - -name: Tests -on: [push, pull_request] -jobs: - Tests: - strategy: - fail-fast: false - max-parallel: 15 - matrix: - node: [10.x, 12.x, 14.x] - python: [3.6, 3.8, 3.9] - os: [macos-latest, ubuntu-latest, windows-latest] - runs-on: ${{ matrix.os }} - steps: - - name: Checkout Repository - uses: actions/checkout@v2 - - name: Use Node.js ${{ matrix.node }} - uses: actions/setup-node@v1 - with: - node-version: ${{ matrix.node }} - - name: Use Python ${{ matrix.python }} - uses: actions/setup-python@v2 - with: - python-version: ${{ matrix.python }} - env: - PYTHON_VERSION: ${{ matrix.python }} - - name: Install Dependencies - run: | - npm install --no-progress - pip install flake8 pytest - - name: Set Windows environment - if: matrix.os == 'windows-latest' - run: - echo '::set-env name=GYP_MSVS_VERSION::2015' - echo '::set-env name=GYP_MSVS_OVERRIDE_PATH::C:\\Dummy' - - name: Lint Python - if: matrix.os == 'ubuntu-latest' - run: | - # stop the build if there are Python syntax errors or undefined names - flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics - # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide - flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics - - name: Run Python tests - run: | - python -m pytest - # - name: Run doctests with pytest - # run: python -m pytest --doctest-modules - - name: Run Node tests - run: | - npm test diff --git a/node_modules/node-gyp/CHANGELOG.md b/node_modules/node-gyp/CHANGELOG.md deleted file mode 100644 index 733a4b5dd6ad3..0000000000000 --- a/node_modules/node-gyp/CHANGELOG.md +++ /dev/null @@ -1,519 +0,0 @@ -v7.1.2 2020-10-17 -================= - -* [[`096e3aded5`](https://github.com/nodejs/node-gyp/commit/096e3aded5)] - **gyp**: update gyp to 0.6.2 (Myles Borins) [#2241](https://github.com/nodejs/node-gyp/pull/2241) -* [[`54f97cd243`](https://github.com/nodejs/node-gyp/commit/54f97cd243)] - **doc**: add cmd to reset `xcode-select` to initial state (Valera Rozuvan) [#2235](https://github.com/nodejs/node-gyp/pull/2235) - -v7.1.1 2020-10-15 -================= - -This release restores the location of shared library builds to the pre-v7 -location. In v7.0.0 until this release, shared library outputs were placed -in a lib.target subdirectory inside the build/{Release,Debug} directory for -builds using `make` (Linux, etc.). This is inconsistent with macOS (Xcode) -behavior and previous node-gyp behavior so has been reverted. -We consider this a bug-fix rather than semver-major change. - -* [[`18bf2d1d38`](https://github.com/nodejs/node-gyp/commit/18bf2d1d38)] - **deps**: update deps to match npm@7 (Rod Vagg) [#2240](https://github.com/nodejs/node-gyp/pull/2240) -* [[`ee6a837cb7`](https://github.com/nodejs/node-gyp/commit/ee6a837cb7)] - **gyp**: update gyp to 0.6.1 (Rod Vagg) [#2238](https://github.com/nodejs/node-gyp/pull/2238) -* [[`3e7f8ccafc`](https://github.com/nodejs/node-gyp/commit/3e7f8ccafc)] - **lib**: better log message when ps fails (Martin Midtgaard) [#2229](https://github.com/nodejs/node-gyp/pull/2229) -* [[`7fb314339f`](https://github.com/nodejs/node-gyp/commit/7fb314339f)] - **test**: GitHub Actions: Test on Python 3.9 (Christian Clauss) [#2230](https://github.com/nodejs/node-gyp/pull/2230) -* [[`754996b9ec`](https://github.com/nodejs/node-gyp/commit/754996b9ec)] - **doc**: replace status badges with new Actions badge (Rod Vagg) [#2218](https://github.com/nodejs/node-gyp/pull/2218) -* [[`2317dc400c`](https://github.com/nodejs/node-gyp/commit/2317dc400c)] - **ci**: switch to GitHub Actions (Shelley Vohr) [#2210](https://github.com/nodejs/node-gyp/pull/2210) -* [[`2cca9b74f7`](https://github.com/nodejs/node-gyp/commit/2cca9b74f7)] - **doc**: drop the --production flag for installing windows-build-tools (DeeDeeG) [#2206](https://github.com/nodejs/node-gyp/pull/2206) - -v7.1.0 2020-08-12 -================= - -* [[`aaf33c3029`](https://github.com/nodejs/node-gyp/commit/aaf33c3029)] - **build**: add update-gyp script (Samuel Attard) [#2167](https://github.com/nodejs/node-gyp/pull/2167) -* * [[`3baa4e4172`](https://github.com/nodejs/node-gyp/commit/3baa4e4172)] - **(SEMVER-MINOR)** **gyp**: update gyp to 0.4.0 (Samuel Attard) [#2165](https://github.com/nodejs/node-gyp/pull/2165) -* * [[`f461d56c53`](https://github.com/nodejs/node-gyp/commit/f461d56c53)] - **(SEMVER-MINOR)** **build**: support apple silicon (arm64 darwin) builds (Samuel Attard) [#2165](https://github.com/nodejs/node-gyp/pull/2165) -* * [[`ee6fa7d3bc`](https://github.com/nodejs/node-gyp/commit/ee6fa7d3bc)] - **docs**: note that node-gyp@7 should solve Catalina CLT issues (Rod Vagg) [#2156](https://github.com/nodejs/node-gyp/pull/2156) -* * [[`4fc8ff179d`](https://github.com/nodejs/node-gyp/commit/4fc8ff179d)] - **doc**: silence curl for macOS Catalina acid test (Chia Wei Ong) [#2150](https://github.com/nodejs/node-gyp/pull/2150) -* * [[`7857cb2eb1`](https://github.com/nodejs/node-gyp/commit/7857cb2eb1)] - **deps**: increase "engines" to "node" : "\>= 10.12.0" (DeeDeeG) [#2153](https://github.com/nodejs/node-gyp/pull/2153) - -v7.0.0 2020-06-03 -================= - -* [[`e18a61afc1`](https://github.com/nodejs/node-gyp/commit/e18a61afc1)] - **build**: shrink bloated addon binaries on windows (Shelley Vohr) [#2060](https://github.com/nodejs/node-gyp/pull/2060) -* [[`4937722cf5`](https://github.com/nodejs/node-gyp/commit/4937722cf5)] - **(SEMVER-MAJOR)** **deps**: replace mkdirp with {recursive} mkdir (Rod Vagg) [#2123](https://github.com/nodejs/node-gyp/pull/2123) -* [[`d45438a047`](https://github.com/nodejs/node-gyp/commit/d45438a047)] - **(SEMVER-MAJOR)** **deps**: update deps, match to npm@7 (Rod Vagg) [#2126](https://github.com/nodejs/node-gyp/pull/2126) -* [[`ba4f34b7d6`](https://github.com/nodejs/node-gyp/commit/ba4f34b7d6)] - **doc**: update catalina xcode clt download link (Dario Vladovic) [#2133](https://github.com/nodejs/node-gyp/pull/2133) -* [[`f7bfce96ed`](https://github.com/nodejs/node-gyp/commit/f7bfce96ed)] - **doc**: update acid test and introduce curl|bash test script (Dario Vladovic) [#2105](https://github.com/nodejs/node-gyp/pull/2105) -* [[`e529f3309d`](https://github.com/nodejs/node-gyp/commit/e529f3309d)] - **doc**: update README to reflect upgrade to gyp-next (Ujjwal Sharma) [#2092](https://github.com/nodejs/node-gyp/pull/2092) -* [[`9aed6286a3`](https://github.com/nodejs/node-gyp/commit/9aed6286a3)] - **doc**: give more attention to Catalina issues doc (Matheus Marchini) [#2134](https://github.com/nodejs/node-gyp/pull/2134) -* [[`963f2a7b48`](https://github.com/nodejs/node-gyp/commit/963f2a7b48)] - **doc**: improve cataline discoverability for search engines (Matheus Marchini) [#2135](https://github.com/nodejs/node-gyp/pull/2135) -* [[`7b75af349b`](https://github.com/nodejs/node-gyp/commit/7b75af349b)] - **doc**: add macOS Catalina software update info (Karl Horky) [#2078](https://github.com/nodejs/node-gyp/pull/2078) -* [[`4f23c7bee2`](https://github.com/nodejs/node-gyp/commit/4f23c7bee2)] - **doc**: update link to the code of conduct (#2073) (Michaël Zasso) [#2073](https://github.com/nodejs/node-gyp/pull/2073) -* [[`473cfa283f`](https://github.com/nodejs/node-gyp/commit/473cfa283f)] - **doc**: note in README that Python 3.8 is supported (#2072) (Michaël Zasso) [#2072](https://github.com/nodejs/node-gyp/pull/2072) -* [[`e7402b4a7c`](https://github.com/nodejs/node-gyp/commit/e7402b4a7c)] - **doc**: update catalina xcode cli tools download link (#2044) (Dario Vladović) [#2044](https://github.com/nodejs/node-gyp/pull/2044) -* [[`35de45984f`](https://github.com/nodejs/node-gyp/commit/35de45984f)] - **doc**: update catalina xcode cli tools download link; formatting (Jonathan Hult) [#2034](https://github.com/nodejs/node-gyp/pull/2034) -* [[`48642191f5`](https://github.com/nodejs/node-gyp/commit/48642191f5)] - **doc**: add download link for Command Line Tools for Xcode (Przemysław Bitkowski) [#2029](https://github.com/nodejs/node-gyp/pull/2029) -* [[`ae5b150051`](https://github.com/nodejs/node-gyp/commit/ae5b150051)] - **doc**: Catalina suggestion: remove /Library/Developer/CommandLineTools (Christian Clauss) [#2022](https://github.com/nodejs/node-gyp/pull/2022) -* [[`d1dea13fe4`](https://github.com/nodejs/node-gyp/commit/d1dea13fe4)] - **doc**: fix changelog 6.1.0 release year to be 2020 (Quentin Vernot) [#2021](https://github.com/nodejs/node-gyp/pull/2021) -* [[`6356117b08`](https://github.com/nodejs/node-gyp/commit/6356117b08)] - **doc, bin**: stop suggesting opening node-gyp issues (Bartosz Sosnowski) [#2096](https://github.com/nodejs/node-gyp/pull/2096) -* [[`a6b76a8b48`](https://github.com/nodejs/node-gyp/commit/a6b76a8b48)] - **gyp**: update gyp to 0.2.1 (Ujjwal Sharma) [#2092](https://github.com/nodejs/node-gyp/pull/2092) -* [[`ebc34ec823`](https://github.com/nodejs/node-gyp/commit/ebc34ec823)] - **gyp**: update gyp to 0.2.0 (Ujjwal Sharma) [#2092](https://github.com/nodejs/node-gyp/pull/2092) -* [[`972780bde7`](https://github.com/nodejs/node-gyp/commit/972780bde7)] - **(SEMVER-MAJOR)** **gyp**: sync code base with nodejs repo (#1975) (Michaël Zasso) [#1975](https://github.com/nodejs/node-gyp/pull/1975) -* [[`c255ffbf6a`](https://github.com/nodejs/node-gyp/commit/c255ffbf6a)] - **lib**: drop "-2" flag for "py.exe" launcher (DeeDeeG) [#2131](https://github.com/nodejs/node-gyp/pull/2131) -* [[`1f7e1e93b5`](https://github.com/nodejs/node-gyp/commit/1f7e1e93b5)] - **lib**: ignore VS instances that cause COMExceptions (Andrew Casey) [#2018](https://github.com/nodejs/node-gyp/pull/2018) -* [[`741ab096d5`](https://github.com/nodejs/node-gyp/commit/741ab096d5)] - **test**: remove support for EOL versions of Node.js (Shelley Vohr) -* [[`ca86ef2539`](https://github.com/nodejs/node-gyp/commit/ca86ef2539)] - **test**: bump actions/checkout from v1 to v2 (BSKY) [#2063](https://github.com/nodejs/node-gyp/pull/2063) - -v6.1.0 2020-01-08 -================= - -* [[`9a7dd16b76`](https://github.com/nodejs/node-gyp/commit/9a7dd16b76)] - **doc**: remove backticks from Python version list (Rod Vagg) [#2011](https://github.com/nodejs/node-gyp/pull/2011) -* [[`26cd6eaea6`](https://github.com/nodejs/node-gyp/commit/26cd6eaea6)] - **doc**: add GitHub Actions badge (#1994) (Rod Vagg) [#1994](https://github.com/nodejs/node-gyp/pull/1994) -* [[`312c12ef4f`](https://github.com/nodejs/node-gyp/commit/312c12ef4f)] - **doc**: update macOS\_Catalina.md (#1992) (James Home) [#1992](https://github.com/nodejs/node-gyp/pull/1992) -* [[`f7b6b6b77b`](https://github.com/nodejs/node-gyp/commit/f7b6b6b77b)] - **doc**: fix typo in README.md (#1985) (Suraneti Rodsuwan) [#1985](https://github.com/nodejs/node-gyp/pull/1985) -* [[`6b8f2652dd`](https://github.com/nodejs/node-gyp/commit/6b8f2652dd)] - **doc**: add travis badge (Rod Vagg) [#1971](https://github.com/nodejs/node-gyp/pull/1971) -* [[`20aa0b44f7`](https://github.com/nodejs/node-gyp/commit/20aa0b44f7)] - **doc**: macOS Catalina add two commands (Christian Clauss) [#1962](https://github.com/nodejs/node-gyp/pull/1962) -* [[`14f2a07a39`](https://github.com/nodejs/node-gyp/commit/14f2a07a39)] - **gyp**: list(dict) so we can del dict(key) while iterating (Christian Clauss) [#2009](https://github.com/nodejs/node-gyp/pull/2009) -* [[`f242ce4d2c`](https://github.com/nodejs/node-gyp/commit/f242ce4d2c)] - **lib**: compatibility with semver ≥ 7 (`new` for semver.Range) (Xavier Guimard) [#2006](https://github.com/nodejs/node-gyp/pull/2006) -* [[`3bcba2a01a`](https://github.com/nodejs/node-gyp/commit/3bcba2a01a)] - **(SEMVER-MINOR)** **lib**: noproxy support, match proxy detection to `request` (Matias Lopez) [#1978](https://github.com/nodejs/node-gyp/pull/1978) -* [[`470cc2178e`](https://github.com/nodejs/node-gyp/commit/470cc2178e)] - **test**: remove old docker test harness (#1993) (Rod Vagg) [#1993](https://github.com/nodejs/node-gyp/pull/1993) -* [[`31ecc8421d`](https://github.com/nodejs/node-gyp/commit/31ecc8421d)] - **test**: add Windows to GitHub Actions testing (#1996) (Christian Clauss) [#1996](https://github.com/nodejs/node-gyp/pull/1996) -* [[`5a729e86ee`](https://github.com/nodejs/node-gyp/commit/5a729e86ee)] - **test**: fix typo in header download test (#2001) (Richard Lau) [#2001](https://github.com/nodejs/node-gyp/pull/2001) -* [[`345c70e56d`](https://github.com/nodejs/node-gyp/commit/345c70e56d)] - **test**: direct python invocation & simpler pyenv (Matias Lopez) [#1979](https://github.com/nodejs/node-gyp/pull/1979) -* [[`d6a7e0e1fb`](https://github.com/nodejs/node-gyp/commit/d6a7e0e1fb)] - **test**: fix macOS Travis on Python 2.7 & 3.7 (Christian Clauss) [#1979](https://github.com/nodejs/node-gyp/pull/1979) -* [[`5a64e9bd32`](https://github.com/nodejs/node-gyp/commit/5a64e9bd32)] - **test**: initial Github Actions with Ubuntu & macOS (Christian Clauss) [#1985](https://github.com/nodejs/node-gyp/pull/1985) -* [[`04da736d38`](https://github.com/nodejs/node-gyp/commit/04da736d38)] - **test**: fix Python unittests (cclauss) [#1961](https://github.com/nodejs/node-gyp/pull/1961) -* [[`0670e5189d`](https://github.com/nodejs/node-gyp/commit/0670e5189d)] - **test**: add header download test (Rod Vagg) [#1796](https://github.com/nodejs/node-gyp/pull/1796) -* [[`c506a6a150`](https://github.com/nodejs/node-gyp/commit/c506a6a150)] - **test**: configure proper devDir for invoking configure() (Rod Vagg) [#1796](https://github.com/nodejs/node-gyp/pull/1796) - -v6.0.1 2019-11-01 -================= - -* [[`8ec2e681d5`](https://github.com/nodejs/node-gyp/commit/8ec2e681d5)] - **doc**: add macOS\_Catalina.md document (cclauss) [#1940](https://github.com/nodejs/node-gyp/pull/1940) -* [[`1b11be63cc`](https://github.com/nodejs/node-gyp/commit/1b11be63cc)] - **gyp**: python3 fixes: utf8 decode, use of 'None' in eval (Wilfried Goesgens) [#1925](https://github.com/nodejs/node-gyp/pull/1925) -* [[`c0282daa48`](https://github.com/nodejs/node-gyp/commit/c0282daa48)] - **gyp**: iteritems() -\> items() in compile\_commands\_json.py (cclauss) [#1947](https://github.com/nodejs/node-gyp/pull/1947) -* [[`d8e09a1b6a`](https://github.com/nodejs/node-gyp/commit/d8e09a1b6a)] - **gyp**: make cmake python3 compatible (gengjiawen) [#1944](https://github.com/nodejs/node-gyp/pull/1944) -* [[`9c0f3404f0`](https://github.com/nodejs/node-gyp/commit/9c0f3404f0)] - **gyp**: fix TypeError in XcodeVersion() (Christian Clauss) [#1939](https://github.com/nodejs/node-gyp/pull/1939) -* [[`bb2eb72a3f`](https://github.com/nodejs/node-gyp/commit/bb2eb72a3f)] - **gyp**: finish decode stdout on Python 3 (Christian Clauss) [#1937](https://github.com/nodejs/node-gyp/pull/1937) -* [[`f0693413d9`](https://github.com/nodejs/node-gyp/commit/f0693413d9)] - **src,win**: allow 403 errors for arm64 node.lib (Richard Lau) [#1934](https://github.com/nodejs/node-gyp/pull/1934) -* [[`c60c22de58`](https://github.com/nodejs/node-gyp/commit/c60c22de58)] - **deps**: update deps to roughly match current npm@6 (Rod Vagg) [#1920](https://github.com/nodejs/node-gyp/pull/1920) -* [[`b91718eefc`](https://github.com/nodejs/node-gyp/commit/b91718eefc)] - **test**: upgrade Linux Travis CI to Python 3.8 (Christian Clauss) [#1923](https://github.com/nodejs/node-gyp/pull/1923) -* [[`3538a317b6`](https://github.com/nodejs/node-gyp/commit/3538a317b6)] - **doc**: adjustments to the README.md for new users (Dan Pike) [#1919](https://github.com/nodejs/node-gyp/pull/1919) -* [[`4fff8458c0`](https://github.com/nodejs/node-gyp/commit/4fff8458c0)] - **travis**: ignore failed `brew upgrade npm`, update xcode (Christian Clauss) [#1932](https://github.com/nodejs/node-gyp/pull/1932) -* [[`60e4488f08`](https://github.com/nodejs/node-gyp/commit/60e4488f08)] - **build**: avoid bare exceptions in xcode\_emulation.py (Christian Clauss) [#1932](https://github.com/nodejs/node-gyp/pull/1932) -* [[`032db2a2d0`](https://github.com/nodejs/node-gyp/commit/032db2a2d0)] - **lib,install**: always download SHA sums on Windows (Sam Hughes) [#1926](https://github.com/nodejs/node-gyp/pull/1926) -* [[`5a83630c33`](https://github.com/nodejs/node-gyp/commit/5a83630c33)] - **travis**: add Windows + Python 3.8 to the mix (Rod Vagg) [#1921](https://github.com/nodejs/node-gyp/pull/1921) - -v6.0.0 2019-10-04 -================= - -* [[`dd0e97ef0b`](https://github.com/nodejs/node-gyp/commit/dd0e97ef0b)] - **(SEMVER-MAJOR)** **lib**: try to find `python` after `python3` (Sam Roberts) [#1907](https://github.com/nodejs/node-gyp/pull/1907) -* [[`f60ed47d14`](https://github.com/nodejs/node-gyp/commit/f60ed47d14)] - **travis**: add Python 3.5 and 3.6 tests on Linux (Christian Clauss) [#1903](https://github.com/nodejs/node-gyp/pull/1903) -* [[`c763ca1838`](https://github.com/nodejs/node-gyp/commit/c763ca1838)] - **(SEMVER-MAJOR)** **doc**: Declare that node-gyp is Python 3 compatible (cclauss) [#1811](https://github.com/nodejs/node-gyp/pull/1811) -* [[`3d1c60ab81`](https://github.com/nodejs/node-gyp/commit/3d1c60ab81)] - **(SEMVER-MAJOR)** **lib**: accept Python 3 by default (João Reis) [#1844](https://github.com/nodejs/node-gyp/pull/1844) -* [[`c6e3b65a23`](https://github.com/nodejs/node-gyp/commit/c6e3b65a23)] - **(SEMVER-MAJOR)** **lib**: raise the minimum Python version from 2.6 to 2.7 (cclauss) [#1818](https://github.com/nodejs/node-gyp/pull/1818) - -v5.1.1 2020-05-25 -================= - -* [[`bdd3a79abe`](https://github.com/nodejs/node-gyp/commit/bdd3a79abe)] - **build**: shrink bloated addon binaries on windows (Shelley Vohr) [#2060](https://github.com/nodejs/node-gyp/pull/2060) -* [[`1f2ba75bc0`](https://github.com/nodejs/node-gyp/commit/1f2ba75bc0)] - **doc**: add macOS Catalina software update info (Karl Horky) [#2078](https://github.com/nodejs/node-gyp/pull/2078) -* [[`c106d915f5`](https://github.com/nodejs/node-gyp/commit/c106d915f5)] - **doc**: update catalina xcode cli tools download link (#2044) (Dario Vladović) [#2044](https://github.com/nodejs/node-gyp/pull/2044) -* [[`9a6fea92e2`](https://github.com/nodejs/node-gyp/commit/9a6fea92e2)] - **doc**: update catalina xcode cli tools download link; formatting (Jonathan Hult) [#2034](https://github.com/nodejs/node-gyp/pull/2034) -* [[`59b0b1add8`](https://github.com/nodejs/node-gyp/commit/59b0b1add8)] - **doc**: add download link for Command Line Tools for Xcode (Przemysław Bitkowski) [#2029](https://github.com/nodejs/node-gyp/pull/2029) -* [[`bb8d0e7b10`](https://github.com/nodejs/node-gyp/commit/bb8d0e7b10)] - **doc**: Catalina suggestion: remove /Library/Developer/CommandLineTools (Christian Clauss) [#2022](https://github.com/nodejs/node-gyp/pull/2022) -* [[`fb2e80d4e3`](https://github.com/nodejs/node-gyp/commit/fb2e80d4e3)] - **doc**: update link to the code of conduct (#2073) (Michaël Zasso) [#2073](https://github.com/nodejs/node-gyp/pull/2073) -* [[`251d9c885c`](https://github.com/nodejs/node-gyp/commit/251d9c885c)] - **doc**: note in README that Python 3.8 is supported (#2072) (Michaël Zasso) [#2072](https://github.com/nodejs/node-gyp/pull/2072) -* [[`2b6fc3c8d6`](https://github.com/nodejs/node-gyp/commit/2b6fc3c8d6)] - **doc, bin**: stop suggesting opening node-gyp issues (Bartosz Sosnowski) [#2096](https://github.com/nodejs/node-gyp/pull/2096) -* [[`a876ae58ad`](https://github.com/nodejs/node-gyp/commit/a876ae58ad)] - **test**: bump actions/checkout from v1 to v2 (BSKY) [#2063](https://github.com/nodejs/node-gyp/pull/2063) - -v5.1.0 2020-02-05 -================= - -* [[`f37a8b40d0`](https://github.com/nodejs/node-gyp/commit/f37a8b40d0)] - **doc**: add GitHub Actions badge (#1994) (Rod Vagg) [#1994](https://github.com/nodejs/node-gyp/pull/1994) -* [[`cb3f6aae5e`](https://github.com/nodejs/node-gyp/commit/cb3f6aae5e)] - **doc**: update macOS\_Catalina.md (#1992) (James Home) [#1992](https://github.com/nodejs/node-gyp/pull/1992) -* [[`0607596a4c`](https://github.com/nodejs/node-gyp/commit/0607596a4c)] - **doc**: fix typo in README.md (#1985) (Suraneti Rodsuwan) [#1985](https://github.com/nodejs/node-gyp/pull/1985) -* [[`0d5a415a14`](https://github.com/nodejs/node-gyp/commit/0d5a415a14)] - **doc**: add travis badge (Rod Vagg) [#1971](https://github.com/nodejs/node-gyp/pull/1971) -* [[`103740cd95`](https://github.com/nodejs/node-gyp/commit/103740cd95)] - **gyp**: list(dict) so we can del dict(key) while iterating (Christian Clauss) [#2009](https://github.com/nodejs/node-gyp/pull/2009) -* [[`278dcddbdd`](https://github.com/nodejs/node-gyp/commit/278dcddbdd)] - **lib**: ignore VS instances that cause COMExceptions (Andrew Casey) [#2018](https://github.com/nodejs/node-gyp/pull/2018) -* [[`1694907bbf`](https://github.com/nodejs/node-gyp/commit/1694907bbf)] - **lib**: compatibility with semver ≥ 7 (`new` for semver.Range) (Xavier Guimard) [#2006](https://github.com/nodejs/node-gyp/pull/2006) -* [[`a3f1143514`](https://github.com/nodejs/node-gyp/commit/a3f1143514)] - **(SEMVER-MINOR)** **lib**: noproxy support, match proxy detection to `request` (Matias Lopez) [#1978](https://github.com/nodejs/node-gyp/pull/1978) -* [[`52365819c7`](https://github.com/nodejs/node-gyp/commit/52365819c7)] - **test**: remove old docker test harness (#1993) (Rod Vagg) [#1993](https://github.com/nodejs/node-gyp/pull/1993) -* [[`bc509c511d`](https://github.com/nodejs/node-gyp/commit/bc509c511d)] - **test**: add Windows to GitHub Actions testing (#1996) (Christian Clauss) [#1996](https://github.com/nodejs/node-gyp/pull/1996) -* [[`91ee26dd48`](https://github.com/nodejs/node-gyp/commit/91ee26dd48)] - **test**: fix typo in header download test (#2001) (Richard Lau) [#2001](https://github.com/nodejs/node-gyp/pull/2001) -* [[`0923f344c9`](https://github.com/nodejs/node-gyp/commit/0923f344c9)] - **test**: direct python invocation & simpler pyenv (Matias Lopez) [#1979](https://github.com/nodejs/node-gyp/pull/1979) -* [[`32c8744b34`](https://github.com/nodejs/node-gyp/commit/32c8744b34)] - **test**: fix macOS Travis on Python 2.7 & 3.7 (Christian Clauss) [#1979](https://github.com/nodejs/node-gyp/pull/1979) -* [[`fd4b1351e4`](https://github.com/nodejs/node-gyp/commit/fd4b1351e4)] - **test**: initial Github Actions with Ubuntu & macOS (Christian Clauss) [#1985](https://github.com/nodejs/node-gyp/pull/1985) - -v5.0.7 2019-12-16 -================= - -Republish of v5.0.6 with unnecessary tarball removed from pack file. - -v5.0.6 2019-12-16 -================= - -* [[`cdec00286f`](https://github.com/nodejs/node-gyp/commit/cdec00286f)] - **doc**: adjustments to the README.md for new users (Dan Pike) [#1919](https://github.com/nodejs/node-gyp/pull/1919) -* [[`b7c8233ef2`](https://github.com/nodejs/node-gyp/commit/b7c8233ef2)] - **test**: fix Python unittests (cclauss) [#1961](https://github.com/nodejs/node-gyp/pull/1961) -* [[`e12b00ab0a`](https://github.com/nodejs/node-gyp/commit/e12b00ab0a)] - **doc**: macOS Catalina add two commands (Christian Clauss) [#1962](https://github.com/nodejs/node-gyp/pull/1962) -* [[`70b9890c0d`](https://github.com/nodejs/node-gyp/commit/70b9890c0d)] - **test**: add header download test (Rod Vagg) [#1796](https://github.com/nodejs/node-gyp/pull/1796) -* [[`4029fa8629`](https://github.com/nodejs/node-gyp/commit/4029fa8629)] - **test**: configure proper devDir for invoking configure() (Rod Vagg) [#1796](https://github.com/nodejs/node-gyp/pull/1796) -* [[`fe8b02cc8b`](https://github.com/nodejs/node-gyp/commit/fe8b02cc8b)] - **doc**: add macOS\_Catalina.md document (cclauss) [#1940](https://github.com/nodejs/node-gyp/pull/1940) -* [[`8ea47ce365`](https://github.com/nodejs/node-gyp/commit/8ea47ce365)] - **gyp**: python3 fixes: utf8 decode, use of 'None' in eval (Wilfried Goesgens) [#1925](https://github.com/nodejs/node-gyp/pull/1925) -* [[`c7229716ba`](https://github.com/nodejs/node-gyp/commit/c7229716ba)] - **gyp**: iteritems() -\> items() in compile\_commands\_json.py (cclauss) [#1947](https://github.com/nodejs/node-gyp/pull/1947) -* [[`2a18b2a0f8`](https://github.com/nodejs/node-gyp/commit/2a18b2a0f8)] - **gyp**: make cmake python3 compatible (gengjiawen) [#1944](https://github.com/nodejs/node-gyp/pull/1944) -* [[`70f391e844`](https://github.com/nodejs/node-gyp/commit/70f391e844)] - **gyp**: fix TypeError in XcodeVersion() (Christian Clauss) [#1939](https://github.com/nodejs/node-gyp/pull/1939) -* [[`9f4f0fa34e`](https://github.com/nodejs/node-gyp/commit/9f4f0fa34e)] - **gyp**: finish decode stdout on Python 3 (Christian Clauss) [#1937](https://github.com/nodejs/node-gyp/pull/1937) -* [[`7cf507906d`](https://github.com/nodejs/node-gyp/commit/7cf507906d)] - **src,win**: allow 403 errors for arm64 node.lib (Richard Lau) [#1934](https://github.com/nodejs/node-gyp/pull/1934) -* [[`ad0d182c01`](https://github.com/nodejs/node-gyp/commit/ad0d182c01)] - **deps**: update deps to roughly match current npm@6 (Rod Vagg) [#1920](https://github.com/nodejs/node-gyp/pull/1920) -* [[`1553081ed6`](https://github.com/nodejs/node-gyp/commit/1553081ed6)] - **test**: upgrade Linux Travis CI to Python 3.8 (Christian Clauss) [#1923](https://github.com/nodejs/node-gyp/pull/1923) -* [[`0705cae9aa`](https://github.com/nodejs/node-gyp/commit/0705cae9aa)] - **travis**: ignore failed `brew upgrade npm`, update xcode (Christian Clauss) [#1932](https://github.com/nodejs/node-gyp/pull/1932) -* [[`7bfdb6f5bf`](https://github.com/nodejs/node-gyp/commit/7bfdb6f5bf)] - **build**: avoid bare exceptions in xcode\_emulation.py (Christian Clauss) [#1932](https://github.com/nodejs/node-gyp/pull/1932) -* [[`7edf7658fa`](https://github.com/nodejs/node-gyp/commit/7edf7658fa)] - **lib,install**: always download SHA sums on Windows (Sam Hughes) [#1926](https://github.com/nodejs/node-gyp/pull/1926) -* [[`69056d04fe`](https://github.com/nodejs/node-gyp/commit/69056d04fe)] - **travis**: add Windows + Python 3.8 to the mix (Rod Vagg) [#1921](https://github.com/nodejs/node-gyp/pull/1921) - -v5.0.5 2019-10-04 -================= - -* [[`3891391746`](https://github.com/nodejs/node-gyp/commit/3891391746)] - **doc**: reconcile README with Python 3 compat changes (Rod Vagg) [#1911](https://github.com/nodejs/node-gyp/pull/1911) -* [[`07f81f1920`](https://github.com/nodejs/node-gyp/commit/07f81f1920)] - **lib**: accept Python 3 after Python 2 (Sam Roberts) [#1910](https://github.com/nodejs/node-gyp/pull/1910) -* [[`04ce59f4a2`](https://github.com/nodejs/node-gyp/commit/04ce59f4a2)] - **doc**: clarify Python configuration, etc (Sam Roberts) [#1908](https://github.com/nodejs/node-gyp/pull/1908) -* [[`01c46ee3df`](https://github.com/nodejs/node-gyp/commit/01c46ee3df)] - **gyp**: add \_\_lt\_\_ to MSVSSolutionEntry (João Reis) [#1904](https://github.com/nodejs/node-gyp/pull/1904) -* [[`735d961b99`](https://github.com/nodejs/node-gyp/commit/735d961b99)] - **win**: support VS 2017 Desktop Express (João Reis) [#1902](https://github.com/nodejs/node-gyp/pull/1902) -* [[`3834156a92`](https://github.com/nodejs/node-gyp/commit/3834156a92)] - **test**: add Python 3.5 and 3.6 tests on Linux (cclauss) [#1909](https://github.com/nodejs/node-gyp/pull/1909) -* [[`1196e990d8`](https://github.com/nodejs/node-gyp/commit/1196e990d8)] - **src**: update to standard@14 (Rod Vagg) [#1899](https://github.com/nodejs/node-gyp/pull/1899) -* [[`53ee7dfe89`](https://github.com/nodejs/node-gyp/commit/53ee7dfe89)] - **gyp**: fix undefined name: cflags --\> ldflags (Christian Clauss) [#1901](https://github.com/nodejs/node-gyp/pull/1901) -* [[`5871dcf6c9`](https://github.com/nodejs/node-gyp/commit/5871dcf6c9)] - **src,win**: add support for fetching arm64 node.lib (Richard Townsend) [#1875](https://github.com/nodejs/node-gyp/pull/1875) - -v5.0.4 2019-09-27 -================= - -* [[`1236869ffc`](https://github.com/nodejs/node-gyp/commit/1236869ffc)] - **gyp**: modify XcodeVersion() to convert "4.2" to "0420" and "10.0" to "1000" (Christian Clauss) [#1895](https://github.com/nodejs/node-gyp/pull/1895) -* [[`36638afe48`](https://github.com/nodejs/node-gyp/commit/36638afe48)] - **gyp**: more decode stdout on Python 3 (cclauss) [#1894](https://github.com/nodejs/node-gyp/pull/1894) -* [[`f753c167c5`](https://github.com/nodejs/node-gyp/commit/f753c167c5)] - **gyp**: decode stdout on Python 3 (cclauss) [#1890](https://github.com/nodejs/node-gyp/pull/1890) -* [[`60a4083523`](https://github.com/nodejs/node-gyp/commit/60a4083523)] - **doc**: update xcode install instructions to match Node's BUILDING (Nhan Khong) [#1884](https://github.com/nodejs/node-gyp/pull/1884) -* [[`19dbc9ac32`](https://github.com/nodejs/node-gyp/commit/19dbc9ac32)] - **deps**: update tar to 4.4.12 (Matheus Marchini) [#1889](https://github.com/nodejs/node-gyp/pull/1889) -* [[`5f3ed92181`](https://github.com/nodejs/node-gyp/commit/5f3ed92181)] - **bin**: fix the usage instructions (Halit Ogunc) [#1888](https://github.com/nodejs/node-gyp/pull/1888) -* [[`aab118edf1`](https://github.com/nodejs/node-gyp/commit/aab118edf1)] - **lib**: adding keep-alive header to download requests (Milad Farazmand) [#1863](https://github.com/nodejs/node-gyp/pull/1863) -* [[`1186e89326`](https://github.com/nodejs/node-gyp/commit/1186e89326)] - **lib**: ignore non-critical os.userInfo() failures (Rod Vagg) [#1835](https://github.com/nodejs/node-gyp/pull/1835) -* [[`785e527c3d`](https://github.com/nodejs/node-gyp/commit/785e527c3d)] - **doc**: fix missing argument for setting python path (lagorsse) [#1802](https://github.com/nodejs/node-gyp/pull/1802) -* [[`a97615196c`](https://github.com/nodejs/node-gyp/commit/a97615196c)] - **gyp**: rm semicolons (Python != JavaScript) (MattIPv4) [#1858](https://github.com/nodejs/node-gyp/pull/1858) -* [[`06019bac24`](https://github.com/nodejs/node-gyp/commit/06019bac24)] - **gyp**: assorted typo fixes (XhmikosR) [#1853](https://github.com/nodejs/node-gyp/pull/1853) -* [[`3f4972c1ca`](https://github.com/nodejs/node-gyp/commit/3f4972c1ca)] - **gyp**: use "is" when comparing to None (Vladyslav Burzakovskyy) [#1860](https://github.com/nodejs/node-gyp/pull/1860) -* [[`1cb4708073`](https://github.com/nodejs/node-gyp/commit/1cb4708073)] - **src,win**: improve unmanaged handling (Peter Sabath) [#1852](https://github.com/nodejs/node-gyp/pull/1852) -* [[`5553cd910e`](https://github.com/nodejs/node-gyp/commit/5553cd910e)] - **gyp**: improve Windows+Cygwin compatibility (Jose Quijada) [#1817](https://github.com/nodejs/node-gyp/pull/1817) -* [[`8bcb1fbb43`](https://github.com/nodejs/node-gyp/commit/8bcb1fbb43)] - **gyp**: Python 3 Windows fixes (João Reis) [#1843](https://github.com/nodejs/node-gyp/pull/1843) -* [[`2e24d0a326`](https://github.com/nodejs/node-gyp/commit/2e24d0a326)] - **test**: accept Python 3 in test-find-python.js (João Reis) [#1843](https://github.com/nodejs/node-gyp/pull/1843) -* [[`1267b4dc1c`](https://github.com/nodejs/node-gyp/commit/1267b4dc1c)] - **build**: add test run Python 3.7 on macOS (Christian Clauss) [#1843](https://github.com/nodejs/node-gyp/pull/1843) -* [[`da1b031aa3`](https://github.com/nodejs/node-gyp/commit/da1b031aa3)] - **build**: import StringIO on Python 2 and Python 3 (Christian Clauss) [#1836](https://github.com/nodejs/node-gyp/pull/1836) -* [[`fa0ed4aa42`](https://github.com/nodejs/node-gyp/commit/fa0ed4aa42)] - **build**: more Python 3 compat, replace compile with ast (cclauss) [#1820](https://github.com/nodejs/node-gyp/pull/1820) -* [[`18d5c7c9d0`](https://github.com/nodejs/node-gyp/commit/18d5c7c9d0)] - **win,src**: update win\_delay\_load\_hook.cc to work with /clr (Ivan Petrovic) [#1819](https://github.com/nodejs/node-gyp/pull/1819) - -v5.0.3 2019-07-17 -================= - -* [[`66ad305775`](https://github.com/nodejs/node-gyp/commit/66ad305775)] - **python**: accept Python 3 conditionally (João Reis) [#1815](https://github.com/nodejs/node-gyp/pull/1815) -* [[`7e7fce3fed`](https://github.com/nodejs/node-gyp/commit/7e7fce3fed)] - **python**: move Python detection to its own file (João Reis) [#1815](https://github.com/nodejs/node-gyp/pull/1815) -* [[`e40c99e283`](https://github.com/nodejs/node-gyp/commit/e40c99e283)] - **src**: implement standard.js linting (Rod Vagg) [#1794](https://github.com/nodejs/node-gyp/pull/1794) -* [[`bb92c761a9`](https://github.com/nodejs/node-gyp/commit/bb92c761a9)] - **test**: add Node.js 6 on Windows to Travis CI (João Reis) [#1812](https://github.com/nodejs/node-gyp/pull/1812) -* [[`7fd924079f`](https://github.com/nodejs/node-gyp/commit/7fd924079f)] - **test**: increase tap timeout (João Reis) [#1812](https://github.com/nodejs/node-gyp/pull/1812) -* [[`7e8127068f`](https://github.com/nodejs/node-gyp/commit/7e8127068f)] - **test**: cover supported node versions with travis (Rod Vagg) [#1809](https://github.com/nodejs/node-gyp/pull/1809) -* [[`24109148df`](https://github.com/nodejs/node-gyp/commit/24109148df)] - **test**: downgrade to tap@^12 for continued Node 6 support (Rod Vagg) [#1808](https://github.com/nodejs/node-gyp/pull/1808) -* [[`656117cc4a`](https://github.com/nodejs/node-gyp/commit/656117cc4a)] - **win**: make VS path match case-insensitive (João Reis) [#1806](https://github.com/nodejs/node-gyp/pull/1806) - -v5.0.2 2019-06-27 -================= - -* [[`2761afbf73`](https://github.com/nodejs/node-gyp/commit/2761afbf73)] - **build,test**: add duplicate symbol test (Gabriel Schulhof) [#1689](https://github.com/nodejs/node-gyp/pull/1689) -* [[`82f129d6de`](https://github.com/nodejs/node-gyp/commit/82f129d6de)] - **gyp**: replace optparse to argparse (KiYugadgeter) [#1591](https://github.com/nodejs/node-gyp/pull/1591) -* [[`afaaa29c61`](https://github.com/nodejs/node-gyp/commit/afaaa29c61)] - **gyp**: remove from \_\_future\_\_ import with\_statement (cclauss) [#1799](https://github.com/nodejs/node-gyp/pull/1799) -* [[`a991f633d6`](https://github.com/nodejs/node-gyp/commit/a991f633d6)] - **gyp**: fix the remaining Python 3 issues (cclauss) [#1793](https://github.com/nodejs/node-gyp/pull/1793) -* [[`f952b08f84`](https://github.com/nodejs/node-gyp/commit/f952b08f84)] - **gyp**: move from \_\_future\_\_ import to the top of the file (cclauss) [#1789](https://github.com/nodejs/node-gyp/pull/1789) -* [[`4f4a677dfa`](https://github.com/nodejs/node-gyp/commit/4f4a677dfa)] - **gyp**: use different default compiler for z/OS (Shuowang (Wayne) Zhang) [#1768](https://github.com/nodejs/node-gyp/pull/1768) -* [[`03683f09d6`](https://github.com/nodejs/node-gyp/commit/03683f09d6)] - **lib**: code de-duplication (Pavel Medvedev) [#965](https://github.com/nodejs/node-gyp/pull/965) -* [[`611bc3c89f`](https://github.com/nodejs/node-gyp/commit/611bc3c89f)] - **lib**: add .json suffix for explicit require (Rod Vagg) [#1787](https://github.com/nodejs/node-gyp/pull/1787) -* [[`d3478d7b0b`](https://github.com/nodejs/node-gyp/commit/d3478d7b0b)] - **meta**: add to .gitignore (Refael Ackermann) [#1573](https://github.com/nodejs/node-gyp/pull/1573) -* [[`7a9a038e9e`](https://github.com/nodejs/node-gyp/commit/7a9a038e9e)] - **test**: add parallel test runs on macOS and Windows (cclauss) [#1800](https://github.com/nodejs/node-gyp/pull/1800) -* [[`7dd7f2b2a2`](https://github.com/nodejs/node-gyp/commit/7dd7f2b2a2)] - **test**: fix Python syntax error in test-adding.js (cclauss) [#1793](https://github.com/nodejs/node-gyp/pull/1793) -* [[`395f843de0`](https://github.com/nodejs/node-gyp/commit/395f843de0)] - **test**: replace self-signed cert with 'localhost' (Rod Vagg) [#1795](https://github.com/nodejs/node-gyp/pull/1795) -* [[`a52c6eb9e8`](https://github.com/nodejs/node-gyp/commit/a52c6eb9e8)] - **test**: migrate from tape to tap (Rod Vagg) [#1795](https://github.com/nodejs/node-gyp/pull/1795) -* [[`ec2eb44a30`](https://github.com/nodejs/node-gyp/commit/ec2eb44a30)] - **test**: use Nan in duplicate\_symbols (Gabriel Schulhof) [#1689](https://github.com/nodejs/node-gyp/pull/1689) -* [[`1597c84aad`](https://github.com/nodejs/node-gyp/commit/1597c84aad)] - **test**: use Travis CI to run tests on every pull request (cclauss) [#1752](https://github.com/nodejs/node-gyp/pull/1752) -* [[`dd9bf929ac`](https://github.com/nodejs/node-gyp/commit/dd9bf929ac)] - **zos**: update compiler options (Shuowang (Wayne) Zhang) [#1768](https://github.com/nodejs/node-gyp/pull/1768) - -v5.0.1 2019-06-20 -================= - -* [[`e3861722ed`](https://github.com/nodejs/node-gyp/commit/e3861722ed)] - **doc**: document --jobs max (David Sanders) [#1770](https://github.com/nodejs/node-gyp/pull/1770) -* [[`1cfdb28886`](https://github.com/nodejs/node-gyp/commit/1cfdb28886)] - **lib**: reintroduce support for iojs file naming for releases \>= 1 && \< 4 (Samuel Attard) [#1777](https://github.com/nodejs/node-gyp/pull/1777) - -v5.0.0 2019-06-13 -================= - -* [[`8a83972743`](https://github.com/nodejs/node-gyp/commit/8a83972743)] - **(SEMVER-MAJOR)** **bin**: follow XDG OS conventions for storing data (Selwyn) [#1570](https://github.com/nodejs/node-gyp/pull/1570) -* [[`9e46872ea3`](https://github.com/nodejs/node-gyp/commit/9e46872ea3)] - **bin,lib**: remove extra comments/lines/spaces (Jon Moss) [#1508](https://github.com/nodejs/node-gyp/pull/1508) -* [[`8098ebdeb4`](https://github.com/nodejs/node-gyp/commit/8098ebdeb4)] - **deps**: replace `osenv` dependency with native `os` (Selwyn) -* [[`f83b457e03`](https://github.com/nodejs/node-gyp/commit/f83b457e03)] - **deps**: bump request to 2.8.7, fixes heok/hawk issues (Rohit Hazra) [#1492](https://github.com/nodejs/node-gyp/pull/1492) -* [[`323cee7323`](https://github.com/nodejs/node-gyp/commit/323cee7323)] - **deps**: pin `request` version range (Refael Ackermann) [#1300](https://github.com/nodejs/node-gyp/pull/1300) -* [[`c515912d08`](https://github.com/nodejs/node-gyp/commit/c515912d08)] - **doc**: improve issue template (Bartosz Sosnowski) [#1618](https://github.com/nodejs/node-gyp/pull/1618) -* [[`cca2d66727`](https://github.com/nodejs/node-gyp/commit/cca2d66727)] - **doc**: python info needs own header (Taylor D. Lee) [#1245](https://github.com/nodejs/node-gyp/pull/1245) -* [[`3e64c780f5`](https://github.com/nodejs/node-gyp/commit/3e64c780f5)] - **doc**: lint README.md (Jon Moss) [#1498](https://github.com/nodejs/node-gyp/pull/1498) -* [[`a20faedc91`](https://github.com/nodejs/node-gyp/commit/a20faedc91)] - **(SEMVER-MAJOR)** **gyp**: enable MARMASM items only on new VS versions (João Reis) [#1762](https://github.com/nodejs/node-gyp/pull/1762) -* [[`721eb691cf`](https://github.com/nodejs/node-gyp/commit/721eb691cf)] - **gyp**: teach MSVS generator about MARMASM Items (Jon Kunkee) [#1679](https://github.com/nodejs/node-gyp/pull/1679) -* [[`91744bfecc`](https://github.com/nodejs/node-gyp/commit/91744bfecc)] - **gyp**: add support for Windows on Arm (Richard Townsend) [#1739](https://github.com/nodejs/node-gyp/pull/1739) -* [[`a6e0a6c7ed`](https://github.com/nodejs/node-gyp/commit/a6e0a6c7ed)] - **gyp**: move compile\_commands\_json (Paul Maréchal) [#1661](https://github.com/nodejs/node-gyp/pull/1661) -* [[`92e8b52cee`](https://github.com/nodejs/node-gyp/commit/92e8b52cee)] - **gyp**: fix target --\> self.target (cclauss) -* [[`febdfa2137`](https://github.com/nodejs/node-gyp/commit/febdfa2137)] - **gyp**: fix sntex error (cclauss) [#1333](https://github.com/nodejs/node-gyp/pull/1333) -* [[`588d333c14`](https://github.com/nodejs/node-gyp/commit/588d333c14)] - **gyp**: \_winreg module was renamed to winreg in Python 3. (Craig Rodrigues) -* [[`98226d198c`](https://github.com/nodejs/node-gyp/commit/98226d198c)] - **gyp**: replace basestring with str, but only on Python 3. (Craig Rodrigues) -* [[`7535e4478e`](https://github.com/nodejs/node-gyp/commit/7535e4478e)] - **gyp**: replace deprecated functions (Craig Rodrigues) -* [[`2040cd21cc`](https://github.com/nodejs/node-gyp/commit/2040cd21cc)] - **gyp**: use print as a function, as specified in PEP 3105. (Craig Rodrigues) -* [[`abef93ded5`](https://github.com/nodejs/node-gyp/commit/abef93ded5)] - **gyp**: get ready for python 3 (cclauss) -* [[`43031fadcb`](https://github.com/nodejs/node-gyp/commit/43031fadcb)] - **python**: clean-up detection (João Reis) [#1582](https://github.com/nodejs/node-gyp/pull/1582) -* [[`49ab79d221`](https://github.com/nodejs/node-gyp/commit/49ab79d221)] - **python**: more informative error (Refael Ackermann) [#1269](https://github.com/nodejs/node-gyp/pull/1269) -* [[`997bc3c748`](https://github.com/nodejs/node-gyp/commit/997bc3c748)] - **readme**: add ARM64 info to MSVC setup instructions (Jon Kunkee) [#1655](https://github.com/nodejs/node-gyp/pull/1655) -* [[`788e767179`](https://github.com/nodejs/node-gyp/commit/788e767179)] - **test**: remove unused variable (João Reis) -* [[`6f5a408934`](https://github.com/nodejs/node-gyp/commit/6f5a408934)] - **tools**: fix usage of inherited -fPIC and -fPIE (Jens) [#1340](https://github.com/nodejs/node-gyp/pull/1340) -* [[`0efb8fb34b`](https://github.com/nodejs/node-gyp/commit/0efb8fb34b)] - **(SEMVER-MAJOR)** **win**: support running in VS Command Prompt (João Reis) [#1762](https://github.com/nodejs/node-gyp/pull/1762) -* [[`360ddbdf3a`](https://github.com/nodejs/node-gyp/commit/360ddbdf3a)] - **(SEMVER-MAJOR)** **win**: add support for Visual Studio 2019 (João Reis) [#1762](https://github.com/nodejs/node-gyp/pull/1762) -* [[`8f43f68275`](https://github.com/nodejs/node-gyp/commit/8f43f68275)] - **(SEMVER-MAJOR)** **win**: detect all VS versions in node-gyp (João Reis) [#1762](https://github.com/nodejs/node-gyp/pull/1762) -* [[`7fe4095974`](https://github.com/nodejs/node-gyp/commit/7fe4095974)] - **(SEMVER-MAJOR)** **win**: generic Visual Studio 2017 detection (João Reis) [#1762](https://github.com/nodejs/node-gyp/pull/1762) -* [[`7a71d68bce`](https://github.com/nodejs/node-gyp/commit/7a71d68bce)] - **win**: use msbuild from the configure stage (Bartosz Sosnowski) [#1654](https://github.com/nodejs/node-gyp/pull/1654) -* [[`d3b21220a0`](https://github.com/nodejs/node-gyp/commit/d3b21220a0)] - **win**: fix delay-load hook for electron 4 (Andy Dill) -* [[`81f3a92338`](https://github.com/nodejs/node-gyp/commit/81f3a92338)] - Update list of Node.js versions to test against. (Ben Noordhuis) [#1670](https://github.com/nodejs/node-gyp/pull/1670) -* [[`4748f6ab75`](https://github.com/nodejs/node-gyp/commit/4748f6ab75)] - Remove deprecated compatibility code. (Ben Noordhuis) [#1670](https://github.com/nodejs/node-gyp/pull/1670) -* [[`45e3221fd4`](https://github.com/nodejs/node-gyp/commit/45e3221fd4)] - Remove an outdated workaround for Python 2.4 (cclauss) [#1650](https://github.com/nodejs/node-gyp/pull/1650) -* [[`721dc7d314`](https://github.com/nodejs/node-gyp/commit/721dc7d314)] - Add ARM64 to MSBuild /Platform logic (Jon Kunkee) [#1655](https://github.com/nodejs/node-gyp/pull/1655) -* [[`a5b7410497`](https://github.com/nodejs/node-gyp/commit/a5b7410497)] - Add ESLint no-unused-vars rule (Jon Moss) [#1497](https://github.com/nodejs/node-gyp/pull/1497) - -v4.0.0 2019-04-24 -================= - -* [[`ceed5cbe10`](https://github.com/nodejs/node-gyp/commit/ceed5cbe10)] - **deps**: updated tar package version to 4.4.8 (Pobegaylo Maksim) [#1713](https://github.com/nodejs/node-gyp/pull/1713) -* [[`374519e066`](https://github.com/nodejs/node-gyp/commit/374519e066)] - **(SEMVER-MAJOR)** Upgrade to tar v3 (isaacs) [#1212](https://github.com/nodejs/node-gyp/pull/1212) -* [[`e6699d13cd`](https://github.com/nodejs/node-gyp/commit/e6699d13cd)] - **test**: fix addon test for Node.js 12 and V8 7.4 (Richard Lau) [#1705](https://github.com/nodejs/node-gyp/pull/1705) -* [[`0c6bf530a0`](https://github.com/nodejs/node-gyp/commit/0c6bf530a0)] - **lib**: use print() for python version detection (GreenAddress) [#1534](https://github.com/nodejs/node-gyp/pull/1534) - -v3.8.0 2018-08-09 -================= - -* [[`c5929cb4fe`](https://github.com/nodejs/node-gyp/commit/c5929cb4fe)] - **doc**: update Xcode preferences tab name. (Ivan Daniluk) [#1330](https://github.com/nodejs/node-gyp/pull/1330) -* [[`8b488da8b9`](https://github.com/nodejs/node-gyp/commit/8b488da8b9)] - **doc**: update link to commit guidelines (Jonas Hermsmeier) [#1456](https://github.com/nodejs/node-gyp/pull/1456) -* [[`b4fe8c16f9`](https://github.com/nodejs/node-gyp/commit/b4fe8c16f9)] - **doc**: fix visual studio links (Bartosz Sosnowski) [#1490](https://github.com/nodejs/node-gyp/pull/1490) -* [[`536759c7e9`](https://github.com/nodejs/node-gyp/commit/536759c7e9)] - **configure**: use sys.version\_info to get python version (Yang Guo) [#1504](https://github.com/nodejs/node-gyp/pull/1504) -* [[`94c39c604e`](https://github.com/nodejs/node-gyp/commit/94c39c604e)] - **gyp**: fix ninja build failure (GYP patch) (Daniel Bevenius) [nodejs/node#12484](https://github.com/nodejs/node/pull/12484) -* [[`e8ea74e0fa`](https://github.com/nodejs/node-gyp/commit/e8ea74e0fa)] - **tools**: patch gyp to avoid xcrun errors (Ujjwal Sharma) [nodejs/node#21520](https://github.com/nodejs/node/pull/21520) -* [[`ea9aff44f2`](https://github.com/nodejs/node-gyp/commit/ea9aff44f2)] - **tools**: fix "the the" typos in comments (Masashi Hirano) [nodejs/node#20716](https://github.com/nodejs/node/pull/20716) -* [[`207e5aa4fd`](https://github.com/nodejs/node-gyp/commit/207e5aa4fd)] - **gyp**: implement LD/LDXX for ninja and FIPS (Sam Roberts) -* [[`b416c5f4b7`](https://github.com/nodejs/node-gyp/commit/b416c5f4b7)] - **gyp**: enable cctest to use objects (gyp part) (Daniel Bevenius) [nodejs/node#12450](https://github.com/nodejs/node/pull/12450) -* [[`40692d016b`](https://github.com/nodejs/node-gyp/commit/40692d016b)] - **gyp**: add compile\_commands.json gyp generator (Ben Noordhuis) [nodejs/node#12450](https://github.com/nodejs/node/pull/12450) -* [[`fc3c4e2b10`](https://github.com/nodejs/node-gyp/commit/fc3c4e2b10)] - **gyp**: float gyp patch for long filenames (Anna Henningsen) [nodejs/node#7963](https://github.com/nodejs/node/pull/7963) -* [[`8aedbfdef6`](https://github.com/nodejs/node-gyp/commit/8aedbfdef6)] - **gyp**: backport GYP fix to fix AIX shared suffix (Stewart Addison) -* [[`6cd84b84fc`](https://github.com/nodejs/node-gyp/commit/6cd84b84fc)] - **test**: formatting and minor fixes for execFileSync replacement (Rod Vagg) [#1521](https://github.com/nodejs/node-gyp/pull/1521) -* [[`60e421363f`](https://github.com/nodejs/node-gyp/commit/60e421363f)] - **test**: added test/processExecSync.js for when execFileSync is not available. (Rohit Hazra) [#1492](https://github.com/nodejs/node-gyp/pull/1492) -* [[`969447c5bd`](https://github.com/nodejs/node-gyp/commit/969447c5bd)] - **deps**: bump request to 2.8.7, fixes heok/hawk issues (Rohit Hazra) [#1492](https://github.com/nodejs/node-gyp/pull/1492) -* [[`340403ccfe`](https://github.com/nodejs/node-gyp/commit/340403ccfe)] - **win**: improve parsing of SDK version (Alessandro Vergani) [#1516](https://github.com/nodejs/node-gyp/pull/1516) - -v3.7.0 2018-06-08 -================= - -* [[`84cea7b30d`](https://github.com/nodejs/node-gyp/commit/84cea7b30d)] - Remove unused gyp test scripts. (Ben Noordhuis) [#1458](https://github.com/nodejs/node-gyp/pull/1458) -* [[`0540e4ec63`](https://github.com/nodejs/node-gyp/commit/0540e4ec63)] - **gyp**: escape spaces in filenames in make generator (Jeff Senn) [#1436](https://github.com/nodejs/node-gyp/pull/1436) -* [[`88fc6fa0ec`](https://github.com/nodejs/node-gyp/commit/88fc6fa0ec)] - Drop dependency on minimatch. (Brian Woodward) [#1158](https://github.com/nodejs/node-gyp/pull/1158) -* [[`1e203c5148`](https://github.com/nodejs/node-gyp/commit/1e203c5148)] - Fix include path when pointing to Node.js source (Richard Lau) [#1055](https://github.com/nodejs/node-gyp/pull/1055) -* [[`53d8cb967c`](https://github.com/nodejs/node-gyp/commit/53d8cb967c)] - Prefix build targets with /t: on Windows (Natalie Wolfe) [#1164](https://github.com/nodejs/node-gyp/pull/1164) -* [[`53a5f8ff38`](https://github.com/nodejs/node-gyp/commit/53a5f8ff38)] - **gyp**: add support for .mm files to msvs generator (Julien Racle) [#1167](https://github.com/nodejs/node-gyp/pull/1167) -* [[`dd8561e528`](https://github.com/nodejs/node-gyp/commit/dd8561e528)] - **zos**: don't use universal-new-lines mode (John Barboza) [#1451](https://github.com/nodejs/node-gyp/pull/1451) -* [[`e5a69010ed`](https://github.com/nodejs/node-gyp/commit/e5a69010ed)] - **zos**: add search locations for libnode.x (John Barboza) [#1451](https://github.com/nodejs/node-gyp/pull/1451) -* [[`79febace53`](https://github.com/nodejs/node-gyp/commit/79febace53)] - **doc**: update macOS information in README (Josh Parnham) [#1323](https://github.com/nodejs/node-gyp/pull/1323) -* [[`9425448945`](https://github.com/nodejs/node-gyp/commit/9425448945)] - **gyp**: don't print xcodebuild not found errors (Gibson Fahnestock) [#1370](https://github.com/nodejs/node-gyp/pull/1370) -* [[`6f1286f5b2`](https://github.com/nodejs/node-gyp/commit/6f1286f5b2)] - Fix infinite install loop. (Ben Noordhuis) [#1384](https://github.com/nodejs/node-gyp/pull/1384) -* [[`2580b9139e`](https://github.com/nodejs/node-gyp/commit/2580b9139e)] - Update `--nodedir` description in README. (Ben Noordhuis) [#1372](https://github.com/nodejs/node-gyp/pull/1372) -* [[`a61360391a`](https://github.com/nodejs/node-gyp/commit/a61360391a)] - Update README with another way to install on windows (JeffAtDeere) [#1352](https://github.com/nodejs/node-gyp/pull/1352) -* [[`47496bf6dc`](https://github.com/nodejs/node-gyp/commit/47496bf6dc)] - Fix IndexError when parsing GYP files. (Ben Noordhuis) [#1267](https://github.com/nodejs/node-gyp/pull/1267) -* [[`b2024dee7b`](https://github.com/nodejs/node-gyp/commit/b2024dee7b)] - **zos**: support platform (John Barboza) [#1276](https://github.com/nodejs/node-gyp/pull/1276) -* [[`90d86512f4`](https://github.com/nodejs/node-gyp/commit/90d86512f4)] - **win**: run PS with `-NoProfile` (Refael Ackermann) [#1292](https://github.com/nodejs/node-gyp/pull/1292) -* [[`2da5f86ef7`](https://github.com/nodejs/node-gyp/commit/2da5f86ef7)] - **doc**: add github PR and Issue templates (Gibson Fahnestock) [#1228](https://github.com/nodejs/node-gyp/pull/1228) -* [[`a46a770d68`](https://github.com/nodejs/node-gyp/commit/a46a770d68)] - **doc**: update proposed DCO and CoC (Mikeal Rogers) [#1229](https://github.com/nodejs/node-gyp/pull/1229) -* [[`7e803d58e0`](https://github.com/nodejs/node-gyp/commit/7e803d58e0)] - **doc**: headerify the Install instructions (Nick Schonning) [#1225](https://github.com/nodejs/node-gyp/pull/1225) -* [[`f27599193a`](https://github.com/nodejs/node-gyp/commit/f27599193a)] - **gyp**: update xml string encoding conversion (Liu Chao) [#1203](https://github.com/nodejs/node-gyp/pull/1203) -* [[`0a07e481f7`](https://github.com/nodejs/node-gyp/commit/0a07e481f7)] - **configure**: don't set ensure if tarball is set (Gibson Fahnestock) [#1220](https://github.com/nodejs/node-gyp/pull/1220) - -v3.6.3 2018-06-08 -================= - -* [[`90cd2e8da9`](https://github.com/nodejs/node-gyp/commit/90cd2e8da9)] - **gyp**: fix regex to match multi-digit versions (Jonas Hermsmeier) [#1455](https://github.com/nodejs/node-gyp/pull/1455) -* [[`7900122337`](https://github.com/nodejs/node-gyp/commit/7900122337)] - deps: pin `request` version range (Refael Ackerman) [#1300](https://github.com/nodejs/node-gyp/pull/1300) - -v3.6.2 2017-06-01 -================= - -* [[`72afdd62cd`](https://github.com/nodejs/node-gyp/commit/72afdd62cd)] - **build**: rename copyNodeLib() to doBuild() (Liu Chao) [#1206](https://github.com/nodejs/node-gyp/pull/1206) -* [[`bad903ac70`](https://github.com/nodejs/node-gyp/commit/bad903ac70)] - **win**: more robust parsing of SDK version (Refael Ackermann) [#1198](https://github.com/nodejs/node-gyp/pull/1198) -* [[`241752f381`](https://github.com/nodejs/node-gyp/commit/241752f381)] - Log dist-url. (Ben Noordhuis) [#1170](https://github.com/nodejs/node-gyp/pull/1170) -* [[`386746c7d1`](https://github.com/nodejs/node-gyp/commit/386746c7d1)] - **configure**: use full path in node_lib_file GYP var (Pavel Medvedev) [#964](https://github.com/nodejs/node-gyp/pull/964) -* [[`0913b2dd99`](https://github.com/nodejs/node-gyp/commit/0913b2dd99)] - **build, win**: use target_arch to link with node.lib (Pavel Medvedev) [#964](https://github.com/nodejs/node-gyp/pull/964) -* [[`c307b302f7`](https://github.com/nodejs/node-gyp/commit/c307b302f7)] - **doc**: blorb about setting `npm_config_OPTION_NAME` (Refael Ackermann) [#1185](https://github.com/nodejs/node-gyp/pull/1185) - -v3.6.1 2017-04-30 -================= - -* [[`49801716c2`](https://github.com/nodejs/node-gyp/commit/49801716c2)] - **test**: fix test-find-python on v0.10.x buildbot. (Ben Noordhuis) [#1172](https://github.com/nodejs/node-gyp/pull/1172) -* [[`a83a3801fc`](https://github.com/nodejs/node-gyp/commit/a83a3801fc)] - **test**: fix test/test-configure-python on AIX (Richard Lau) [#1131](https://github.com/nodejs/node-gyp/pull/1131) -* [[`8a767145c9`](https://github.com/nodejs/node-gyp/commit/8a767145c9)] - **gyp**: Revert quote_cmd workaround (Kunal Pathak) [#1153](https://github.com/nodejs/node-gyp/pull/1153) -* [[`c09cf7671e`](https://github.com/nodejs/node-gyp/commit/c09cf7671e)] - **doc**: add a note for using `configure` on Windows (Vse Mozhet Byt) [#1152](https://github.com/nodejs/node-gyp/pull/1152) -* [[`da9cb5f411`](https://github.com/nodejs/node-gyp/commit/da9cb5f411)] - Delete superfluous .patch files. (Ben Noordhuis) [#1122](https://github.com/nodejs/node-gyp/pull/1122) - -v3.6.0 2017-03-16 -================= - -* [[`ae141e1906`](https://github.com/nodejs/node-gyp/commit/ae141e1906)] - **win**: find and setup for VS2017 (Refael Ackermann) [#1130](https://github.com/nodejs/node-gyp/pull/1130) -* [[`ec5fc36a80`](https://github.com/nodejs/node-gyp/commit/ec5fc36a80)] - Add support to build node.js with chakracore for ARM. (Kunal Pathak) [#873](https://github.com/nodejs/node-gyp/pull/873) -* [[`a04ea3051a`](https://github.com/nodejs/node-gyp/commit/a04ea3051a)] - Add support to build node.js with chakracore. (Kunal Pathak) [#873](https://github.com/nodejs/node-gyp/pull/873) -* [[`93d7fa83c8`](https://github.com/nodejs/node-gyp/commit/93d7fa83c8)] - Upgrade semver dependency. (Ben Noordhuis) [#1107](https://github.com/nodejs/node-gyp/pull/1107) -* [[`ff9a6fadfd`](https://github.com/nodejs/node-gyp/commit/ff9a6fadfd)] - Update link of gyp as Google code is shutting down (Peter Dave Hello) [#1061](https://github.com/nodejs/node-gyp/pull/1061) - - -v3.5.0 2017-01-10 -================= - -* [[`762d19a39e`](https://github.com/nodejs/node-gyp/commit/762d19a39e)] - \[doc\] merge History.md and CHANGELOG.md (Rod Vagg) -* [[`80fc5c3d31`](https://github.com/nodejs/node-gyp/commit/80fc5c3d31)] - Fix deprecated dependency warning (Simone Primarosa) [#1069](https://github.com/nodejs/node-gyp/pull/1069) -* [[`05c44944fd`](https://github.com/nodejs/node-gyp/commit/05c44944fd)] - Open the build file with universal-newlines mode (Guy Margalit) [#1053](https://github.com/nodejs/node-gyp/pull/1053) -* [[`37ae7be114`](https://github.com/nodejs/node-gyp/commit/37ae7be114)] - Try python launcher when stock python is python 3. (Ben Noordhuis) [#992](https://github.com/nodejs/node-gyp/pull/992) -* [[`e3778d9907`](https://github.com/nodejs/node-gyp/commit/e3778d9907)] - Add lots of findPython() tests. (Ben Noordhuis) [#992](https://github.com/nodejs/node-gyp/pull/992) -* [[`afc766adf6`](https://github.com/nodejs/node-gyp/commit/afc766adf6)] - Unset executable bit for .bat files (Pavel Medvedev) [#969](https://github.com/nodejs/node-gyp/pull/969) -* [[`ddac348991`](https://github.com/nodejs/node-gyp/commit/ddac348991)] - Use push on PYTHONPATH and add tests (Michael Hart) [#990](https://github.com/nodejs/node-gyp/pull/990) -* [[`b182a19042`](https://github.com/nodejs/node-gyp/commit/b182a19042)] - ***Revert*** "add "path-array" dep" (Michael Hart) [#990](https://github.com/nodejs/node-gyp/pull/990) -* [[`7c08b85c5a`](https://github.com/nodejs/node-gyp/commit/7c08b85c5a)] - ***Revert*** "**configure**: use "path-array" for PYTHONPATH" (Michael Hart) [#990](https://github.com/nodejs/node-gyp/pull/990) -* [[`9c8d275526`](https://github.com/nodejs/node-gyp/commit/9c8d275526)] - Add --devdir flag. (Ben Noordhuis) [#916](https://github.com/nodejs/node-gyp/pull/916) -* [[`f6eab1f9e4`](https://github.com/nodejs/node-gyp/commit/f6eab1f9e4)] - **doc**: add windows-build-tools to readme (Felix Rieseberg) [#970](https://github.com/nodejs/node-gyp/pull/970) - -v3.4.0 2016-06-28 -================= - -* [[`ce5fd04e94`](https://github.com/nodejs/node-gyp/commit/ce5fd04e94)] - **deps**: update minimatch version (delphiactual) [#961](https://github.com/nodejs/node-gyp/pull/961) -* [[`77383ddd85`](https://github.com/nodejs/node-gyp/commit/77383ddd85)] - Replace fs.accessSync call to fs.statSync (Richard Lau) [#955](https://github.com/nodejs/node-gyp/pull/955) -* [[`0dba4bda57`](https://github.com/nodejs/node-gyp/commit/0dba4bda57)] - **test**: add simple addon test (Richard Lau) [#955](https://github.com/nodejs/node-gyp/pull/955) -* [[`c4344b3889`](https://github.com/nodejs/node-gyp/commit/c4344b3889)] - **doc**: add --target option to README (Gibson Fahnestock) [#958](https://github.com/nodejs/node-gyp/pull/958) -* [[`cc778e9215`](https://github.com/nodejs/node-gyp/commit/cc778e9215)] - Override BUILDING_UV_SHARED, BUILDING_V8_SHARED. (Ben Noordhuis) [#915](https://github.com/nodejs/node-gyp/pull/915) -* [[`af35b2ad32`](https://github.com/nodejs/node-gyp/commit/af35b2ad32)] - Move VC++ Build Tools to Build Tools landing page. (Andrew Pardoe) [#953](https://github.com/nodejs/node-gyp/pull/953) -* [[`f31482e226`](https://github.com/nodejs/node-gyp/commit/f31482e226)] - **win**: work around __pfnDliNotifyHook2 type change (Alexis Campailla) [#952](https://github.com/nodejs/node-gyp/pull/952) -* [[`3df8222fa5`](https://github.com/nodejs/node-gyp/commit/3df8222fa5)] - Allow for npmlog@3.x (Rebecca Turner) [#950](https://github.com/nodejs/node-gyp/pull/950) -* [[`a4fa07b390`](https://github.com/nodejs/node-gyp/commit/a4fa07b390)] - More verbose error on locating msbuild.exe failure. (Mateusz Jaworski) [#930](https://github.com/nodejs/node-gyp/pull/930) -* [[`4ee31329e0`](https://github.com/nodejs/node-gyp/commit/4ee31329e0)] - **doc**: add command options to README.md (Gibson Fahnestock) [#937](https://github.com/nodejs/node-gyp/pull/937) -* [[`c8c7ca86b9`](https://github.com/nodejs/node-gyp/commit/c8c7ca86b9)] - Add --silent option for zero output. (Gibson Fahnestock) [#937](https://github.com/nodejs/node-gyp/pull/937) -* [[`ac29d23a7c`](https://github.com/nodejs/node-gyp/commit/ac29d23a7c)] - Upgrade to glob@7.0.3. (Ben Noordhuis) [#943](https://github.com/nodejs/node-gyp/pull/943) -* [[`15fd56be3d`](https://github.com/nodejs/node-gyp/commit/15fd56be3d)] - Enable V8 deprecation warnings for native modules (Matt Loring) [#920](https://github.com/nodejs/node-gyp/pull/920) -* [[`7f1c1b960c`](https://github.com/nodejs/node-gyp/commit/7f1c1b960c)] - **gyp**: improvements for android generator (Robert Chiras) [#935](https://github.com/nodejs/node-gyp/pull/935) -* [[`088082766c`](https://github.com/nodejs/node-gyp/commit/088082766c)] - Update Windows install instructions (Sara Itani) [#867](https://github.com/nodejs/node-gyp/pull/867) -* [[`625c1515f9`](https://github.com/nodejs/node-gyp/commit/625c1515f9)] - **gyp**: inherit CC/CXX for CC/CXX.host (Johan Bergström) [#908](https://github.com/nodejs/node-gyp/pull/908) -* [[`3bcb1720e4`](https://github.com/nodejs/node-gyp/commit/3bcb1720e4)] - Add support for the Python launcher on Windows (Patrick Westerhoff) [#894](https://github.com/nodejs/node-gyp/pull/894 - -v3.3.1 2016-03-04 -================= - -* [[`a981ef847a`](https://github.com/nodejs/node-gyp/commit/a981ef847a)] - **gyp**: fix android generator (Robert Chiras) [#889](https://github.com/nodejs/node-gyp/pull/889) - -v3.3.0 2016-02-16 -================= - -* [[`818d854a4d`](https://github.com/nodejs/node-gyp/commit/818d854a4d)] - Introduce NODEJS_ORG_MIRROR and IOJS_ORG_MIRROR (Rod Vagg) [#878](https://github.com/nodejs/node-gyp/pull/878) -* [[`d1e4cc4b62`](https://github.com/nodejs/node-gyp/commit/d1e4cc4b62)] - **(SEMVER-MINOR)** Download headers tarball for ~0.12.10 || ~0.10.42 (Rod Vagg) [#877](https://github.com/nodejs/node-gyp/pull/877) -* [[`6e28ad1bea`](https://github.com/nodejs/node-gyp/commit/6e28ad1bea)] - Allow for npmlog@2.x (Rebecca Turner) [#861](https://github.com/nodejs/node-gyp/pull/861) -* [[`07371e5812`](https://github.com/nodejs/node-gyp/commit/07371e5812)] - Use -fPIC for NetBSD. (Marcin Cieślak) [#856](https://github.com/nodejs/node-gyp/pull/856) -* [[`8c4b0ffa50`](https://github.com/nodejs/node-gyp/commit/8c4b0ffa50)] - **(SEMVER-MINOR)** Add --cafile command line option. (Ben Noordhuis) [#837](https://github.com/nodejs/node-gyp/pull/837) -* [[`b3ad43498e`](https://github.com/nodejs/node-gyp/commit/b3ad43498e)] - **(SEMVER-MINOR)** Make download() function testable. (Ben Noordhuis) [#837](https://github.com/nodejs/node-gyp/pull/837) - -v3.2.1 2015-12-03 -================= - -* [[`ab89b477c4`](https://github.com/nodejs/node-gyp/commit/ab89b477c4)] - Upgrade gyp to b3cef02. (Ben Noordhuis) [#831](https://github.com/nodejs/node-gyp/pull/831) -* [[`90078ecb17`](https://github.com/nodejs/node-gyp/commit/90078ecb17)] - Define WIN32_LEAN_AND_MEAN conditionally. (Ben Noordhuis) [#824](https://github.com/nodejs/node-gyp/pull/824) - -v3.2.0 2015-11-25 -================= - -* [[`268f1ca4c7`](https://github.com/nodejs/node-gyp/commit/268f1ca4c7)] - Use result of `which` when searching for python. (Refael Ackermann) [#668](https://github.com/nodejs/node-gyp/pull/668) -* [[`817ed9bd78`](https://github.com/nodejs/node-gyp/commit/817ed9bd78)] - Add test for python executable search logic. (Ben Noordhuis) [#756](https://github.com/nodejs/node-gyp/pull/756) -* [[`0e2dfda1f3`](https://github.com/nodejs/node-gyp/commit/0e2dfda1f3)] - Fix test/test-options when run through `npm test`. (Ben Noordhuis) [#755](https://github.com/nodejs/node-gyp/pull/755) -* [[`9bfa0876b4`](https://github.com/nodejs/node-gyp/commit/9bfa0876b4)] - Add support for AIX (Michael Dawson) [#753](https://github.com/nodejs/node-gyp/pull/753) -* [[`a8d441a0a2`](https://github.com/nodejs/node-gyp/commit/a8d441a0a2)] - Update README for Windows 10 support. (Jason Williams) [#766](https://github.com/nodejs/node-gyp/pull/766) -* [[`d1d6015276`](https://github.com/nodejs/node-gyp/commit/d1d6015276)] - Update broken links and switch to HTTPS. (andrew morton) - -v3.1.0 2015-11-14 -================= - -* [[`9049241f91`](https://github.com/nodejs/node-gyp/commit/9049241f91)] - **gyp**: don't use links at all, just copy the files instead (Nathan Zadoks) -* [[`8ef90348d1`](https://github.com/nodejs/node-gyp/commit/8ef90348d1)] - **gyp**: apply https://codereview.chromium.org/11361103/ (Nathan Rajlich) -* [[`a2ed0df84e`](https://github.com/nodejs/node-gyp/commit/a2ed0df84e)] - **gyp**: always install into $PRODUCT_DIR (Nathan Rajlich) -* [[`cc8b2fa83e`](https://github.com/nodejs/node-gyp/commit/cc8b2fa83e)] - Update gyp to b3cef02. (Imran Iqbal) [#781](https://github.com/nodejs/node-gyp/pull/781) -* [[`f5d86eb84e`](https://github.com/nodejs/node-gyp/commit/f5d86eb84e)] - Update to tar@2.0.0. (Edgar Muentes) [#797](https://github.com/nodejs/node-gyp/pull/797) -* [[`2ac7de02c4`](https://github.com/nodejs/node-gyp/commit/2ac7de02c4)] - Fix infinite loop with zero-length options. (Ben Noordhuis) [#745](https://github.com/nodejs/node-gyp/pull/745) -* [[`101bed639b`](https://github.com/nodejs/node-gyp/commit/101bed639b)] - This platform value came from debian package, and now the value (Jérémy Lal) [#738](https://github.com/nodejs/node-gyp/pull/738) - -v3.0.3 2015-09-14 -================= - -* [[`ad827cda30`](https://github.com/nodejs/node-gyp/commit/ad827cda30)] - tarballUrl global and && when checking for iojs (Lars-Magnus Skog) [#729](https://github.com/nodejs/node-gyp/pull/729) - -v3.0.2 2015-09-12 -================= - -* [[`6e8c3bf3c6`](https://github.com/nodejs/node-gyp/commit/6e8c3bf3c6)] - add back support for passing additional cmdline args (Rod Vagg) [#723](https://github.com/nodejs/node-gyp/pull/723) -* [[`ff82f2f3b9`](https://github.com/nodejs/node-gyp/commit/ff82f2f3b9)] - fixed broken link in docs to Visual Studio 2013 download (simon-p-r) [#722](https://github.com/nodejs/node-gyp/pull/722) - -v3.0.1 2015-09-08 -================= - -* [[`846337e36b`](https://github.com/nodejs/node-gyp/commit/846337e36b)] - normalise versions for target == this comparison (Rod Vagg) [#716](https://github.com/nodejs/node-gyp/pull/716) - -v3.0.0 2015-09-08 -================= - -* [[`9720d0373c`](https://github.com/nodejs/node-gyp/commit/9720d0373c)] - remove node_modules from tree (Rod Vagg) [#711](https://github.com/nodejs/node-gyp/pull/711) -* [[`6dcf220db7`](https://github.com/nodejs/node-gyp/commit/6dcf220db7)] - test version major directly, don't use semver.satisfies() (Rod Vagg) [#711](https://github.com/nodejs/node-gyp/pull/711) -* [[`938dd18d1c`](https://github.com/nodejs/node-gyp/commit/938dd18d1c)] - refactor for clarity, fix dist-url, add env var dist-url functionality (Rod Vagg) [#711](https://github.com/nodejs/node-gyp/pull/711) -* [[`9e9df66a06`](https://github.com/nodejs/node-gyp/commit/9e9df66a06)] - use process.release, make aware of io.js & node v4 differences (Rod Vagg) [#711](https://github.com/nodejs/node-gyp/pull/711) -* [[`1ea7ed01f4`](https://github.com/nodejs/node-gyp/commit/1ea7ed01f4)] - **deps**: update graceful-fs dependency to the latest (Sakthipriyan Vairamani) [#714](https://github.com/nodejs/node-gyp/pull/714) -* [[`0fbc387b35`](https://github.com/nodejs/node-gyp/commit/0fbc387b35)] - Update repository URLs. (Ben Noordhuis) [#715](https://github.com/nodejs/node-gyp/pull/715) -* [[`bbedb8868b`](https://github.com/nodejs/node-gyp/commit/bbedb8868b)] - **(SEMVER-MAJOR)** **win**: enable delay-load hook by default (Jeremiah Senkpiel) [#708](https://github.com/nodejs/node-gyp/pull/708) -* [[`85ed107565`](https://github.com/nodejs/node-gyp/commit/85ed107565)] - Merge pull request #664 from othiym23/othiym23/allow-semver-5 (Nathan Rajlich) -* [[`0c720d234c`](https://github.com/nodejs/node-gyp/commit/0c720d234c)] - allow semver@5 (Forrest L Norvell) - -2.0.2 / 2015-07-14 -================== - - * Use HTTPS for dist url (#656, @SonicHedgehog) - * Merge pull request #648 from nevosegal/master - * Merge pull request #650 from magic890/patch-1 - * Updated Installation section on README - * Updated link to gyp user documentation - * Fix download error message spelling (#643, @tomxtobin) - * Merge pull request #637 from lygstate/master - * Set NODE_GYP_DIR for addon.gypi to setting absolute path for - src/win_delay_load_hook.c, and fixes of the long relative path issue on Win32. - Fixes #636 (#637, @lygstate). - -2.0.1 / 2015-05-28 -================== - - * configure: try/catch the semver range.test() call - * README: update for visual studio 2013 (#510, @samccone) - -2.0.0 / 2015-05-24 -================== - - * configure: check for python2 executable by default, fallback to python - * configure: don't clobber existing $PYTHONPATH - * configure: use "path-array" for PYTHONPATH - * gyp: fix for non-acsii userprofile name on Windows - * gyp: always install into $PRODUCT_DIR - * gyp: apply https://codereview.chromium.org/11361103/ - * gyp: don't use links at all, just copy the files instead - * gyp: update gyp to e1c8fcf7 - * Updated README.md with updated Windows build info - * Show URL when a download fails - * package: add a "license" field - * move HMODULE m declaration to top - * Only add "-undefined dynamic_lookup" to loadable_module targets - * win: optionally allow node.exe/iojs.exe to be renamed - * Avoid downloading shasums if using tarPath - * Add target name preprocessor define: `NODE_GYP_MODULE_NAME` - * Show better error message in case of bad network settings diff --git a/node_modules/node-gyp/README.md b/node_modules/node-gyp/README.md deleted file mode 100644 index e06b01a739794..0000000000000 --- a/node_modules/node-gyp/README.md +++ /dev/null @@ -1,242 +0,0 @@ -# `node-gyp` - Node.js native addon build tool - -[![Build Status](https://github.com/nodejs/node-gyp/workflows/Tests/badge.svg?branch=master)](https://github.com/nodejs/node-gyp/actions?query=workflow%3ATests+branch%3Amaster) - -`node-gyp` is a cross-platform command-line tool written in Node.js for -compiling native addon modules for Node.js. It contains a vendored copy of the -[gyp-next](https://github.com/nodejs/gyp-next) project that was previously used -by the Chromium team, extended to support the development of Node.js native addons. - -Note that `node-gyp` is _not_ used to build Node.js itself. - -Multiple target versions of Node.js are supported (i.e. `0.8`, ..., `4`, `5`, `6`, -etc.), regardless of what version of Node.js is actually installed on your system -(`node-gyp` downloads the necessary development files or headers for the target version). - -## Features - - * The same build commands work on any of the supported platforms - * Supports the targeting of different versions of Node.js - -## Installation - -You can install `node-gyp` using `npm`: - -``` bash -$ npm install -g node-gyp -``` - -Depending on your operating system, you will need to install: - -### On Unix - - * Python v2.7, v3.5, v3.6, v3.7, or v3.8 - * `make` - * A proper C/C++ compiler toolchain, like [GCC](https://gcc.gnu.org) - -### On macOS - -**ATTENTION**: If your Mac has been _upgraded_ to macOS Catalina (10.15), please read [macOS_Catalina.md](macOS_Catalina.md). - - * Python v2.7, v3.5, v3.6, v3.7, or v3.8 - * [Xcode](https://developer.apple.com/xcode/download/) - * You also need to install the `XCode Command Line Tools` by running `xcode-select --install`. Alternatively, if you already have the full Xcode installed, you can find them under the menu `Xcode -> Open Developer Tool -> More Developer Tools...`. This step will install `clang`, `clang++`, and `make`. - -### On Windows - -Install the current version of Python from the [Microsoft Store package](https://docs.python.org/3/using/windows.html#the-microsoft-store-package). - -#### Option 1 - -Install all the required tools and configurations using Microsoft's [windows-build-tools](https://github.com/felixrieseberg/windows-build-tools) using `npm install --global windows-build-tools` from an elevated PowerShell or CMD.exe (run as Administrator). - -#### Option 2 - -Install tools and configuration manually: - * Install Visual C++ Build Environment: [Visual Studio Build Tools](https://visualstudio.microsoft.com/thank-you-downloading-visual-studio/?sku=BuildTools) - (using "Visual C++ build tools" workload) or [Visual Studio 2017 Community](https://visualstudio.microsoft.com/pl/thank-you-downloading-visual-studio/?sku=Community) - (using the "Desktop development with C++" workload) - * Launch cmd, `npm config set msvs_version 2017` - - If the above steps didn't work for you, please visit [Microsoft's Node.js Guidelines for Windows](https://github.com/Microsoft/nodejs-guidelines/blob/master/windows-environment.md#compiling-native-addon-modules) for additional tips. - - To target native ARM64 Node.js on Windows 10 on ARM, add the components "Visual C++ compilers and libraries for ARM64" and "Visual C++ ATL for ARM64". - -### Configuring Python Dependency - -`node-gyp` requires that you have installed a compatible version of Python, one of: v2.7, v3.5, v3.6, -v3.7, or v3.8. If you have multiple Python versions installed, you can identify which Python -version `node-gyp` should use in one of the following ways: - -1. by setting the `--python` command-line option, e.g.: - -``` bash -$ node-gyp <command> --python /path/to/executable/python -``` - -2. If `node-gyp` is called by way of `npm`, *and* you have multiple versions of -Python installed, then you can set `npm`'s 'python' config key to the appropriate -value: - -``` bash -$ npm config set python /path/to/executable/python -``` - -3. If the `PYTHON` environment variable is set to the path of a Python executable, -then that version will be used, if it is a compatible version. - -4. If the `NODE_GYP_FORCE_PYTHON` environment variable is set to the path of a -Python executable, it will be used instead of any of the other configured or -builtin Python search paths. If it's not a compatible version, no further -searching will be done. - -## How to Use - -To compile your native addon, first go to its root directory: - -``` bash -$ cd my_node_addon -``` - -The next step is to generate the appropriate project build files for the current -platform. Use `configure` for that: - -``` bash -$ node-gyp configure -``` - -Auto-detection fails for Visual C++ Build Tools 2015, so `--msvs_version=2015` -needs to be added (not needed when run by npm as configured above): -``` bash -$ node-gyp configure --msvs_version=2015 -``` - -__Note__: The `configure` step looks for a `binding.gyp` file in the current -directory to process. See below for instructions on creating a `binding.gyp` file. - -Now you will have either a `Makefile` (on Unix platforms) or a `vcxproj` file -(on Windows) in the `build/` directory. Next, invoke the `build` command: - -``` bash -$ node-gyp build -``` - -Now you have your compiled `.node` bindings file! The compiled bindings end up -in `build/Debug/` or `build/Release/`, depending on the build mode. At this point, -you can require the `.node` file with Node.js and run your tests! - -__Note:__ To create a _Debug_ build of the bindings file, pass the `--debug` (or -`-d`) switch when running either the `configure`, `build` or `rebuild` commands. - -## The `binding.gyp` file - -A `binding.gyp` file describes the configuration to build your module, in a -JSON-like format. This file gets placed in the root of your package, alongside -`package.json`. - -A barebones `gyp` file appropriate for building a Node.js addon could look like: - -```python -{ - "targets": [ - { - "target_name": "binding", - "sources": [ "src/binding.cc" ] - } - ] -} -``` - -## Further reading - -Some additional resources for Node.js native addons and writing `gyp` configuration files: - - * ["Going Native" a nodeschool.io tutorial](http://nodeschool.io/#goingnative) - * ["Hello World" node addon example](https://github.com/nodejs/node/tree/master/test/addons/hello-world) - * [gyp user documentation](https://gyp.gsrc.io/docs/UserDocumentation.md) - * [gyp input format reference](https://gyp.gsrc.io/docs/InputFormatReference.md) - * [*"binding.gyp" files out in the wild* wiki page](https://github.com/nodejs/node-gyp/wiki/%22binding.gyp%22-files-out-in-the-wild) - -## Commands - -`node-gyp` responds to the following commands: - -| **Command** | **Description** -|:--------------|:--------------------------------------------------------------- -| `help` | Shows the help dialog -| `build` | Invokes `make`/`msbuild.exe` and builds the native addon -| `clean` | Removes the `build` directory if it exists -| `configure` | Generates project build files for the current platform -| `rebuild` | Runs `clean`, `configure` and `build` all in a row -| `install` | Installs Node.js header files for the given version -| `list` | Lists the currently installed Node.js header versions -| `remove` | Removes the Node.js header files for the given version - - -## Command Options - -`node-gyp` accepts the following command options: - -| **Command** | **Description** -|:----------------------------------|:------------------------------------------ -| `-j n`, `--jobs n` | Run `make` in parallel. The value `max` will use all available CPU cores -| `--target=v6.2.1` | Node.js version to build for (default is `process.version`) -| `--silly`, `--loglevel=silly` | Log all progress to console -| `--verbose`, `--loglevel=verbose` | Log most progress to console -| `--silent`, `--loglevel=silent` | Don't log anything to console -| `debug`, `--debug` | Make Debug build (default is `Release`) -| `--release`, `--no-debug` | Make Release build -| `-C $dir`, `--directory=$dir` | Run command in different directory -| `--make=$make` | Override `make` command (e.g. `gmake`) -| `--thin=yes` | Enable thin static libraries -| `--arch=$arch` | Set target architecture (e.g. ia32) -| `--tarball=$path` | Get headers from a local tarball -| `--devdir=$path` | SDK download directory (default is OS cache directory) -| `--ensure` | Don't reinstall headers if already present -| `--dist-url=$url` | Download header tarball from custom URL -| `--proxy=$url` | Set HTTP(S) proxy for downloading header tarball -| `--noproxy=$urls` | Set urls to ignore proxies when downloading header tarball -| `--cafile=$cafile` | Override default CA chain (to download tarball) -| `--nodedir=$path` | Set the path to the node source code -| `--python=$path` | Set path to the Python binary -| `--msvs_version=$version` | Set Visual Studio version (Windows only) -| `--solution=$solution` | Set Visual Studio Solution version (Windows only) - -## Configuration - -### Environment variables - -Use the form `npm_config_OPTION_NAME` for any of the command options listed -above (dashes in option names should be replaced by underscores). - -For example, to set `devdir` equal to `/tmp/.gyp`, you would: - -Run this on Unix: - -```bash -$ export npm_config_devdir=/tmp/.gyp -``` - -Or this on Windows: - -```console -> set npm_config_devdir=c:\temp\.gyp -``` - -### `npm` configuration - -Use the form `OPTION_NAME` for any of the command options listed above. - -For example, to set `devdir` equal to `/tmp/.gyp`, you would run: - -```bash -$ npm config set [--global] devdir /tmp/.gyp -``` - -**Note:** Configuration set via `npm` will only be used when `node-gyp` -is run via `npm`, not when `node-gyp` is run directly. - -## License - -`node-gyp` is available under the MIT license. See the [LICENSE -file](LICENSE) for details. diff --git a/node_modules/node-gyp/gyp/.github/workflows/Python_tests.yml b/node_modules/node-gyp/gyp/.github/workflows/Python_tests.yml deleted file mode 100644 index 128654f3121d7..0000000000000 --- a/node_modules/node-gyp/gyp/.github/workflows/Python_tests.yml +++ /dev/null @@ -1,31 +0,0 @@ -# TODO: Enable os: windows-latest -# TODO: Enable python-version: 3.5 -# TODO: Enable pytest --doctest-modules - -name: Python_tests -on: [push, pull_request] -jobs: - Python_tests: - runs-on: ${{ matrix.os }} - strategy: - fail-fast: false - max-parallel: 15 - matrix: - os: [macos-latest, ubuntu-latest] # , windows-latest] - python-version: [2.7, 3.6, 3.7, 3.8, 3.9] - steps: - - uses: actions/checkout@v2 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 - with: - python-version: ${{ matrix.python-version }} - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install -r requirements_dev.txt - - name: Lint with flake8 - run: flake8 . --count --show-source --statistics - - name: Test with pytest - run: pytest - # - name: Run doctests with pytest - # run: pytest --doctest-modules diff --git a/node_modules/node-gyp/gyp/.github/workflows/node-gyp.yml b/node_modules/node-gyp/gyp/.github/workflows/node-gyp.yml deleted file mode 100644 index 78fe502bda062..0000000000000 --- a/node_modules/node-gyp/gyp/.github/workflows/node-gyp.yml +++ /dev/null @@ -1,40 +0,0 @@ -name: node-gyp integration - -on: [push, pull_request] - -jobs: - test: - strategy: - fail-fast: false - matrix: - os: [macos-latest, ubuntu-latest, windows-latest] - runs-on: ${{ matrix.os }} - steps: - - name: Clone gyp-next - uses: actions/checkout@v2 - with: - path: gyp-next - - name: Clone nodejs/node-gyp - uses: actions/checkout@v2 - with: - repository: nodejs/node-gyp - path: node-gyp - - uses: actions/setup-node@v1 - with: - node-version: 14.x - - uses: actions/setup-python@v2 - with: - python-version: 3.9 - - name: Install dependencies - run: | - cd node-gyp - npm install --no-progress - - name: Replace gyp in node-gyp - shell: bash - run: | - rm -rf node-gyp/gyp - cp -r gyp-next node-gyp/gyp - - name: Run tests - run: | - cd node-gyp - npm test diff --git a/node_modules/node-gyp/gyp/.github/workflows/nodejs-windows.yml b/node_modules/node-gyp/gyp/.github/workflows/nodejs-windows.yml deleted file mode 100644 index fffe96e33b815..0000000000000 --- a/node_modules/node-gyp/gyp/.github/workflows/nodejs-windows.yml +++ /dev/null @@ -1,27 +0,0 @@ -name: Node.js Windows integration - -on: [push, pull_request] - -jobs: - build-windows: - runs-on: windows-latest - steps: - - name: Clone gyp-next - uses: actions/checkout@v2 - with: - path: gyp-next - - name: Clone nodejs/node - uses: actions/checkout@v2 - with: - repository: nodejs/node - path: node - - name: Install deps - run: choco install nasm - - name: Replace gyp in Node.js - run: | - rm -Recurse node/tools/gyp - cp -Recurse gyp-next node/tools/gyp - - name: Build Node.js - run: | - cd node - ./vcbuild.bat diff --git a/node_modules/node-gyp/gyp/.github/workflows/release-please.yml b/node_modules/node-gyp/gyp/.github/workflows/release-please.yml deleted file mode 100644 index a414c10e15636..0000000000000 --- a/node_modules/node-gyp/gyp/.github/workflows/release-please.yml +++ /dev/null @@ -1,16 +0,0 @@ -on: - push: - branches: - - master - -name: release-please -jobs: - release-please: - runs-on: ubuntu-latest - steps: - - uses: GoogleCloudPlatform/release-please-action@v2.5.6 - with: - token: ${{ secrets.GITHUB_TOKEN }} - release-type: python - package-name: gyp-next - bump-minor-pre-major: Yes diff --git a/node_modules/node-gyp/gyp/CHANGELOG.md b/node_modules/node-gyp/gyp/CHANGELOG.md deleted file mode 100644 index 53c922b6c903f..0000000000000 --- a/node_modules/node-gyp/gyp/CHANGELOG.md +++ /dev/null @@ -1,70 +0,0 @@ -# Changelog - -### [0.6.2](https://www.github.com/nodejs/gyp-next/compare/v0.6.1...v0.6.2) (2020-10-16) - - -### Bug Fixes - -* do not rewrite absolute paths to avoid long paths ([#74](https://www.github.com/nodejs/gyp-next/issues/74)) ([c2ccc1a](https://www.github.com/nodejs/gyp-next/commit/c2ccc1a81f7f94433a94f4d01a2e820db4c4331a)) -* only include MARMASM when toolset is target ([5a2794a](https://www.github.com/nodejs/gyp-next/commit/5a2794aefb58f0c00404ff042b61740bc8b8d5cd)) - -### [0.6.1](https://github.com/nodejs/gyp-next/compare/v0.6.0...v0.6.1) (2020-10-14) - - -### Bug Fixes - -* Correctly rename object files for absolute paths in MSVS generator. - -## [0.6.0](https://github.com/nodejs/gyp-next/compare/v0.5.0...v0.6.0) (2020-10-13) - - -### Features - -* The Makefile generator will now output shared libraries directly to the product directory on all platforms (previously only macOS). - -## [0.5.0](https://github.com/nodejs/gyp-next/compare/v0.4.0...v0.5.0) (2020-09-30) - - -### Features - -* Extended compile_commands_json generator to consider more file extensions than just `c` and `cc`. `cpp` and `cxx` are now supported. -* Source files with duplicate basenames are now supported. - -### Removed - -* The `--no-duplicate-basename-check` option was removed. -* The `msvs_enable_marmasm` configuration option was removed in favor of auto-inclusion of the "marmasm" sections for Windows on ARM. - -## [0.4.0](https://github.com/nodejs/gyp-next/compare/v0.3.0...v0.4.0) (2020-07-14) - - -### Features - -* Added support for passing arbitrary architectures to Xcode builds, enables `arm64` builds. - -### Bug Fixes - -* Fixed a bug on Solaris where copying archives failed. - -## [0.3.0](https://github.com/nodejs/gyp-next/compare/v0.2.1...v0.3.0) (2020-06-06) - - -### Features - -* Added support for MSVC cross-compilation. This allows compilation on x64 for a Windows ARM target. - -### Bug Fixes - -* Fixed XCode CLT version detection on macOS Catalina. - -### [0.2.1](https://github.com/nodejs/gyp-next/compare/v0.2.0...v0.2.1) (2020-05-05) - - -### Bug Fixes - -* Relicensed to Node.js contributors. -* Fixed Windows bug introduced in v0.2.0. - -## [0.2.0](https://github.com/nodejs/gyp-next/releases/tag/v0.2.0) (2020-04-06) - -This is the first release of this project, based on https://chromium.googlesource.com/external/gyp with changes made over the years in Node.js and node-gyp. diff --git a/node_modules/node-gyp/gyp/README.md b/node_modules/node-gyp/gyp/README.md deleted file mode 100644 index 9ffc2b21e81b8..0000000000000 --- a/node_modules/node-gyp/gyp/README.md +++ /dev/null @@ -1,7 +0,0 @@ -GYP can Generate Your Projects. -=================================== - -Documents are available at [gyp.gsrc.io](https://gyp.gsrc.io), or you can check out ```md-pages``` branch to read those documents offline. - -__gyp-next__ is [released](https://github.com/nodejs/gyp-next/releases) to the [__Python Packaging Index__](https://pypi.org/project/gyp-next) (PyPI) and can be installed with the command: -* `python3 -m pip install gyp-next` diff --git a/node_modules/node-gyp/gyp/tools/README b/node_modules/node-gyp/gyp/tools/README deleted file mode 100644 index 712e4efbb7a06..0000000000000 --- a/node_modules/node-gyp/gyp/tools/README +++ /dev/null @@ -1,15 +0,0 @@ -pretty_vcproj: - Usage: pretty_vcproj.py "c:\path\to\vcproj.vcproj" [key1=value1] [key2=value2] - - They key/value pair are used to resolve vsprops name. - - For example, if I want to diff the base.vcproj project: - - pretty_vcproj.py z:\dev\src-chrome\src\base\build\base.vcproj "$(SolutionDir)=z:\dev\src-chrome\src\chrome\\" "$(CHROMIUM_BUILD)=" "$(CHROME_BUILD_TYPE)=" > orignal.txt - pretty_vcproj.py z:\dev\src-chrome\src\base\base_gyp.vcproj "$(SolutionDir)=z:\dev\src-chrome\src\chrome\\" "$(CHROMIUM_BUILD)=" "$(CHROME_BUILD_TYPE)=" > gyp.txt - - And you can use your favorite diff tool to see the changes. - - Note: In the case of base.vcproj, the original vcproj is one level up the generated one. - I suggest you do a search and replace for '"..\' and replace it with '"' in original.txt - before you perform the diff. \ No newline at end of file diff --git a/node_modules/node-gyp/gyp/tools/Xcode/README b/node_modules/node-gyp/gyp/tools/Xcode/README deleted file mode 100644 index 2492a2c2f8f17..0000000000000 --- a/node_modules/node-gyp/gyp/tools/Xcode/README +++ /dev/null @@ -1,5 +0,0 @@ -Specifications contains syntax formatters for Xcode 3. These do not appear to be supported yet on Xcode 4. To use these with Xcode 3 please install both the gyp.pbfilespec and gyp.xclangspec files in - -~/Library/Application Support/Developer/Shared/Xcode/Specifications/ - -and restart Xcode. \ No newline at end of file diff --git a/node_modules/node-gyp/gyp/tools/emacs/README b/node_modules/node-gyp/gyp/tools/emacs/README deleted file mode 100644 index eeef39f41b7db..0000000000000 --- a/node_modules/node-gyp/gyp/tools/emacs/README +++ /dev/null @@ -1,12 +0,0 @@ -How to install gyp-mode for emacs: - -Add the following to your ~/.emacs (replace ... with the path to your gyp -checkout). - -(setq load-path (cons ".../tools/emacs" load-path)) -(require 'gyp) - -Restart emacs (or eval-region the added lines) and you should be all set. - -Please note that ert is required for running the tests, which is included in -Emacs 24, or available separately from https://github.com/ohler/ert diff --git a/node_modules/gauge/node_modules/aproba/LICENSE b/node_modules/node-gyp/node_modules/aproba/LICENSE similarity index 100% rename from node_modules/gauge/node_modules/aproba/LICENSE rename to node_modules/node-gyp/node_modules/aproba/LICENSE diff --git a/node_modules/gauge/node_modules/aproba/index.js b/node_modules/node-gyp/node_modules/aproba/index.js similarity index 100% rename from node_modules/gauge/node_modules/aproba/index.js rename to node_modules/node-gyp/node_modules/aproba/index.js diff --git a/node_modules/gauge/node_modules/aproba/package.json b/node_modules/node-gyp/node_modules/aproba/package.json similarity index 100% rename from node_modules/gauge/node_modules/aproba/package.json rename to node_modules/node-gyp/node_modules/aproba/package.json diff --git a/node_modules/node-gyp/node_modules/gauge/LICENSE b/node_modules/node-gyp/node_modules/gauge/LICENSE new file mode 100644 index 0000000000000..e756052969b78 --- /dev/null +++ b/node_modules/node-gyp/node_modules/gauge/LICENSE @@ -0,0 +1,13 @@ +Copyright (c) 2014, Rebecca Turner <me@re-becca.org> + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/node-gyp/node_modules/gauge/base-theme.js b/node_modules/node-gyp/node_modules/gauge/base-theme.js new file mode 100644 index 0000000000000..0b67638e0211d --- /dev/null +++ b/node_modules/node-gyp/node_modules/gauge/base-theme.js @@ -0,0 +1,14 @@ +'use strict' +var spin = require('./spin.js') +var progressBar = require('./progress-bar.js') + +module.exports = { + activityIndicator: function (values, theme, width) { + if (values.spun == null) return + return spin(theme, values.spun) + }, + progressbar: function (values, theme, width) { + if (values.completed == null) return + return progressBar(theme, width, values.completed) + } +} diff --git a/node_modules/node-gyp/node_modules/gauge/error.js b/node_modules/node-gyp/node_modules/gauge/error.js new file mode 100644 index 0000000000000..d9914ba5335d2 --- /dev/null +++ b/node_modules/node-gyp/node_modules/gauge/error.js @@ -0,0 +1,24 @@ +'use strict' +var util = require('util') + +var User = exports.User = function User (msg) { + var err = new Error(msg) + Error.captureStackTrace(err, User) + err.code = 'EGAUGE' + return err +} + +exports.MissingTemplateValue = function MissingTemplateValue (item, values) { + var err = new User(util.format('Missing template value "%s"', item.type)) + Error.captureStackTrace(err, MissingTemplateValue) + err.template = item + err.values = values + return err +} + +exports.Internal = function Internal (msg) { + var err = new Error(msg) + Error.captureStackTrace(err, Internal) + err.code = 'EGAUGEINTERNAL' + return err +} diff --git a/node_modules/node-gyp/node_modules/gauge/has-color.js b/node_modules/node-gyp/node_modules/gauge/has-color.js new file mode 100644 index 0000000000000..e283a256f26b7 --- /dev/null +++ b/node_modules/node-gyp/node_modules/gauge/has-color.js @@ -0,0 +1,12 @@ +'use strict' + +module.exports = isWin32() || isColorTerm() + +function isWin32 () { + return process.platform === 'win32' +} + +function isColorTerm () { + var termHasColor = /^screen|^xterm|^vt100|color|ansi|cygwin|linux/i + return !!process.env.COLORTERM || termHasColor.test(process.env.TERM) +} diff --git a/node_modules/node-gyp/node_modules/gauge/index.js b/node_modules/node-gyp/node_modules/gauge/index.js new file mode 100644 index 0000000000000..c55324008cbfa --- /dev/null +++ b/node_modules/node-gyp/node_modules/gauge/index.js @@ -0,0 +1,233 @@ +'use strict' +var Plumbing = require('./plumbing.js') +var hasUnicode = require('has-unicode') +var hasColor = require('./has-color.js') +var onExit = require('signal-exit') +var defaultThemes = require('./themes') +var setInterval = require('./set-interval.js') +var process = require('./process.js') +var setImmediate = require('./set-immediate') + +module.exports = Gauge + +function callWith (obj, method) { + return function () { + return method.call(obj) + } +} + +function Gauge (arg1, arg2) { + var options, writeTo + if (arg1 && arg1.write) { + writeTo = arg1 + options = arg2 || {} + } else if (arg2 && arg2.write) { + writeTo = arg2 + options = arg1 || {} + } else { + writeTo = process.stderr + options = arg1 || arg2 || {} + } + + this._status = { + spun: 0, + section: '', + subsection: '' + } + this._paused = false // are we paused for back pressure? + this._disabled = true // are all progress bar updates disabled? + this._showing = false // do we WANT the progress bar on screen + this._onScreen = false // IS the progress bar on screen + this._needsRedraw = false // should we print something at next tick? + this._hideCursor = options.hideCursor == null ? true : options.hideCursor + this._fixedFramerate = options.fixedFramerate == null + ? !(/^v0\.8\./.test(process.version)) + : options.fixedFramerate + this._lastUpdateAt = null + this._updateInterval = options.updateInterval == null ? 50 : options.updateInterval + + this._themes = options.themes || defaultThemes + this._theme = options.theme + var theme = this._computeTheme(options.theme) + var template = options.template || [ + {type: 'progressbar', length: 20}, + {type: 'activityIndicator', kerning: 1, length: 1}, + {type: 'section', kerning: 1, default: ''}, + {type: 'subsection', kerning: 1, default: ''} + ] + this.setWriteTo(writeTo, options.tty) + var PlumbingClass = options.Plumbing || Plumbing + this._gauge = new PlumbingClass(theme, template, this.getWidth()) + + this._$$doRedraw = callWith(this, this._doRedraw) + this._$$handleSizeChange = callWith(this, this._handleSizeChange) + + this._cleanupOnExit = options.cleanupOnExit == null || options.cleanupOnExit + this._removeOnExit = null + + if (options.enabled || (options.enabled == null && this._tty && this._tty.isTTY)) { + this.enable() + } else { + this.disable() + } +} +Gauge.prototype = {} + +Gauge.prototype.isEnabled = function () { + return !this._disabled +} + +Gauge.prototype.setTemplate = function (template) { + this._gauge.setTemplate(template) + if (this._showing) this._requestRedraw() +} + +Gauge.prototype._computeTheme = function (theme) { + if (!theme) theme = {} + if (typeof theme === 'string') { + theme = this._themes.getTheme(theme) + } else if (theme && (Object.keys(theme).length === 0 || theme.hasUnicode != null || theme.hasColor != null)) { + var useUnicode = theme.hasUnicode == null ? hasUnicode() : theme.hasUnicode + var useColor = theme.hasColor == null ? hasColor : theme.hasColor + theme = this._themes.getDefault({hasUnicode: useUnicode, hasColor: useColor, platform: theme.platform}) + } + return theme +} + +Gauge.prototype.setThemeset = function (themes) { + this._themes = themes + this.setTheme(this._theme) +} + +Gauge.prototype.setTheme = function (theme) { + this._gauge.setTheme(this._computeTheme(theme)) + if (this._showing) this._requestRedraw() + this._theme = theme +} + +Gauge.prototype._requestRedraw = function () { + this._needsRedraw = true + if (!this._fixedFramerate) this._doRedraw() +} + +Gauge.prototype.getWidth = function () { + return ((this._tty && this._tty.columns) || 80) - 1 +} + +Gauge.prototype.setWriteTo = function (writeTo, tty) { + var enabled = !this._disabled + if (enabled) this.disable() + this._writeTo = writeTo + this._tty = tty || + (writeTo === process.stderr && process.stdout.isTTY && process.stdout) || + (writeTo.isTTY && writeTo) || + this._tty + if (this._gauge) this._gauge.setWidth(this.getWidth()) + if (enabled) this.enable() +} + +Gauge.prototype.enable = function () { + if (!this._disabled) return + this._disabled = false + if (this._tty) this._enableEvents() + if (this._showing) this.show() +} + +Gauge.prototype.disable = function () { + if (this._disabled) return + if (this._showing) { + this._lastUpdateAt = null + this._showing = false + this._doRedraw() + this._showing = true + } + this._disabled = true + if (this._tty) this._disableEvents() +} + +Gauge.prototype._enableEvents = function () { + if (this._cleanupOnExit) { + this._removeOnExit = onExit(callWith(this, this.disable)) + } + this._tty.on('resize', this._$$handleSizeChange) + if (this._fixedFramerate) { + this.redrawTracker = setInterval(this._$$doRedraw, this._updateInterval) + if (this.redrawTracker.unref) this.redrawTracker.unref() + } +} + +Gauge.prototype._disableEvents = function () { + this._tty.removeListener('resize', this._$$handleSizeChange) + if (this._fixedFramerate) clearInterval(this.redrawTracker) + if (this._removeOnExit) this._removeOnExit() +} + +Gauge.prototype.hide = function (cb) { + if (this._disabled) return cb && process.nextTick(cb) + if (!this._showing) return cb && process.nextTick(cb) + this._showing = false + this._doRedraw() + cb && setImmediate(cb) +} + +Gauge.prototype.show = function (section, completed) { + this._showing = true + if (typeof section === 'string') { + this._status.section = section + } else if (typeof section === 'object') { + var sectionKeys = Object.keys(section) + for (var ii = 0; ii < sectionKeys.length; ++ii) { + var key = sectionKeys[ii] + this._status[key] = section[key] + } + } + if (completed != null) this._status.completed = completed + if (this._disabled) return + this._requestRedraw() +} + +Gauge.prototype.pulse = function (subsection) { + this._status.subsection = subsection || '' + this._status.spun ++ + if (this._disabled) return + if (!this._showing) return + this._requestRedraw() +} + +Gauge.prototype._handleSizeChange = function () { + this._gauge.setWidth(this._tty.columns - 1) + this._requestRedraw() +} + +Gauge.prototype._doRedraw = function () { + if (this._disabled || this._paused) return + if (!this._fixedFramerate) { + var now = Date.now() + if (this._lastUpdateAt && now - this._lastUpdateAt < this._updateInterval) return + this._lastUpdateAt = now + } + if (!this._showing && this._onScreen) { + this._onScreen = false + var result = this._gauge.hide() + if (this._hideCursor) { + result += this._gauge.showCursor() + } + return this._writeTo.write(result) + } + if (!this._showing && !this._onScreen) return + if (this._showing && !this._onScreen) { + this._onScreen = true + this._needsRedraw = true + if (this._hideCursor) { + this._writeTo.write(this._gauge.hideCursor()) + } + } + if (!this._needsRedraw) return + if (!this._writeTo.write(this._gauge.show(this._status))) { + this._paused = true + this._writeTo.on('drain', callWith(this, function () { + this._paused = false + this._doRedraw() + })) + } +} diff --git a/node_modules/node-gyp/node_modules/gauge/package.json b/node_modules/node-gyp/node_modules/gauge/package.json new file mode 100644 index 0000000000000..4882cff8390d8 --- /dev/null +++ b/node_modules/node-gyp/node_modules/gauge/package.json @@ -0,0 +1,63 @@ +{ + "name": "gauge", + "version": "2.7.4", + "description": "A terminal based horizontal guage", + "main": "index.js", + "scripts": { + "test": "standard && tap test/*.js --coverage", + "prepublish": "rm -f *~" + }, + "repository": { + "type": "git", + "url": "https://github.com/iarna/gauge" + }, + "keywords": [ + "progressbar", + "progress", + "gauge" + ], + "author": "Rebecca Turner <me@re-becca.org>", + "license": "ISC", + "bugs": { + "url": "https://github.com/iarna/gauge/issues" + }, + "homepage": "https://github.com/iarna/gauge", + "dependencies": { + "aproba": "^1.0.3", + "console-control-strings": "^1.0.0", + "has-unicode": "^2.0.0", + "object-assign": "^4.1.0", + "signal-exit": "^3.0.0", + "string-width": "^1.0.1", + "strip-ansi": "^3.0.1", + "wide-align": "^1.1.0" + }, + "devDependencies": { + "readable-stream": "^2.0.6", + "require-inject": "^1.4.0", + "standard": "^7.1.2", + "tap": "^5.7.2", + "through2": "^2.0.0" + }, + "files": [ + "base-theme.js", + "CHANGELOG.md", + "error.js", + "has-color.js", + "index.js", + "LICENSE", + "package.json", + "plumbing.js", + "process.js", + "progress-bar.js", + "README.md", + "render-template.js", + "set-immediate.js", + "set-interval.js", + "spin.js", + "template-item.js", + "theme-set.js", + "themes.js", + "wide-truncate.js" + ] +} diff --git a/node_modules/node-gyp/node_modules/gauge/plumbing.js b/node_modules/node-gyp/node_modules/gauge/plumbing.js new file mode 100644 index 0000000000000..1afb4af6d5017 --- /dev/null +++ b/node_modules/node-gyp/node_modules/gauge/plumbing.js @@ -0,0 +1,48 @@ +'use strict' +var consoleControl = require('console-control-strings') +var renderTemplate = require('./render-template.js') +var validate = require('aproba') + +var Plumbing = module.exports = function (theme, template, width) { + if (!width) width = 80 + validate('OAN', [theme, template, width]) + this.showing = false + this.theme = theme + this.width = width + this.template = template +} +Plumbing.prototype = {} + +Plumbing.prototype.setTheme = function (theme) { + validate('O', [theme]) + this.theme = theme +} + +Plumbing.prototype.setTemplate = function (template) { + validate('A', [template]) + this.template = template +} + +Plumbing.prototype.setWidth = function (width) { + validate('N', [width]) + this.width = width +} + +Plumbing.prototype.hide = function () { + return consoleControl.gotoSOL() + consoleControl.eraseLine() +} + +Plumbing.prototype.hideCursor = consoleControl.hideCursor + +Plumbing.prototype.showCursor = consoleControl.showCursor + +Plumbing.prototype.show = function (status) { + var values = Object.create(this.theme) + for (var key in status) { + values[key] = status[key] + } + + return renderTemplate(this.width, this.template, values).trim() + + consoleControl.color('reset') + + consoleControl.eraseLine() + consoleControl.gotoSOL() +} diff --git a/node_modules/node-gyp/node_modules/gauge/process.js b/node_modules/node-gyp/node_modules/gauge/process.js new file mode 100644 index 0000000000000..05e85694d755b --- /dev/null +++ b/node_modules/node-gyp/node_modules/gauge/process.js @@ -0,0 +1,3 @@ +'use strict' +// this exists so we can replace it during testing +module.exports = process diff --git a/node_modules/node-gyp/node_modules/gauge/progress-bar.js b/node_modules/node-gyp/node_modules/gauge/progress-bar.js new file mode 100644 index 0000000000000..7f8dd68be24cf --- /dev/null +++ b/node_modules/node-gyp/node_modules/gauge/progress-bar.js @@ -0,0 +1,35 @@ +'use strict' +var validate = require('aproba') +var renderTemplate = require('./render-template.js') +var wideTruncate = require('./wide-truncate') +var stringWidth = require('string-width') + +module.exports = function (theme, width, completed) { + validate('ONN', [theme, width, completed]) + if (completed < 0) completed = 0 + if (completed > 1) completed = 1 + if (width <= 0) return '' + var sofar = Math.round(width * completed) + var rest = width - sofar + var template = [ + {type: 'complete', value: repeat(theme.complete, sofar), length: sofar}, + {type: 'remaining', value: repeat(theme.remaining, rest), length: rest} + ] + return renderTemplate(width, template, theme) +} + +// lodash's way of repeating +function repeat (string, width) { + var result = '' + var n = width + do { + if (n % 2) { + result += string + } + n = Math.floor(n / 2) + /*eslint no-self-assign: 0*/ + string += string + } while (n && stringWidth(result) < width) + + return wideTruncate(result, width) +} diff --git a/node_modules/node-gyp/node_modules/gauge/render-template.js b/node_modules/node-gyp/node_modules/gauge/render-template.js new file mode 100644 index 0000000000000..3261bfbe6f4be --- /dev/null +++ b/node_modules/node-gyp/node_modules/gauge/render-template.js @@ -0,0 +1,181 @@ +'use strict' +var align = require('wide-align') +var validate = require('aproba') +var objectAssign = require('object-assign') +var wideTruncate = require('./wide-truncate') +var error = require('./error') +var TemplateItem = require('./template-item') + +function renderValueWithValues (values) { + return function (item) { + return renderValue(item, values) + } +} + +var renderTemplate = module.exports = function (width, template, values) { + var items = prepareItems(width, template, values) + var rendered = items.map(renderValueWithValues(values)).join('') + return align.left(wideTruncate(rendered, width), width) +} + +function preType (item) { + var cappedTypeName = item.type[0].toUpperCase() + item.type.slice(1) + return 'pre' + cappedTypeName +} + +function postType (item) { + var cappedTypeName = item.type[0].toUpperCase() + item.type.slice(1) + return 'post' + cappedTypeName +} + +function hasPreOrPost (item, values) { + if (!item.type) return + return values[preType(item)] || values[postType(item)] +} + +function generatePreAndPost (baseItem, parentValues) { + var item = objectAssign({}, baseItem) + var values = Object.create(parentValues) + var template = [] + var pre = preType(item) + var post = postType(item) + if (values[pre]) { + template.push({value: values[pre]}) + values[pre] = null + } + item.minLength = null + item.length = null + item.maxLength = null + template.push(item) + values[item.type] = values[item.type] + if (values[post]) { + template.push({value: values[post]}) + values[post] = null + } + return function ($1, $2, length) { + return renderTemplate(length, template, values) + } +} + +function prepareItems (width, template, values) { + function cloneAndObjectify (item, index, arr) { + var cloned = new TemplateItem(item, width) + var type = cloned.type + if (cloned.value == null) { + if (!(type in values)) { + if (cloned.default == null) { + throw new error.MissingTemplateValue(cloned, values) + } else { + cloned.value = cloned.default + } + } else { + cloned.value = values[type] + } + } + if (cloned.value == null || cloned.value === '') return null + cloned.index = index + cloned.first = index === 0 + cloned.last = index === arr.length - 1 + if (hasPreOrPost(cloned, values)) cloned.value = generatePreAndPost(cloned, values) + return cloned + } + + var output = template.map(cloneAndObjectify).filter(function (item) { return item != null }) + + var outputLength = 0 + var remainingSpace = width + var variableCount = output.length + + function consumeSpace (length) { + if (length > remainingSpace) length = remainingSpace + outputLength += length + remainingSpace -= length + } + + function finishSizing (item, length) { + if (item.finished) throw new error.Internal('Tried to finish template item that was already finished') + if (length === Infinity) throw new error.Internal('Length of template item cannot be infinity') + if (length != null) item.length = length + item.minLength = null + item.maxLength = null + --variableCount + item.finished = true + if (item.length == null) item.length = item.getBaseLength() + if (item.length == null) throw new error.Internal('Finished template items must have a length') + consumeSpace(item.getLength()) + } + + output.forEach(function (item) { + if (!item.kerning) return + var prevPadRight = item.first ? 0 : output[item.index - 1].padRight + if (!item.first && prevPadRight < item.kerning) item.padLeft = item.kerning - prevPadRight + if (!item.last) item.padRight = item.kerning + }) + + // Finish any that have a fixed (literal or intuited) length + output.forEach(function (item) { + if (item.getBaseLength() == null) return + finishSizing(item) + }) + + var resized = 0 + var resizing + var hunkSize + do { + resizing = false + hunkSize = Math.round(remainingSpace / variableCount) + output.forEach(function (item) { + if (item.finished) return + if (!item.maxLength) return + if (item.getMaxLength() < hunkSize) { + finishSizing(item, item.maxLength) + resizing = true + } + }) + } while (resizing && resized++ < output.length) + if (resizing) throw new error.Internal('Resize loop iterated too many times while determining maxLength') + + resized = 0 + do { + resizing = false + hunkSize = Math.round(remainingSpace / variableCount) + output.forEach(function (item) { + if (item.finished) return + if (!item.minLength) return + if (item.getMinLength() >= hunkSize) { + finishSizing(item, item.minLength) + resizing = true + } + }) + } while (resizing && resized++ < output.length) + if (resizing) throw new error.Internal('Resize loop iterated too many times while determining minLength') + + hunkSize = Math.round(remainingSpace / variableCount) + output.forEach(function (item) { + if (item.finished) return + finishSizing(item, hunkSize) + }) + + return output +} + +function renderFunction (item, values, length) { + validate('OON', arguments) + if (item.type) { + return item.value(values, values[item.type + 'Theme'] || {}, length) + } else { + return item.value(values, {}, length) + } +} + +function renderValue (item, values) { + var length = item.getBaseLength() + var value = typeof item.value === 'function' ? renderFunction(item, values, length) : item.value + if (value == null || value === '') return '' + var alignWith = align[item.align] || align.left + var leftPadding = item.padLeft ? align.left('', item.padLeft) : '' + var rightPadding = item.padRight ? align.right('', item.padRight) : '' + var truncated = wideTruncate(String(value), length) + var aligned = alignWith(truncated, length) + return leftPadding + aligned + rightPadding +} diff --git a/node_modules/node-gyp/node_modules/gauge/set-immediate.js b/node_modules/node-gyp/node_modules/gauge/set-immediate.js new file mode 100644 index 0000000000000..6650a485c4993 --- /dev/null +++ b/node_modules/node-gyp/node_modules/gauge/set-immediate.js @@ -0,0 +1,7 @@ +'use strict' +var process = require('./process') +try { + module.exports = setImmediate +} catch (ex) { + module.exports = process.nextTick +} diff --git a/node_modules/node-gyp/node_modules/gauge/set-interval.js b/node_modules/node-gyp/node_modules/gauge/set-interval.js new file mode 100644 index 0000000000000..576198793c550 --- /dev/null +++ b/node_modules/node-gyp/node_modules/gauge/set-interval.js @@ -0,0 +1,3 @@ +'use strict' +// this exists so we can replace it during testing +module.exports = setInterval diff --git a/node_modules/node-gyp/node_modules/gauge/spin.js b/node_modules/node-gyp/node_modules/gauge/spin.js new file mode 100644 index 0000000000000..34142ee31acc7 --- /dev/null +++ b/node_modules/node-gyp/node_modules/gauge/spin.js @@ -0,0 +1,5 @@ +'use strict' + +module.exports = function spin (spinstr, spun) { + return spinstr[spun % spinstr.length] +} diff --git a/node_modules/node-gyp/node_modules/gauge/template-item.js b/node_modules/node-gyp/node_modules/gauge/template-item.js new file mode 100644 index 0000000000000..e46f447c941d3 --- /dev/null +++ b/node_modules/node-gyp/node_modules/gauge/template-item.js @@ -0,0 +1,73 @@ +'use strict' +var stringWidth = require('string-width') + +module.exports = TemplateItem + +function isPercent (num) { + if (typeof num !== 'string') return false + return num.slice(-1) === '%' +} + +function percent (num) { + return Number(num.slice(0, -1)) / 100 +} + +function TemplateItem (values, outputLength) { + this.overallOutputLength = outputLength + this.finished = false + this.type = null + this.value = null + this.length = null + this.maxLength = null + this.minLength = null + this.kerning = null + this.align = 'left' + this.padLeft = 0 + this.padRight = 0 + this.index = null + this.first = null + this.last = null + if (typeof values === 'string') { + this.value = values + } else { + for (var prop in values) this[prop] = values[prop] + } + // Realize percents + if (isPercent(this.length)) { + this.length = Math.round(this.overallOutputLength * percent(this.length)) + } + if (isPercent(this.minLength)) { + this.minLength = Math.round(this.overallOutputLength * percent(this.minLength)) + } + if (isPercent(this.maxLength)) { + this.maxLength = Math.round(this.overallOutputLength * percent(this.maxLength)) + } + return this +} + +TemplateItem.prototype = {} + +TemplateItem.prototype.getBaseLength = function () { + var length = this.length + if (length == null && typeof this.value === 'string' && this.maxLength == null && this.minLength == null) { + length = stringWidth(this.value) + } + return length +} + +TemplateItem.prototype.getLength = function () { + var length = this.getBaseLength() + if (length == null) return null + return length + this.padLeft + this.padRight +} + +TemplateItem.prototype.getMaxLength = function () { + if (this.maxLength == null) return null + return this.maxLength + this.padLeft + this.padRight +} + +TemplateItem.prototype.getMinLength = function () { + if (this.minLength == null) return null + return this.minLength + this.padLeft + this.padRight +} + diff --git a/node_modules/node-gyp/node_modules/gauge/theme-set.js b/node_modules/node-gyp/node_modules/gauge/theme-set.js new file mode 100644 index 0000000000000..68971d5d231b0 --- /dev/null +++ b/node_modules/node-gyp/node_modules/gauge/theme-set.js @@ -0,0 +1,115 @@ +'use strict' +var objectAssign = require('object-assign') + +module.exports = function () { + return ThemeSetProto.newThemeSet() +} + +var ThemeSetProto = {} + +ThemeSetProto.baseTheme = require('./base-theme.js') + +ThemeSetProto.newTheme = function (parent, theme) { + if (!theme) { + theme = parent + parent = this.baseTheme + } + return objectAssign({}, parent, theme) +} + +ThemeSetProto.getThemeNames = function () { + return Object.keys(this.themes) +} + +ThemeSetProto.addTheme = function (name, parent, theme) { + this.themes[name] = this.newTheme(parent, theme) +} + +ThemeSetProto.addToAllThemes = function (theme) { + var themes = this.themes + Object.keys(themes).forEach(function (name) { + objectAssign(themes[name], theme) + }) + objectAssign(this.baseTheme, theme) +} + +ThemeSetProto.getTheme = function (name) { + if (!this.themes[name]) throw this.newMissingThemeError(name) + return this.themes[name] +} + +ThemeSetProto.setDefault = function (opts, name) { + if (name == null) { + name = opts + opts = {} + } + var platform = opts.platform == null ? 'fallback' : opts.platform + var hasUnicode = !!opts.hasUnicode + var hasColor = !!opts.hasColor + if (!this.defaults[platform]) this.defaults[platform] = {true: {}, false: {}} + this.defaults[platform][hasUnicode][hasColor] = name +} + +ThemeSetProto.getDefault = function (opts) { + if (!opts) opts = {} + var platformName = opts.platform || process.platform + var platform = this.defaults[platformName] || this.defaults.fallback + var hasUnicode = !!opts.hasUnicode + var hasColor = !!opts.hasColor + if (!platform) throw this.newMissingDefaultThemeError(platformName, hasUnicode, hasColor) + if (!platform[hasUnicode][hasColor]) { + if (hasUnicode && hasColor && platform[!hasUnicode][hasColor]) { + hasUnicode = false + } else if (hasUnicode && hasColor && platform[hasUnicode][!hasColor]) { + hasColor = false + } else if (hasUnicode && hasColor && platform[!hasUnicode][!hasColor]) { + hasUnicode = false + hasColor = false + } else if (hasUnicode && !hasColor && platform[!hasUnicode][hasColor]) { + hasUnicode = false + } else if (!hasUnicode && hasColor && platform[hasUnicode][!hasColor]) { + hasColor = false + } else if (platform === this.defaults.fallback) { + throw this.newMissingDefaultThemeError(platformName, hasUnicode, hasColor) + } + } + if (platform[hasUnicode][hasColor]) { + return this.getTheme(platform[hasUnicode][hasColor]) + } else { + return this.getDefault(objectAssign({}, opts, {platform: 'fallback'})) + } +} + +ThemeSetProto.newMissingThemeError = function newMissingThemeError (name) { + var err = new Error('Could not find a gauge theme named "' + name + '"') + Error.captureStackTrace.call(err, newMissingThemeError) + err.theme = name + err.code = 'EMISSINGTHEME' + return err +} + +ThemeSetProto.newMissingDefaultThemeError = function newMissingDefaultThemeError (platformName, hasUnicode, hasColor) { + var err = new Error( + 'Could not find a gauge theme for your platform/unicode/color use combo:\n' + + ' platform = ' + platformName + '\n' + + ' hasUnicode = ' + hasUnicode + '\n' + + ' hasColor = ' + hasColor) + Error.captureStackTrace.call(err, newMissingDefaultThemeError) + err.platform = platformName + err.hasUnicode = hasUnicode + err.hasColor = hasColor + err.code = 'EMISSINGTHEME' + return err +} + +ThemeSetProto.newThemeSet = function () { + var themeset = function (opts) { + return themeset.getDefault(opts) + } + return objectAssign(themeset, ThemeSetProto, { + themes: objectAssign({}, this.themes), + baseTheme: objectAssign({}, this.baseTheme), + defaults: JSON.parse(JSON.stringify(this.defaults || {})) + }) +} + diff --git a/node_modules/node-gyp/node_modules/gauge/themes.js b/node_modules/node-gyp/node_modules/gauge/themes.js new file mode 100644 index 0000000000000..eb5a4f5b5e103 --- /dev/null +++ b/node_modules/node-gyp/node_modules/gauge/themes.js @@ -0,0 +1,54 @@ +'use strict' +var consoleControl = require('console-control-strings') +var ThemeSet = require('./theme-set.js') + +var themes = module.exports = new ThemeSet() + +themes.addTheme('ASCII', { + preProgressbar: '[', + postProgressbar: ']', + progressbarTheme: { + complete: '#', + remaining: '.' + }, + activityIndicatorTheme: '-\\|/', + preSubsection: '>' +}) + +themes.addTheme('colorASCII', themes.getTheme('ASCII'), { + progressbarTheme: { + preComplete: consoleControl.color('inverse'), + complete: ' ', + postComplete: consoleControl.color('stopInverse'), + preRemaining: consoleControl.color('brightBlack'), + remaining: '.', + postRemaining: consoleControl.color('reset') + } +}) + +themes.addTheme('brailleSpinner', { + preProgressbar: '⸨', + postProgressbar: '⸩', + progressbarTheme: { + complete: '░', + remaining: '⠂' + }, + activityIndicatorTheme: '⠋⠙⠹⠸⠼⠴⠦⠧⠇⠏', + preSubsection: '>' +}) + +themes.addTheme('colorBrailleSpinner', themes.getTheme('brailleSpinner'), { + progressbarTheme: { + preComplete: consoleControl.color('inverse'), + complete: ' ', + postComplete: consoleControl.color('stopInverse'), + preRemaining: consoleControl.color('brightBlack'), + remaining: '░', + postRemaining: consoleControl.color('reset') + } +}) + +themes.setDefault({}, 'ASCII') +themes.setDefault({hasColor: true}, 'colorASCII') +themes.setDefault({platform: 'darwin', hasUnicode: true}, 'brailleSpinner') +themes.setDefault({platform: 'darwin', hasUnicode: true, hasColor: true}, 'colorBrailleSpinner') diff --git a/node_modules/node-gyp/node_modules/gauge/wide-truncate.js b/node_modules/node-gyp/node_modules/gauge/wide-truncate.js new file mode 100644 index 0000000000000..c531bc491fbb5 --- /dev/null +++ b/node_modules/node-gyp/node_modules/gauge/wide-truncate.js @@ -0,0 +1,25 @@ +'use strict' +var stringWidth = require('string-width') +var stripAnsi = require('strip-ansi') + +module.exports = wideTruncate + +function wideTruncate (str, target) { + if (stringWidth(str) === 0) return str + if (target <= 0) return '' + if (stringWidth(str) <= target) return str + + // We compute the number of bytes of ansi sequences here and add + // that to our initial truncation to ensure that we don't slice one + // that we want to keep in half. + var noAnsi = stripAnsi(str) + var ansiSize = str.length + noAnsi.length + var truncated = str.slice(0, target + ansiSize) + + // we have to shrink the result to account for our ansi sequence buffer + // (if an ansi sequence was truncated) and double width characters. + while (stringWidth(truncated) > target) { + truncated = truncated.slice(0, -1) + } + return truncated +} diff --git a/node_modules/gauge/node_modules/is-fullwidth-code-point/index.js b/node_modules/node-gyp/node_modules/is-fullwidth-code-point/index.js similarity index 100% rename from node_modules/gauge/node_modules/is-fullwidth-code-point/index.js rename to node_modules/node-gyp/node_modules/is-fullwidth-code-point/index.js diff --git a/node_modules/gauge/node_modules/is-fullwidth-code-point/license b/node_modules/node-gyp/node_modules/is-fullwidth-code-point/license similarity index 100% rename from node_modules/gauge/node_modules/is-fullwidth-code-point/license rename to node_modules/node-gyp/node_modules/is-fullwidth-code-point/license diff --git a/node_modules/gauge/node_modules/is-fullwidth-code-point/package.json b/node_modules/node-gyp/node_modules/is-fullwidth-code-point/package.json similarity index 100% rename from node_modules/gauge/node_modules/is-fullwidth-code-point/package.json rename to node_modules/node-gyp/node_modules/is-fullwidth-code-point/package.json diff --git a/node_modules/node-gyp/node_modules/npmlog/LICENSE b/node_modules/node-gyp/node_modules/npmlog/LICENSE new file mode 100644 index 0000000000000..19129e315fe59 --- /dev/null +++ b/node_modules/node-gyp/node_modules/npmlog/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/node-gyp/node_modules/npmlog/log.js b/node_modules/node-gyp/node_modules/npmlog/log.js new file mode 100644 index 0000000000000..341f3313ab354 --- /dev/null +++ b/node_modules/node-gyp/node_modules/npmlog/log.js @@ -0,0 +1,309 @@ +'use strict' +var Progress = require('are-we-there-yet') +var Gauge = require('gauge') +var EE = require('events').EventEmitter +var log = exports = module.exports = new EE() +var util = require('util') + +var setBlocking = require('set-blocking') +var consoleControl = require('console-control-strings') + +setBlocking(true) +var stream = process.stderr +Object.defineProperty(log, 'stream', { + set: function (newStream) { + stream = newStream + if (this.gauge) this.gauge.setWriteTo(stream, stream) + }, + get: function () { + return stream + } +}) + +// by default, decide based on tty-ness. +var colorEnabled +log.useColor = function () { + return colorEnabled != null ? colorEnabled : stream.isTTY +} + +log.enableColor = function () { + colorEnabled = true + this.gauge.setTheme({hasColor: colorEnabled, hasUnicode: unicodeEnabled}) +} +log.disableColor = function () { + colorEnabled = false + this.gauge.setTheme({hasColor: colorEnabled, hasUnicode: unicodeEnabled}) +} + +// default level +log.level = 'info' + +log.gauge = new Gauge(stream, { + enabled: false, // no progress bars unless asked + theme: {hasColor: log.useColor()}, + template: [ + {type: 'progressbar', length: 20}, + {type: 'activityIndicator', kerning: 1, length: 1}, + {type: 'section', default: ''}, + ':', + {type: 'logline', kerning: 1, default: ''} + ] +}) + +log.tracker = new Progress.TrackerGroup() + +// we track this separately as we may need to temporarily disable the +// display of the status bar for our own loggy purposes. +log.progressEnabled = log.gauge.isEnabled() + +var unicodeEnabled + +log.enableUnicode = function () { + unicodeEnabled = true + this.gauge.setTheme({hasColor: this.useColor(), hasUnicode: unicodeEnabled}) +} + +log.disableUnicode = function () { + unicodeEnabled = false + this.gauge.setTheme({hasColor: this.useColor(), hasUnicode: unicodeEnabled}) +} + +log.setGaugeThemeset = function (themes) { + this.gauge.setThemeset(themes) +} + +log.setGaugeTemplate = function (template) { + this.gauge.setTemplate(template) +} + +log.enableProgress = function () { + if (this.progressEnabled) return + this.progressEnabled = true + this.tracker.on('change', this.showProgress) + if (this._pause) return + this.gauge.enable() +} + +log.disableProgress = function () { + if (!this.progressEnabled) return + this.progressEnabled = false + this.tracker.removeListener('change', this.showProgress) + this.gauge.disable() +} + +var trackerConstructors = ['newGroup', 'newItem', 'newStream'] + +var mixinLog = function (tracker) { + // mixin the public methods from log into the tracker + // (except: conflicts and one's we handle specially) + Object.keys(log).forEach(function (P) { + if (P[0] === '_') return + if (trackerConstructors.filter(function (C) { return C === P }).length) return + if (tracker[P]) return + if (typeof log[P] !== 'function') return + var func = log[P] + tracker[P] = function () { + return func.apply(log, arguments) + } + }) + // if the new tracker is a group, make sure any subtrackers get + // mixed in too + if (tracker instanceof Progress.TrackerGroup) { + trackerConstructors.forEach(function (C) { + var func = tracker[C] + tracker[C] = function () { return mixinLog(func.apply(tracker, arguments)) } + }) + } + return tracker +} + +// Add tracker constructors to the top level log object +trackerConstructors.forEach(function (C) { + log[C] = function () { return mixinLog(this.tracker[C].apply(this.tracker, arguments)) } +}) + +log.clearProgress = function (cb) { + if (!this.progressEnabled) return cb && process.nextTick(cb) + this.gauge.hide(cb) +} + +log.showProgress = function (name, completed) { + if (!this.progressEnabled) return + var values = {} + if (name) values.section = name + var last = log.record[log.record.length - 1] + if (last) { + values.subsection = last.prefix + var disp = log.disp[last.level] || last.level + var logline = this._format(disp, log.style[last.level]) + if (last.prefix) logline += ' ' + this._format(last.prefix, this.prefixStyle) + logline += ' ' + last.message.split(/\r?\n/)[0] + values.logline = logline + } + values.completed = completed || this.tracker.completed() + this.gauge.show(values) +}.bind(log) // bind for use in tracker's on-change listener + +// temporarily stop emitting, but don't drop +log.pause = function () { + this._paused = true + if (this.progressEnabled) this.gauge.disable() +} + +log.resume = function () { + if (!this._paused) return + this._paused = false + + var b = this._buffer + this._buffer = [] + b.forEach(function (m) { + this.emitLog(m) + }, this) + if (this.progressEnabled) this.gauge.enable() +} + +log._buffer = [] + +var id = 0 +log.record = [] +log.maxRecordSize = 10000 +log.log = function (lvl, prefix, message) { + var l = this.levels[lvl] + if (l === undefined) { + return this.emit('error', new Error(util.format( + 'Undefined log level: %j', lvl))) + } + + var a = new Array(arguments.length - 2) + var stack = null + for (var i = 2; i < arguments.length; i++) { + var arg = a[i - 2] = arguments[i] + + // resolve stack traces to a plain string. + if (typeof arg === 'object' && arg && + (arg instanceof Error) && arg.stack) { + + Object.defineProperty(arg, 'stack', { + value: stack = arg.stack + '', + enumerable: true, + writable: true + }) + } + } + if (stack) a.unshift(stack + '\n') + message = util.format.apply(util, a) + + var m = { id: id++, + level: lvl, + prefix: String(prefix || ''), + message: message, + messageRaw: a } + + this.emit('log', m) + this.emit('log.' + lvl, m) + if (m.prefix) this.emit(m.prefix, m) + + this.record.push(m) + var mrs = this.maxRecordSize + var n = this.record.length - mrs + if (n > mrs / 10) { + var newSize = Math.floor(mrs * 0.9) + this.record = this.record.slice(-1 * newSize) + } + + this.emitLog(m) +}.bind(log) + +log.emitLog = function (m) { + if (this._paused) { + this._buffer.push(m) + return + } + if (this.progressEnabled) this.gauge.pulse(m.prefix) + var l = this.levels[m.level] + if (l === undefined) return + if (l < this.levels[this.level]) return + if (l > 0 && !isFinite(l)) return + + // If 'disp' is null or undefined, use the lvl as a default + // Allows: '', 0 as valid disp + var disp = log.disp[m.level] != null ? log.disp[m.level] : m.level + this.clearProgress() + m.message.split(/\r?\n/).forEach(function (line) { + if (this.heading) { + this.write(this.heading, this.headingStyle) + this.write(' ') + } + this.write(disp, log.style[m.level]) + var p = m.prefix || '' + if (p) this.write(' ') + this.write(p, this.prefixStyle) + this.write(' ' + line + '\n') + }, this) + this.showProgress() +} + +log._format = function (msg, style) { + if (!stream) return + + var output = '' + if (this.useColor()) { + style = style || {} + var settings = [] + if (style.fg) settings.push(style.fg) + if (style.bg) settings.push('bg' + style.bg[0].toUpperCase() + style.bg.slice(1)) + if (style.bold) settings.push('bold') + if (style.underline) settings.push('underline') + if (style.inverse) settings.push('inverse') + if (settings.length) output += consoleControl.color(settings) + if (style.beep) output += consoleControl.beep() + } + output += msg + if (this.useColor()) { + output += consoleControl.color('reset') + } + return output +} + +log.write = function (msg, style) { + if (!stream) return + + stream.write(this._format(msg, style)) +} + +log.addLevel = function (lvl, n, style, disp) { + // If 'disp' is null or undefined, use the lvl as a default + if (disp == null) disp = lvl + this.levels[lvl] = n + this.style[lvl] = style + if (!this[lvl]) { + this[lvl] = function () { + var a = new Array(arguments.length + 1) + a[0] = lvl + for (var i = 0; i < arguments.length; i++) { + a[i + 1] = arguments[i] + } + return this.log.apply(this, a) + }.bind(this) + } + this.disp[lvl] = disp +} + +log.prefixStyle = { fg: 'magenta' } +log.headingStyle = { fg: 'white', bg: 'black' } + +log.style = {} +log.levels = {} +log.disp = {} +log.addLevel('silly', -Infinity, { inverse: true }, 'sill') +log.addLevel('verbose', 1000, { fg: 'blue', bg: 'black' }, 'verb') +log.addLevel('info', 2000, { fg: 'green' }) +log.addLevel('timing', 2500, { fg: 'green', bg: 'black' }) +log.addLevel('http', 3000, { fg: 'green', bg: 'black' }) +log.addLevel('notice', 3500, { fg: 'blue', bg: 'black' }) +log.addLevel('warn', 4000, { fg: 'black', bg: 'yellow' }, 'WARN') +log.addLevel('error', 5000, { fg: 'red', bg: 'black' }, 'ERR!') +log.addLevel('silent', Infinity) + +// allow 'error' prefix +log.on('error', function () {}) diff --git a/node_modules/node-gyp/node_modules/npmlog/package.json b/node_modules/node-gyp/node_modules/npmlog/package.json new file mode 100644 index 0000000000000..7220f8e72a3c7 --- /dev/null +++ b/node_modules/node-gyp/node_modules/npmlog/package.json @@ -0,0 +1,28 @@ +{ + "author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)", + "name": "npmlog", + "description": "logger for npm", + "version": "4.1.2", + "repository": { + "type": "git", + "url": "https://github.com/npm/npmlog.git" + }, + "main": "log.js", + "files": [ + "log.js" + ], + "scripts": { + "test": "standard && tap test/*.js" + }, + "dependencies": { + "are-we-there-yet": "~1.1.2", + "console-control-strings": "~1.1.0", + "gauge": "~2.7.3", + "set-blocking": "~2.0.0" + }, + "devDependencies": { + "standard": "~7.1.2", + "tap": "~5.7.3" + }, + "license": "ISC" +} diff --git a/node_modules/gauge/node_modules/string-width/index.js b/node_modules/node-gyp/node_modules/string-width/index.js similarity index 100% rename from node_modules/gauge/node_modules/string-width/index.js rename to node_modules/node-gyp/node_modules/string-width/index.js diff --git a/node_modules/gauge/node_modules/string-width/license b/node_modules/node-gyp/node_modules/string-width/license similarity index 100% rename from node_modules/gauge/node_modules/string-width/license rename to node_modules/node-gyp/node_modules/string-width/license diff --git a/node_modules/gauge/node_modules/string-width/package.json b/node_modules/node-gyp/node_modules/string-width/package.json similarity index 100% rename from node_modules/gauge/node_modules/string-width/package.json rename to node_modules/node-gyp/node_modules/string-width/package.json diff --git a/node_modules/nopt/CHANGELOG.md b/node_modules/nopt/CHANGELOG.md deleted file mode 100644 index 82a09fb4bf809..0000000000000 --- a/node_modules/nopt/CHANGELOG.md +++ /dev/null @@ -1,58 +0,0 @@ -### v4.0.1 (2016-12-14) - -#### WHOOPS - -* [`fb9b1ce`](https://github.com/npm/nopt/commit/fb9b1ce57b3c69b4f7819015be87719204f77ef6) - Merged so many patches at once that the code fencing - ([@adius](https://github.com/adius)) added got broken. Sorry, - ([@adius](https://github.com/adius))! - ([@othiym23](https://github.com/othiym23)) - -### v4.0.0 (2016-12-13) - -#### BREAKING CHANGES - -* [`651d447`](https://github.com/npm/nopt/commit/651d4473946096d341a480bbe56793de3fc706aa) - When parsing String-typed arguments, if the next value is `""`, don't simply - swallow it. ([@samjonester](https://github.com/samjonester)) - -#### PERFORMANCE TWEAKS - -* [`3370ce8`](https://github.com/npm/nopt/commit/3370ce87a7618ba228883861db84ddbcdff252a9) - Simplify initialization. ([@elidoran](https://github.com/elidoran)) -* [`356e58e`](https://github.com/npm/nopt/commit/356e58e3b3b431a4b1af7fd7bdee44c2c0526a09) - Store `Array.isArray(types[arg])` for reuse. - ([@elidoran](https://github.com/elidoran)) -* [`0d95e90`](https://github.com/npm/nopt/commit/0d95e90515844f266015b56d2c80b94e5d14a07e) - Interpret single-item type arrays as a single type. - ([@samjonester](https://github.com/samjonester)) -* [`07c69d3`](https://github.com/npm/nopt/commit/07c69d38b5186450941fbb505550becb78a0e925) - Simplify key-value extraction. ([@elidoran](https://github.com/elidoran)) -* [`39b6e5c`](https://github.com/npm/nopt/commit/39b6e5c65ac47f60cd43a1fbeece5cd4c834c254) - Only call `Date.parse(val)` once. ([@elidoran](https://github.com/elidoran)) -* [`934943d`](https://github.com/npm/nopt/commit/934943dffecb55123a2b15959fe2a359319a5dbd) - Use `osenv.home()` to find a user's home directory instead of assuming it's - always `$HOME`. ([@othiym23](https://github.com/othiym23)) - -#### TEST & CI IMPROVEMENTS - -* [`326ffff`](https://github.com/npm/nopt/commit/326ffff7f78a00bcd316adecf69075f8a8093619) - Fix `/tmp` test to work on Windows. - ([@elidoran](https://github.com/elidoran)) -* [`c89d31a`](https://github.com/npm/nopt/commit/c89d31a49d14f2238bc6672db08da697bbc57f1b) - Only run Windows tests on Windows, only run Unix tests on a Unix. - ([@elidoran](https://github.com/elidoran)) -* [`affd3d1`](https://github.com/npm/nopt/commit/affd3d1d0addffa93006397b2013b18447339366) - Refresh Travis to run the tests against the currently-supported batch of npm - versions. ([@helio](https://github.com/helio)-frota) -* [`55f9449`](https://github.com/npm/nopt/commit/55f94497d163ed4d16dd55fd6c4fb95cc440e66d) - `tap@8.0.1` ([@othiym23](https://github.com/othiym23)) - -#### DOC TWEAKS - -* [`5271229`](https://github.com/npm/nopt/commit/5271229ee7c810217dd51616c086f5d9ab224581) - Use JavaScript code block for syntax highlighting. - ([@adius](https://github.com/adius)) -* [`c0d156f`](https://github.com/npm/nopt/commit/c0d156f229f9994c5dfcec4a8886eceff7a07682) - The code sample in the README had `many2: [ oneThing ]`, and now it has - `many2: [ two, things ]`. ([@silkentrance](https://github.com/silkentrance)) diff --git a/node_modules/nopt/README.md b/node_modules/nopt/README.md deleted file mode 100644 index a99531c04655f..0000000000000 --- a/node_modules/nopt/README.md +++ /dev/null @@ -1,213 +0,0 @@ -If you want to write an option parser, and have it be good, there are -two ways to do it. The Right Way, and the Wrong Way. - -The Wrong Way is to sit down and write an option parser. We've all done -that. - -The Right Way is to write some complex configurable program with so many -options that you hit the limit of your frustration just trying to -manage them all, and defer it with duct-tape solutions until you see -exactly to the core of the problem, and finally snap and write an -awesome option parser. - -If you want to write an option parser, don't write an option parser. -Write a package manager, or a source control system, or a service -restarter, or an operating system. You probably won't end up with a -good one of those, but if you don't give up, and you are relentless and -diligent enough in your procrastination, you may just end up with a very -nice option parser. - -## USAGE - -```javascript -// my-program.js -var nopt = require("nopt") - , Stream = require("stream").Stream - , path = require("path") - , knownOpts = { "foo" : [String, null] - , "bar" : [Stream, Number] - , "baz" : path - , "bloo" : [ "big", "medium", "small" ] - , "flag" : Boolean - , "pick" : Boolean - , "many1" : [String, Array] - , "many2" : [path, Array] - } - , shortHands = { "foofoo" : ["--foo", "Mr. Foo"] - , "b7" : ["--bar", "7"] - , "m" : ["--bloo", "medium"] - , "p" : ["--pick"] - , "f" : ["--flag"] - } - // everything is optional. - // knownOpts and shorthands default to {} - // arg list defaults to process.argv - // slice defaults to 2 - , parsed = nopt(knownOpts, shortHands, process.argv, 2) -console.log(parsed) -``` - -This would give you support for any of the following: - -```console -$ node my-program.js --foo "blerp" --no-flag -{ "foo" : "blerp", "flag" : false } - -$ node my-program.js ---bar 7 --foo "Mr. Hand" --flag -{ bar: 7, foo: "Mr. Hand", flag: true } - -$ node my-program.js --foo "blerp" -f -----p -{ foo: "blerp", flag: true, pick: true } - -$ node my-program.js -fp --foofoo -{ foo: "Mr. Foo", flag: true, pick: true } - -$ node my-program.js --foofoo -- -fp # -- stops the flag parsing. -{ foo: "Mr. Foo", argv: { remain: ["-fp"] } } - -$ node my-program.js --blatzk -fp # unknown opts are ok. -{ blatzk: true, flag: true, pick: true } - -$ node my-program.js --blatzk=1000 -fp # but you need to use = if they have a value -{ blatzk: 1000, flag: true, pick: true } - -$ node my-program.js --no-blatzk -fp # unless they start with "no-" -{ blatzk: false, flag: true, pick: true } - -$ node my-program.js --baz b/a/z # known paths are resolved. -{ baz: "/Users/isaacs/b/a/z" } - -# if Array is one of the types, then it can take many -# values, and will always be an array. The other types provided -# specify what types are allowed in the list. - -$ node my-program.js --many1 5 --many1 null --many1 foo -{ many1: ["5", "null", "foo"] } - -$ node my-program.js --many2 foo --many2 bar -{ many2: ["/path/to/foo", "path/to/bar"] } -``` - -Read the tests at the bottom of `lib/nopt.js` for more examples of -what this puppy can do. - -## Types - -The following types are supported, and defined on `nopt.typeDefs` - -* String: A normal string. No parsing is done. -* path: A file system path. Gets resolved against cwd if not absolute. -* url: A url. If it doesn't parse, it isn't accepted. -* Number: Must be numeric. -* Date: Must parse as a date. If it does, and `Date` is one of the options, - then it will return a Date object, not a string. -* Boolean: Must be either `true` or `false`. If an option is a boolean, - then it does not need a value, and its presence will imply `true` as - the value. To negate boolean flags, do `--no-whatever` or `--whatever - false` -* NaN: Means that the option is strictly not allowed. Any value will - fail. -* Stream: An object matching the "Stream" class in node. Valuable - for use when validating programmatically. (npm uses this to let you - supply any WriteStream on the `outfd` and `logfd` config options.) -* Array: If `Array` is specified as one of the types, then the value - will be parsed as a list of options. This means that multiple values - can be specified, and that the value will always be an array. - -If a type is an array of values not on this list, then those are -considered valid values. For instance, in the example above, the -`--bloo` option can only be one of `"big"`, `"medium"`, or `"small"`, -and any other value will be rejected. - -When parsing unknown fields, `"true"`, `"false"`, and `"null"` will be -interpreted as their JavaScript equivalents. - -You can also mix types and values, or multiple types, in a list. For -instance `{ blah: [Number, null] }` would allow a value to be set to -either a Number or null. When types are ordered, this implies a -preference, and the first type that can be used to properly interpret -the value will be used. - -To define a new type, add it to `nopt.typeDefs`. Each item in that -hash is an object with a `type` member and a `validate` method. The -`type` member is an object that matches what goes in the type list. The -`validate` method is a function that gets called with `validate(data, -key, val)`. Validate methods should assign `data[key]` to the valid -value of `val` if it can be handled properly, or return boolean -`false` if it cannot. - -You can also call `nopt.clean(data, types, typeDefs)` to clean up a -config object and remove its invalid properties. - -## Error Handling - -By default, nopt outputs a warning to standard error when invalid values for -known options are found. You can change this behavior by assigning a method -to `nopt.invalidHandler`. This method will be called with -the offending `nopt.invalidHandler(key, val, types)`. - -If no `nopt.invalidHandler` is assigned, then it will console.error -its whining. If it is assigned to boolean `false` then the warning is -suppressed. - -## Abbreviations - -Yes, they are supported. If you define options like this: - -```javascript -{ "foolhardyelephants" : Boolean -, "pileofmonkeys" : Boolean } -``` - -Then this will work: - -```bash -node program.js --foolhar --pil -node program.js --no-f --pileofmon -# etc. -``` - -## Shorthands - -Shorthands are a hash of shorter option names to a snippet of args that -they expand to. - -If multiple one-character shorthands are all combined, and the -combination does not unambiguously match any other option or shorthand, -then they will be broken up into their constituent parts. For example: - -```json -{ "s" : ["--loglevel", "silent"] -, "g" : "--global" -, "f" : "--force" -, "p" : "--parseable" -, "l" : "--long" -} -``` - -```bash -npm ls -sgflp -# just like doing this: -npm ls --loglevel silent --global --force --long --parseable -``` - -## The Rest of the args - -The config object returned by nopt is given a special member called -`argv`, which is an object with the following fields: - -* `remain`: The remaining args after all the parsing has occurred. -* `original`: The args as they originally appeared. -* `cooked`: The args after flags and shorthands are expanded. - -## Slicing - -Node programs are called with more or less the exact argv as it appears -in C land, after the v8 and node-specific options have been plucked off. -As such, `argv[0]` is always `node` and `argv[1]` is always the -JavaScript program being run. - -That's usually not very useful to you. So they're sliced off by -default. If you want them, then you can pass in `0` as the last -argument, or any other number that you'd like to slice off the start of -the list. diff --git a/node_modules/normalize-package-data/README.md b/node_modules/normalize-package-data/README.md deleted file mode 100644 index d2bd7bc7ff606..0000000000000 --- a/node_modules/normalize-package-data/README.md +++ /dev/null @@ -1,106 +0,0 @@ -# normalize-package-data [![Build Status](https://travis-ci.org/npm/normalize-package-data.png?branch=master)](https://travis-ci.org/npm/normalize-package-data) - -normalize-package-data exports a function that normalizes package metadata. This data is typically found in a package.json file, but in principle could come from any source - for example the npm registry. - -normalize-package-data is used by [read-package-json](https://npmjs.org/package/read-package-json) to normalize the data it reads from a package.json file. In turn, read-package-json is used by [npm](https://npmjs.org/package/npm) and various npm-related tools. - -## Installation - -``` -npm install normalize-package-data -``` - -## Usage - -Basic usage is really simple. You call the function that normalize-package-data exports. Let's call it `normalizeData`. - -```javascript -normalizeData = require('normalize-package-data') -packageData = require("./package.json") -normalizeData(packageData) -// packageData is now normalized -``` - -#### Strict mode - -You may activate strict validation by passing true as the second argument. - -```javascript -normalizeData = require('normalize-package-data') -packageData = require("./package.json") -normalizeData(packageData, true) -// packageData is now normalized -``` - -If strict mode is activated, only Semver 2.0 version strings are accepted. Otherwise, Semver 1.0 strings are accepted as well. Packages must have a name, and the name field must not have contain leading or trailing whitespace. - -#### Warnings - -Optionally, you may pass a "warning" function. It gets called whenever the `normalizeData` function encounters something that doesn't look right. It indicates less than perfect input data. - -```javascript -normalizeData = require('normalize-package-data') -packageData = require("./package.json") -warnFn = function(msg) { console.error(msg) } -normalizeData(packageData, warnFn) -// packageData is now normalized. Any number of warnings may have been logged. -``` - -You may combine strict validation with warnings by passing `true` as the second argument, and `warnFn` as third. - -When `private` field is set to `true`, warnings will be suppressed. - -### Potential exceptions - -If the supplied data has an invalid name or version vield, `normalizeData` will throw an error. Depending on where you call `normalizeData`, you may want to catch these errors so can pass them to a callback. - -## What normalization (currently) entails - -* The value of `name` field gets trimmed (unless in strict mode). -* The value of the `version` field gets cleaned by `semver.clean`. See [documentation for the semver module](https://github.com/isaacs/node-semver). -* If `name` and/or `version` fields are missing, they are set to empty strings. -* If `files` field is not an array, it will be removed. -* If `bin` field is a string, then `bin` field will become an object with `name` set to the value of the `name` field, and `bin` set to the original string value. -* If `man` field is a string, it will become an array with the original string as its sole member. -* If `keywords` field is string, it is considered to be a list of keywords separated by one or more white-space characters. It gets converted to an array by splitting on `\s+`. -* All people fields (`author`, `maintainers`, `contributors`) get converted into objects with name, email and url properties. -* If `bundledDependencies` field (a typo) exists and `bundleDependencies` field does not, `bundledDependencies` will get renamed to `bundleDependencies`. -* If the value of any of the dependencies fields (`dependencies`, `devDependencies`, `optionalDependencies`) is a string, it gets converted into an object with familiar `name=>value` pairs. -* The values in `optionalDependencies` get added to `dependencies`. The `optionalDependencies` array is left untouched. -* As of v2: Dependencies that point at known hosted git providers (currently: github, bitbucket, gitlab) will have their URLs canonicalized, but protocols will be preserved. -* As of v2: Dependencies that use shortcuts for hosted git providers (`org/proj`, `github:org/proj`, `bitbucket:org/proj`, `gitlab:org/proj`, `gist:docid`) will have the shortcut left in place. (In the case of github, the `org/proj` form will be expanded to `github:org/proj`.) THIS MARKS A BREAKING CHANGE FROM V1, where the shorcut was previously expanded to a URL. -* If `description` field does not exist, but `readme` field does, then (more or less) the first paragraph of text that's found in the readme is taken as value for `description`. -* If `repository` field is a string, it will become an object with `url` set to the original string value, and `type` set to `"git"`. -* If `repository.url` is not a valid url, but in the style of "[owner-name]/[repo-name]", `repository.url` will be set to git+https://github.com/[owner-name]/[repo-name].git -* If `bugs` field is a string, the value of `bugs` field is changed into an object with `url` set to the original string value. -* If `bugs` field does not exist, but `repository` field points to a repository hosted on GitHub, the value of the `bugs` field gets set to an url in the form of https://github.com/[owner-name]/[repo-name]/issues . If the repository field points to a GitHub Gist repo url, the associated http url is chosen. -* If `bugs` field is an object, the resulting value only has email and url properties. If email and url properties are not strings, they are ignored. If no valid values for either email or url is found, bugs field will be removed. -* If `homepage` field is not a string, it will be removed. -* If the url in the `homepage` field does not specify a protocol, then http is assumed. For example, `myproject.org` will be changed to `http://myproject.org`. -* If `homepage` field does not exist, but `repository` field points to a repository hosted on GitHub, the value of the `homepage` field gets set to an url in the form of https://github.com/[owner-name]/[repo-name]#readme . If the repository field points to a GitHub Gist repo url, the associated http url is chosen. - -### Rules for name field - -If `name` field is given, the value of the name field must be a string. The string may not: - -* start with a period. -* contain the following characters: `/@\s+%` -* contain any characters that would need to be encoded for use in urls. -* resemble the word `node_modules` or `favicon.ico` (case doesn't matter). - -### Rules for version field - -If `version` field is given, the value of the version field must be a valid *semver* string, as determined by the `semver.valid` method. See [documentation for the semver module](https://github.com/isaacs/node-semver). - -### Rules for license field - -The `license` field should be a valid *SPDX license expression* or one of the special values allowed by [validate-npm-package-license](https://npmjs.com/package/validate-npm-package-license). See [documentation for the license field in package.json](https://docs.npmjs.com/files/package.json#license). - -## Credits - -This package contains code based on read-package-json written by Isaac Z. Schlueter. Used with permisson. - -## License - -normalize-package-data is released under the [BSD 2-Clause License](http://opensource.org/licenses/MIT). -Copyright (c) 2013 Meryn Stol diff --git a/node_modules/normalize-package-data/package.json b/node_modules/normalize-package-data/package.json index 8df2f8fcac21e..09b184c19d563 100644 --- a/node_modules/normalize-package-data/package.json +++ b/node_modules/normalize-package-data/package.json @@ -1,6 +1,6 @@ { "name": "normalize-package-data", - "version": "3.0.0", + "version": "3.0.2", "author": "Meryn Stol <merynstol@gmail.com>", "description": "Normalizes data that can be found in package.json files.", "license": "BSD-2-Clause", @@ -10,16 +10,19 @@ }, "main": "lib/normalize.js", "scripts": { + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "preversion": "npm test", "test": "tap test/*.js" }, "dependencies": { - "hosted-git-info": "^3.0.6", - "resolve": "^1.17.0", - "semver": "^7.3.2", + "hosted-git-info": "^4.0.1", + "resolve": "^1.20.0", + "semver": "^7.3.4", "validate-npm-package-license": "^3.0.1" }, "devDependencies": { - "tap": "^14.10.8" + "tap": "^14.11.0" }, "files": [ "lib/*.js", diff --git a/node_modules/npm-audit-report/CHANGELOG.md b/node_modules/npm-audit-report/CHANGELOG.md deleted file mode 100644 index 58819a43b4d11..0000000000000 --- a/node_modules/npm-audit-report/CHANGELOG.md +++ /dev/null @@ -1,81 +0,0 @@ -# Change Log - -All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. - -<a name="1.3.3"></a> -## [1.3.3](https://github.com/npm/npm-audit-report/compare/v1.3.2...v1.3.3) (2020-03-26) - - - -<a name="1.3.2"></a> -## [1.3.2](https://github.com/npm/npm-audit-report/compare/v1.3.1...v1.3.2) (2018-12-18) - - -### Bug Fixes - -* **parseable:** add support for critical vulns and more resolves on update/install action ([#28](https://github.com/npm/npm-audit-report/issues/28)) ([5e27893](https://github.com/npm/npm-audit-report/commit/5e27893)) -* **security:** audit fix ([ff9faf3](https://github.com/npm/npm-audit-report/commit/ff9faf3)) -* **urls:** Replace hardcoded URL to advisory with a URL from audit response ([#34](https://github.com/npm/npm-audit-report/issues/34)) ([e2fe95b](https://github.com/npm/npm-audit-report/commit/e2fe95b)) - - - -<a name="1.3.1"></a> -## [1.3.1](https://github.com/npm/npm-audit-report/compare/v1.3.0...v1.3.1) (2018-07-10) - - - -<a name="1.3.0"></a> -# [1.3.0](https://github.com/npm/npm-audit-report/compare/v1.2.1...v1.3.0) (2018-07-09) - - -### Bug Fixes - -* **deps:** remove object.values dependency ([2c5374a](https://github.com/npm/npm-audit-report/commit/2c5374a)) -* **detail:** Fix info-level severity ([#18](https://github.com/npm/npm-audit-report/issues/18)) ([807db5a](https://github.com/npm/npm-audit-report/commit/807db5a)) -* **tests:** a test should not cause side-effects in other tests ([#23](https://github.com/npm/npm-audit-report/issues/23)) ([a94449f](https://github.com/npm/npm-audit-report/commit/a94449f)) - - -### Features - -* **output:** add `parseable` tabular output format support ([#21](https://github.com/npm/npm-audit-report/issues/21)) ([1c9aaf4](https://github.com/npm/npm-audit-report/commit/1c9aaf4)) - - - -<a name="1.2.1"></a> -## [1.2.1](https://github.com/npm/npm-audit-report/compare/v1.2.0...v1.2.1) (2018-05-17) - - -### Bug Fixes - -* **detail:** count id+path instead of just id ([99880fd](https://github.com/npm/npm-audit-report/commit/99880fd)) - - - -<a name="1.2.0"></a> -# [1.2.0](https://github.com/npm/npm-audit-report/compare/v1.1.0...v1.2.0) (2018-05-16) - - -### Bug Fixes - -* **full-report:** Fix install flag for devDependencies ([#14](https://github.com/npm/npm-audit-report/issues/14)) ([30e5f30](https://github.com/npm/npm-audit-report/commit/30e5f30)) - - -### Features - -* **detail:** consistified full report with install report ([#15](https://github.com/npm/npm-audit-report/issues/15)) ([6df6810](https://github.com/npm/npm-audit-report/commit/6df6810)) -* **install:** include `npm audit` recommendation too ([32fb153](https://github.com/npm/npm-audit-report/commit/32fb153)) - - - -<a name="1.1.0"></a> -# [1.1.0](https://github.com/npm/npm-audit-report/compare/v1.0.9...v1.1.0) (2018-05-10) - - -### Bug Fixes - -* **install:** not enough data for this conditional ([6ddc30c](https://github.com/npm/npm-audit-report/commit/6ddc30c)) - - -### Features - -* **report:** compress and reformat human-readable install report ([74d5203](https://github.com/npm/npm-audit-report/commit/74d5203)) diff --git a/node_modules/npm-audit-report/README.md b/node_modules/npm-audit-report/README.md deleted file mode 100644 index 6eb2a3dfe56c1..0000000000000 --- a/node_modules/npm-audit-report/README.md +++ /dev/null @@ -1,65 +0,0 @@ -# npm audit security report - -Given a response from the npm security api, render it into a variety of security reports - -The response is an object that contains an output string (the report) and a suggested exitCode. -``` -{ - report: 'string that contains the security report', - exit: 1 -} -``` - - -## Basic usage example - -This is intended to be used along with -[`@npmcli/arborist`](http://npm.im/@npmcli/arborist)'s `AuditReport` class. - -``` -'use strict' -const Report = require('npm-audit-report') -const options = { - reporter: 'json' -} - -const arb = new Arborist({ path: '/path/to/project' }) -arb.audit().then(report => { - const result = new Report(report, options) - console.log(result.output) - process.exitCode = result.exitCode -}) -``` - -## Break from Version 1 - -Version 5 and 6 of the npm CLI make a request to the registry endpoint at -either the "Full Audit" endpoint at `/-/npm/v1/security/audits` or -the "Quick Audit" endpoint at `/-/npm/v1/security/audits/quick`. The Full -Audit endpoint calculates remediations necessary to correct problems based -on the shape of the tree. - -As of npm v7, the logic of how the cli manages trees is dramatically -rearchitected, rendering much of the remediations no longer valid. -Thus, it _only_ fetches the advisory data from the Quick Audit endpoint, -and uses [`@npmcli/arborist`](http://npm.im/@npmcli/arborist) to calculate -required remediations and affected nodes in the dependency graph. This -data is serialized and provided as an `"auditReportVersion": 2` object. - -Version 2 of this module expects to recieve an instance (or serialized JSON -version of) the `AuditReport` class from Arborist, which is returned by -`arborist.audit()` and stored on the instance as `arborist.auditReport`. - -Eventually, a new endpoint _may_ be added to move the `@npmcli/arborist` work -to the server-side, in which case version 2 style audit reports _may_ be -provided directly. - -## options - -| option | values | default | description | -| :--- | :--- | :--- |:--- | -| reporter | `install`, `detail`, `json`, `quiet` | `install` | specify which output format you want to use | -| color   | `true`, `false`   | `true`   | indicates if some report elements should use colors | -| unicode  | `true`, `false`                  | `true` | indicates if unicode characters should be used| -| indent   | Number or String                | `2` | indentation for `'json'` report| -| auditLevel | 'info', 'low', 'moderate', 'high', 'critical', 'none' | `low` (ie, exit 0 if only `info` advisories are found) | level of vulnerability that will trigger a non-zero exit code (set to 'none' to always exit with a 0 status code) | diff --git a/node_modules/npm-audit-report/lib/index.js b/node_modules/npm-audit-report/lib/index.js index 464004c17518a..9ee86be7915d8 100644 --- a/node_modules/npm-audit-report/lib/index.js +++ b/node_modules/npm-audit-report/lib/index.js @@ -15,9 +15,11 @@ module.exports = Object.assign((data, options = {}) => { color = true, unicode = true, indent = 2, - auditLevel = 'low' } = options + // CLI defaults this to `null` so the defaulting method above doesn't work + const auditLevel = options.auditLevel || 'low' + if (!data) throw Object.assign( new TypeError('ENOAUDITDATA'), diff --git a/node_modules/npm-audit-report/package.json b/node_modules/npm-audit-report/package.json index 66b4a6aa74b2c..c819b9608412a 100644 --- a/node_modules/npm-audit-report/package.json +++ b/node_modules/npm-audit-report/package.json @@ -1,6 +1,6 @@ { "name": "npm-audit-report", - "version": "2.1.4", + "version": "2.1.5", "description": "Given a response from the npm security api, render it into a variety of security reports", "main": "lib/index.js", "scripts": { @@ -26,8 +26,8 @@ "chalk": "^4.0.0" }, "devDependencies": { - "tap": "^14.10.7", - "require-inject": "^1.4.4" + "require-inject": "^1.4.4", + "tap": "^14.10.7" }, "directories": { "lib": "lib", diff --git a/node_modules/npm-bundled/README.md b/node_modules/npm-bundled/README.md deleted file mode 100644 index fcfb2322faf09..0000000000000 --- a/node_modules/npm-bundled/README.md +++ /dev/null @@ -1,48 +0,0 @@ -# npm-bundled - -Run this in a node package, and it'll tell you which things in -node_modules are bundledDependencies, or transitive dependencies of -bundled dependencies. - -[![Build Status](https://travis-ci.org/npm/npm-bundled.svg?branch=master)](https://travis-ci.org/npm/npm-bundled) - -## USAGE - -To get the list of deps at the top level that are bundled (or -transitive deps of a bundled dep) run this: - -```js -const bundled = require('npm-bundled') - -// async version -bundled({ path: '/path/to/pkg/defaults/to/cwd'}, (er, list) => { - // er means it had an error, which is _hella_ weird - // list is a list of package names, like `fooblz` or `@corp/blerg` - // the might not all be deps of the top level, because transitives -}) - -// async promise version -bundled({ path: '/path/to/pkg/defaults/to/cwd'}).then(list => { - // so promisey! - // actually the callback version returns a promise, too, it just - // attaches the supplied callback to the promise -}) - -// sync version, throws if there's an error -const list = bundled({ path: '/path/to/pkg/defaults/to/cwd'}) -``` - -That's basically all you need to know. If you care to dig into it, -you can also use the `bundled.Walker` and `bundled.WalkerSync` -classes to get fancy. - -This library does not write anything to the filesystem, but it _may_ -have undefined behavior if the structure of `node_modules` changes -while it's reading deps. - -All symlinks are followed. This means that it can lead to surprising -results if a symlinked bundled dependency has a missing dependency -that is satisfied at the top level. Since package creation resolves -symlinks as well, this is an edge case where package creation and -development environment are not going to be aligned, and is best -avoided. diff --git a/node_modules/npm-bundled/index.js b/node_modules/npm-bundled/index.js index 197a1bcb99a15..378ddc4c5ddb2 100644 --- a/node_modules/npm-bundled/index.js +++ b/node_modules/npm-bundled/index.js @@ -135,9 +135,11 @@ class BundleWalker extends EE { } childDep (dep) { - if (this.node_modules.indexOf(dep) !== -1 && !this.seen.has(dep)) { - this.seen.add(dep) - this.child(dep) + if (this.node_modules.indexOf(dep) !== -1) { + if (!this.seen.has(dep)) { + this.seen.add(dep) + this.child(dep) + } } else if (this.parent) { this.parent.childDep(dep) } diff --git a/node_modules/npm-bundled/package.json b/node_modules/npm-bundled/package.json index 2ce536e673ee3..cf20e297b0b63 100644 --- a/node_modules/npm-bundled/package.json +++ b/node_modules/npm-bundled/package.json @@ -1,6 +1,6 @@ { "name": "npm-bundled", - "version": "1.1.1", + "version": "1.1.2", "description": "list things in node_modules that are bundledDependencies, or transitive dependencies thereof", "main": "index.js", "repository": { diff --git a/node_modules/npm-install-checks/CHANGELOG.md b/node_modules/npm-install-checks/CHANGELOG.md deleted file mode 100644 index ae4f22fcf52c3..0000000000000 --- a/node_modules/npm-install-checks/CHANGELOG.md +++ /dev/null @@ -1,18 +0,0 @@ -# Change Log - -## v4.0 - -* Remove `checkCycle` and `checkGit`, as they are no longer used in npm v7 -* Synchronous throw-or-return API instead of taking a callback needlessly -* Modernize code and drop support for node versions less than 10 - -## v3 2016-01-12 - -* Change error messages to be more informative. -* checkEngine, when not in strict mode, now calls back with the error - object as the second argument instead of warning. -* checkCycle no longer logs when cycle errors are found. - -## v2 2015-01-20 - -* Remove checking of engineStrict in the package.json diff --git a/node_modules/npm-install-checks/README.md b/node_modules/npm-install-checks/README.md deleted file mode 100644 index e83356c1dd9ba..0000000000000 --- a/node_modules/npm-install-checks/README.md +++ /dev/null @@ -1,27 +0,0 @@ -# npm-install-checks - -Check the engines and platform fields in package.json - -## API - -Both functions will throw an error if the check fails, or return -`undefined` if everything is ok. - -Errors have a `required` and `current` fields. - -### .checkEngine(pkg, npmVer, nodeVer, force = false) - -Check if node/npm version is supported by the package. If it isn't -supported, an error is thrown. - -`force` argument will override the node version check, but not the npm -version check, as this typically would indicate that the current version of -npm is unable to install the package properly for some reason. - -Error code: 'EBADENGINE' - -### .checkPlatform(pkg, force) - -Check if OS/Arch is supported by the package. - -Error code: 'EBADPLATFORM' diff --git a/node_modules/npm-normalize-package-bin/.github/settings.yml b/node_modules/npm-normalize-package-bin/.github/settings.yml deleted file mode 100644 index 4aaa0dd57e4ad..0000000000000 --- a/node_modules/npm-normalize-package-bin/.github/settings.yml +++ /dev/null @@ -1,2 +0,0 @@ ---- -_extends: 'open-source-project-boilerplate' diff --git a/node_modules/npm-normalize-package-bin/.npmignore b/node_modules/npm-normalize-package-bin/.npmignore deleted file mode 100644 index 3870bd5bb7207..0000000000000 --- a/node_modules/npm-normalize-package-bin/.npmignore +++ /dev/null @@ -1,24 +0,0 @@ -# ignore most things, include some others -/* -/.* - -!bin/ -!lib/ -!docs/ -!package.json -!package-lock.json -!README.md -!CONTRIBUTING.md -!LICENSE -!CHANGELOG.md -!example/ -!scripts/ -!tap-snapshots/ -!test/ -!.github/ -!.travis.yml -!.gitignore -!.gitattributes -!coverage-map.js -!map.js -!index.js diff --git a/node_modules/npm-normalize-package-bin/README.md b/node_modules/npm-normalize-package-bin/README.md deleted file mode 100644 index 65ba316a0d97e..0000000000000 --- a/node_modules/npm-normalize-package-bin/README.md +++ /dev/null @@ -1,14 +0,0 @@ -# npm-normalize-package-bin - -Turn any flavor of allowable package.json bin into a normalized object. - -## API - -```js -const normalize = require('npm-normalize-package-bin') -const pkg = {name: 'foo', bin: 'bar'} -console.log(normalize(pkg)) // {name:'foo', bin:{foo: 'bar'}} -``` - -Also strips out weird dots and slashes to prevent accidental and/or -malicious bad behavior when the package is installed. diff --git a/node_modules/npm-package-arg/CHANGELOG.md b/node_modules/npm-package-arg/CHANGELOG.md deleted file mode 100644 index 390a3a3c4f2de..0000000000000 --- a/node_modules/npm-package-arg/CHANGELOG.md +++ /dev/null @@ -1,52 +0,0 @@ -# Changelog - -All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. - -## [8.0.0](https://github.com/npm/npm-package-arg/compare/v7.0.0...v8.0.0) (2019-12-15) - - -### ⚠ BREAKING CHANGES - -* Dropping support for node 6 and 8. It'll probably -still work on those versions, but they are no longer supported or -tested, since npm v7 is moving away from them. - -* drop support for node 6 and 8 ([ba85e68](https://github.com/npm/npm-package-arg/commit/ba85e68555d6270f672c3d59da17672f744d0376)) - -<a name="7.0.0"></a> -# [7.0.0](https://github.com/npm/npm-package-arg/compare/v6.1.1...v7.0.0) (2019-11-11) - - -### deps - -* bump hosted-git-info to 3.0.2 ([68a4fc3](https://github.com/npm/npm-package-arg/commit/68a4fc3)), closes [/github.com/npm/hosted-git-info/pull/38#issuecomment-520243803](https://github.com//github.com/npm/hosted-git-info/pull/38/issues/issuecomment-520243803) - - -### BREAKING CHANGES - -* this drops support for ancient node versions. - - - -<a name="6.1.1"></a> -## [6.1.1](https://github.com/npm/npm-package-arg/compare/v6.1.0...v6.1.1) (2019-08-21) - - -### Bug Fixes - -* preserve drive letter on windows git file:// urls ([3909203](https://github.com/npm/npm-package-arg/commit/3909203)) - - - -<a name="6.1.0"></a> -# [6.1.0](https://github.com/npm/npm-package-arg/compare/v6.0.0...v6.1.0) (2018-04-10) - - -### Bug Fixes - -* **git:** Fix gitRange for git+ssh for private git ([#33](https://github.com/npm/npm-package-arg/issues/33)) ([647a0b3](https://github.com/npm/npm-package-arg/commit/647a0b3)) - - -### Features - -* **alias:** add `npm:` registry alias spec ([#34](https://github.com/npm/npm-package-arg/issues/34)) ([ab99f8e](https://github.com/npm/npm-package-arg/commit/ab99f8e)) diff --git a/node_modules/npm-package-arg/README.md b/node_modules/npm-package-arg/README.md deleted file mode 100644 index 847341b21a3b7..0000000000000 --- a/node_modules/npm-package-arg/README.md +++ /dev/null @@ -1,83 +0,0 @@ -# npm-package-arg - -[![Build Status](https://travis-ci.org/npm/npm-package-arg.svg?branch=master)](https://travis-ci.org/npm/npm-package-arg) - -Parses package name and specifier passed to commands like `npm install` or -`npm cache add`, or as found in `package.json` dependency sections. - -## EXAMPLES - -```javascript -var assert = require("assert") -var npa = require("npm-package-arg") - -// Pass in the descriptor, and it'll return an object -try { - var parsed = npa("@bar/foo@1.2") -} catch (ex) { - … -} -``` - -## USING - -`var npa = require('npm-package-arg')` - -### var result = npa(*arg*[, *where*]) - -* *arg* - a string that you might pass to `npm install`, like: -`foo@1.2`, `@bar/foo@1.2`, `foo@user/foo`, `http://x.com/foo.tgz`, -`git+https://github.com/user/foo`, `bitbucket:user/foo`, `foo.tar.gz`, -`../foo/bar/` or `bar`. If the *arg* you provide doesn't have a specifier -part, eg `foo` then the specifier will default to `latest`. -* *where* - Optionally the path to resolve file paths relative to. Defaults to `process.cwd()` - -**Throws** if the package name is invalid, a dist-tag is invalid or a URL's protocol is not supported. - -### var result = npa.resolve(*name*, *spec*[, *where*]) - -* *name* - The name of the module you want to install. For example: `foo` or `@bar/foo`. -* *spec* - The specifier indicating where and how you can get this module. Something like: -`1.2`, `^1.7.17`, `http://x.com/foo.tgz`, `git+https://github.com/user/foo`, -`bitbucket:user/foo`, `file:foo.tar.gz` or `file:../foo/bar/`. If not -included then the default is `latest`. -* *where* - Optionally the path to resolve file paths relative to. Defaults to `process.cwd()` - -**Throws** if the package name is invalid, a dist-tag is invalid or a URL's protocol is not supported. - -## RESULT OBJECT - -The objects that are returned by npm-package-arg contain the following -keys: - -* `type` - One of the following strings: - * `git` - A git repo - * `tag` - A tagged version, like `"foo@latest"` - * `version` - A specific version number, like `"foo@1.2.3"` - * `range` - A version range, like `"foo@2.x"` - * `file` - A local `.tar.gz`, `.tar` or `.tgz` file. - * `directory` - A local directory. - * `remote` - An http url (presumably to a tgz) -* `registry` - If true this specifier refers to a resource hosted on a - registry. This is true for `tag`, `version` and `range` types. -* `name` - If known, the `name` field expected in the resulting pkg. -* `scope` - If a name is something like `@org/module` then the `scope` - field will be set to `@org`. If it doesn't have a scoped name, then - scope is `null`. -* `escapedName` - A version of `name` escaped to match the npm scoped packages - specification. Mostly used when making requests against a registry. When - `name` is `null`, `escapedName` will also be `null`. -* `rawSpec` - The specifier part that was parsed out in calls to `npa(arg)`, - or the value of `spec` in calls to `npa.resolve(name, spec). -* `saveSpec` - The normalized specifier, for saving to package.json files. - `null` for registry dependencies. -* `fetchSpec` - The version of the specifier to be used to fetch this - resource. `null` for shortcuts to hosted git dependencies as there isn't - just one URL to try with them. -* `gitRange` - If set, this is a semver specifier to match against git tags with -* `gitCommittish` - If set, this is the specific committish to use with a git dependency. -* `hosted` - If `from === 'hosted'` then this will be a `hosted-git-info` - object. This property is not included when serializing the object as - JSON. -* `raw` - The original un-modified string that was provided. If called as - `npa.resolve(name, spec)` then this will be `name + '@' + spec`. diff --git a/node_modules/npm-package-arg/npa.js b/node_modules/npm-package-arg/npa.js index 6018dd608ed33..191befeb5e69d 100644 --- a/node_modules/npm-package-arg/npa.js +++ b/node_modules/npm-package-arg/npa.js @@ -3,16 +3,12 @@ module.exports = npa module.exports.resolve = resolve module.exports.Result = Result -let url -let HostedGit -let semver -let path_ -function path () { - if (!path_) path_ = require('path') - return path_ -} -let validatePackageName -let os +const url = require('url') +const HostedGit = require('hosted-git-info') +const semver = require('semver') +const path = global.FAKE_WINDOWS ? require('path').win32 : require('path') +const validatePackageName = require('validate-npm-package-name') +const { homedir } = require('os') const isWindows = process.platform === 'win32' || global.FAKE_WINDOWS const hasSlashes = isWindows ? /\\|[/]/ : /[/]/ @@ -24,33 +20,30 @@ function npa (arg, where) { let name let spec if (typeof arg === 'object') { - if (arg instanceof Result && (!where || where === arg.where)) { + if (arg instanceof Result && (!where || where === arg.where)) return arg - } else if (arg.name && arg.rawSpec) { + else if (arg.name && arg.rawSpec) return npa.resolve(arg.name, arg.rawSpec, where || arg.where) - } else { + else return npa(arg.raw, where || arg.where) - } } const nameEndsAt = arg[0] === '@' ? arg.slice(1).indexOf('@') + 1 : arg.indexOf('@') const namePart = nameEndsAt > 0 ? arg.slice(0, nameEndsAt) : arg - if (isURL.test(arg)) { + if (isURL.test(arg)) spec = arg - } else if (isGit.test(arg)) { + else if (isGit.test(arg)) spec = `git+ssh://${arg}` - } else if (namePart[0] !== '@' && (hasSlashes.test(namePart) || isFilename.test(namePart))) { + else if (namePart[0] !== '@' && (hasSlashes.test(namePart) || isFilename.test(namePart))) spec = arg - } else if (nameEndsAt > 0) { + else if (nameEndsAt > 0) { name = namePart spec = arg.slice(nameEndsAt + 1) } else { - if (!validatePackageName) validatePackageName = require('validate-npm-package-name') const valid = validatePackageName(arg) - if (valid.validForOldPackages) { + if (valid.validForOldPackages) name = arg - } else { + else spec = arg - } } return resolve(name, spec, where, arg) } @@ -62,27 +55,29 @@ function resolve (name, spec, where, arg) { raw: arg, name: name, rawSpec: spec, - fromArgument: arg != null + fromArgument: arg != null, }) - if (name) res.setName(name) + if (name) + res.setName(name) - if (spec && (isFilespec.test(spec) || /^file:/i.test(spec))) { + if (spec && (isFilespec.test(spec) || /^file:/i.test(spec))) return fromFile(res, where) - } else if (spec && /^npm:/i.test(spec)) { + else if (spec && /^npm:/i.test(spec)) return fromAlias(res, where) - } - if (!HostedGit) HostedGit = require('hosted-git-info') - const hosted = HostedGit.fromUrl(spec, { noGitPlus: true, noCommittish: true }) - if (hosted) { + + const hosted = HostedGit.fromUrl(spec, { + noGitPlus: true, + noCommittish: true, + }) + if (hosted) return fromHostedGit(res, hosted) - } else if (spec && isURL.test(spec)) { + else if (spec && isURL.test(spec)) return fromURL(res) - } else if (spec && (hasSlashes.test(spec) || isFilename.test(spec))) { + else if (spec && (hasSlashes.test(spec) || isFilename.test(spec))) return fromFile(res, where) - } else { + else return fromRegistry(res) - } } function invalidPackageName (name, valid) { @@ -100,29 +95,29 @@ function Result (opts) { this.type = opts.type this.registry = opts.registry this.where = opts.where - if (opts.raw == null) { + if (opts.raw == null) this.raw = opts.name ? opts.name + '@' + opts.rawSpec : opts.rawSpec - } else { + else this.raw = opts.raw - } + this.name = undefined this.escapedName = undefined this.scope = undefined this.rawSpec = opts.rawSpec == null ? '' : opts.rawSpec this.saveSpec = opts.saveSpec this.fetchSpec = opts.fetchSpec - if (opts.name) this.setName(opts.name) + if (opts.name) + this.setName(opts.name) this.gitRange = opts.gitRange this.gitCommittish = opts.gitCommittish this.hosted = opts.hosted } Result.prototype.setName = function (name) { - if (!validatePackageName) validatePackageName = require('validate-npm-package-name') const valid = validatePackageName(name) - if (!valid.validForOldPackages) { + if (!valid.validForOldPackages) throw invalidPackageName(name, valid) - } + this.name = name this.scope = name[0] === '@' ? name.slice(0, name.indexOf('/')) : undefined // scoped packages in couch must have slash url-encoded, e.g. @foo%2Fbar @@ -132,9 +127,11 @@ Result.prototype.setName = function (name) { Result.prototype.toString = function () { const full = [] - if (this.name != null && this.name !== '') full.push(this.name) + if (this.name != null && this.name !== '') + full.push(this.name) const spec = this.saveSpec || this.fetchSpec || this.rawSpec - if (spec != null && spec !== '') full.push(spec) + if (spec != null && spec !== '') + full.push(spec) return full.length ? full.join('@') : this.raw } @@ -148,46 +145,92 @@ function setGitCommittish (res, committish) { if (committish != null && committish.length >= 7 && committish.slice(0, 7) === 'semver:') { res.gitRange = decodeURIComponent(committish.slice(7)) res.gitCommittish = null - } else { + } else res.gitCommittish = committish === '' ? null : committish - } - return res -} - -const isAbsolutePath = /^[/]|^[A-Za-z]:/ - -function resolvePath (where, spec) { - if (isAbsolutePath.test(spec)) return spec - return path().resolve(where, spec) -} -function isAbsolute (dir) { - if (dir[0] === '/') return true - if (/^[A-Za-z]:/.test(dir)) return true - return false + return res } function fromFile (res, where) { - if (!where) where = process.cwd() + if (!where) + where = process.cwd() res.type = isFilename.test(res.rawSpec) ? 'file' : 'directory' res.where = where - const spec = res.rawSpec.replace(/\\/g, '/') - .replace(/^file:[/]*([A-Za-z]:)/, '$1') // drive name paths on windows - .replace(/^file:(?:[/]*([~./]))?/, '$1') - if (/^~[/]/.test(spec)) { - // this is needed for windows and for file:~/foo/bar - if (!os) os = require('os') - res.fetchSpec = resolvePath(os.homedir(), spec.slice(2)) - res.saveSpec = 'file:' + spec - } else { - res.fetchSpec = resolvePath(where, spec) - if (isAbsolute(spec)) { - res.saveSpec = 'file:' + spec - } else { - res.saveSpec = 'file:' + path().relative(where, res.fetchSpec) + // always put the '/' on where when resolving urls, or else + // file:foo from /path/to/bar goes to /path/to/foo, when we want + // it to be /path/to/foo/bar + + let specUrl + let resolvedUrl + const prefix = (!/^file:/.test(res.rawSpec) ? 'file:' : '') + const rawWithPrefix = prefix + res.rawSpec + let rawNoPrefix = rawWithPrefix.replace(/^file:/, '') + try { + resolvedUrl = new url.URL(rawWithPrefix, `file://${path.resolve(where)}/`) + specUrl = new url.URL(rawWithPrefix) + } catch (originalError) { + const er = new Error('Invalid file: URL, must comply with RFC 8909') + throw Object.assign(er, { + raw: res.rawSpec, + spec: res, + where, + originalError, + }) + } + + // environment switch for testing + if (process.env.NPM_PACKAGE_ARG_8909_STRICT !== '1') { + // XXX backwards compatibility lack of compliance with 8909 + // Remove when we want a breaking change to come into RFC compliance. + if (resolvedUrl.host && resolvedUrl.host !== 'localhost') { + const rawSpec = res.rawSpec.replace(/^file:\/\//, 'file:///') + resolvedUrl = new url.URL(rawSpec, `file://${path.resolve(where)}/`) + specUrl = new url.URL(rawSpec) + rawNoPrefix = rawSpec.replace(/^file:/, '') } + // turn file:/../foo into file:../foo + if (/^\/\.\.?(\/|$)/.test(rawNoPrefix)) { + const rawSpec = res.rawSpec.replace(/^file:\//, 'file:') + resolvedUrl = new url.URL(rawSpec, `file://${path.resolve(where)}/`) + specUrl = new url.URL(rawSpec) + rawNoPrefix = rawSpec.replace(/^file:/, '') + } + // XXX end 8909 violation backwards compatibility section + } + + // file:foo - relative url to ./foo + // file:/foo - absolute path /foo + // file:///foo - absolute path to /foo, no authority host + // file://localhost/foo - absolute path to /foo, on localhost + // file://foo - absolute path to / on foo host (error!) + if (resolvedUrl.host && resolvedUrl.host !== 'localhost') { + const msg = `Invalid file: URL, must be absolute if // present` + throw Object.assign(new Error(msg), { + raw: res.rawSpec, + parsed: resolvedUrl, + }) + } + + // turn /C:/blah into just C:/blah on windows + let specPath = decodeURIComponent(specUrl.pathname) + let resolvedPath = decodeURIComponent(resolvedUrl.pathname) + if (isWindows) { + specPath = specPath.replace(/^\/+([a-z]:\/)/i, '$1') + resolvedPath = resolvedPath.replace(/^\/+([a-z]:\/)/i, '$1') } + + // replace ~ with homedir, but keep the ~ in the saveSpec + // otherwise, make it relative to where param + if (/^\/~(\/|$)/.test(specPath)) { + res.saveSpec = `file:${specPath.substr(1)}` + resolvedPath = path.resolve(homedir(), specPath.substr(3)) + } else if (!path.isAbsolute(rawNoPrefix)) + res.saveSpec = `file:${path.relative(where, resolvedPath)}` + else + res.saveSpec = `file:${path.resolve(resolvedPath)}` + + res.fetchSpec = path.resolve(where, resolvedPath) return res } @@ -217,12 +260,12 @@ function matchGitScp (spec) { const matched = spec.match(/^git\+ssh:\/\/([^:#]+:[^#]+(?:\.git)?)(?:#(.*))?$/i) return matched && !matched[1].match(/:[0-9]+\/?.*$/i) && { fetchSpec: matched[1], - gitCommittish: matched[2] == null ? null : matched[2] + gitCommittish: matched[2] == null ? null : matched[2], } } function fromURL (res) { - if (!url) url = require('url') + // eslint-disable-next-line node/no-deprecated-api const urlparse = url.parse(res.rawSpec) res.saveSpec = res.rawSpec // check the protocol, and then see if it's git or not @@ -233,9 +276,10 @@ function fromURL (res) { case 'git+rsync:': case 'git+ftp:': case 'git+file:': - case 'git+ssh:': + case 'git+ssh:': { res.type = 'git' - const match = urlparse.protocol === 'git+ssh:' && matchGitScp(res.rawSpec) + const match = urlparse.protocol === 'git+ssh:' ? matchGitScp(res.rawSpec) + : null if (match) { setGitCommittish(res, match.gitCommittish) res.fetchSpec = match.fetchSpec @@ -251,6 +295,7 @@ function fromURL (res) { res.fetchSpec = url.format(urlparse) } break + } case 'http:': case 'https:': res.type = 'remote' @@ -266,12 +311,12 @@ function fromURL (res) { function fromAlias (res, where) { const subSpec = npa(res.rawSpec.substr(4), where) - if (subSpec.type === 'alias') { + if (subSpec.type === 'alias') throw new Error('nested aliases not supported') - } - if (!subSpec.registry) { + + if (!subSpec.registry) throw new Error('aliases only work for registry deps') - } + res.subSpec = subSpec res.registry = true res.type = 'alias' @@ -282,22 +327,21 @@ function fromAlias (res, where) { function fromRegistry (res) { res.registry = true - const spec = res.rawSpec === '' ? 'latest' : res.rawSpec + const spec = res.rawSpec === '' ? 'latest' : res.rawSpec.trim() // no save spec for registry components as we save based on the fetched // version, not on the argument so this can't compute that. res.saveSpec = null res.fetchSpec = spec - if (!semver) semver = require('semver') const version = semver.valid(spec, true) const range = semver.validRange(spec, true) - if (version) { + if (version) res.type = 'version' - } else if (range) { + else if (range) res.type = 'range' - } else { - if (encodeURIComponent(spec) !== spec) { + else { + if (encodeURIComponent(spec) !== spec) throw invalidTagName(spec) - } + res.type = 'tag' } return res diff --git a/node_modules/npm-package-arg/package.json b/node_modules/npm-package-arg/package.json index c460be828efcb..bf5f597e6d8df 100644 --- a/node_modules/npm-package-arg/package.json +++ b/node_modules/npm-package-arg/package.json @@ -1,6 +1,6 @@ { "name": "npm-package-arg", - "version": "8.1.1", + "version": "8.1.5", "description": "Parse the things that can be arguments to `npm install`", "main": "npa.js", "directories": { @@ -10,19 +10,25 @@ "npa.js" ], "dependencies": { - "hosted-git-info": "^3.0.6", - "semver": "^7.0.0", + "hosted-git-info": "^4.0.1", + "semver": "^7.3.4", "validate-npm-package-name": "^3.0.0" }, "devDependencies": { - "tap": "^14.10.2" + "@npmcli/lint": "^1.0.1", + "tap": "^15.0.9" }, "scripts": { "preversion": "npm test", "postversion": "npm publish", "prepublishOnly": "git push origin --follow-tags", "test": "tap", - "snap": "tap" + "snap": "tap", + "npmclilint": "npmcli-lint", + "lint": "npm run npmclilint -- \"*.*js\" \"test/**/*.*js\"", + "lintfix": "npm run lint -- --fix", + "posttest": "npm run lint --", + "postsnap": "npm run lintfix --" }, "repository": { "type": "git", diff --git a/node_modules/npm-packlist/README.md b/node_modules/npm-packlist/README.md deleted file mode 100644 index 9ec86afcd26cb..0000000000000 --- a/node_modules/npm-packlist/README.md +++ /dev/null @@ -1,146 +0,0 @@ -# npm-packlist - -[![Build Status](https://travis-ci.com/npm/npm-packlist.svg?token=hHeDp9pQmz9kvsgRNVHy&branch=master)](https://travis-ci.com/npm/npm-packlist) - -Get a list of the files to add from a folder into an npm package. - -These can be handed to [tar](http://npm.im/tar) like so to make an npm -package tarball: - -```js -const packlist = require('npm-packlist') -const tar = require('tar') -const packageDir = '/path/to/package' -const packageTarball = '/path/to/package.tgz' - -packlist({ path: packageDir }) - .then(files => tar.create({ - prefix: 'package/', - cwd: packageDir, - file: packageTarball, - gzip: true - }, files)) - .then(_ => { - // tarball has been created, continue with your day - }) -``` - -This uses the following rules: - -1. If a `package.json` file is found, and it has a `files` list, - then ignore everything that isn't in `files`. Always include the - readme, license, notice, changes, changelog, and history files, if - they exist, and the package.json file itself. -2. If there's no `package.json` file (or it has no `files` list), and - there is a `.npmignore` file, then ignore all the files in the - `.npmignore` file. -3. If there's no `package.json` with a `files` list, and there's no - `.npmignore` file, but there is a `.gitignore` file, then ignore - all the files in the `.gitignore` file. -4. Everything in the root `node_modules` is ignored, unless it's a - bundled dependency. If it IS a bundled dependency, and it's a - symbolic link, then the target of the link is included, not the - symlink itself. -4. Unless they're explicitly included (by being in a `files` list, or - a `!negated` rule in a relevant `.npmignore` or `.gitignore`), - always ignore certain common cruft files: - - 1. .npmignore and .gitignore files (their effect is in the package - already, there's no need to include them in the package) - 2. editor junk like `.*.swp`, `._*` and `.*.orig` files - 3. `.npmrc` files (these may contain private configs) - 4. The `node_modules/.bin` folder - 5. Waf and gyp cruft like `/build/config.gypi` and `.lock-wscript` - 6. Darwin's `.DS_Store` files because wtf are those even - 7. `npm-debug.log` files at the root of a project - - You can explicitly re-include any of these with a `files` list in - `package.json` or a negated ignore file rule. - -Only the `package.json` file in the very root of the project is ever -inspected for a `files` list. Below the top level of the root package, -`package.json` is treated as just another file, and no package-specific -semantics are applied. - -### Interaction between `package.json` and `.npmignore` rules - -For simplicity, it is best to use _either_ a `files` list in `package.json` -_or_ a `.npmignore` file, and not both. If you only use one of these -methods, you can skip this documentation section. - -The `files` list in `package.json` is used to direct the exploration of the -tree. In other words, that's all the walker will ever look at when -exploring that level. - -In some cases this can lead to a `.npmignore` file being ignored. If a -_directory_ is listed in `files`, then any rules in a root or nested -`.npmignore` files will be honored. - -For example, with this package.json: - -```json -{ - "files": [ "dir" ] -} -``` - -a `.npmignore` file at `dir/.npmignore` (and any subsequent -sub-directories) will be honored. However, a `.npmignore` at the root -level will be skipped. - -Conversely, with this package.json: - -``` -{ - "files": ["dir/subdir"] -} -``` - -a `.npmignore` file at `dir/.npmignore` will not be honored. - -Any specific file matched by a glob or filename in the package.json `files` -list will be included, and cannot be excluded by any `.npmignore` files in -nested directories, or by a `.npmignore` file in the root package -directory, unless that root `.npmignore` file is also in the `files` list. - -The previous (v1) implementation used in npm 6 and below treated -`package.json` as a special sort of "reverse ignore" file. That is, it was -parsed and handled as if it was a `.npmignore` file with `!` prepended to -all of the globs in the `files` list. In order to include children of a -directory listed in `files`, they would _also_ have `/**` appended to them. - -This is tricky to explain, but is a significant improvement over the -previous (v1) implementation used in npm 6 and below, with the following -beneficial properties: - -- If you have `{"files":["lib"]}` in `package.json`, then the walker will - still ignore files such as `lib/.DS_Store` and `lib/.foo.swp`. The - previous implementation would include these files, as they'd be matched - by the computed `!lib/**` ignore rule. -- If you have `{"files":["lib/a.js","lib/b.js"]}` in `package.json`, and a - `lib/.npmignore` containing `a.js`, then the walker will still include - the two files indicated in `package.json`, and ignore the - `lib/.npmignore` file. The previous implementation would mark these - files for inclusion, but then _exclude_ them when it came to the nested - `.npmignore` file. (Ignore file semantics dictate that a "closer" ignore - file always takes precedence.) -- A file in `lib/pkg-template/package.json` will be included, and its - `files` list will not have any bearing on other files being included or - skipped. When treating `package.json` as just Yet Another ignore file, - this was not the case, leading to difficulty for modules that aim to - initialize a project. - -In general, this walk should work as a reasonable developer would expect. -Matching human expectation is tricky business, and if you find cases where -it violates those expectations, [please let us -know](https://github.com/npm/npm-packlist/issues). - -## API - -Same API as [ignore-walk](http://npm.im/ignore-walk), just hard-coded -file list and rule sets. - -The `Walker` and `WalkerSync` classes take a `bundled` argument, which -is a list of package names to include from node_modules. When calling -the top-level `packlist()` and `packlist.sync()` functions, this -module calls into `npm-bundled` directly. diff --git a/node_modules/npm-packlist/bin/index.js b/node_modules/npm-packlist/bin/index.js index f06feffd9b55a..40811db7d32e7 100755 --- a/node_modules/npm-packlist/bin/index.js +++ b/node_modules/npm-packlist/bin/index.js @@ -12,13 +12,14 @@ process.argv.slice(2).forEach(arg => { dirs.push(arg) }) -const sort = list => doSort ? list.sort((a, b) => a.localeCompare(b)) : list +const sort = list => doSort ? list.sort((a, b) => a.localeCompare(b, 'en')) : list const packlist = require('../') if (!dirs.length) console.log(sort(packlist.sync({ path: process.cwd() })).join('\n')) -else +else { dirs.forEach(path => { console.log(`> ${path}`) console.log(sort(packlist.sync({ path })).join('\n')) }) +} diff --git a/node_modules/npm-packlist/index.js b/node_modules/npm-packlist/index.js index cf87b18528b01..f498fa008ceca 100644 --- a/node_modules/npm-packlist/index.js +++ b/node_modules/npm-packlist/index.js @@ -26,11 +26,10 @@ const normalizePackageBin = require('npm-normalize-package-bin') // localized documentation and other use cases. Adding a `/` to // these rules, while tempting and arguably more "correct", is a // significant change that will break existing use cases. -const packageMustHaveFileNames = - 'readme|copying|license|licence|notice|changes|changelog|history' +const packageMustHaveFileNames = 'readme|copying|license|licence' const packageMustHaves = `@(${packageMustHaveFileNames}){,.*[^~$]}` -const packageMustHavesRE = new RegExp(`^(${packageMustHaveFileNames})(\\..*[^~\$])?$`, 'i') +const packageMustHavesRE = new RegExp(`^(${packageMustHaveFileNames})(\\..*[^~$])?$`, 'i') const fs = require('fs') const glob = require('glob') @@ -76,13 +75,11 @@ const npmWalker = Class => class Walker extends Class { 'package.json', '.npmignore', '.gitignore', - packageNecessaryRules + packageNecessaryRules, ] opt.includeEmpty = false opt.path = opt.path || process.cwd() - const dirName = path.basename(opt.path) - const parentName = path.basename(path.dirname(opt.path)) // only follow links in the root node_modules folder, because if those // folders are included, it's because they're bundled, and bundles @@ -90,7 +87,7 @@ const npmWalker = Class => class Walker extends Class { // This regexp tests to see that we're either a node_modules folder, // or a @scope within a node_modules folder, in the root's node_modules // hierarchy (ie, not in test/foo/node_modules/ or something). - const followRe = /^(?:\/node_modules\/(?:@[^\/]+\/[^\/]+|[^\/]+)\/)*\/node_modules(?:\/@[^\/]+)?$/ + const followRe = /^(?:\/node_modules\/(?:@[^/]+\/[^/]+|[^/]+)\/)*\/node_modules(?:\/@[^/]+)?$/ const rootPath = opt.parent ? opt.parent.root : opt.path const followTestPath = opt.path.replace(/\\/g, '/').substr(rootPath.length) opt.follow = followRe.test(followTestPath) @@ -104,10 +101,10 @@ const npmWalker = Class => class Walker extends Class { this.bundled = opt.bundled || [] this.bundledScopes = Array.from(new Set( this.bundled.filter(f => /^@/.test(f)) - .map(f => f.split('/')[0]))) + .map(f => f.split('/')[0]))) const rules = defaultRules.join('\n') + '\n' this.packageJsonCache = opt.packageJsonCache || new Map() - super.onReadIgnoreFile(rootBuiltinRules, rules, _=>_) + super.onReadIgnoreFile(rootBuiltinRules, rules, _ => _) } else { this.bundled = [] this.bundledScopes = [] @@ -129,9 +126,8 @@ const npmWalker = Class => class Walker extends Class { // to be in the state the user wants to include them, and // a package.json somewhere else might be a template or // test or something else entirely. - if (this.parent || !entries.includes('package.json')) { + if (this.parent || !entries.includes('package.json')) return super.onReaddir(entries) - } // when the cache has been seeded with the root manifest, // we must respect that (it may differ from the filesystem) @@ -141,9 +137,8 @@ const npmWalker = Class => class Walker extends Class { const pkg = this.packageJsonCache.get(ig) // fall back to filesystem when seeded manifest is invalid - if (!pkg || typeof pkg !== 'object') { + if (!pkg || typeof pkg !== 'object') return this.readPackageJson(entries) - } // feels wonky, but this ensures package bin is _always_ // normalized, as well as guarding against invalid JSON @@ -175,13 +170,20 @@ const npmWalker = Class => class Walker extends Class { '/package.json', '/npm-shrinkwrap.json', '!/package-lock.json', - packageMustHaves, + packageMustHaves ) return files } getPackageFiles (entries, pkg) { try { + // XXX this could be changed to use read-package-json-fast + // which handles the normalizing of bins for us, and simplifies + // the test for bundleDependencies and bundledDependencies later. + // HOWEVER if we do this, we need to be sure that we're careful + // about what we write back out since rpj-fast removes some fields + // that the user likely wants to keep. it also would add a second + // file read that we would want to optimize away. pkg = normalizePackageBin(JSON.parse(pkg.toString())) } catch (er) { // not actually a valid package.json @@ -202,7 +204,7 @@ const npmWalker = Class => class Walker extends Class { // the files list as the effective readdir result, that means it // looks like we don't have a node_modules folder at all unless we // include it here. - if (pkg.bundleDependencies && entries.includes('node_modules')) + if ((pkg.bundleDependencies || pkg.bundledDependencies) && entries.includes('node_modules')) pkg.files.push('node_modules') const patterns = Array.from(new Set(pkg.files)).reduce((set, pattern) => { @@ -226,9 +228,8 @@ const npmWalker = Class => class Walker extends Class { return this.emit('error', er) results[i] = { negate, fileList } - if (--n === 0) { + if (--n === 0) processResults(results) - } } const processResults = results => { for (const {negate, fileList} of results) { @@ -266,7 +267,7 @@ const npmWalker = Class => class Walker extends Class { filterEntry (entry, partial) { // get the partial path from the root of the walk const p = this.path.substr(this.root.length + 1) - const pkgre = /^node_modules\/(@[^\/]+\/?[^\/]+|[^\/]+)(\/.*)?$/ + const pkgre = /^node_modules\/(@[^/]+\/?[^/]+|[^/]+)(\/.*)?$/ const isRoot = !this.parent const pkg = isRoot && pkgre.test(entry) ? entry.replace(pkgre, '$1') : null @@ -276,7 +277,7 @@ const npmWalker = Class => class Walker extends Class { return ( // if we're in a bundled package, check with the parent. /^node_modules($|\/)/i.test(p) ? this.parent.filterEntry( - this.basename + '/' + entry, partial) + this.basename + '/' + entry, partial) // if package is bundled, all files included // also include @scope dirs for bundled scoped deps @@ -284,8 +285,8 @@ const npmWalker = Class => class Walker extends Class { // However, this only matters if we're in the root. // node_modules folders elsewhere, like lib/node_modules, // should be included normally unless ignored. - : pkg ? -1 !== this.bundled.indexOf(pkg) || - -1 !== this.bundledScopes.indexOf(pkg) + : pkg ? this.bundled.indexOf(pkg) !== -1 || + this.bundledScopes.indexOf(pkg) !== -1 // only walk top node_modules if we want to bundle something : rootNM ? !!this.bundled.length @@ -301,10 +302,10 @@ const npmWalker = Class => class Walker extends Class { ? true // package-lock never included - : isRoot && entry === 'package-lock.json' ? false + : isRoot && entry === 'package-lock.json' ? false - // otherwise, follow ignore-walk's logic - : super.filterEntry(entry, partial) + // otherwise, follow ignore-walk's logic + : super.filterEntry(entry, partial) ) } @@ -447,12 +448,11 @@ const sort = (a, b) => { const basea = path.basename(a).toLowerCase() const baseb = path.basename(b).toLowerCase() - return exta.localeCompare(extb) || - basea.localeCompare(baseb) || - a.localeCompare(b) + return exta.localeCompare(extb, 'en') || + basea.localeCompare(baseb, 'en') || + a.localeCompare(b, 'en') } - module.exports = walk walk.sync = walkSync walk.Walker = Walker diff --git a/node_modules/npm-packlist/package.json b/node_modules/npm-packlist/package.json index 6f60521059a04..49fa947547b6c 100644 --- a/node_modules/npm-packlist/package.json +++ b/node_modules/npm-packlist/package.json @@ -1,6 +1,6 @@ { "name": "npm-packlist", - "version": "2.1.4", + "version": "2.2.2", "description": "Get a list of the files to add from a folder into an npm package", "directories": { "test": "test" @@ -15,25 +15,38 @@ "author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)", "license": "ISC", "files": [ + "bin/index.js", "index.js" ], "devDependencies": { + "eslint": "^7.25.0", + "eslint-plugin-import": "^2.22.1", + "eslint-plugin-node": "^11.1.0", + "eslint-plugin-promise": "^5.1.0", + "eslint-plugin-standard": "^5.0.0", "mutate-fs": "^2.1.1", - "require-inject": "^1.4.4", - "tap": "^14.10.7" + "tap": "^15.0.6" }, "scripts": { "test": "tap", + "posttest": "npm run lint", "snap": "tap", + "postsnap": "npm run lintfix", "preversion": "npm test", "postversion": "npm publish", - "prepublishOnly": "git push origin --follow-tags" + "prepublishOnly": "git push origin --follow-tags", + "eslint": "eslint", + "lint": "npm run eslint -- index.js bin/index.js \"test/**/*.js\"", + "lintfix": "npm run lint -- --fix" }, "repository": { "type": "git", "url": "git+https://github.com/npm/npm-packlist.git" }, "tap": { + "test-env": [ + "LC_ALL=sk" + ], "check-coverage": true, "nyc-arg": [ "--include=index.js", diff --git a/node_modules/npm-pick-manifest/CHANGELOG.md b/node_modules/npm-pick-manifest/CHANGELOG.md deleted file mode 100644 index a4ee13e92ab45..0000000000000 --- a/node_modules/npm-pick-manifest/CHANGELOG.md +++ /dev/null @@ -1,219 +0,0 @@ -# Changelog - -All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. - -## [6.1.0](https://github.com/npm/npm-pick-manifest/compare/v6.0.0...v6.1.0) (2020-04-07) - - -### Features - -* add 'avoid' semver range option ([c64973d](https://github.com/npm/npm-pick-manifest/commit/c64973d63ddf6797edf41c20df641f816d30ff03)) -* add avoidStrict option to strictly avoid ([c268796](https://github.com/npm/npm-pick-manifest/commit/c2687967b6294f5ce01aa6b59071e79272dc57de)), closes [#30](https://github.com/npm/npm-pick-manifest/issues/30) - -## [6.0.0](https://github.com/npm/npm-pick-manifest/compare/v5.0.0...v6.0.0) (2020-02-18) - - -### ⚠ BREAKING CHANGES - -* 'enjoyBy' is no longer an acceptable alias. - -### Features - -* add GitHub Actions file for ci ([8985247](https://github.com/npm/npm-pick-manifest/commit/898524727fa157f46fdf4eb0c11148ae4808226b)) - - -### Bug Fixes - -* Handle edge cases around before:Date and filtering staged publishes ([ed2f92e](https://github.com/npm/npm-pick-manifest/commit/ed2f92e7fdc9cc7836b13ebc73e17d8fd296a07e)) -* remove figgy pudding ([c24fed2](https://github.com/npm/npm-pick-manifest/commit/c24fed25b8f77fbbcc3107030f2dfed55fa54222)) -* remove outdated cruft from docs ([aae7ef7](https://github.com/npm/npm-pick-manifest/commit/aae7ef7625ddddbac0548287e5d57b8f76593322)) -* update some missing {loose:true} semver configs ([4015424](https://github.com/npm/npm-pick-manifest/commit/40154244a3fe1af86462bc1d6165199fc3315c10)) -* Use canonical 'before' config name ([029de59](https://github.com/npm/npm-pick-manifest/commit/029de59bda6d3376f03760a00efe4ac9d997c623)) - -## [5.0.0](https://github.com/npm/npm-pick-manifest/compare/v4.0.0...v5.0.0) (2019-12-15) - - -### ⚠ BREAKING CHANGES - -* This drops support for node < 10. - -* normalize settings, drop old nodes, update deps ([dc2e61c](https://github.com/npm/npm-pick-manifest/commit/dc2e61cc06bd19e079128e77397df7593741da50)) - -<a name="4.0.0"></a> -# [4.0.0](https://github.com/npm/npm-pick-manifest/compare/v3.0.2...v4.0.0) (2019-11-11) - - -### deps - -* bump npm-package-arg to v7 ([42c76d8](https://github.com/npm/npm-pick-manifest/commit/42c76d8)), closes [/github.com/npm/hosted-git-info/pull/38#issuecomment-520243803](https://github.com//github.com/npm/hosted-git-info/pull/38/issues/issuecomment-520243803) - - -### BREAKING CHANGES - -* this drops support for ancient node versions. - - - -<a name="3.0.2"></a> -## [3.0.2](https://github.com/npm/npm-pick-manifest/compare/v3.0.1...v3.0.2) (2019-08-30) - - - -<a name="3.0.1"></a> -## [3.0.1](https://github.com/npm/npm-pick-manifest/compare/v3.0.0...v3.0.1) (2019-08-28) - - -### Bug Fixes - -* throw 403 for forbidden major/minor versions ([003286e](https://github.com/npm/npm-pick-manifest/commit/003286e)), closes [#2](https://github.com/npm/npm-pick-manifest/issues/2) - - - -<a name="3.0.0"></a> -# [3.0.0](https://github.com/npm/npm-pick-manifest/compare/v2.2.3...v3.0.0) (2019-08-20) - - -### Features - -* throw forbidden error when package is blocked by policy ([ad2a962](https://github.com/npm/npm-pick-manifest/commit/ad2a962)), closes [#1](https://github.com/npm/npm-pick-manifest/issues/1) - - -### BREAKING CHANGES - -* This adds a new error code when package versions are -blocked. - -PR-URL: https://github.com/npm/npm-pick-manifest/pull/1 -Credit: @claudiahdz - - - -<a name="2.2.3"></a> -## [2.2.3](https://github.com/npm/npm-pick-manifest/compare/v2.2.2...v2.2.3) (2018-10-31) - - -### Bug Fixes - -* **enjoyBy:** rework semantics for enjoyBy again ([5e89b62](https://github.com/npm/npm-pick-manifest/commit/5e89b62)) - - - -<a name="2.2.2"></a> -## [2.2.2](https://github.com/npm/npm-pick-manifest/compare/v2.2.1...v2.2.2) (2018-10-31) - - -### Bug Fixes - -* **enjoyBy:** rework semantics for enjoyBy ([5684f45](https://github.com/npm/npm-pick-manifest/commit/5684f45)) - - - -<a name="2.2.1"></a> -## [2.2.1](https://github.com/npm/npm-pick-manifest/compare/v2.2.0...v2.2.1) (2018-10-30) - - - -<a name="2.2.0"></a> -# [2.2.0](https://github.com/npm/npm-pick-manifest/compare/v2.1.0...v2.2.0) (2018-10-30) - - -### Bug Fixes - -* **audit:** npm audit fix --force ([d5ae6c4](https://github.com/npm/npm-pick-manifest/commit/d5ae6c4)) - - -### Features - -* **enjoyBy:** add opts.enjoyBy option to filter versions by date ([0b8a790](https://github.com/npm/npm-pick-manifest/commit/0b8a790)) - - - -<a name="2.1.0"></a> -# [2.1.0](https://github.com/npm/npm-pick-manifest/compare/v2.0.1...v2.1.0) (2017-10-18) - - -### Features - -* **selection:** allow manually disabling deprecation skipping ([0d239d3](https://github.com/npm/npm-pick-manifest/commit/0d239d3)) - - - -<a name="2.0.1"></a> -## [2.0.1](https://github.com/npm/npm-pick-manifest/compare/v2.0.0...v2.0.1) (2017-10-18) - - - -<a name="2.0.0"></a> -# [2.0.0](https://github.com/npm/npm-pick-manifest/compare/v1.0.4...v2.0.0) (2017-10-03) - - -### Bug Fixes - -* **license:** relicense project according to npm policy (#3) ([ed743a0](https://github.com/npm/npm-pick-manifest/commit/ed743a0)) - - -### Features - -* **selection:** Avoid matching deprecated packages if possible ([3fc6c3a](https://github.com/npm/npm-pick-manifest/commit/3fc6c3a)) - - -### BREAKING CHANGES - -* **selection:** deprecated versions may be skipped now -* **license:** This moves the license from CC0 to ISC and properly documents the copyright as belonging to npm, Inc. - - - -<a name="1.0.4"></a> -## [1.0.4](https://github.com/npm/npm-pick-manifest/compare/v1.0.3...v1.0.4) (2017-06-29) - - -### Bug Fixes - -* **npa:** bump npa version for bugfixes ([7cdaca7](https://github.com/npm/npm-pick-manifest/commit/7cdaca7)) -* **semver:** use loose semver parsing for *all* ops ([bbc0daa](https://github.com/npm/npm-pick-manifest/commit/bbc0daa)) - - - -<a name="1.0.3"></a> -## [1.0.3](https://github.com/npm/npm-pick-manifest/compare/v1.0.2...v1.0.3) (2017-05-04) - - -### Bug Fixes - -* **semver:** use semver.clean() instead ([f4133b5](https://github.com/npm/npm-pick-manifest/commit/f4133b5)) - - - -<a name="1.0.2"></a> -## [1.0.2](https://github.com/npm/npm-pick-manifest/compare/v1.0.1...v1.0.2) (2017-05-04) - - -### Bug Fixes - -* **picker:** spaces in `wanted` prevented match ([97a7d0a](https://github.com/npm/npm-pick-manifest/commit/97a7d0a)) - - - -<a name="1.0.1"></a> -## [1.0.1](https://github.com/npm/npm-pick-manifest/compare/v1.0.0...v1.0.1) (2017-04-24) - - -### Bug Fixes - -* **deps:** forgot to add semver ([1876f4f](https://github.com/npm/npm-pick-manifest/commit/1876f4f)) - - - -<a name="1.0.0"></a> -# 1.0.0 (2017-04-24) - - -### Features - -* **api:** initial implementation. ([b086912](https://github.com/npm/npm-pick-manifest/commit/b086912)) - - -### BREAKING CHANGES - -* **api:** ex nihilo diff --git a/node_modules/npm-pick-manifest/README.md b/node_modules/npm-pick-manifest/README.md deleted file mode 100644 index 26ee43e05e531..0000000000000 --- a/node_modules/npm-pick-manifest/README.md +++ /dev/null @@ -1,157 +0,0 @@ -# npm-pick-manifest [![npm version](https://img.shields.io/npm/v/npm-pick-manifest.svg)](https://npm.im/npm-pick-manifest) [![license](https://img.shields.io/npm/l/npm-pick-manifest.svg)](https://npm.im/npm-pick-manifest) [![Travis](https://img.shields.io/travis/npm/npm-pick-manifest.svg)](https://travis-ci.org/npm/npm-pick-manifest) [![Coverage Status](https://coveralls.io/repos/github/npm/npm-pick-manifest/badge.svg?branch=latest)](https://coveralls.io/github/npm/npm-pick-manifest?branch=latest) - -[`npm-pick-manifest`](https://github.com/npm/npm-pick-manifest) is a standalone -implementation of [npm](https://npmjs.com)'s semver range resolution algorithm. - -## Install - -`$ npm install --save npm-pick-manifest` - -## Table of Contents - -* [Example](#example) -* [Features](#features) -* [API](#api) - * [`pickManifest()`](#pick-manifest) - -### Example - -```javascript -const pickManifest = require('npm-pick-manifest') - -fetch('https://registry.npmjs.org/npm-pick-manifest').then(res => { - return res.json() -}).then(packument => { - return pickManifest(packument, '^1.0.0') -}) // get same manifest as npm would get if you `npm i npm-pick-manifest@^1.0.0` -``` - -### Features - -* Uses npm's exact [semver resolution algorithm](http://npm.im/semver). -* Supports ranges, tags, and versions. -* Prefers non-deprecated versions to deprecated versions. -* Prefers versions whose `engines` requirements are satisfied over those - that will raise a warning or error at install time. - -### API - -#### <a name="pick-manifest"></a> `> pickManifest(packument, selector, [opts]) -> manifest` - -Returns the manifest that best matches `selector`, or throws an error. - -Packuments are anything returned by metadata URLs from the npm registry. That -is, they're objects with the following shape (only fields used by -`npm-pick-manifest` included): - -```javascript -{ - name: 'some-package', - 'dist-tags': { - foo: '1.0.1' - }, - versions: { - '1.0.0': { version: '1.0.0' }, - '1.0.1': { version: '1.0.1' }, - '1.0.2': { version: '1.0.2' }, - '2.0.0': { version: '2.0.0' } - } -} -``` - -The algorithm will follow npm's algorithm for semver resolution, and only -`tag`, `range`, and `version` selectors are supported. - -The function will throw `ETARGET` if there was no matching manifest, and -`ENOVERSIONS` if the packument object has no valid versions in `versions`. -If the only matching manifest is included in a `policyRestrictions` section -of the packument, then an `E403` is raised. - -#### <a name="pick-manifest-options"></a> Options - -All options are optional. - -* `includeStaged` - Boolean, default `false`. Include manifests in the - `stagedVersions.versions` set, to support installing [staged - packages](https://github.com/npm/rfcs/pull/92) when appropriate. Note - that staged packages are always treated as lower priority than actual - publishes, even when `includeStaged` is set. -* `defaultTag` - String, default `'latest'`. The default `dist-tag` to - install when no specifier is provided. Note that the version indicated - by this specifier will be given top priority if it matches a supplied - semver range. -* `before` - String, Date, or Number, default `null`. This is passed to - `new Date()`, so anything that works there will be valid. Do not - consider _any_ manifests that were published after the date indicated. - Note that this is only relevant when the packument includes a `time` - field listing the publish date of all the packages. -* `nodeVersion` - String, default `process.version`. The Node.js version - to use when checking manifests for `engines` requirement satisfaction. -* `npmVersion` - String, default `null`. The npm version to use when - checking manifest for `engines` requirement satisfaction. (If `null`, - then this particular check is skipped.) -* `avoid` - String, default `null`. A SemVer range of - versions that should be avoided. An avoided version MAY be selected if - there is no other option, so when using this for version selection ensure - that you check the result against the range to see if there was no - alternative available. -* `avoidStrict` Boolean, default `false`. If set to true, then - `pickManifest` will never return a version in the `avoid` range. If the - only available version in the `wanted` range is a version that should be - avoided, then it will return a version _outside_ the `wanted` range, - preferring to do so without making a SemVer-major jump, if possible. If - there are no versions outside the `avoid` range, then throw an - `ETARGET` error. It does this by calling pickManifest first with the - `wanted` range, then with a `^` affixed to the version returned by the - `wanted` range, and then with a `*` version range, and throwing if - nothing could be found to satisfy the avoidance request. - -Return value is the manifest as it exists in the packument, possibly -decorated with the following boolean flags: - -* `_shouldAvoid` The version is in the `avoid` range. Watch out! -* `_outsideDependencyRange` The version is outside the `wanted` range, - because `avoidStrict: true` was set. -* `_isSemVerMajor` The `_outsideDependencyRange` result is a SemVer-major - step up from the version returned by the `wanted` range. - -### Algorithm - -1. Create list of all versions in `versions`, - `policyRestrictions.versions`, and (if `includeStaged` is set) - `stagedVersions.versions`. -2. If a `dist-tag` is requested, - 1. If the manifest is not after the specified `before` date, then - select that from the set. - 2. If the manifest is after the specified `before` date, then re-start - the selection looking for the highest SemVer range that is equal to - or less than the `dist-tag` target. -3. If a specific version is requested, - 1. If the manifest is not after the specified `before` date, then - select the specified manifest. - 2. If the manifest is after the specified `before` date, then raise - `ETARGET` error. (NB: this is a breaking change from v5, where a - specified version would override the `before` setting.) -4. (At this point we know a range is requested.) -5. If the `defaultTag` refers to a `dist-tag` that satisfies the range (or - if the range is `'*'` or `''`), and the manifest is published before the - `before` setting, then select that manifest. -6. If nothing is yet selected, sort by the following heuristics in order, - and select the top item: - 1. Prioritize versions that are not in the `avoid` range over those - that are. - 2. Prioritize versions that are not in `policyRestrictions` over those - that are. - 3. Prioritize published versions over staged versions. - 4. Prioritize versions that are not deprecated, and which have a - satisfied engines requirement, over those that are either deprecated - or have an engines mismatch. - 5. Prioritize versions that have a satisfied engines requirement over - those that do not. - 6. Prioritize versions that are not are not deprecated (but have a - mismatched engines requirement) over those that are deprecated. - 7. Prioritize higher SemVer precedence over lower SemVer precedence. -7. If no manifest was selected, raise an `ETARGET` error. -8. If the selected item is in the `policyRestrictions.versions` list, raise - an `E403` error. -9. Return the selected manifest. diff --git a/node_modules/npm-pick-manifest/index.js b/node_modules/npm-pick-manifest/index.js index 2b3ea6ffa4930..695450524dc13 100644 --- a/node_modules/npm-pick-manifest/index.js +++ b/node_modules/npm-pick-manifest/index.js @@ -3,6 +3,7 @@ const npa = require('npm-package-arg') const semver = require('semver') const { checkEngine } = require('npm-install-checks') +const normalizeBin = require('npm-normalize-package-bin') const engineOk = (manifest, npmVersion, nodeVersion) => { try { @@ -183,7 +184,8 @@ const pickManifest = (packument, wanted, opts) => { } module.exports = (packument, wanted, opts = {}) => { - const picked = pickManifest(packument, wanted, opts) + const mani = pickManifest(packument, wanted, opts) + const picked = mani && normalizeBin(mani) const policyRestrictions = packument.policyRestrictions const restricted = (policyRestrictions && policyRestrictions.versions) || {} diff --git a/node_modules/npm-pick-manifest/package.json b/node_modules/npm-pick-manifest/package.json index 805f5ac23a846..4b4866cbf8832 100644 --- a/node_modules/npm-pick-manifest/package.json +++ b/node_modules/npm-pick-manifest/package.json @@ -1,6 +1,6 @@ { "name": "npm-pick-manifest", - "version": "6.1.0", + "version": "6.1.1", "description": "Resolves a matching manifest from a package metadata document according to standard npm semver resolution rules.", "main": "index.js", "files": [ @@ -9,12 +9,11 @@ "scripts": { "coverage": "tap", "lint": "standard", - "postrelease": "npm publish", + "test": "tap", "posttest": "npm run lint", - "prepublishOnly": "git push --follow-tags", - "prerelease": "npm t", - "release": "standard-version -s", - "test": "tap" + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags" }, "repository": "https://github.com/npm/npm-pick-manifest", "keywords": [ @@ -30,13 +29,13 @@ "license": "ISC", "dependencies": { "npm-install-checks": "^4.0.0", - "npm-package-arg": "^8.0.0", - "semver": "^7.0.0" + "npm-normalize-package-bin": "^1.0.1", + "npm-package-arg": "^8.1.2", + "semver": "^7.3.4" }, "devDependencies": { "standard": "^14.3.1", - "standard-version": "^7.0.1", - "tap": "^14.10.2" + "tap": "^14.11.0" }, "tap": { "check-coverage": true diff --git a/node_modules/npm-profile/CHANGELOG.md b/node_modules/npm-profile/CHANGELOG.md deleted file mode 100644 index 3205cf532299b..0000000000000 --- a/node_modules/npm-profile/CHANGELOG.md +++ /dev/null @@ -1,62 +0,0 @@ -# v5.0.0 (2020-02-27) - -- Drop the CLI from the project, just maintain the library -- Drop support for EOL Node.js versions -- Remove `Promise` option, just use native Promises -- Remove `figgy-pudding` -- Use `npm-registry-fetch` v8 -- fix: do not try to open invalid URLs for WebLogin - -# v4.0.3 (2020-02-27) - -- fix: do not try to open invalid URLs for WebLogin - -# v4.0.2 (2019-07-16) - -- Update `npm-registry-fetch` to 4.0.0 - -# v4.0.1 (2018-08-29) - -- `opts.password` needs to be base64-encoded when passed in for login -- Bump `npm-registry-fetch` dep because we depend on `opts.forceAuth` - -# v4.0.0 (2018-08-28) - -## BREAKING CHANGES: - -- Networking and auth-related options now use the latest [`npm-registry-fetch` config format](https://www.npmjs.com/package/npm-registry-fetch#fetch-opts). - -# v3.0.2 (2018-06-07) - -- Allow newer make-fetch-happen. -- Report 500s from weblogin end point as unsupported. -- EAUTHUNKNOWN errors were incorrectly reported as EAUTHIP. - -# v3.0.1 (2018-02-18) - -- Log `npm-notice` headers - -# v3.0.0 (2018-02-18) - -## BREAKING CHANGES: - -- profile.login() and profile.adduser() take 2 functions: opener() and - prompter(). opener is used when we get the url couplet from the - registry. prompter is used if web-based login fails. -- Non-200 status codes now always throw. Previously if the `content.error` - property was set, `content` would be returned. Content is available on the - thrown error object in the `body` property. - -## FEATURES: - -- The previous adduser is available as adduserCouch -- The previous login is available as loginCouch -- New loginWeb and adduserWeb commands added, which take an opener - function to open up the web browser. -- General errors have better error message reporting - -## FIXES: - -- General errors now correctly include the URL. -- Missing user errors from Couch are now thrown. (As was always intended.) -- Many errors have better stacktrace filtering. diff --git a/node_modules/npm-profile/README.md b/node_modules/npm-profile/README.md deleted file mode 100644 index 9f671d12a502a..0000000000000 --- a/node_modules/npm-profile/README.md +++ /dev/null @@ -1,555 +0,0 @@ -# npm-profile - -Provides functions for fetching and updating an npmjs.com profile. - -```js -const profile = require('npm-profile') -const result = await profile.get(registry, {token}) -//... -``` - -The API that this implements is documented here: - -* [authentication](https://github.com/npm/registry/blob/master/docs/user/authentication.md) -* [profile editing](https://github.com/npm/registry/blob/master/docs/user/profile.md) (and two-factor authentication) - -## Table of Contents - -* [API](#api) - * Login and Account Creation - * [`adduser()`](#adduser) - * [`login()`](#login) - * [`adduserWeb()`](#adduser-web) - * [`loginWeb()`](#login-web) - * [`adduserCouch()`](#adduser-couch) - * [`loginCouch()`](#login-couch) - * Profile Data Management - * [`get()`](#get) - * [`set()`](#set) - * Token Management - * [`listTokens()`](#list-tokens) - * [`removeToken()`](#remove-token) - * [`createToken()`](#create-token) - -## API - -### <a name="adduser"></a> `> profile.adduser(opener, prompter, [opts]) → Promise` - -Tries to create a user new web based login, if that fails it falls back to -using the legacy CouchDB APIs. - -* `opener` Function (url) → Promise, returns a promise that resolves after a browser has been opened for the user at `url`. -* `prompter` Function (creds) → Promise, returns a promise that resolves to an object with `username`, `email` and `password` properties. - -#### **Promise Value** - -An object with the following properties: - -* `token` String, to be used to authenticate further API calls -* `username` String, the username the user authenticated as - -#### **Promise Rejection** - -An error object indicating what went wrong. - -The `headers` property will contain the HTTP headers of the response. - -If the action was denied because it came from an IP address that this action -on this account isn't allowed from then the `code` will be set to `EAUTHIP`. - -Otherwise the code will be `'E'` followed by the HTTP response code, for -example a Forbidden response would be `E403`. - -### <a name="login"></a> `> profile.login(opener, prompter, [opts]) → Promise` - -Tries to login using new web based login, if that fails it falls back to -using the legacy CouchDB APIs. - -* `opener` Function (url) → Promise, returns a promise that resolves after a browser has been opened for the user at `url`. -* `prompter` Function (creds) → Promise, returns a promise that resolves to an object with `username`, and `password` properties. - -#### **Promise Value** - -An object with the following properties: - -* `token` String, to be used to authenticate further API calls -* `username` String, the username the user authenticated as - -#### **Promise Rejection** - -An error object indicating what went wrong. - -The `headers` property will contain the HTTP headers of the response. - -If the action was denied because an OTP is required then `code` will be set -to `EOTP`. This error code can only come from a legacy CouchDB login and so -this should be retried with loginCouch. - -If the action was denied because it came from an IP address that this action -on this account isn't allowed from then the `code` will be set to `EAUTHIP`. - -Otherwise the code will be `'E'` followed by the HTTP response code, for -example a Forbidden response would be `E403`. - -### <a name="adduser-web"></a> `> profile.adduserWeb(opener, [opts]) → Promise` - -Tries to create a user new web based login, if that fails it falls back to -using the legacy CouchDB APIs. - -* `opener` Function (url) → Promise, returns a promise that resolves after a browser has been opened for the user at `url`. -* [`opts`](#opts) Object - -#### **Promise Value** - -An object with the following properties: - -* `token` String, to be used to authenticate further API calls -* `username` String, the username the user authenticated as - -#### **Promise Rejection** - -An error object indicating what went wrong. - -The `headers` property will contain the HTTP headers of the response. - -If the registry does not support web-login then an error will be thrown with -its `code` property set to `ENYI` . You should retry with `adduserCouch`. -If you use `adduser` then this fallback will be done automatically. - -If the action was denied because it came from an IP address that this action -on this account isn't allowed from then the `code` will be set to `EAUTHIP`. - -Otherwise the code will be `'E'` followed by the HTTP response code, for -example a Forbidden response would be `E403`. - -### <a name="login-web"></a> `> profile.loginWeb(opener, [opts]) → Promise` - -Tries to login using new web based login, if that fails it falls back to -using the legacy CouchDB APIs. - -* `opener` Function (url) → Promise, returns a promise that resolves after a browser has been opened for the user at `url`. -* [`opts`](#opts) Object (optional) - -#### **Promise Value** - -An object with the following properties: - -* `token` String, to be used to authenticate further API calls -* `username` String, the username the user authenticated as - -#### **Promise Rejection** - -An error object indicating what went wrong. - -The `headers` property will contain the HTTP headers of the response. - -If the registry does not support web-login then an error will be thrown with -its `code` property set to `ENYI` . You should retry with `loginCouch`. -If you use `login` then this fallback will be done automatically. - -If the action was denied because it came from an IP address that this action -on this account isn't allowed from then the `code` will be set to `EAUTHIP`. - -Otherwise the code will be `'E'` followed by the HTTP response code, for -example a Forbidden response would be `E403`. - -### <a name="adduser-couch"></a> `> profile.adduserCouch(username, email, password, [opts]) → Promise` - -```js -const {token} = await profile.adduser(username, email, password, {registry}) -// `token` can be passed in through `opts` for authentication. -``` - -Creates a new user on the server along with a fresh bearer token for future -authentication as this user. This is what you see as an `authToken` in an -`.npmrc`. - -If the user already exists then the npm registry will return an error, but -this is registry specific and not guaranteed. - -* `username` String -* `email` String -* `password` String -* [`opts`](#opts) Object (optional) - -#### **Promise Value** - -An object with the following properties: - -* `token` String, to be used to authenticate further API calls -* `username` String, the username the user authenticated as - -#### **Promise Rejection** - -An error object indicating what went wrong. - -The `headers` property will contain the HTTP headers of the response. - -If the action was denied because an OTP is required then `code` will be set -to `EOTP`. - -If the action was denied because it came from an IP address that this action -on this account isn't allowed from then the `code` will be set to `EAUTHIP`. - -Otherwise the code will be `'E'` followed by the HTTP response code, for -example a Forbidden response would be `E403`. - -### <a name="login-couch"></a> `> profile.loginCouch(username, password, [opts]) → Promise` - -```js -let token -try { - {token} = await profile.login(username, password, {registry}) -} catch (err) { - if (err.code === 'otp') { - const otp = await getOTPFromSomewhere() - {token} = await profile.login(username, password, {otp}) - } -} -// `token` can now be passed in through `opts` for authentication. -``` - -Logs you into an existing user. Does not create the user if they do not -already exist. Logging in means generating a new bearer token for use in -future authentication. This is what you use as an `authToken` in an `.npmrc`. - -* `username` String -* `email` String -* `password` String -* [`opts`](#opts) Object (optional) - -#### **Promise Value** - -An object with the following properties: - -* `token` String, to be used to authenticate further API calls -* `username` String, the username the user authenticated as - -#### **Promise Rejection** - -An error object indicating what went wrong. - -If the object has a `code` property set to `EOTP` then that indicates that -this account must use two-factor authentication to login. Try again with a -one-time password. - -If the object has a `code` property set to `EAUTHIP` then that indicates that -this account is only allowed to login from certain networks and this ip is -not on one of those networks. - -If the error was neither of these then the error object will have a -`code` property set to the HTTP response code and a `headers` property with -the HTTP headers in the response. - -### <a name="get"></a> `> profile.get([opts]) → Promise` - -```js -const {name, email} = await profile.get({token}) -console.log(`${token} belongs to https://npm.im/~${name}, (mailto:${email})`) -``` - -Fetch profile information for the authenticated user. - -* [`opts`](#opts) Object - -#### **Promise Value** - -An object that looks like this: - -```js -// "*" indicates a field that may not always appear -{ - tfa: null | - false | - {"mode": "auth-only", pending: Boolean} | - ["recovery", "codes"] | - "otpauth://...", - name: String, - email: String, - email_verified: Boolean, - created: Date, - updated: Date, - cidr_whitelist: null | ["192.168.1.1/32", ...], - fullname: String, // * - homepage: String, // * - freenode: String, // * - twitter: String, // * - github: String // * -} -``` - -#### **Promise Rejection** - -An error object indicating what went wrong. - -The `headers` property will contain the HTTP headers of the response. - -If the action was denied because an OTP is required then `code` will be set -to `EOTP`. - -If the action was denied because it came from an IP address that this action -on this account isn't allowed from then the `code` will be set to `EAUTHIP`. - -Otherwise the code will be the HTTP response code. - -### <a name="set"></a> `> profile.set(profileData, [opts]) → Promise` - -```js -await profile.set({github: 'great-github-account-name'}, {token}) -``` - -Update profile information for the authenticated user. - -* `profileData` An object, like that returned from `profile.get`, but see - below for caveats relating to `password`, `tfa` and `cidr_whitelist`. -* [`opts`](#opts) Object (optional) - -#### **SETTING `password`** - -This is used to change your password and is not visible (for obvious -reasons) through the `get()` API. The value should be an object with `old` -and `new` properties, where the former has the user's current password and -the latter has the desired new password. For example - -```js -await profile.set({ - password: { - old: 'abc123', - new: 'my new (more secure) password' - } -}, {token}) -``` - -#### **SETTING `cidr_whitelist`** - -The value for this is an Array. Only valid CIDR ranges are allowed in it. -Be very careful as it's possible to lock yourself out of your account with -this. This is not currently exposed in `npm` itself. - -```js -await profile.set({ - cidr_whitelist: [ '8.8.8.8/32' ] -}, {token}) -// ↑ only one of google's dns servers can now access this account. -``` - -#### **SETTING `tfa`** - -Enabling two-factor authentication is a multi-step process. - -1. Call `profile.get` and check the status of `tfa`. If `pending` is true then - you'll need to disable it with `profile.set({tfa: {password, mode: 'disable'}, …)`. -2. `profile.set({tfa: {password, mode}}, {registry, token})` - * Note that the user's `password` is required here in the `tfa` object, - regardless of how you're authenticating. - * `mode` is either `auth-only` which requires an `otp` when calling `login` - or `createToken`, or `mode` is `auth-and-writes` and an `otp` will be - required on login, publishing or when granting others access to your - modules. - * Be aware that this set call may require otp as part of the auth object. - If otp is needed it will be indicated through a rejection in the usual - way. -3. If tfa was already enabled then you're just switch modes and a - successful response means that you're done. If the tfa property is empty - and tfa _wasn't_ enabled then it means they were in a pending state. -3. The response will have a `tfa` property set to an `otpauth` URL, as - [used by Google Authenticator](https://github.com/google/google-authenticator/wiki/Key-Uri-Format). - You will need to show this to the user for them to add to their - authenticator application. This is typically done as a QRCODE, but you - can also show the value of the `secret` key in the `otpauth` query string - and they can type or copy paste that in. -4. To complete setting up two factor auth you need to make a second call to - `profile.set` with `tfa` set to an array of TWO codes from the user's - authenticator, eg: `profile.set(tfa: [otp1, otp2]}, {registry, token})` -5. On success you'll get a result object with a `tfa` property that has an - array of one-time-use recovery codes. These are used to authenticate - later if the second factor is lost and generally should be printed and - put somewhere safe. - -Disabling two-factor authentication is more straightforward, set the `tfa` -attribute to an object with a `password` property and a `mode` of `disable`. - -```js -await profile.set({tfa: {password, mode: 'disable'}}, {token}) -``` - -#### **Promise Value** - -An object reflecting the changes you made, see description for `profile.get`. - -#### **Promise Rejection** - -An error object indicating what went wrong. - -The `headers` property will contain the HTTP headers of the response. - -If the action was denied because an OTP is required then `code` will be set -to `EOTP`. - -If the action was denied because it came from an IP address that this action -on this account isn't allowed from then the `code` will be set to `EAUTHIP`. - -Otherwise the code will be the HTTP response code. - -### <a name="list-tokens"></a> `> profile.listTokens([opts]) → Promise` - -```js -const tokens = await profile.listTokens({registry, token}) -console.log(`Number of tokens in your accounts: ${tokens.length}`) -``` - -Fetch a list of all of the authentication tokens the authenticated user has. - -* [`opts`](#opts) Object (optional) - -#### **Promise Value** - -An array of token objects. Each token object has the following properties: - -* key — A sha512 that can be used to remove this token. -* token — The first six characters of the token UUID. This should be used - by the user to identify which token this is. -* created — The date and time the token was created -* readonly — If true, this token can only be used to download private modules. Critically, it CAN NOT be used to publish. -* cidr_whitelist — An array of CIDR ranges that this token is allowed to be used from. - -#### **Promise Rejection** - -An error object indicating what went wrong. - -The `headers` property will contain the HTTP headers of the response. - -If the action was denied because an OTP is required then `code` will be set -to `EOTP`. - -If the action was denied because it came from an IP address that this action -on this account isn't allowed from then the `code` will be set to `EAUTHIP`. - -Otherwise the code will be the HTTP response code. - -### <a name="remove-token"><a> `> profile.removeToken(token|key, opts) → Promise` - -```js -await profile.removeToken(key, {token}) -// token is gone! -``` - -Remove a specific authentication token. - -* `token|key` String, either a complete authentication token or the key returned by `profile.listTokens`. -* [`opts`](#opts) Object (optional) - -#### **Promise Value** - -No value. - -#### **Promise Rejection** - -An error object indicating what went wrong. - -The `headers` property will contain the HTTP headers of the response. - -If the action was denied because an OTP is required then `code` will be set -to `EOTP`. - -If the action was denied because it came from an IP address that this action -on this account isn't allowed from then the `code` will be set to `EAUTHIP`. - -Otherwise the code will be the HTTP response code. - -### <a name="create-token"></a> `> profile.createToken(password, readonly, cidr_whitelist, [opts]) → Promise` - -```js -const newToken = await profile.createToken( - password, readonly, cidr_whitelist, {token, otp} -) -// do something with the newToken -``` - -Create a new authentication token, possibly with restrictions. - -* `password` String -* `readonly` Boolean -* `cidr_whitelist` Array -* [`opts`](#opts) Object Optional - -#### **Promise Value** - -The promise will resolve with an object very much like the one's returned by -`profile.listTokens`. The only difference is that `token` is not truncated. - -```js -{ - token: String, - key: String, // sha512 hash of the token UUID - cidr_whitelist: [String], - created: Date, - readonly: Boolean -} -``` - -#### **Promise Rejection** - -An error object indicating what went wrong. - -The `headers` property will contain the HTTP headers of the response. - -If the action was denied because an OTP is required then `code` will be set -to `EOTP`. - -If the action was denied because it came from an IP address that this action -on this account isn't allowed from then the `code` will be set to `EAUTHIP`. - -Otherwise the code will be the HTTP response code. - -### <a name="opts"></a> options objects - -The various API functions accept an optional `opts` object as a final -argument. - -Options are passed to -[`npm-registry-fetch` -options](https://www.npmjs.com/package/npm-registry-fetch#fetch-opts), so -anything provided to this module will affect the behavior of that one as -well. - -Of particular note are `opts.registry`, and the auth-related options: - -* `opts.creds` Object, passed through to prompter, common values are: - * `username` String, default value for username - * `email` String, default value for email -* `opts.username` and `opts.password` - used for Basic auth -* `opts.otp` String, the two-factor-auth one-time-password (Will prompt for - this if needed and not provided.) -* `opts.hostname` String, the hostname of the current machine, to show the - user during the WebAuth flow. (Defaults to `os.hostname()`.) - -## <a name="logging"></a> Logging - -This modules logs by emitting `log` events on the global `process` object. -These events look like this: - -```js -process.emit('log', 'loglevel', 'feature', 'message part 1', 'part 2', 'part 3', 'etc') -``` - -`loglevel` can be one of: `error`, `warn`, `notice`, `http`, `timing`, `info`, `verbose`, and `silly`. - -`feature` is any brief string that describes the component doing the logging. - -The remaining arguments are evaluated like `console.log` and joined together with spaces. - -A real world example of this is: - -```js - process.emit('log', 'http', 'request', '→', conf.method || 'GET', conf.target) -``` - -To handle the log events, you would do something like this: - -```js -const log = require('npmlog') -process.on('log', function (level) { - return log[level].apply(log, [].slice.call(arguments, 1)) -}) -``` diff --git a/node_modules/npm-profile/package.json b/node_modules/npm-profile/package.json index 233381a38ec7b..43cc7c921bb04 100644 --- a/node_modules/npm-profile/package.json +++ b/node_modules/npm-profile/package.json @@ -1,12 +1,12 @@ { "name": "npm-profile", - "version": "5.0.2", + "version": "5.0.4", "description": "Library for updating an npmjs.com profile", "keywords": [], "author": "Rebecca Turner <me@re-becca.org> (http://re-becca.org/)", "license": "ISC", "dependencies": { - "npm-registry-fetch": "^9.0.0" + "npm-registry-fetch": "^11.0.0" }, "main": "index.js", "repository": { diff --git a/node_modules/npm-registry-fetch/CHANGELOG.md b/node_modules/npm-registry-fetch/CHANGELOG.md deleted file mode 100644 index fc26ee1bda4ba..0000000000000 --- a/node_modules/npm-registry-fetch/CHANGELOG.md +++ /dev/null @@ -1,384 +0,0 @@ -# Changelog - -All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. - -### [8.1.5](https://github.com/npm/registry-fetch/compare/v8.1.4...v8.1.5) (2020-10-12) - - -### Bug Fixes - -* respect publishConfig.registry when specified ([32e36ef](https://github.com/npm/registry-fetch/commit/32e36efe86302ed319973cd5b1e6ccc3f62e557e)), closes [#35](https://github.com/npm/registry-fetch/issues/35) - -### [8.1.4](https://github.com/npm/registry-fetch/compare/v8.1.3...v8.1.4) (2020-08-17) - - -### Bug Fixes - -* redact passwords from http logs ([3c294eb](https://github.com/npm/registry-fetch/commit/3c294ebbd7821725db4ff1bc5fe368c49613efcc)) - -### [8.1.3](https://github.com/npm/registry-fetch/compare/v8.1.2...v8.1.3) (2020-07-21) - -### [8.1.2](https://github.com/npm/registry-fetch/compare/v8.1.1...v8.1.2) (2020-07-11) - -### [8.1.1](https://github.com/npm/registry-fetch/compare/v8.1.0...v8.1.1) (2020-06-30) - -## [8.1.0](https://github.com/npm/registry-fetch/compare/v8.0.3...v8.1.0) (2020-05-20) - - -### Features - -* add npm-command HTTP header ([1bb4eb2](https://github.com/npm/registry-fetch/commit/1bb4eb2c66ee8a0dc62558bdcff1b548e2bb9820)) - -### [8.0.3](https://github.com/npm/registry-fetch/compare/v8.0.2...v8.0.3) (2020-05-13) - - -### Bug Fixes - -* update minipass and make-fetch-happen to latest ([3b6c5d0](https://github.com/npm/registry-fetch/commit/3b6c5d0d8ccd4c4a97862a65acef956f19aec127)), closes [#23](https://github.com/npm/registry-fetch/issues/23) - -### [8.0.2](https://github.com/npm/registry-fetch/compare/v8.0.1...v8.0.2) (2020-05-04) - - -### Bug Fixes - -* update make-fetch-happen to 8.0.6 ([226df2c](https://github.com/npm/registry-fetch/commit/226df2c32e3f9ed8ceefcfdbd11efb178181b442)) - -## [8.0.0](https://github.com/npm/registry-fetch/compare/v7.0.1...v8.0.0) (2020-02-24) - - -### ⚠ BREAKING CHANGES - -* Removes the 'opts.refer' option and the HTTP Referer -header (unless explicitly added to the 'headers' option, of course). - -PR-URL: https://github.com/npm/npm-registry-fetch/pull/25 -Credit: @isaacs - -### Bug Fixes - -* remove referer header and opts.refer ([eb8f7af](https://github.com/npm/registry-fetch/commit/eb8f7af3c102834856604c1be664b00ca0fe8ef2)), closes [#25](https://github.com/npm/registry-fetch/issues/25) - -### [7.0.1](https://github.com/npm/registry-fetch/compare/v7.0.0...v7.0.1) (2020-02-24) - -## [7.0.0](https://github.com/npm/registry-fetch/compare/v6.0.2...v7.0.0) (2020-02-18) - - -### ⚠ BREAKING CHANGES - -* figgy pudding is now nowhere to be found. -* this removes figgy-pudding, and drops several option -aliases. - -Defaults and behavior are all the same, and this module is now using the -canonical camelCase option names that npm v7 will provide to all its -deps. - -Related to: https://github.com/npm/rfcs/pull/102 - -PR-URL: https://github.com/npm/npm-registry-fetch/pull/22 -Credit: @isaacs - -### Bug Fixes - -* Remove figgy-pudding, use canonical option names ([ede3c08](https://github.com/npm/registry-fetch/commit/ede3c087007fd1808e02b1af70562220d03b18a9)), closes [#22](https://github.com/npm/registry-fetch/issues/22) - - -* update cacache, ssri, make-fetch-happen ([57fcc88](https://github.com/npm/registry-fetch/commit/57fcc889bee03edcc0a2025d96a171039108c231)) - -### [6.0.2](https://github.com/npm/registry-fetch/compare/v6.0.1...v6.0.2) (2020-02-14) - - -### Bug Fixes - -* always bypass cache when ?write=true ([83f89f3](https://github.com/npm/registry-fetch/commit/83f89f35abd2ed0507c869e37f90ed746375772c)) - -### [6.0.1](https://github.com/npm/registry-fetch/compare/v6.0.0...v6.0.1) (2020-02-14) - - -### Bug Fixes - -* use 30s default for timeout as per README ([50e8afc](https://github.com/npm/registry-fetch/commit/50e8afc6ff850542feb588f9f9c64ebae59e72a0)), closes [#20](https://github.com/npm/registry-fetch/issues/20) - -## [6.0.0](https://github.com/npm/registry-fetch/compare/v5.0.1...v6.0.0) (2019-12-17) - - -### ⚠ BREAKING CHANGES - -* This drops support for node < 10. - -There are some lint failures due to standard pushing for using WhatWG URL -objects instead of url.parse/url.resolve. However, the code in this lib -does some fancy things with the query/search portions of the parsed url -object, so it'll take a bit of care to make it work properly. - -### Bug Fixes - -* detect CI so our tests don't fail in CI ([5813da6](https://github.com/npm/registry-fetch/commit/5813da634cef73b12e40373972d7937e6934fce0)) -* Use WhatWG URLs instead of url.parse ([8ccfa8a](https://github.com/npm/registry-fetch/commit/8ccfa8a72c38cfedb0f525b7f453644fd4444f99)) - - -* normalize settings, drop old nodes, update deps ([510b125](https://github.com/npm/registry-fetch/commit/510b1255cc7ed4bb397a34e0007757dae33e2275)) - -<a name="5.0.1"></a> -## [5.0.1](https://github.com/npm/registry-fetch/compare/v5.0.0...v5.0.1) (2019-11-11) - - - -<a name="5.0.0"></a> -# [5.0.0](https://github.com/npm/registry-fetch/compare/v4.0.2...v5.0.0) (2019-10-04) - - -### Bug Fixes - -* prefer const in getAuth function ([90ac7b1](https://github.com/npm/registry-fetch/commit/90ac7b1)) -* use minizlib instead of core zlib ([e64702e](https://github.com/npm/registry-fetch/commit/e64702e)) - - -### Features - -* refactor to use Minipass streams ([bb37f20](https://github.com/npm/registry-fetch/commit/bb37f20)) - - -### BREAKING CHANGES - -* this replaces all core streams (except for some -PassThrough streams in a few tests) with Minipass streams, and updates -all deps to the latest and greatest Minipass versions of things. - - - -<a name="4.0.2"></a> -## [4.0.2](https://github.com/npm/registry-fetch/compare/v4.0.0...v4.0.2) (2019-10-04) - - -### Bug Fixes - -* Add null check on body on 401 errors ([e3a0186](https://github.com/npm/registry-fetch/commit/e3a0186)), closes [#9](https://github.com/npm/registry-fetch/issues/9) -* **deps:** Add explicit dependency on safe-buffer ([8eae5f0](https://github.com/npm/registry-fetch/commit/8eae5f0)), closes [npm/libnpmaccess#2](https://github.com/npm/libnpmaccess/issues/2) [#3](https://github.com/npm/registry-fetch/issues/3) - - - -<a name="4.0.0"></a> -# [4.0.0](https://github.com/npm/registry-fetch/compare/v3.9.1...v4.0.0) (2019-07-15) - - -* cacache@12.0.0, infer uid from cache folder ([0c4f060](https://github.com/npm/registry-fetch/commit/0c4f060)) - - -### BREAKING CHANGES - -* uid and gid are inferred from cache folder, rather than -being passed in as options. - - - -<a name="3.9.1"></a> -## [3.9.1](https://github.com/npm/registry-fetch/compare/v3.9.0...v3.9.1) (2019-07-02) - - - -<a name="3.9.0"></a> -# [3.9.0](https://github.com/npm/registry-fetch/compare/v3.8.0...v3.9.0) (2019-01-24) - - -### Features - -* **auth:** support username:password encoded legacy _auth ([a91f90c](https://github.com/npm/registry-fetch/commit/a91f90c)) - - - -<a name="3.8.0"></a> -# [3.8.0](https://github.com/npm/registry-fetch/compare/v3.7.0...v3.8.0) (2018-08-23) - - -### Features - -* **mapJson:** add support for passing in json stream mapper ([0600986](https://github.com/npm/registry-fetch/commit/0600986)) - - - -<a name="3.7.0"></a> -# [3.7.0](https://github.com/npm/registry-fetch/compare/v3.6.0...v3.7.0) (2018-08-23) - - -### Features - -* **json.stream:** add utility function for streamed JSON parsing ([051d969](https://github.com/npm/registry-fetch/commit/051d969)) - - - -<a name="3.6.0"></a> -# [3.6.0](https://github.com/npm/registry-fetch/compare/v3.5.0...v3.6.0) (2018-08-22) - - -### Bug Fixes - -* **docs:** document opts.forceAuth ([40bcd65](https://github.com/npm/registry-fetch/commit/40bcd65)) - - -### Features - -* **opts.ignoreBody:** add a boolean to throw away response bodies ([6923702](https://github.com/npm/registry-fetch/commit/6923702)) - - - -<a name="3.5.0"></a> -# [3.5.0](https://github.com/npm/registry-fetch/compare/v3.4.0...v3.5.0) (2018-08-22) - - -### Features - -* **pkgid:** heuristic pkgid calculation for errors ([2e789a5](https://github.com/npm/registry-fetch/commit/2e789a5)) - - - -<a name="3.4.0"></a> -# [3.4.0](https://github.com/npm/registry-fetch/compare/v3.3.0...v3.4.0) (2018-08-22) - - -### Bug Fixes - -* **deps:** use new figgy-pudding with aliases fix ([0308f54](https://github.com/npm/registry-fetch/commit/0308f54)) - - -### Features - -* **auth:** add forceAuth option to force a specific auth mechanism ([4524d17](https://github.com/npm/registry-fetch/commit/4524d17)) - - - -<a name="3.3.0"></a> -# [3.3.0](https://github.com/npm/registry-fetch/compare/v3.2.1...v3.3.0) (2018-08-21) - - -### Bug Fixes - -* **query:** stop including undefined keys ([4718b1b](https://github.com/npm/registry-fetch/commit/4718b1b)) - - -### Features - -* **otp:** use heuristic detection for malformed EOTP responses ([f035194](https://github.com/npm/registry-fetch/commit/f035194)) - - - -<a name="3.2.1"></a> -## [3.2.1](https://github.com/npm/registry-fetch/compare/v3.2.0...v3.2.1) (2018-08-16) - - -### Bug Fixes - -* **opts:** pass through non-null opts.retry ([beba040](https://github.com/npm/registry-fetch/commit/beba040)) - - - -<a name="3.2.0"></a> -# [3.2.0](https://github.com/npm/registry-fetch/compare/v3.1.1...v3.2.0) (2018-07-27) - - -### Features - -* **gzip:** add opts.gzip convenience opt ([340abe0](https://github.com/npm/registry-fetch/commit/340abe0)) - - - -<a name="3.1.1"></a> -## [3.1.1](https://github.com/npm/registry-fetch/compare/v3.1.0...v3.1.1) (2018-04-09) - - - -<a name="3.1.0"></a> -# [3.1.0](https://github.com/npm/registry-fetch/compare/v3.0.0...v3.1.0) (2018-04-09) - - -### Features - -* **config:** support no-proxy and https-proxy options ([9aa906b](https://github.com/npm/registry-fetch/commit/9aa906b)) - - - -<a name="3.0.0"></a> -# [3.0.0](https://github.com/npm/registry-fetch/compare/v2.1.0...v3.0.0) (2018-04-09) - - -### Bug Fixes - -* **api:** pacote integration-related fixes ([a29de4f](https://github.com/npm/registry-fetch/commit/a29de4f)) -* **config:** stop caring about opts.config ([5856a6f](https://github.com/npm/registry-fetch/commit/5856a6f)) - - -### BREAKING CHANGES - -* **config:** opts.config is no longer supported. Pass the options down in opts itself. - - - -<a name="2.1.0"></a> -# [2.1.0](https://github.com/npm/registry-fetch/compare/v2.0.0...v2.1.0) (2018-04-08) - - -### Features - -* **token:** accept opts.token for opts._authToken ([108c9f0](https://github.com/npm/registry-fetch/commit/108c9f0)) - - - -<a name="2.0.0"></a> -# [2.0.0](https://github.com/npm/registry-fetch/compare/v1.1.1...v2.0.0) (2018-04-08) - - -### meta - -* drop support for node@4 ([758536e](https://github.com/npm/registry-fetch/commit/758536e)) - - -### BREAKING CHANGES - -* node@4 is no longer supported - - - -<a name="1.1.1"></a> -## [1.1.1](https://github.com/npm/registry-fetch/compare/v1.1.0...v1.1.1) (2018-04-06) - - - -<a name="1.1.0"></a> -# [1.1.0](https://github.com/npm/registry-fetch/compare/v1.0.1...v1.1.0) (2018-03-16) - - -### Features - -* **specs:** can use opts.spec to trigger pickManifest ([85c4ac9](https://github.com/npm/registry-fetch/commit/85c4ac9)) - - - -<a name="1.0.1"></a> -## [1.0.1](https://github.com/npm/registry-fetch/compare/v1.0.0...v1.0.1) (2018-03-16) - - -### Bug Fixes - -* **query:** oops console.log ([870e4f5](https://github.com/npm/registry-fetch/commit/870e4f5)) - - - -<a name="1.0.0"></a> -# 1.0.0 (2018-03-16) - - -### Bug Fixes - -* **auth:** get auth working with all the little details ([84b94ba](https://github.com/npm/registry-fetch/commit/84b94ba)) -* **deps:** add bluebird as an actual dep ([1286e31](https://github.com/npm/registry-fetch/commit/1286e31)) -* **errors:** Unknown auth errors use default code ([#1](https://github.com/npm/registry-fetch/issues/1)) ([3d91b93](https://github.com/npm/registry-fetch/commit/3d91b93)) -* **standard:** remove args from invocation ([9620a0a](https://github.com/npm/registry-fetch/commit/9620a0a)) - - -### Features - -* **api:** baseline kinda-working API impl ([bf91f9f](https://github.com/npm/registry-fetch/commit/bf91f9f)) -* **body:** automatic handling of different opts.body values ([f3b97db](https://github.com/npm/registry-fetch/commit/f3b97db)) -* **config:** nicer input config input handling ([b9ce21d](https://github.com/npm/registry-fetch/commit/b9ce21d)) -* **opts:** use figgy-pudding for opts handling ([0abd527](https://github.com/npm/registry-fetch/commit/0abd527)) -* **query:** add query utility support ([65ea8b1](https://github.com/npm/registry-fetch/commit/65ea8b1)) diff --git a/node_modules/npm-registry-fetch/README.md b/node_modules/npm-registry-fetch/README.md deleted file mode 100644 index f5ae9cac31a03..0000000000000 --- a/node_modules/npm-registry-fetch/README.md +++ /dev/null @@ -1,629 +0,0 @@ -# npm-registry-fetch - -[`npm-registry-fetch`](https://github.com/npm/npm-registry-fetch) is a Node.js -library that implements a `fetch`-like API for accessing npm registry APIs -consistently. It's able to consume npm-style configuration values and has all -the necessary logic for picking registries, handling scopes, and dealing with -authentication details built-in. - -This package is meant to replace the older -[`npm-registry-client`](https://npm.im/npm-registry-client). - -## Example - -```javascript -const npmFetch = require('npm-registry-fetch') - -console.log( - await npmFetch.json('/-/ping') -) -``` - -## Table of Contents - -* [Installing](#install) -* [Example](#example) -* [Contributing](#contributing) -* [API](#api) - * [`fetch`](#fetch) - * [`fetch.json`](#fetch-json) - * [`fetch` options](#fetch-opts) - -### Install - -`$ npm install npm-registry-fetch` - -### Contributing - -The npm team enthusiastically welcomes contributions and project participation! -There's a bunch of things you can do if you want to contribute! The [Contributor -Guide](CONTRIBUTING.md) has all the information you need for everything from -reporting bugs to contributing entire new features. Please don't hesitate to -jump in if you'd like to, or even ask us questions if something isn't clear. - -All participants and maintainers in this project are expected to follow [Code of -Conduct](CODE_OF_CONDUCT.md), and just generally be excellent to each other. - -Please refer to the [Changelog](CHANGELOG.md) for project history details, too. - -Happy hacking! - -### API - -#### Caching and `write=true` query strings - -Before performing any PUT or DELETE operation, npm clients first make a -GET request to the registry resource being updated, which includes -the query string `?write=true`. - -The semantics of this are, effectively, "I intend to write to this thing, -and need to know the latest current value, so that my write can land -cleanly". - -The public npm registry handles these `?write=true` requests by ensuring -that the cache is re-validated before sending a response. In order to -maintain the same behavior on the client, and not get tripped up by an -overeager local cache when we intend to write data to the registry, any -request that comes through `npm-registry-fetch` that contains `write=true` -in the query string will forcibly set the `prefer-online` option to `true`, -and set both `prefer-offline` and `offline` to false, so that any local -cached value will be revalidated. - -#### <a name="fetch"></a> `> fetch(url, [opts]) -> Promise<Response>` - -Performs a request to a given URL. - -The URL can be either a full URL, or a path to one. The appropriate registry -will be automatically picked if only a URL path is given. - -For available options, please see the section on [`fetch` options](#fetch-opts). - -##### Example - -```javascript -const res = await fetch('/-/ping') -console.log(res.headers) -res.on('data', d => console.log(d.toString('utf8'))) -``` - -#### <a name="fetch-json"></a> `> fetch.json(url, [opts]) -> Promise<ResponseJSON>` - -Performs a request to a given registry URL, parses the body of the response as -JSON, and returns it as its final value. This is a utility shorthand for -`fetch(url).then(res => res.json())`. - -For available options, please see the section on [`fetch` options](#fetch-opts). - -##### Example - -```javascript -const res = await fetch.json('/-/ping') -console.log(res) // Body parsed as JSON -``` - -#### <a name="fetch-json-stream"></a> `> fetch.json.stream(url, jsonPath, [opts]) -> Stream` - -Performs a request to a given registry URL and parses the body of the response -as JSON, with each entry being emitted through the stream. - -The `jsonPath` argument is a [`JSONStream.parse()` -path](https://github.com/dominictarr/JSONStream#jsonstreamparsepath), and the -returned stream (unlike default `JSONStream`s), has a valid -`Symbol.asyncIterator` implementation. - -For available options, please see the section on [`fetch` options](#fetch-opts). - -##### Example - -```javascript -console.log('https://npm.im/~zkat has access to the following packages:') -for await (let {key, value} of fetch.json.stream('/-/user/zkat/package', '$*')) { - console.log(`https://npm.im/${key} (perms: ${value})`) -} -``` - -#### <a name="fetch-opts"></a> `fetch` Options - -Fetch options are optional, and can be passed in as either a Map-like object -(one with a `.get()` method), a plain javascript object, or a -[`figgy-pudding`](https://npm.im/figgy-pudding) instance. - -##### <a name="opts-agent"></a> `opts.agent` - -* Type: http.Agent -* Default: an appropriate agent based on URL protocol and proxy settings - -An [`Agent`](https://nodejs.org/api/http.html#http_class_http_agent) instance to -be shared across requests. This allows multiple concurrent `fetch` requests to -happen on the same socket. - -You do _not_ need to provide this option unless you want something particularly -specialized, since proxy configurations and http/https agents are already -automatically managed internally when this option is not passed through. - -##### <a name="opts-body"></a> `opts.body` - -* Type: Buffer | Stream | Object -* Default: null - -Request body to send through the outgoing request. Buffers and Streams will be -passed through as-is, with a default `content-type` of -`application/octet-stream`. Plain JavaScript objects will be `JSON.stringify`ed -and the `content-type` will default to `application/json`. - -Use [`opts.headers`](#opts-headers) to set the content-type to something else. - -##### <a name="opts-ca"></a> `opts.ca` - -* Type: String, Array, or null -* Default: null - -The Certificate Authority signing certificate that is trusted for SSL -connections to the registry. Values should be in PEM format (Windows calls it -"Base-64 encoded X.509 (.CER)") with newlines replaced by the string `'\n'`. For -example: - -``` -{ - ca: '-----BEGIN CERTIFICATE-----\nXXXX\nXXXX\n-----END CERTIFICATE-----' -} -``` - -Set to `null` to only allow "known" registrars, or to a specific CA cert -to trust only that specific signing authority. - -Multiple CAs can be trusted by specifying an array of certificates instead of a -single string. - -See also [`opts.strictSSL`](#opts-strictSSL), [`opts.ca`](#opts-ca) and -[`opts.key`](#opts-key) - -##### <a name="opts-cache"></a> `opts.cache` - -* Type: path -* Default: null - -The location of the http cache directory. If provided, certain cachable requests -will be cached according to [IETF RFC 7234](https://tools.ietf.org/html/rfc7234) -rules. This will speed up future requests, as well as make the cached data -available offline if necessary/requested. - -See also [`offline`](#opts-offline), [`preferOffline`](#opts-preferOffline), -and [`preferOnline`](#opts-preferOnline). - -##### <a name="opts-cert"></a> `opts.cert` - -* Type: String -* Default: null - -A client certificate to pass when accessing the registry. Values should be in -PEM format (Windows calls it "Base-64 encoded X.509 (.CER)") with newlines -replaced by the string `'\n'`. For example: - -``` -{ - cert: '-----BEGIN CERTIFICATE-----\nXXXX\nXXXX\n-----END CERTIFICATE-----' -} -``` - -It is _not_ the path to a certificate file (and there is no "certfile" option). - -See also: [`opts.ca`](#opts-ca) and [`opts.key`](#opts-key) - -##### <a name="opts-fetchRetries"></a> `opts.fetchRetries` - -* Type: Number -* Default: 2 - -The "retries" config for [`retry`](https://npm.im/retry) to use when fetching -packages from the registry. - -See also [`opts.retry`](#opts-retry) to provide all retry options as a single -object. - -##### <a name="opts-fetchRetryFactor"></a> `opts.fetchRetryFactor` - -* Type: Number -* Default: 10 - -The "factor" config for [`retry`](https://npm.im/retry) to use when fetching -packages. - -See also [`opts.retry`](#opts-retry) to provide all retry options as a single -object. - -##### <a name="opts-fetchRetryMintimeout"></a> `opts.fetchRetryMintimeout` - -* Type: Number -* Default: 10000 (10 seconds) - -The "minTimeout" config for [`retry`](https://npm.im/retry) to use when fetching -packages. - -See also [`opts.retry`](#opts-retry) to provide all retry options as a single -object. - -##### <a name="opts-fetchRetryMaxtimeout"></a> `opts.fetchRetryMaxtimeout` - -* Type: Number -* Default: 60000 (1 minute) - -The "maxTimeout" config for [`retry`](https://npm.im/retry) to use when fetching -packages. - -See also [`opts.retry`](#opts-retry) to provide all retry options as a single -object. - -##### <a name="opts-forceAuth"></a> `opts.forceAuth` - -* Type: Object -* Default: null - -If present, other auth-related values in `opts` will be completely ignored, -including `alwaysAuth`, `email`, and `otp`, when calculating auth for a request, -and the auth details in `opts.forceAuth` will be used instead. - -##### <a name="opts-gzip"></a> `opts.gzip` - -* Type: Boolean -* Default: false - -If true, `npm-registry-fetch` will set the `Content-Encoding` header to `gzip` -and use `zlib.gzip()` or `zlib.createGzip()` to gzip-encode -[`opts.body`](#opts-body). - -##### <a name="opts-headers"></a> `opts.headers` - -* Type: Object -* Default: null - -Additional headers for the outgoing request. This option can also be used to -override headers automatically generated by `npm-registry-fetch`, such as -`Content-Type`. - -##### <a name="opts-ignoreBody"></a> `opts.ignoreBody` - -* Type: Boolean -* Default: false - -If true, the **response body** will be thrown away and `res.body` set to `null`. -This will prevent dangling response sockets for requests where you don't usually -care what the response body is. - -##### <a name="opts-integrity"></a> `opts.integrity` - -* Type: String | [SRI object](https://npm.im/ssri) -* Default: null - -If provided, the response body's will be verified against this integrity string, -using [`ssri`](https://npm.im/ssri). If verification succeeds, the response will -complete as normal. If verification fails, the response body will error with an -`EINTEGRITY` error. - -Body integrity is only verified if the body is actually consumed to completion -- -that is, if you use `res.json()`/`res.buffer()`, or if you consume the default -`res` stream data to its end. - -Cached data will have its integrity automatically verified using the -previously-generated integrity hash for the saved request information, so -`EINTEGRITY` errors can happen if [`opts.cache`](#opts-cache) is used, even if -`opts.integrity` is not passed in. - -##### <a name='opts-isFromCI'></a> `opts.isFromCI` - -* Type: Boolean -* Default: Based on environment variables - -This is used to populate the `npm-in-ci` request header sent to the registry. - -##### <a name="opts-key"></a> `opts.key` - -* Type: String -* Default: null - -A client key to pass when accessing the registry. Values should be in PEM -format with newlines replaced by the string `'\n'`. For example: - -``` -{ - key: '-----BEGIN PRIVATE KEY-----\nXXXX\nXXXX\n-----END PRIVATE KEY-----' -} -``` - -It is _not_ the path to a key file (and there is no "keyfile" option). - -See also: [`opts.ca`](#opts-ca) and [`opts.cert`](#opts-cert) - -##### <a name="opts-localAddress"></a> `opts.localAddress` - -* Type: IP Address String -* Default: null - -The IP address of the local interface to use when making connections -to the registry. - -See also [`opts.proxy`](#opts-proxy) - -##### <a name="opts-log"></a> `opts.log` - -* Type: [`npmlog`](https://npm.im/npmlog)-like -* Default: null - -Logger object to use for logging operation details. Must have the same methods -as `npmlog`. - -##### <a name="opts-mapJSON"></a> `opts.mapJSON` - -* Type: Function -* Default: undefined - -When using `fetch.json.stream()` (NOT `fetch.json()`), this will be passed down -to [`JSONStream`](https://npm.im/JSONStream) as the second argument to -`JSONStream.parse`, and can be used to transform stream data before output. - -##### <a name="opts-maxSockets"></a> `opts.maxSockets` - -* Type: Integer -* Default: 12 - -Maximum number of sockets to keep open during requests. Has no effect if -[`opts.agent`](#opts-agent) is used. - -##### <a name="opts-method"></a> `opts.method` - -* Type: String -* Default: 'GET' - -HTTP method to use for the outgoing request. Case-insensitive. - -##### <a name="opts-noproxy"></a> `opts.noproxy` - -* Type: Boolean -* Default: process.env.NOPROXY - -If true, proxying will be disabled even if [`opts.proxy`](#opts-proxy) is used. - -##### <a name="opts-npmSession"></a> `opts.npmSession` - -* Type: String -* Default: null - -If provided, will be sent in the `npm-session` header. This header is used by -the npm registry to identify individual user sessions (usually individual -invocations of the CLI). - -##### <a name="opts-npmCommand"></a> `opts.npmCommand` - -* Type: String -* Default: null - -If provided, it will be sent in the `npm-command` header. This yeader is -used by the npm registry to identify the npm command that caused this -request to be made. - -##### <a name="opts-offline"></a> `opts.offline` - -* Type: Boolean -* Default: false - -Force offline mode: no network requests will be done during install. To allow -`npm-registry-fetch` to fill in missing cache data, see -[`opts.preferOffline`](#opts-preferOffline). - -This option is only really useful if you're also using -[`opts.cache`](#opts-cache). - -This option is set to `true` when the request includes `write=true` in the -query string. - -##### <a name="opts-otp"></a> `opts.otp` - -* Type: Number | String -* Default: null - -This is a one-time password from a two-factor authenticator. It is required for -certain registry interactions when two-factor auth is enabled for a user -account. - -##### <a name="opts-password"></a> `opts.password` - -* Alias: `_password` -* Type: String -* Default: null - -Password used for basic authentication. For the more modern authentication -method, please use the (more secure) [`opts.token`](#opts-token) - -Can optionally be scoped to a registry by using a "nerf dart" for that registry. -That is: - -``` -{ - '//registry.npmjs.org/:password': 't0k3nH34r' -} -``` - -See also [`opts.username`](#opts-username) - -##### <a name="opts-preferOffline"></a> `opts.preferOffline` - -* Type: Boolean -* Default: false - -If true, staleness checks for cached data will be bypassed, but missing data -will be requested from the server. To force full offline mode, use -[`opts.offline`](#opts-offline). - -This option is generally only useful if you're also using -[`opts.cache`](#opts-cache). - -This option is set to `false` when the request includes `write=true` in the -query string. - -##### <a name="opts-preferOnline"></a> `opts.preferOnline` - -* Type: Boolean -* Default: false - -If true, staleness checks for cached data will be forced, making the CLI look -for updates immediately even for fresh package data. - -This option is generally only useful if you're also using -[`opts.cache`](#opts-cache). - -This option is set to `true` when the request includes `write=true` in the -query string. - -##### <a name="opts-projectScope"></a> `opts.projectScope` - -* Type: String -* Default: null - -If provided, will be sent in the `npm-scope` header. This header is used by the -npm registry to identify the toplevel package scope that a particular project -installation is using. - -##### <a name="opts-proxy"></a> `opts.proxy` - -* Type: url -* Default: null - -A proxy to use for outgoing http requests. If not passed in, the `HTTP(S)_PROXY` -environment variable will be used. - -##### <a name="opts-query"></a> `opts.query` - -* Type: String | Object -* Default: null - -If provided, the request URI will have a query string appended to it using this -query. If `opts.query` is an object, it will be converted to a query string -using -[`querystring.stringify()`](https://nodejs.org/api/querystring.html#querystring_querystring_stringify_obj_sep_eq_options). - -If the request URI already has a query string, it will be merged with -`opts.query`, preferring `opts.query` values. - -##### <a name="opts-registry"></a> `opts.registry` - -* Type: URL -* Default: `'https://registry.npmjs.org'` - -Registry configuration for a request. If a request URL only includes the URL -path, this registry setting will be prepended. This configuration is also used -to determine authentication details, so even if the request URL references a -completely different host, `opts.registry` will be used to find the auth details -for that request. - -See also [`opts.scope`](#opts-scope), [`opts.spec`](#opts-spec), and -[`opts.<scope>:registry`](#opts-scope-registry) which can all affect the actual -registry URL used by the outgoing request. - -##### <a name="opts-retry"></a> `opts.retry` - -* Type: Object -* Default: null - -Single-object configuration for request retry settings. If passed in, will -override individually-passed `fetch-retry-*` settings. - -##### <a name="opts-scope"></a> `opts.scope` - -* Type: String -* Default: null - -Associate an operation with a scope for a scoped registry. This option can force -lookup of scope-specific registries and authentication. - -See also [`opts.<scope>:registry`](#opts-scope-registry) and -[`opts.spec`](#opts-spec) for interactions with this option. - -##### <a name="opts-scope-registry"></a> `opts.<scope>:registry` - -* Type: String -* Default: null - -This option type can be used to configure the registry used for requests -involving a particular scope. For example, `opts['@myscope:registry'] = -'https://scope-specific.registry/'` will make it so requests go out to this -registry instead of [`opts.registry`](#opts-registry) when -[`opts.scope`](#opts-scope) is used, or when [`opts.spec`](#opts-spec) is a -scoped package spec. - -The `@` before the scope name is optional, but recommended. - -##### <a name="opts-spec"></a> `opts.spec` - -* Type: String | [`npm-registry-arg`](https://npm.im/npm-registry-arg) object. -* Default: null - -If provided, can be used to automatically configure [`opts.scope`](#opts-scope) -based on a specific package name. Non-registry package specs will throw an -error. - -##### <a name="opts-strictSSL"></a> `opts.strictSSL` - -* Type: Boolean -* Default: true - -Whether or not to do SSL key validation when making requests to the -registry via https. - -See also [`opts.ca`](#opts-ca). - -##### <a name="opts-timeout"></a> `opts.timeout` - -* Type: Milliseconds -* Default: 300000 (5 minutes) - -Time before a hanging request times out. - -##### <a name="opts-token"></a> `opts.token` - -* Alias: `opts._authToken` -* Type: String -* Default: null - -Authentication token string. - -Can be scoped to a registry by using a "nerf dart" for that registry. That is: - -``` -{ - '//registry.npmjs.org/:token': 't0k3nH34r' -} -``` - -##### <a name="opts-userAgent"></a> `opts.userAgent` - -* Type: String -* Default: `'npm-registry-fetch@<version>/node@<node-version>+<arch> (<platform>)'` - -User agent string to send in the `User-Agent` header. - -##### <a name="opts-username"></a> `opts.username` - -* Type: String -* Default: null - -Username used for basic authentication. For the more modern authentication -method, please use the (more secure) [`opts.token`](#opts-token) - -Can optionally be scoped to a registry by using a "nerf dart" for that registry. -That is: - -``` -{ - '//registry.npmjs.org/:username': 't0k3nH34r' -} -``` - -See also [`opts.password`](#opts-password) - -##### <a name="opts-auth"></a> `opts._auth` - -* Type: String -* Default: null - -** DEPRECATED ** This is a legacy authentication token supported only for -compatibility. Please use [`opts.token`](#opts-token) instead. diff --git a/node_modules/npm-registry-fetch/auth.js b/node_modules/npm-registry-fetch/auth.js index e096a6f98f9a4..01a4436a5d2a8 100644 --- a/node_modules/npm-registry-fetch/auth.js +++ b/node_modules/npm-registry-fetch/auth.js @@ -1,55 +1,111 @@ 'use strict' +const npa = require('npm-package-arg') -const defaultOpts = require('./default-opts.js') -const url = require('url') +// Find the longest registry key that is used for some kind of auth +// in the options. +const regKeyFromURI = (uri, opts) => { + const parsed = new URL(uri) + // try to find a config key indicating we have auth for this registry + // can be one of :_authToken, :_auth, or :_password and :username + // We walk up the "path" until we're left with just //<host>[:<port>], + // stopping when we reach '//'. + let regKey = `//${parsed.host}${parsed.pathname}` + while (regKey.length > '//'.length) { + // got some auth for this URI + if (hasAuth(regKey, opts)) + return regKey -module.exports = getAuth -function getAuth (registry, opts_ = {}) { - if (!registry) - throw new Error('registry is required') - const opts = opts_.forceAuth ? opts_.forceAuth : { ...defaultOpts, ...opts_ } - const AUTH = {} - const regKey = registry && registryKey(registry) - const doKey = (key, alias) => addKey(opts, AUTH, regKey, key, alias) - doKey('token') - doKey('_authToken', 'token') - doKey('username') - doKey('password') - doKey('_password', 'password') - doKey('email') - doKey('_auth') - doKey('otp') - doKey('always-auth', 'alwaysAuth') - if (AUTH.password) - AUTH.password = Buffer.from(AUTH.password, 'base64').toString('utf8') - - if (AUTH._auth && !(AUTH.username && AUTH.password)) { - let auth = Buffer.from(AUTH._auth, 'base64').toString() - auth = auth.split(':') - AUTH.username = auth.shift() - AUTH.password = auth.join(':') + // can be either //host/some/path/:_auth or //host/some/path:_auth + // walk up by removing EITHER what's after the slash OR the slash itself + regKey = regKey.replace(/([^/]+|\/)$/, '') } - AUTH.alwaysAuth = AUTH.alwaysAuth === 'false' ? false : !!AUTH.alwaysAuth - return AUTH } -function addKey (opts, obj, scope, key, objKey) { - if (opts[key]) - obj[objKey || key] = opts[key] +const hasAuth = (regKey, opts) => ( + opts[`${regKey}:_authToken`] || + opts[`${regKey}:_auth`] || + opts[`${regKey}:username`] && opts[`${regKey}:_password`] +) + +const sameHost = (a, b) => { + const parsedA = new URL(a) + const parsedB = new URL(b) + return parsedA.host === parsedB.host +} - if (scope && opts[`${scope}:${key}`]) - obj[objKey || key] = opts[`${scope}:${key}`] +const getRegistry = opts => { + const { spec } = opts + const { scope: specScope, subSpec } = spec ? npa(spec) : {} + const subSpecScope = subSpec && subSpec.scope + const scope = subSpec ? subSpecScope : specScope + const scopeReg = scope && opts[`${scope}:registry`] + return scopeReg || opts.registry } -// Called a nerf dart in the main codebase. Used as a "safe" -// key when fetching registry info from config. -function registryKey (registry) { - const parsed = new url.URL(registry) - const formatted = url.format({ - protocol: parsed.protocol, - host: parsed.host, - pathname: parsed.pathname, - slashes: true, +const getAuth = (uri, opts = {}) => { + const { forceAuth } = opts + if (!uri) + throw new Error('URI is required') + const regKey = regKeyFromURI(uri, forceAuth || opts) + + // we are only allowed to use what's in forceAuth if specified + if (forceAuth && !regKey) { + return new Auth({ + scopeAuthKey: null, + token: forceAuth._authToken || forceAuth.token, + username: forceAuth.username, + password: forceAuth._password || forceAuth.password, + auth: forceAuth._auth || forceAuth.auth, + }) + } + + // no auth for this URI, but might have it for the registry + if (!regKey) { + const registry = getRegistry(opts) + if (registry && uri !== registry && sameHost(uri, registry)) + return getAuth(registry, opts) + else if (registry !== opts.registry) { + // If making a tarball request to a different base URI than the + // registry where we logged in, but the same auth SHOULD be sent + // to that artifact host, then we track where it was coming in from, + // and warn the user if we get a 4xx error on it. + const scopeAuthKey = regKeyFromURI(registry, opts) + return new Auth({ scopeAuthKey }) + } + } + + const { + [`${regKey}:_authToken`]: token, + [`${regKey}:username`]: username, + [`${regKey}:_password`]: password, + [`${regKey}:_auth`]: auth, + } = opts + + return new Auth({ + scopeAuthKey: null, + token, + auth, + username, + password, }) - return url.format(new url.URL('.', formatted)).replace(/^[^:]+:/, '') } + +class Auth { + constructor ({ token, auth, username, password, scopeAuthKey }) { + this.scopeAuthKey = scopeAuthKey + this.token = null + this.auth = null + this.isBasicAuth = false + if (token) + this.token = token + else if (auth) + this.auth = auth + else if (username && password) { + const p = Buffer.from(password, 'base64').toString('utf8') + this.auth = Buffer.from(`${username}:${p}`, 'utf8').toString('base64') + this.isBasicAuth = true + } + } +} + +module.exports = getAuth diff --git a/node_modules/npm-registry-fetch/check-response.js b/node_modules/npm-registry-fetch/check-response.js index 5154da5349f76..8bd85661ee8ca 100644 --- a/node_modules/npm-registry-fetch/check-response.js +++ b/node_modules/npm-registry-fetch/check-response.js @@ -1,35 +1,46 @@ 'use strict' const errors = require('./errors.js') -const LRU = require('lru-cache') const { Response } = require('minipass-fetch') const defaultOpts = require('./default-opts.js') -module.exports = checkResponse -function checkResponse (method, res, registry, startTime, opts_ = {}) { - const opts = { ...defaultOpts, ...opts_ } - if (res.headers.has('npm-notice') && !res.headers.has('x-local-cache')) - opts.log.notice('', res.headers.get('npm-notice')) +const checkResponse = + async ({ method, uri, res, registry, startTime, auth, opts }) => { + opts = { ...defaultOpts, ...opts } + if (res.headers.has('npm-notice') && !res.headers.has('x-local-cache')) + opts.log.notice('', res.headers.get('npm-notice')) + + if (res.status >= 400) { + logRequest(method, res, startTime, opts) + if (auth && auth.scopeAuthKey && !auth.token && !auth.auth) { + // we didn't have auth for THIS request, but we do have auth for + // requests to the registry indicated by the spec's scope value. + // Warn the user. + opts.log.warn('registry', `No auth for URI, but auth present for scoped registry. - checkWarnings(res, registry, opts) - if (res.status >= 400) { - logRequest(method, res, startTime, opts) - return checkErrors(method, res, startTime, opts) - } else { - res.body.on('end', () => logRequest(method, res, startTime, opts)) - if (opts.ignoreBody) { - res.body.resume() - return new Response(null, res) +URI: ${uri} +Scoped Registry Key: ${auth.scopeAuthKey} + +More info here: https://github.com/npm/cli/wiki/No-auth-for-URI,-but-auth-present-for-scoped-registry`) + } + return checkErrors(method, res, startTime, opts) + } else { + res.body.on('end', () => logRequest(method, res, startTime, opts)) + if (opts.ignoreBody) { + res.body.resume() + return new Response(null, res) + } + return res } - return res } -} +module.exports = checkResponse function logRequest (method, res, startTime, opts) { const elapsedTime = Date.now() - startTime const attempt = res.headers.get('x-fetch-attempts') const attemptStr = attempt && attempt > 1 ? ` attempt #${attempt}` : '' - const cacheStr = res.headers.get('x-local-cache') ? ' (from cache)' : '' + const cacheStatus = res.headers.get('x-local-cache-status') + const cacheStr = cacheStatus ? ` (cache ${cacheStatus})` : '' let urlStr try { @@ -49,46 +60,6 @@ function logRequest (method, res, startTime, opts) { ) } -const WARNING_REGEXP = /^\s*(\d{3})\s+(\S+)\s+"(.*)"\s+"([^"]+)"/ -const BAD_HOSTS = new LRU({ max: 50 }) - -function checkWarnings (res, registry, opts) { - if (res.headers.has('warning') && !BAD_HOSTS.has(registry)) { - const warnings = {} - // note: headers.raw() will preserve case, so we might have a - // key on the object like 'WaRnInG' if that was used first - for (const [key, value] of Object.entries(res.headers.raw())) { - if (key.toLowerCase() !== 'warning') - continue - value.forEach(w => { - const match = w.match(WARNING_REGEXP) - if (match) { - warnings[match[1]] = { - code: match[1], - host: match[2], - message: match[3], - date: new Date(match[4]), - } - } - }) - } - BAD_HOSTS.set(registry, true) - if (warnings['199']) { - if (warnings['199'].message.match(/ENOTFOUND/)) - opts.log.warn('registry', `Using stale data from ${registry} because the host is inaccessible -- are you offline?`) - else - opts.log.warn('registry', `Unexpected warning for ${registry}: ${warnings['199'].message}`) - } - if (warnings['111']) { - // 111 Revalidation failed -- we're using stale data - opts.log.warn( - 'registry', - `Using stale data from ${registry} due to a request error during revalidation.` - ) - } - } -} - function checkErrors (method, res, startTime, opts) { return res.buffer() .catch(() => null) @@ -115,7 +86,8 @@ function checkErrors (method, res, startTime, opts) { ) } } else if (res.status === 401 && body != null && /one-time pass/.test(body.toString('utf8'))) { - // Heuristic for malformed OTP responses that don't include the www-authenticate header. + // Heuristic for malformed OTP responses that don't include the + // www-authenticate header. throw new errors.HttpErrorAuthOTP( method, res, parsed, opts.spec ) diff --git a/node_modules/npm-registry-fetch/default-opts.js b/node_modules/npm-registry-fetch/default-opts.js index fb8021d6b742f..9ca3f97d0352e 100644 --- a/node_modules/npm-registry-fetch/default-opts.js +++ b/node_modules/npm-registry-fetch/default-opts.js @@ -1,7 +1,5 @@ const pkg = require('./package.json') -const ciDetect = require('@npmcli/ci-detect') module.exports = { - isFromCI: ciDetect(), log: require('./silentlog.js'), maxSockets: 12, method: 'GET', diff --git a/node_modules/npm-registry-fetch/errors.js b/node_modules/npm-registry-fetch/errors.js index 69671551dc619..e65e5fbd80dda 100644 --- a/node_modules/npm-registry-fetch/errors.js +++ b/node_modules/npm-registry-fetch/errors.js @@ -22,6 +22,7 @@ function packageName (href) { class HttpErrorBase extends Error { constructor (method, res, body, spec) { super() + this.name = this.constructor.name this.headers = res.headers.raw() this.statusCode = res.status this.code = `E${res.status}` diff --git a/node_modules/npm-registry-fetch/index.js b/node_modules/npm-registry-fetch/index.js index df3b49eb52969..35fab75bcade9 100644 --- a/node_modules/npm-registry-fetch/index.js +++ b/node_modules/npm-registry-fetch/index.js @@ -1,5 +1,6 @@ 'use strict' +const { HttpErrorAuthOTP } = require('./errors.js') const checkResponse = require('./check-response.js') const getAuth = require('./auth.js') const fetch = require('make-fetch-happen') @@ -27,26 +28,32 @@ function regFetch (uri, /* istanbul ignore next */ opts_ = {}) { ...defaultOpts, ...opts_, } - const registry = opts.registry = ( - (opts.spec && pickRegistry(opts.spec, opts)) || - opts.registry || - /* istanbul ignore next */ - 'https://registry.npmjs.org/' - ) - - if (!urlIsValid(uri)) { + + // if we did not get a fully qualified URI, then we look at the registry + // config or relevant scope to resolve it. + const uriValid = urlIsValid(uri) + let registry = opts.registry || defaultOpts.registry + if (!uriValid) { + registry = opts.registry = ( + (opts.spec && pickRegistry(opts.spec, opts)) || + opts.registry || + registry + ) uri = `${ registry.trim().replace(/\/?$/g, '') }/${ uri.trim().replace(/^\//, '') }` + // asserts that this is now valid + new url.URL(uri) } const method = opts.method || 'GET' // through that takes into account the scope, the prefix of `uri`, etc const startTime = Date.now() - const headers = getHeaders(registry, uri, opts) + const auth = getAuth(uri, opts) + const headers = getHeaders(uri, auth, opts) let body = opts.body const bodyIsStream = Minipass.isStream(body) const bodyIsPromise = body && @@ -92,34 +99,57 @@ function regFetch (uri, /* istanbul ignore next */ opts_ = {}) { opts.preferOnline = true } - const doFetch = (body) => fetch(uri, { - agent: opts.agent, - algorithms: opts.algorithms, - body, - cache: getCacheMode(opts), - cacheManager: opts.cache, - ca: opts.ca, - cert: opts.cert, - headers, - integrity: opts.integrity, - key: opts.key, - localAddress: opts.localAddress, - maxSockets: opts.maxSockets, - memoize: opts.memoize, - method: method, - noProxy: opts.noProxy, - proxy: opts.httpsProxy || opts.proxy, - retry: opts.retry ? opts.retry : { - retries: opts.fetchRetries, - factor: opts.fetchRetryFactor, - minTimeout: opts.fetchRetryMintimeout, - maxTimeout: opts.fetchRetryMaxtimeout, - }, - strictSSL: opts.strictSSL, - timeout: opts.timeout || 30 * 1000, - }).then(res => checkResponse( - method, res, registry, startTime, opts - )) + const doFetch = async body => { + const p = fetch(uri, { + agent: opts.agent, + algorithms: opts.algorithms, + body, + cache: getCacheMode(opts), + cacheManager: opts.cache, + ca: opts.ca, + cert: opts.cert, + headers, + integrity: opts.integrity, + key: opts.key, + localAddress: opts.localAddress, + maxSockets: opts.maxSockets, + memoize: opts.memoize, + method: method, + noProxy: opts.noProxy, + proxy: opts.httpsProxy || opts.proxy, + retry: opts.retry ? opts.retry : { + retries: opts.fetchRetries, + factor: opts.fetchRetryFactor, + minTimeout: opts.fetchRetryMintimeout, + maxTimeout: opts.fetchRetryMaxtimeout, + }, + strictSSL: opts.strictSSL, + timeout: opts.timeout || 30 * 1000, + }).then(res => checkResponse({ + method, + uri, + res, + registry, + startTime, + auth, + opts, + })) + + if (typeof opts.otpPrompt === 'function') { + return p.catch(async er => { + if (er instanceof HttpErrorAuthOTP) { + // if otp fails to complete, we fail with that failure + const otp = await opts.otpPrompt() + // if no otp provided, throw the original HTTP error + if (!otp) + throw er + return regFetch(uri, { ...opts, otp }) + } + throw er + }) + } else + return p + } return Promise.resolve(body).then(doFetch) } @@ -130,7 +160,8 @@ function fetchJSON (uri, opts) { } module.exports.json.stream = fetchJSONStream -function fetchJSONStream (uri, jsonPath, /* istanbul ignore next */ opts_ = {}) { +function fetchJSONStream (uri, jsonPath, + /* istanbul ignore next */ opts_ = {}) { const opts = { ...defaultOpts, ...opts_ } const parser = JSONStream.parse(jsonPath, opts.mapJSON) regFetch(uri, opts).then(res => @@ -151,7 +182,7 @@ function pickRegistry (spec, opts = {}) { registry = opts[opts.scope.replace(/^@?/, '@') + ':registry'] if (!registry) - registry = opts.registry || 'https://registry.npmjs.org/' + registry = opts.registry || defaultOpts.registry return registry } @@ -163,9 +194,8 @@ function getCacheMode (opts) { : 'default' } -function getHeaders (registry, uri, opts) { +function getHeaders (uri, auth, opts) { const headers = Object.assign({ - 'npm-in-ci': !!opts.isFromCI, 'user-agent': opts.userAgent, }, opts.headers || {}) @@ -178,25 +208,15 @@ function getHeaders (registry, uri, opts) { if (opts.npmCommand) headers['npm-command'] = opts.npmCommand - const auth = getAuth(registry, opts) // If a tarball is hosted on a different place than the manifest, only send // credentials on `alwaysAuth` - const shouldAuth = ( - auth.alwaysAuth || - new url.URL(uri).host === new url.URL(registry).host - ) - if (shouldAuth && auth.token) + if (auth.token) headers.authorization = `Bearer ${auth.token}` - else if (shouldAuth && auth.username && auth.password) { - const encoded = Buffer.from( - `${auth.username}:${auth.password}`, 'utf8' - ).toString('base64') - headers.authorization = `Basic ${encoded}` - } else if (shouldAuth && auth._auth) - headers.authorization = `Basic ${auth._auth}` - - if (shouldAuth && auth.otp) - headers['npm-otp'] = auth.otp + else if (auth.auth) + headers.authorization = `Basic ${auth.auth}` + + if (opts.otp) + headers['npm-otp'] = opts.otp return headers } diff --git a/node_modules/npm-registry-fetch/package.json b/node_modules/npm-registry-fetch/package.json index 40e0067b4aedb..e4eaabaa5b09a 100644 --- a/node_modules/npm-registry-fetch/package.json +++ b/node_modules/npm-registry-fetch/package.json @@ -1,6 +1,6 @@ { "name": "npm-registry-fetch", - "version": "9.0.0", + "version": "11.0.0", "description": "Fetch-based http client for use with npm registry APIs", "main": "index.js", "files": [ @@ -8,13 +8,15 @@ ], "scripts": { "eslint": "eslint", - "lint": "npm run eslint -- *.js test/*.js", + "lint": "npm run npmclilint -- \"*.*js\" \"test/**/*.*js\"", "lintfix": "npm run lint -- --fix", "prepublishOnly": "git push origin --follow-tags", "preversion": "npm test", "postversion": "npm publish", "test": "tap", - "posttest": "npm run lint" + "posttest": "npm run lint --", + "npmclilint": "npmcli-lint", + "postsnap": "npm run lintfix --" }, "repository": "https://github.com/npm/npm-registry-fetch", "keywords": [ @@ -29,9 +31,7 @@ }, "license": "ISC", "dependencies": { - "@npmcli/ci-detect": "^1.0.0", - "lru-cache": "^6.0.0", - "make-fetch-happen": "^8.0.9", + "make-fetch-happen": "^9.0.1", "minipass": "^3.1.3", "minipass-fetch": "^1.3.0", "minipass-json-stream": "^1.0.1", @@ -39,19 +39,13 @@ "npm-package-arg": "^8.0.0" }, "devDependencies": { + "@npmcli/lint": "^1.0.1", "cacache": "^15.0.0", - "eslint": "^6.8.0", - "eslint-plugin-import": "^2.18.2", - "eslint-plugin-node": "^10.0.0", - "eslint-plugin-promise": "^4.2.1", - "eslint-plugin-standard": "^4.0.1", - "mkdirp": "^0.5.1", - "nock": "^11.7.0", + "nock": "^13.1.0", "npmlog": "^4.1.2", "require-inject": "^1.4.4", - "rimraf": "^2.6.2", "ssri": "^8.0.0", - "tap": "^14.10.7" + "tap": "^15.0.4" }, "tap": { "check-coverage": true, diff --git a/node_modules/npm-user-validate/README.md b/node_modules/npm-user-validate/README.md deleted file mode 100644 index 53bdae5af0670..0000000000000 --- a/node_modules/npm-user-validate/README.md +++ /dev/null @@ -1,6 +0,0 @@ -[![Build Status](https://travis-ci.org/npm/npm-user-validate.png?branch=master)](https://travis-ci.org/npm/npm-user-validate) -[![devDependency Status](https://david-dm.org/npm/npm-user-validate/dev-status.png)](https://david-dm.org/npm/npm-user-validate#info=devDependencies) - -# npm-user-validate - -Validation for the npm client and npm-www (and probably other npm projects) diff --git a/node_modules/npmlog/CHANGELOG.md b/node_modules/npmlog/CHANGELOG.md deleted file mode 100644 index 51e4abc0a4075..0000000000000 --- a/node_modules/npmlog/CHANGELOG.md +++ /dev/null @@ -1,49 +0,0 @@ -### v4.0.2 - -* Added installation instructions. - -### v4.0.1 - -* Fix bugs where `log.progressEnabled` got out of sync with how `gauge` kept - track of these things resulting in a progressbar that couldn't be disabled. - -### v4.0.0 - -* Allow creating log levels that are an empty string or 0. - -### v3.1.2 - -* Update to `gauge@1.6.0` adding support for default values for template - items. - -### v3.1.1 - -* Update to `gauge@1.5.3` to fix to `1.x` compatibility when it comes to - when a progress bar is enabled. In `1.x` if you didn't have a TTY the - progress bar was never shown. In `2.x` it merely defaults to disabled, - but you can enable it explicitly if you still want progress updates. - -### v3.1.0 - -* Update to `gauge@2.5.2`: - * Updates the `signal-exit` dependency which fixes an incompatibility with - the node profiler. - * Uses externalizes its ansi code generation in `console-control-strings` -* Make the default progress bar include the last line printed, colored as it - would be when printing to a tty. - -### v3.0.0 - -* Switch to `gauge@2.0.0`, for better performance, better look. -* Set stderr/stdout blocking if they're tty's, so that we can hide a - progress bar going to stderr and then safely print to stdout. Without - this the two can end up overlapping producing confusing and sometimes - corrupted output. - -### v2.0.0 - -* Make the `error` event non-fatal so that folks can use it as a prefix. - -### v1.0.0 - -* Add progress bar with `gauge@1.1.0` diff --git a/node_modules/npmlog/README.md b/node_modules/npmlog/README.md deleted file mode 100644 index 268a4af41d628..0000000000000 --- a/node_modules/npmlog/README.md +++ /dev/null @@ -1,216 +0,0 @@ -# npmlog - -The logger util that npm uses. - -This logger is very basic. It does the logging for npm. It supports -custom levels and colored output. - -By default, logs are written to stderr. If you want to send log messages -to outputs other than streams, then you can change the `log.stream` -member, or you can just listen to the events that it emits, and do -whatever you want with them. - -# Installation - -```console -npm install npmlog --save -``` - -# Basic Usage - -```javascript -var log = require('npmlog') - -// additional stuff ---------------------------+ -// message ----------+ | -// prefix ----+ | | -// level -+ | | | -// v v v v - log.info('fyi', 'I have a kitty cat: %j', myKittyCat) -``` - -## log.level - -* {String} - -The level to display logs at. Any logs at or above this level will be -displayed. The special level `silent` will prevent anything from being -displayed ever. - -## log.record - -* {Array} - -An array of all the log messages that have been entered. - -## log.maxRecordSize - -* {Number} - -The maximum number of records to keep. If log.record gets bigger than -10% over this value, then it is sliced down to 90% of this value. - -The reason for the 10% window is so that it doesn't have to resize a -large array on every log entry. - -## log.prefixStyle - -* {Object} - -A style object that specifies how prefixes are styled. (See below) - -## log.headingStyle - -* {Object} - -A style object that specifies how the heading is styled. (See below) - -## log.heading - -* {String} Default: "" - -If set, a heading that is printed at the start of every line. - -## log.stream - -* {Stream} Default: `process.stderr` - -The stream where output is written. - -## log.enableColor() - -Force colors to be used on all messages, regardless of the output -stream. - -## log.disableColor() - -Disable colors on all messages. - -## log.enableProgress() - -Enable the display of log activity spinner and progress bar - -## log.disableProgress() - -Disable the display of a progress bar - -## log.enableUnicode() - -Force the unicode theme to be used for the progress bar. - -## log.disableUnicode() - -Disable the use of unicode in the progress bar. - -## log.setGaugeTemplate(template) - -Set a template for outputting the progress bar. See the [gauge documentation] for details. - -[gauge documentation]: https://npmjs.com/package/gauge - -## log.setGaugeThemeset(themes) - -Select a themeset to pick themes from for the progress bar. See the [gauge documentation] for details. - -## log.pause() - -Stop emitting messages to the stream, but do not drop them. - -## log.resume() - -Emit all buffered messages that were written while paused. - -## log.log(level, prefix, message, ...) - -* `level` {String} The level to emit the message at -* `prefix` {String} A string prefix. Set to "" to skip. -* `message...` Arguments to `util.format` - -Emit a log message at the specified level. - -## log\[level](prefix, message, ...) - -For example, - -* log.silly(prefix, message, ...) -* log.verbose(prefix, message, ...) -* log.info(prefix, message, ...) -* log.http(prefix, message, ...) -* log.warn(prefix, message, ...) -* log.error(prefix, message, ...) - -Like `log.log(level, prefix, message, ...)`. In this way, each level is -given a shorthand, so you can do `log.info(prefix, message)`. - -## log.addLevel(level, n, style, disp) - -* `level` {String} Level indicator -* `n` {Number} The numeric level -* `style` {Object} Object with fg, bg, inverse, etc. -* `disp` {String} Optional replacement for `level` in the output. - -Sets up a new level with a shorthand function and so forth. - -Note that if the number is `Infinity`, then setting the level to that -will cause all log messages to be suppressed. If the number is -`-Infinity`, then the only way to show it is to enable all log messages. - -## log.newItem(name, todo, weight) - -* `name` {String} Optional; progress item name. -* `todo` {Number} Optional; total amount of work to be done. Default 0. -* `weight` {Number} Optional; the weight of this item relative to others. Default 1. - -This adds a new `are-we-there-yet` item tracker to the progress tracker. The -object returned has the `log[level]` methods but is otherwise an -`are-we-there-yet` `Tracker` object. - -## log.newStream(name, todo, weight) - -This adds a new `are-we-there-yet` stream tracker to the progress tracker. The -object returned has the `log[level]` methods but is otherwise an -`are-we-there-yet` `TrackerStream` object. - -## log.newGroup(name, weight) - -This adds a new `are-we-there-yet` tracker group to the progress tracker. The -object returned has the `log[level]` methods but is otherwise an -`are-we-there-yet` `TrackerGroup` object. - -# Events - -Events are all emitted with the message object. - -* `log` Emitted for all messages -* `log.<level>` Emitted for all messages with the `<level>` level. -* `<prefix>` Messages with prefixes also emit their prefix as an event. - -# Style Objects - -Style objects can have the following fields: - -* `fg` {String} Color for the foreground text -* `bg` {String} Color for the background -* `bold`, `inverse`, `underline` {Boolean} Set the associated property -* `bell` {Boolean} Make a noise (This is pretty annoying, probably.) - -# Message Objects - -Every log event is emitted with a message object, and the `log.record` -list contains all of them that have been created. They have the -following fields: - -* `id` {Number} -* `level` {String} -* `prefix` {String} -* `message` {String} Result of `util.format()` -* `messageRaw` {Array} Arguments to `util.format()` - -# Blocking TTYs - -We use [`set-blocking`](https://npmjs.com/package/set-blocking) to set -stderr and stdout blocking if they are tty's and have the setBlocking call. -This is a work around for an issue in early versions of Node.js 6.x, which -made stderr and stdout non-blocking on OSX. (They are always blocking -Windows and were never blocking on Linux.) `npmlog` needs them to be blocking -so that it can allow output to stdout and stderr to be interlaced. diff --git a/node_modules/npmlog/log.js b/node_modules/npmlog/log.js index 341f3313ab354..069154262e4da 100644 --- a/node_modules/npmlog/log.js +++ b/node_modules/npmlog/log.js @@ -13,11 +13,12 @@ var stream = process.stderr Object.defineProperty(log, 'stream', { set: function (newStream) { stream = newStream - if (this.gauge) this.gauge.setWriteTo(stream, stream) + if (this.gauge) + this.gauge.setWriteTo(stream, stream) }, get: function () { return stream - } + }, }) // by default, decide based on tty-ness. @@ -46,8 +47,8 @@ log.gauge = new Gauge(stream, { {type: 'activityIndicator', kerning: 1, length: 1}, {type: 'section', default: ''}, ':', - {type: 'logline', kerning: 1, default: ''} - ] + {type: 'logline', kerning: 1, default: ''}, + ], }) log.tracker = new Progress.TrackerGroup() @@ -77,15 +78,20 @@ log.setGaugeTemplate = function (template) { } log.enableProgress = function () { - if (this.progressEnabled) return + if (this.progressEnabled) + return + this.progressEnabled = true this.tracker.on('change', this.showProgress) - if (this._pause) return + if (this._paused) + return + this.gauge.enable() } log.disableProgress = function () { - if (!this.progressEnabled) return + if (!this.progressEnabled) + return this.progressEnabled = false this.tracker.removeListener('change', this.showProgress) this.gauge.disable() @@ -97,10 +103,20 @@ var mixinLog = function (tracker) { // mixin the public methods from log into the tracker // (except: conflicts and one's we handle specially) Object.keys(log).forEach(function (P) { - if (P[0] === '_') return - if (trackerConstructors.filter(function (C) { return C === P }).length) return - if (tracker[P]) return - if (typeof log[P] !== 'function') return + if (P[0] === '_') + return + + if (trackerConstructors.filter(function (C) { + return C === P + }).length) + return + + if (tracker[P]) + return + + if (typeof log[P] !== 'function') + return + var func = log[P] tracker[P] = function () { return func.apply(log, arguments) @@ -111,7 +127,9 @@ var mixinLog = function (tracker) { if (tracker instanceof Progress.TrackerGroup) { trackerConstructors.forEach(function (C) { var func = tracker[C] - tracker[C] = function () { return mixinLog(func.apply(tracker, arguments)) } + tracker[C] = function () { + return mixinLog(func.apply(tracker, arguments)) + } }) } return tracker @@ -119,24 +137,34 @@ var mixinLog = function (tracker) { // Add tracker constructors to the top level log object trackerConstructors.forEach(function (C) { - log[C] = function () { return mixinLog(this.tracker[C].apply(this.tracker, arguments)) } + log[C] = function () { + return mixinLog(this.tracker[C].apply(this.tracker, arguments)) + } }) log.clearProgress = function (cb) { - if (!this.progressEnabled) return cb && process.nextTick(cb) + if (!this.progressEnabled) + return cb && process.nextTick(cb) + this.gauge.hide(cb) } log.showProgress = function (name, completed) { - if (!this.progressEnabled) return + if (!this.progressEnabled) + return + var values = {} - if (name) values.section = name + if (name) + values.section = name + var last = log.record[log.record.length - 1] if (last) { values.subsection = last.prefix var disp = log.disp[last.level] || last.level var logline = this._format(disp, log.style[last.level]) - if (last.prefix) logline += ' ' + this._format(last.prefix, this.prefixStyle) + if (last.prefix) + logline += ' ' + this._format(last.prefix, this.prefixStyle) + logline += ' ' + last.message.split(/\r?\n/)[0] values.logline = logline } @@ -147,11 +175,14 @@ log.showProgress = function (name, completed) { // temporarily stop emitting, but don't drop log.pause = function () { this._paused = true - if (this.progressEnabled) this.gauge.disable() + if (this.progressEnabled) + this.gauge.disable() } log.resume = function () { - if (!this._paused) return + if (!this._paused) + return + this._paused = false var b = this._buffer @@ -159,7 +190,8 @@ log.resume = function () { b.forEach(function (m) { this.emitLog(m) }, this) - if (this.progressEnabled) this.gauge.enable() + if (this.progressEnabled) + this.gauge.enable() } log._buffer = [] @@ -180,28 +212,30 @@ log.log = function (lvl, prefix, message) { var arg = a[i - 2] = arguments[i] // resolve stack traces to a plain string. - if (typeof arg === 'object' && arg && - (arg instanceof Error) && arg.stack) { - + if (typeof arg === 'object' && arg instanceof Error && arg.stack) { Object.defineProperty(arg, 'stack', { value: stack = arg.stack + '', enumerable: true, - writable: true + writable: true, }) } } - if (stack) a.unshift(stack + '\n') + if (stack) + a.unshift(stack + '\n') message = util.format.apply(util, a) - var m = { id: id++, - level: lvl, - prefix: String(prefix || ''), - message: message, - messageRaw: a } + var m = { + id: id++, + level: lvl, + prefix: String(prefix || ''), + message: message, + messageRaw: a, + } this.emit('log', m) this.emit('log.' + lvl, m) - if (m.prefix) this.emit(m.prefix, m) + if (m.prefix) + this.emit(m.prefix, m) this.record.push(m) var mrs = this.maxRecordSize @@ -219,11 +253,18 @@ log.emitLog = function (m) { this._buffer.push(m) return } - if (this.progressEnabled) this.gauge.pulse(m.prefix) + if (this.progressEnabled) + this.gauge.pulse(m.prefix) + var l = this.levels[m.level] - if (l === undefined) return - if (l < this.levels[this.level]) return - if (l > 0 && !isFinite(l)) return + if (l === undefined) + return + + if (l < this.levels[this.level]) + return + + if (l > 0 && !isFinite(l)) + return // If 'disp' is null or undefined, use the lvl as a default // Allows: '', 0 as valid disp @@ -236,7 +277,9 @@ log.emitLog = function (m) { } this.write(disp, log.style[m.level]) var p = m.prefix || '' - if (p) this.write(' ') + if (p) + this.write(' ') + this.write(p, this.prefixStyle) this.write(' ' + line + '\n') }, this) @@ -244,45 +287,62 @@ log.emitLog = function (m) { } log._format = function (msg, style) { - if (!stream) return + if (!stream) + return var output = '' if (this.useColor()) { style = style || {} var settings = [] - if (style.fg) settings.push(style.fg) - if (style.bg) settings.push('bg' + style.bg[0].toUpperCase() + style.bg.slice(1)) - if (style.bold) settings.push('bold') - if (style.underline) settings.push('underline') - if (style.inverse) settings.push('inverse') - if (settings.length) output += consoleControl.color(settings) - if (style.beep) output += consoleControl.beep() + if (style.fg) + settings.push(style.fg) + + if (style.bg) + settings.push('bg' + style.bg[0].toUpperCase() + style.bg.slice(1)) + + if (style.bold) + settings.push('bold') + + if (style.underline) + settings.push('underline') + + if (style.inverse) + settings.push('inverse') + + if (settings.length) + output += consoleControl.color(settings) + + if (style.beep) + output += consoleControl.beep() } output += msg - if (this.useColor()) { + if (this.useColor()) output += consoleControl.color('reset') - } + return output } log.write = function (msg, style) { - if (!stream) return + if (!stream) + return stream.write(this._format(msg, style)) } log.addLevel = function (lvl, n, style, disp) { // If 'disp' is null or undefined, use the lvl as a default - if (disp == null) disp = lvl + if (disp == null) + disp = lvl + this.levels[lvl] = n this.style[lvl] = style if (!this[lvl]) { this[lvl] = function () { var a = new Array(arguments.length + 1) a[0] = lvl - for (var i = 0; i < arguments.length; i++) { + for (var i = 0; i < arguments.length; i++) a[i + 1] = arguments[i] - } + return this.log.apply(this, a) }.bind(this) } diff --git a/node_modules/npmlog/package.json b/node_modules/npmlog/package.json index 7220f8e72a3c7..5288b9ca06256 100644 --- a/node_modules/npmlog/package.json +++ b/node_modules/npmlog/package.json @@ -2,7 +2,7 @@ "author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)", "name": "npmlog", "description": "logger for npm", - "version": "4.1.2", + "version": "5.0.0", "repository": { "type": "git", "url": "https://github.com/npm/npmlog.git" @@ -12,17 +12,22 @@ "log.js" ], "scripts": { - "test": "standard && tap test/*.js" + "test": "tap test/*.js --branches=95", + "npmclilint": "npmcli-lint", + "lint": "npm run npmclilint -- \"*.*js\" \"test/**/*.*js\"", + "lintfix": "npm run lint -- --fix", + "posttest": "npm run lint --", + "postsnap": "npm run lintfix --" }, "dependencies": { - "are-we-there-yet": "~1.1.2", - "console-control-strings": "~1.1.0", - "gauge": "~2.7.3", - "set-blocking": "~2.0.0" + "are-we-there-yet": "^1.1.5", + "console-control-strings": "^1.1.0", + "gauge": "^3.0.0", + "set-blocking": "^2.0.0" }, "devDependencies": { - "standard": "~7.1.2", - "tap": "~5.7.3" + "@npmcli/lint": "^1.0.1", + "tap": "^15.0.9" }, "license": "ISC" } diff --git a/node_modules/oauth-sign/README.md b/node_modules/oauth-sign/README.md deleted file mode 100644 index 549cbbafa4919..0000000000000 --- a/node_modules/oauth-sign/README.md +++ /dev/null @@ -1,11 +0,0 @@ -oauth-sign -========== - -OAuth 1 signing. Formerly a vendor lib in mikeal/request, now a standalone module. - -## Supported Method Signatures - -- HMAC-SHA1 -- HMAC-SHA256 -- RSA-SHA1 -- PLAINTEXT \ No newline at end of file diff --git a/node_modules/once/README.md b/node_modules/once/README.md deleted file mode 100644 index 1f1ffca9330e3..0000000000000 --- a/node_modules/once/README.md +++ /dev/null @@ -1,79 +0,0 @@ -# once - -Only call a function once. - -## usage - -```javascript -var once = require('once') - -function load (file, cb) { - cb = once(cb) - loader.load('file') - loader.once('load', cb) - loader.once('error', cb) -} -``` - -Or add to the Function.prototype in a responsible way: - -```javascript -// only has to be done once -require('once').proto() - -function load (file, cb) { - cb = cb.once() - loader.load('file') - loader.once('load', cb) - loader.once('error', cb) -} -``` - -Ironically, the prototype feature makes this module twice as -complicated as necessary. - -To check whether you function has been called, use `fn.called`. Once the -function is called for the first time the return value of the original -function is saved in `fn.value` and subsequent calls will continue to -return this value. - -```javascript -var once = require('once') - -function load (cb) { - cb = once(cb) - var stream = createStream() - stream.once('data', cb) - stream.once('end', function () { - if (!cb.called) cb(new Error('not found')) - }) -} -``` - -## `once.strict(func)` - -Throw an error if the function is called twice. - -Some functions are expected to be called only once. Using `once` for them would -potentially hide logical errors. - -In the example below, the `greet` function has to call the callback only once: - -```javascript -function greet (name, cb) { - // return is missing from the if statement - // when no name is passed, the callback is called twice - if (!name) cb('Hello anonymous') - cb('Hello ' + name) -} - -function log (msg) { - console.log(msg) -} - -// this will print 'Hello anonymous' but the logical error will be missed -greet(null, once(msg)) - -// once.strict will print 'Hello anonymous' and throw an error when the callback will be called the second time -greet(null, once.strict(msg)) -``` diff --git a/node_modules/opener/README.md b/node_modules/opener/README.md deleted file mode 100644 index 51cc1a28c56d3..0000000000000 --- a/node_modules/opener/README.md +++ /dev/null @@ -1,54 +0,0 @@ -# It Opens Stuff - -That is, in your desktop environment. This will make *actual windows pop up*, with stuff in them: - -```bash -npm install opener -g - -opener http://google.com -opener ./my-file.txt -opener firefox -opener npm run lint -``` - -Also if you want to use it programmatically you can do that too: - -```js -var opener = require("opener"); - -opener("http://google.com"); -opener("./my-file.txt"); -opener("firefox"); -opener("npm run lint"); -``` - -Plus, it returns the child process created, so you can do things like let your script exit while the window stays open: - -```js -var editor = opener("documentation.odt"); -editor.unref(); -// These other unrefs may be necessary if your OS's opener process -// exits before the process it started is complete. -editor.stdin.unref(); -editor.stdout.unref(); -editor.stderr.unref(); -``` - -## Use It for Good - -Like opening the user's browser with a test harness in your package's test script: - -```json -{ - "scripts": { - "test": "opener ./test/runner.html" - }, - "devDependencies": { - "opener": "*" - } -} -``` - -## Why - -Because Windows has `start`, Macs have `open`, and *nix has `xdg-open`. At least [according to some person on StackOverflow](http://stackoverflow.com/q/1480971/3191). And I like things that work on all three. Like Node.js. And Opener. diff --git a/node_modules/pacote/README.md b/node_modules/pacote/README.md deleted file mode 100644 index 619e0ec44e8f6..0000000000000 --- a/node_modules/pacote/README.md +++ /dev/null @@ -1,259 +0,0 @@ -# pacote - -Fetches package manifests and tarballs from the npm registry. - -## USAGE - -```js -const pacote = require('pacote') - -// get a package manifest -pacote.manifest('foo@1.x').then(manifest => console.log('got it', manifest)) - -// extract a package into a folder -pacote.extract('github:npm/cli', 'some/path', options) - .then(({from, resolved, integrity}) => { - console.log('extracted!', from, resolved, integrity) - }) - -pacote.tarball('https://server.com/package.tgz').then(data => { - console.log('got ' + data.length + ' bytes of tarball data') -}) -``` - -`pacote` works with any kind of package specifier that npm can install. If -you can pass it to the npm CLI, you can pass it to pacote. (In fact, that's -exactly what the npm CLI does.) - -Anything that you can do with one kind of package, you can do with another. - -Data that isn't relevant (like a packument for a tarball) will be -simulated. - -`prepare` scripts will be run when generating tarballs from `git` and -`directory` locations, to simulate what _would_ be published to the -registry, so that you get a working package instead of just raw source -code that might need to be transpiled. - -## CLI - -This module exports a command line interface that can do most of what is -described below. Run `pacote -h` to learn more. - -``` -Pacote - The JavaScript Package Handler, v10.1.1 - -Usage: - - pacote resolve <spec> - Resolve a specifier and output the fully resolved target - Returns integrity and from if '--long' flag is set. - - pacote manifest <spec> - Fetch a manifest and print to stdout - - pacote packument <spec> - Fetch a full packument and print to stdout - - pacote tarball <spec> [<filename>] - Fetch a package tarball and save to <filename> - If <filename> is missing or '-', the tarball will be streamed to stdout. - - pacote extract <spec> <folder> - Extract a package to the destination folder. - -Configuration values all match the names of configs passed to npm, or -options passed to Pacote. Additional flags for this executable: - - --long Print an object from 'resolve', including integrity and spec. - --json Print result objects as JSON rather than node's default. - (This is the default if stdout is not a TTY.) - --help -h Print this helpful text. - -For example '--cache=/path/to/folder' will use that folder as the cache. -``` - -## API - -The `spec` refers to any kind of package specifier that npm can install. -If you can pass it to the npm CLI, you can pass it to pacote. (In fact, -that's exactly what the npm CLI does.) - -See below for valid `opts` values. - -* `pacote.resolve(spec, opts)` Resolve a specifier like `foo@latest` or - `github:user/project` all the way to a tarball url, tarball file, or git - repo with commit hash. - -* `pacote.extract(spec, dest, opts)` Extract a package's tarball into a - destination folder. Returns a promise that resolves to the - `{from,resolved,integrity}` of the extracted package. - -* `pacote.manifest(spec, opts)` Fetch (or simulate) a package's manifest - (basically, the `package.json` file, plus a bit of metadata). - See below for more on manifests and packuments. Returns a Promise that - resolves to the manifest object. - -* `pacote.packument(spec, opts)` Fetch (or simulate) a package's packument - (basically, the top-level package document listing all the manifests that - the registry returns). See below for more on manifests and packuments. - Returns a Promise that resolves to the packument object. - -* `pacote.tarball(spec, opts)` Get a package tarball data as a buffer in - memory. Returns a Promise that resolves to the tarball data Buffer, with - `from`, `resolved`, and `integrity` fields attached. - -* `pacote.tarball.file(spec, dest, opts)` Save a package tarball data to - a file on disk. Returns a Promise that resolves to - `{from,integrity,resolved}` of the fetched tarball. - -* `pacote.tarball.stream(spec, streamHandler, opts)` Fetch a tarball and - make the stream available to the `streamHandler` function. - - This is mostly an internal function, but it is exposed because it does - provide some functionality that may be difficult to achieve otherwise. - - The `streamHandler` function MUST return a Promise that resolves when - the stream (and all associated work) is ended, or rejects if the stream - has an error. - - The `streamHandler` function MAY be called multiple times, as Pacote - retries requests in some scenarios, such as cache corruption or - retriable network failures. - -### Options - -Options are passed to -[`npm-registry-fetch`](http://npm.im/npm-registry-fetch) and -[`cacache`](http://npm.im/cacache), so in addition to these, anything for -those modules can be given to pacote as well. - -Options object is cloned, and mutated along the way to add integrity, -resolved, and other properties, as they are determined. - -* `cache` Where to store cache entries and temp files. Passed to - [`cacache`](http://npm.im/cacache). Defaults to the same cache directory - that npm will use by default, based on platform and environment. -* `where` Base folder for resolving relative `file:` dependencies. -* `resolved` Shortcut for looking up resolved values. Should be specified - if known. -* `integrity` Expected integrity of fetched package tarball. If specified, - tarballs with mismatched integrity values will raise an `EINTEGRITY` - error. -* `umask` Permission mode mask for extracted files and directories. - Defaults to `0o22`. See "Extracted File Modes" below. -* `fmode` Minimum permission mode for extracted files. Defaults to - `0o666`. See "Extracted File Modes" below. -* `dmode` Minimum permission mode for extracted directories. Defaults to - `0o777`. See "Extracted File Modes" below. -* `log` A logger object with methods for various log levels. Typically, - this will be [`npmlog`](http://npm.im/npmlog) in the npm CLI use case, - but if not specified, the default is a logger that emits `'log'` events - on the `process` object. -* `preferOnline` Prefer to revalidate cache entries, even when it would not - be strictly necessary. Default `false`. -* `before` When picking a manifest from a packument, only consider - packages published before the specified date. Default `null`. -* `defaultTag` The default `dist-tag` to use when choosing a manifest from a - packument. Defaults to `latest`. -* `registry` The npm registry to use by default. Defaults to - `https://registry.npmjs.org/`. -* `fullMetadata` Fetch the full metadata from the registry for packuments, - including information not strictly required for installation (author, - description, etc.) Defaults to `true` when `before` is set, since the - version publish time is part of the extended packument metadata. -* `packumentCache` For registry packuments only, you may provide a `Map` - object which will be used to cache packument requests between pacote - calls. This allows you to easily avoid hitting the registry multiple - times (even just to validate the cache) for a given packument, since it - is unlikely to change in the span of a single command. - -## Extracted File Modes - -Files are extracted with a mode matching the following formula: - -``` -( (tarball entry mode value) | (minimum mode option) ) ~ (umask) -``` - -This is in order to prevent unreadable files or unlistable directories from -cluttering a project's `node_modules` folder, even if the package tarball -specifies that the file should be inaccessible. - -It also prevents files from being group- or world-writable without explicit -opt-in by the user, because all file and directory modes are masked against -the `umask` value. - -So, a file which is `0o771` in the tarball, using the default `fmode` of -`0o666` and `umask` of `0o22`, will result in a file mode of `0o755`: - -``` -(0o771 | 0o666) => 0o777 -(0o777 ~ 0o22) => 0o755 -``` - -In almost every case, the defaults are appropriate. To respect exactly -what is in the package tarball (even if this makes an unusable system), set -both `dmode` and `fmode` options to `0`. Otherwise, the `umask` config -should be used in most cases where file mode modifications are required, -and this functions more or less the same as the `umask` value in most Unix -systems. - -## Extracted File Ownership - -When running as `root` on Unix systems, all extracted files and folders -will have their owning `uid` and `gid` values set to match the ownership -of the containing folder. - -This prevents `root`-owned files showing up in a project's `node_modules` -folder when a user runs `sudo npm install`. - -## Manifests - -A `manifest` is similar to a `package.json` file. However, it has a few -pieces of extra metadata, and sometimes lacks metadata that is inessential -to package installation. - -In addition to the common `package.json` fields, manifests include: - -* `manifest._resolved` The tarball url or file path where the package - artifact can be found. -* `manifest._from` A normalized form of the spec passed in as an argument. -* `manifest._integrity` The integrity value for the package artifact. -* `manifest.dist` Registry manifests (those included in a packument) have a - `dist` object. Only `tarball` is required, though at least one of - `shasum` or `integrity` is almost always present. - - * `tarball` The url to the associated package artifact. (Copied by - Pacote to `manifest._resolved`.) - * `integrity` The integrity SRI string for the artifact. This may not - be present for older packages on the npm registry. (Copied by Pacote - to `manifest._integrity`.) - * `shasum` Legacy integrity value. Hexadecimal-encoded sha1 hash. - (Converted to an SRI string and copied by Pacote to - `manifest._integrity` when `dist.integrity` is not present.) - * `fileCount` Number of files in the tarball. - * `unpackedSize` Size on disk of the package when unpacked. - * `npm-signature` A signature of the package by the - [`npmregistry`](https://keybase.io/npmregistry) Keybase account. - (Obviously only present for packages published to - `https://registry.npmjs.org`.) - -## Packuments - -A packument is the top-level package document that lists the set of -manifests for available versions for a package. - -When a packument is fetched with `accept: -application/vnd.npm.install-v1+json` in the HTTP headers, only the most -minimum necessary metadata is returned. Additional metadata is returned -when fetched with only `accept: application/json`. - -For Pacote's purposes, the following fields are relevant: - -* `versions` An object where each key is a version, and each value is the - manifest for that version. -* `dist-tags` An object mapping dist-tags to version numbers. This is how - `foo@latest` gets turned into `foo@1.2.3`. -* `time` In the full packument, an object mapping version numbers to - publication times, for the `opts.before` functionality. diff --git a/node_modules/pacote/lib/dir.js b/node_modules/pacote/lib/dir.js index 4a89348b9290c..0d3a00d95ae7c 100644 --- a/node_modules/pacote/lib/dir.js +++ b/node_modules/pacote/lib/dir.js @@ -4,11 +4,10 @@ const cacache = require('cacache') const Minipass = require('minipass') const { promisify } = require('util') const readPackageJson = require('read-package-json-fast') -const isPackageBin = require('./util/is-package-bin.js') +const tarCreateOptions = require('./util/tar-create-options.js') const packlist = require('npm-packlist') const tar = require('tar') const _prepareDir = Symbol('_prepareDir') -const _tarcOpts = Symbol('_tarcOpts') const { resolve } = require('path') const runScript = require('@npmcli/run-script') @@ -21,6 +20,11 @@ class DirFetcher extends Fetcher { this.resolved = this.spec.fetchSpec } + // exposes tarCreateOptions as public API + static tarCreateOptions (manifest) { + return tarCreateOptions(manifest) + } + get types () { return ['directory'] } @@ -65,35 +69,12 @@ class DirFetcher extends Fetcher { // pipe to the stream, and proxy errors the chain. this[_prepareDir]() .then(() => packlist({ path: this.resolved })) - .then(files => tar.c(this[_tarcOpts](), files) + .then(files => tar.c(tarCreateOptions(this.package), files) .on('error', er => stream.emit('error', er)).pipe(stream)) .catch(er => stream.emit('error', er)) return stream } - [_tarcOpts] () { - return { - cwd: this.resolved, - prefix: 'package/', - portable: true, - gzip: true, - - // ensure that package bins are always executable - // Note that npm-packlist is already filtering out - // anything that is not a regular file, ignored by - // .npmignore or package.json "files", etc. - filter: (path, stat) => { - if (isPackageBin(this.package, path)) - stat.mode |= 0o111 - return true - }, - - // Provide a specific date in the 1980s for the benefit of zip, - // which is confounded by files dated at the Unix epoch 0. - mtime: new Date('1985-10-26T08:15:00.000Z'), - } - } - manifest () { if (this.package) return Promise.resolve(this.package) diff --git a/node_modules/pacote/lib/fetcher.js b/node_modules/pacote/lib/fetcher.js index c9a3201f0ae4a..69dd025b7bd98 100644 --- a/node_modules/pacote/lib/fetcher.js +++ b/node_modules/pacote/lib/fetcher.js @@ -40,6 +40,7 @@ const _istream = Symbol('_istream') const _assertType = Symbol('_assertType') const _tarballFromCache = Symbol('_tarballFromCache') const _tarballFromResolved = Symbol.for('pacote.Fetcher._tarballFromResolved') +const _cacheFetches = Symbol.for('pacote.Fetcher._cacheFetches') class FetcherBase { constructor (spec, opts) { @@ -118,6 +119,13 @@ class FetcherBase { '--no-progress', '--no-save', '--no-audit', + // override any omit settings from the environment + '--include=dev', + '--include=peer', + '--include=optional', + // we need the actual things, not just the lockfile + '--no-package-lock-only', + '--no-dry-run', ] } @@ -166,25 +174,19 @@ class FetcherBase { } // private, should be overridden. - // Note that they should *not* calculate or check integrity, but *just* - // return the raw tarball data stream. + // Note that they should *not* calculate or check integrity or cache, + // but *just* return the raw tarball data stream. [_tarballFromResolved] () { throw this.notImplementedError } // public, should not be overridden tarball () { - return this.tarballStream(stream => new Promise((res, rej) => { - const buf = [] - stream.on('error', er => rej(er)) - stream.on('end', () => { - const data = Buffer.concat(buf) - data.integrity = this.integrity && String(this.integrity) - data.resolved = this.resolved - data.from = this.from - return res(data) - }) - stream.on('data', d => buf.push(d)) + return this.tarballStream(stream => stream.concat().then(data => { + data.integrity = this.integrity && String(this.integrity) + data.resolved = this.resolved + data.from = this.from + return data })) } @@ -194,6 +196,10 @@ class FetcherBase { return cacache.get.stream.byDigest(this.cache, this.integrity, this.opts) } + get [_cacheFetches] () { + return true + } + [_istream] (stream) { // everyone will need one of these, either for verifying or calculating // We always set it, because we have might only have a weak legacy hex @@ -203,7 +209,31 @@ class FetcherBase { // gets to the point of re-setting the integrity. const istream = ssri.integrityStream(this.opts) istream.on('integrity', i => this.integrity = i) - return stream.on('error', er => istream.emit('error', er)).pipe(istream) + stream.on('error', er => istream.emit('error', er)) + + // if not caching this, just pipe through to the istream and return it + if (!this.opts.cache || !this[_cacheFetches]) + return stream.pipe(istream) + + // we have to return a stream that gets ALL the data, and proxies errors, + // but then pipe from the original tarball stream into the cache as well. + // To do this without losing any data, and since the cacache put stream + // is not a passthrough, we have to pipe from the original stream into + // the cache AFTER we pipe into the istream. Since the cache stream + // has an asynchronous flush to write its contents to disk, we need to + // defer the istream end until the cache stream ends. + stream.pipe(istream, { end: false }) + const cstream = cacache.put.stream( + this.opts.cache, + `pacote:tarball:${this.from}`, + this.opts + ) + stream.pipe(cstream) + // defer istream end until after cstream + // cache write errors should not crash the fetch, this is best-effort. + cstream.promise().catch(() => {}).then(() => istream.end()) + + return istream } pickIntegrityAlgorithm () { @@ -232,7 +262,9 @@ class FetcherBase { // An ENOENT trying to read a tgz file, for example, is Right Out. isRetriableError (er) { // TODO: check error class, once those are rolled out to our deps - return this.isDataCorruptionError(er) || er.code === 'ENOENT' + return this.isDataCorruptionError(er) || + er.code === 'ENOENT' || + er.code === 'EISDIR' } // Mostly internal, but has some uses @@ -405,6 +437,7 @@ class FetcherBase { return { cwd, noChmod: true, + noMtime: true, filter: (name, entry) => { if (/Link$/.test(entry.type)) return false diff --git a/node_modules/pacote/lib/git.js b/node_modules/pacote/lib/git.js index 973e13ea9be43..18f42547bb3ac 100644 --- a/node_modules/pacote/lib/git.js +++ b/node_modules/pacote/lib/git.js @@ -85,6 +85,9 @@ class GitFetcher extends Fetcher { [_resolvedFromHosted] (hosted) { return this[_resolvedFromRepo](hosted.https && hosted.https()) .catch(er => { + // Throw early since we know pathspec errors will fail again if retried + if (er instanceof git.errors.GitPathspecError) + throw er const ssh = hosted.sshurl && hosted.sshurl() // no fallthrough if we can't fall through or have https auth if (!ssh || hosted.auth) @@ -260,9 +263,11 @@ class GitFetcher extends Fetcher { // is present, otherwise ssh if the hosted type provides it [_cloneHosted] (ref, tmp) { const hosted = this.spec.hosted - const https = hosted.https() return this[_cloneRepo](hosted.https({ noCommittish: true }), ref, tmp) .catch(er => { + // Throw early since we know pathspec errors will fail again if retried + if (er instanceof git.errors.GitPathspecError) + throw er const ssh = hosted.sshurl && hosted.sshurl({ noCommittish: true }) // no fallthrough if we can't fall through or have https auth if (!ssh || hosted.auth) diff --git a/node_modules/pacote/lib/index.js b/node_modules/pacote/lib/index.js index 546ba960baa2e..cbcbd7c92d15f 100644 --- a/node_modules/pacote/lib/index.js +++ b/node_modules/pacote/lib/index.js @@ -1,5 +1,16 @@ const { get } = require('./fetcher.js') +const GitFetcher = require('./git.js') +const RegistryFetcher = require('./registry.js') +const FileFetcher = require('./file.js') +const DirFetcher = require('./dir.js') +const RemoteFetcher = require('./remote.js') + module.exports = { + GitFetcher, + RegistryFetcher, + FileFetcher, + DirFetcher, + RemoteFetcher, resolve: (spec, opts) => get(spec, opts).resolve(), extract: (spec, dest, opts) => get(spec, opts).extract(dest), manifest: (spec, opts) => get(spec, opts).manifest(), diff --git a/node_modules/pacote/lib/registry.js b/node_modules/pacote/lib/registry.js index 537610d2990d0..e0a310717420d 100644 --- a/node_modules/pacote/lib/registry.js +++ b/node_modules/pacote/lib/registry.js @@ -3,6 +3,7 @@ const RemoteFetcher = require('./remote.js') const _tarballFromResolved = Symbol.for('pacote.Fetcher._tarballFromResolved') const pacoteVersion = require('../package.json').version const npa = require('npm-package-arg') +const rpj = require('read-package-json-fast') const pickManifest = require('npm-pick-manifest') const ssri = require('ssri') const Minipass = require('minipass') @@ -156,7 +157,8 @@ class RegistryFetcher extends Fetcher { } if (this.integrity) mani._integrity = String(this.integrity) - return this.package = mani + this.package = rpj.normalize(mani) + return this.package }) } diff --git a/node_modules/pacote/lib/remote.js b/node_modules/pacote/lib/remote.js index 91f6eb59daa6f..727a8bfc8e608 100644 --- a/node_modules/pacote/lib/remote.js +++ b/node_modules/pacote/lib/remote.js @@ -8,6 +8,7 @@ const Minipass = require('minipass') // The default registry URL is a string of great magic. const magic = /^https?:\/\/registry\.npmjs\.org\// +const _cacheFetches = Symbol.for('pacote.Fetcher._cacheFetches') const _headers = Symbol('_headers') class RemoteFetcher extends Fetcher { constructor (spec, opts) { @@ -21,6 +22,12 @@ class RemoteFetcher extends Fetcher { this.pkgid = opts.pkgid ? opts.pkgid : `remote:${nameat}${this.resolved}` } + // Don't need to cache tarball fetches in pacote, because make-fetch-happen + // will write into cacache anyway. + get [_cacheFetches] () { + return false + } + [_tarballFromResolved] () { const stream = new Minipass() const fetchOpts = { diff --git a/node_modules/pacote/lib/util/tar-create-options.js b/node_modules/pacote/lib/util/tar-create-options.js new file mode 100644 index 0000000000000..31ab34c9d949f --- /dev/null +++ b/node_modules/pacote/lib/util/tar-create-options.js @@ -0,0 +1,30 @@ +const isPackageBin = require('./is-package-bin.js') + +const tarCreateOptions = manifest => ({ + cwd: manifest._resolved, + prefix: 'package/', + portable: true, + gzip: { + // forcing the level to 9 seems to avoid some + // platform specific optimizations that cause + // integrity mismatch errors due to differing + // end results after compression + level: 9 + }, + + // ensure that package bins are always executable + // Note that npm-packlist is already filtering out + // anything that is not a regular file, ignored by + // .npmignore or package.json "files", etc. + filter: (path, stat) => { + if (isPackageBin(manifest, path)) + stat.mode |= 0o111 + return true + }, + + // Provide a specific date in the 1980s for the benefit of zip, + // which is confounded by files dated at the Unix epoch 0. + mtime: new Date('1985-10-26T08:15:00.000Z'), +}) + +module.exports = tarCreateOptions diff --git a/node_modules/pacote/package.json b/node_modules/pacote/package.json index a1668056f9794..437bb8f79e1d8 100644 --- a/node_modules/pacote/package.json +++ b/node_modules/pacote/package.json @@ -1,6 +1,6 @@ { "name": "pacote", - "version": "11.2.7", + "version": "11.3.5", "description": "JavaScript package downloader", "author": "Isaac Z. Schlueter <i@izs.me> (https://izs.me)", "bin": { @@ -17,15 +17,12 @@ }, "tap": { "timeout": 300, - "check-coverage": true, - "coverage-map": "map.js", - "esm": false + "coverage-map": "map.js" }, "devDependencies": { "mutate-fs": "^2.1.1", "npm-registry-mock": "^1.3.1", - "require-inject": "^1.4.4", - "tap": "^14.11.0" + "tap": "^15.0.4" }, "files": [ "lib/**/*.js" @@ -36,7 +33,7 @@ "git" ], "dependencies": { - "@npmcli/git": "^2.0.1", + "@npmcli/git": "^2.1.0", "@npmcli/installed-package-contents": "^1.0.6", "@npmcli/promise-spawn": "^1.2.0", "@npmcli/run-script": "^1.8.2", @@ -49,7 +46,7 @@ "npm-package-arg": "^8.0.1", "npm-packlist": "^2.1.4", "npm-pick-manifest": "^6.0.0", - "npm-registry-fetch": "^9.0.0", + "npm-registry-fetch": "^11.0.0", "promise-retry": "^2.0.1", "read-package-json-fast": "^2.0.1", "rimraf": "^3.0.2", diff --git a/node_modules/parse-conflict-json/README.md b/node_modules/parse-conflict-json/README.md deleted file mode 100644 index ee9e4fd564199..0000000000000 --- a/node_modules/parse-conflict-json/README.md +++ /dev/null @@ -1,42 +0,0 @@ -# parse-conflict-json - -Parse a JSON string that has git merge conflicts, resolving if possible. - -If the JSON is valid, it just does `JSON.parse` as normal. - -If either side of the conflict is invalid JSON, then an error is thrown for -that. - -## USAGE - -```js -// after a git merge that left some conflicts there -const data = fs.readFileSync('package-lock.json', 'utf8') - -// reviverFunction is passed to JSON.parse as the reviver function -// preference defaults to 'ours', set to 'theirs' to prefer the other -// side's changes. -const parsed = parseConflictJson(data, reviverFunction, preference) - -// returns true if the data looks like a conflicted diff file -parsed.isDiff(data) -``` - -## Algorithm - -If `prefer` is set to `theirs`, then the vaules of `theirs` and `ours` are -switched in the resolver function. (Ie, we'll apply their changes on top -of our object, rather than the other way around.) - -- Parse the conflicted file into 3 pieces: `ours`, `theirs`, and `parent` - -- Get the [diff](https://github.com/angus-c/just#just-diff) from `parent` - to `ours`. - -- [Apply](https://github.com/angus-c/just#just-diff-apply) each change of - that diff to `theirs`. - - If any change in the diff set cannot be applied (ie, because they - changed an object into a non-object and we changed a field on that - object), then replace the object at the specified path with the object - at the path in `ours`. diff --git a/node_modules/path-parse/.travis.yml b/node_modules/path-parse/.travis.yml deleted file mode 100644 index dae31da968ba1..0000000000000 --- a/node_modules/path-parse/.travis.yml +++ /dev/null @@ -1,9 +0,0 @@ -language: node_js -node_js: - - "0.12" - - "0.11" - - "0.10" - - "0.10.12" - - "0.8" - - "0.6" - - "iojs" diff --git a/node_modules/path-parse/README.md b/node_modules/path-parse/README.md deleted file mode 100644 index 05097f86aef36..0000000000000 --- a/node_modules/path-parse/README.md +++ /dev/null @@ -1,42 +0,0 @@ -# path-parse [![Build Status](https://travis-ci.org/jbgutierrez/path-parse.svg?branch=master)](https://travis-ci.org/jbgutierrez/path-parse) - -> Node.js [`path.parse(pathString)`](https://nodejs.org/api/path.html#path_path_parse_pathstring) [ponyfill](https://ponyfill.com). - -## Install - -``` -$ npm install --save path-parse -``` - -## Usage - -```js -var pathParse = require('path-parse'); - -pathParse('/home/user/dir/file.txt'); -//=> { -// root : "/", -// dir : "/home/user/dir", -// base : "file.txt", -// ext : ".txt", -// name : "file" -// } -``` - -## API - -See [`path.parse(pathString)`](https://nodejs.org/api/path.html#path_path_parse_pathstring) docs. - -### pathParse(path) - -### pathParse.posix(path) - -The Posix specific version. - -### pathParse.win32(path) - -The Windows specific version. - -## License - -MIT © [Javier Blanco](http://jbgutierrez.info) diff --git a/node_modules/path-parse/index.js b/node_modules/path-parse/index.js index 3b7601fe494ee..f062d0a23e629 100644 --- a/node_modules/path-parse/index.js +++ b/node_modules/path-parse/index.js @@ -2,29 +2,14 @@ var isWindows = process.platform === 'win32'; -// Regex to split a windows path into three parts: [*, device, slash, -// tail] windows-only -var splitDeviceRe = - /^([a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/]+[^\\\/]+)?([\\\/])?([\s\S]*?)$/; - -// Regex to split the tail part of the above into [*, dir, basename, ext] -var splitTailRe = - /^([\s\S]*?)((?:\.{1,2}|[^\\\/]+?|)(\.[^.\/\\]*|))(?:[\\\/]*)$/; +// Regex to split a windows path into into [dir, root, basename, name, ext] +var splitWindowsRe = + /^(((?:[a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/]+[^\\\/]+)?[\\\/]?)(?:[^\\\/]*[\\\/])*)((\.{1,2}|[^\\\/]+?|)(\.[^.\/\\]*|))[\\\/]*$/; var win32 = {}; -// Function to split a filename into [root, dir, basename, ext] function win32SplitPath(filename) { - // Separate device+slash from tail - var result = splitDeviceRe.exec(filename), - device = (result[1] || '') + (result[2] || ''), - tail = result[3] || ''; - // Split the tail into dir, basename and extension - var result2 = splitTailRe.exec(tail), - dir = result2[1], - basename = result2[2], - ext = result2[3]; - return [device, dir, basename, ext]; + return splitWindowsRe.exec(filename).slice(1); } win32.parse = function(pathString) { @@ -34,24 +19,24 @@ win32.parse = function(pathString) { ); } var allParts = win32SplitPath(pathString); - if (!allParts || allParts.length !== 4) { + if (!allParts || allParts.length !== 5) { throw new TypeError("Invalid path '" + pathString + "'"); } return { - root: allParts[0], - dir: allParts[0] + allParts[1].slice(0, -1), + root: allParts[1], + dir: allParts[0] === allParts[1] ? allParts[0] : allParts[0].slice(0, -1), base: allParts[2], - ext: allParts[3], - name: allParts[2].slice(0, allParts[2].length - allParts[3].length) + ext: allParts[4], + name: allParts[3] }; }; -// Split a filename into [root, dir, basename, ext], unix version +// Split a filename into [dir, root, basename, name, ext], unix version // 'root' is just a slash, or nothing. var splitPathRe = - /^(\/?|)([\s\S]*?)((?:\.{1,2}|[^\/]+?|)(\.[^.\/]*|))(?:[\/]*)$/; + /^((\/?)(?:[^\/]*\/)*)((\.{1,2}|[^\/]+?|)(\.[^.\/]*|))[\/]*$/; var posix = {}; @@ -67,19 +52,16 @@ posix.parse = function(pathString) { ); } var allParts = posixSplitPath(pathString); - if (!allParts || allParts.length !== 4) { + if (!allParts || allParts.length !== 5) { throw new TypeError("Invalid path '" + pathString + "'"); } - allParts[1] = allParts[1] || ''; - allParts[2] = allParts[2] || ''; - allParts[3] = allParts[3] || ''; - + return { - root: allParts[0], - dir: allParts[0] + allParts[1].slice(0, -1), + root: allParts[1], + dir: allParts[0].slice(0, -1), base: allParts[2], - ext: allParts[3], - name: allParts[2].slice(0, allParts[2].length - allParts[3].length) + ext: allParts[4], + name: allParts[3], }; }; diff --git a/node_modules/path-parse/package.json b/node_modules/path-parse/package.json index 21332bb14f8b7..36c23f84e7063 100644 --- a/node_modules/path-parse/package.json +++ b/node_modules/path-parse/package.json @@ -1,6 +1,6 @@ { "name": "path-parse", - "version": "1.0.6", + "version": "1.0.7", "description": "Node.js path.parse() ponyfill", "main": "index.js", "scripts": { diff --git a/node_modules/path-parse/test.js b/node_modules/path-parse/test.js deleted file mode 100644 index 0b30c12393639..0000000000000 --- a/node_modules/path-parse/test.js +++ /dev/null @@ -1,77 +0,0 @@ -var assert = require('assert'); -var pathParse = require('./index'); - -var winParseTests = [ - [{ root: 'C:\\', dir: 'C:\\path\\dir', base: 'index.html', ext: '.html', name: 'index' }, 'C:\\path\\dir\\index.html'], - [{ root: 'C:\\', dir: 'C:\\another_path\\DIR\\1\\2\\33', base: 'index', ext: '', name: 'index' }, 'C:\\another_path\\DIR\\1\\2\\33\\index'], - [{ root: '', dir: 'another_path\\DIR with spaces\\1\\2\\33', base: 'index', ext: '', name: 'index' }, 'another_path\\DIR with spaces\\1\\2\\33\\index'], - [{ root: '\\', dir: '\\foo', base: 'C:', ext: '', name: 'C:' }, '\\foo\\C:'], - [{ root: '', dir: '', base: 'file', ext: '', name: 'file' }, 'file'], - [{ root: '', dir: '.', base: 'file', ext: '', name: 'file' }, '.\\file'], - - // unc - [{ root: '\\\\server\\share\\', dir: '\\\\server\\share\\', base: 'file_path', ext: '', name: 'file_path' }, '\\\\server\\share\\file_path'], - [{ root: '\\\\server two\\shared folder\\', dir: '\\\\server two\\shared folder\\', base: 'file path.zip', ext: '.zip', name: 'file path' }, '\\\\server two\\shared folder\\file path.zip'], - [{ root: '\\\\teela\\admin$\\', dir: '\\\\teela\\admin$\\', base: 'system32', ext: '', name: 'system32' }, '\\\\teela\\admin$\\system32'], - [{ root: '\\\\?\\UNC\\', dir: '\\\\?\\UNC\\server', base: 'share', ext: '', name: 'share' }, '\\\\?\\UNC\\server\\share'] -]; - -var winSpecialCaseFormatTests = [ - [{dir: 'some\\dir'}, 'some\\dir\\'], - [{base: 'index.html'}, 'index.html'], - [{}, ''] -]; - -var unixParseTests = [ - [{ root: '/', dir: '/home/user/dir', base: 'file.txt', ext: '.txt', name: 'file' }, '/home/user/dir/file.txt'], - [{ root: '/', dir: '/home/user/a dir', base: 'another File.zip', ext: '.zip', name: 'another File' }, '/home/user/a dir/another File.zip'], - [{ root: '/', dir: '/home/user/a dir/', base: 'another&File.', ext: '.', name: 'another&File' }, '/home/user/a dir//another&File.'], - [{ root: '/', dir: '/home/user/a$$$dir/', base: 'another File.zip', ext: '.zip', name: 'another File' }, '/home/user/a$$$dir//another File.zip'], - [{ root: '', dir: 'user/dir', base: 'another File.zip', ext: '.zip', name: 'another File' }, 'user/dir/another File.zip'], - [{ root: '', dir: '', base: 'file', ext: '', name: 'file' }, 'file'], - [{ root: '', dir: '', base: '.\\file', ext: '', name: '.\\file' }, '.\\file'], - [{ root: '', dir: '.', base: 'file', ext: '', name: 'file' }, './file'], - [{ root: '', dir: '', base: 'C:\\foo', ext: '', name: 'C:\\foo' }, 'C:\\foo'] -]; - -var unixSpecialCaseFormatTests = [ - [{dir: 'some/dir'}, 'some/dir/'], - [{base: 'index.html'}, 'index.html'], - [{}, ''] -]; - -var errors = [ - {input: null, message: /Parameter 'pathString' must be a string, not/}, - {input: {}, message: /Parameter 'pathString' must be a string, not object/}, - {input: true, message: /Parameter 'pathString' must be a string, not boolean/}, - {input: 1, message: /Parameter 'pathString' must be a string, not number/}, - {input: undefined, message: /Parameter 'pathString' must be a string, not undefined/}, -]; - -checkParseFormat(pathParse.win32, winParseTests); -checkParseFormat(pathParse.posix, unixParseTests); -checkErrors(pathParse.win32); -checkErrors(pathParse.posix); - -function checkErrors(parse) { - errors.forEach(function(errorCase) { - try { - parse(errorCase.input); - } catch(err) { - assert.ok(err instanceof TypeError); - assert.ok( - errorCase.message.test(err.message), - 'expected ' + errorCase.message + ' to match ' + err.message - ); - return; - } - - assert.fail('should have thrown'); - }); -} - -function checkParseFormat(parse, testCases) { - testCases.forEach(function(testCase) { - assert.deepEqual(parse(testCase[1]), testCase[0]); - }); -} diff --git a/node_modules/performance-now/.npmignore b/node_modules/performance-now/.npmignore deleted file mode 100644 index 496ee2ca6a2f0..0000000000000 --- a/node_modules/performance-now/.npmignore +++ /dev/null @@ -1 +0,0 @@ -.DS_Store \ No newline at end of file diff --git a/node_modules/performance-now/.travis.yml b/node_modules/performance-now/.travis.yml deleted file mode 100644 index 1543c1990eb9e..0000000000000 --- a/node_modules/performance-now/.travis.yml +++ /dev/null @@ -1,6 +0,0 @@ -language: node_js -node_js: - - "node" - - "6" - - "4" - - "0.12" diff --git a/node_modules/performance-now/README.md b/node_modules/performance-now/README.md deleted file mode 100644 index 28080f856aa21..0000000000000 --- a/node_modules/performance-now/README.md +++ /dev/null @@ -1,30 +0,0 @@ -# performance-now [![Build Status](https://travis-ci.org/braveg1rl/performance-now.png?branch=master)](https://travis-ci.org/braveg1rl/performance-now) [![Dependency Status](https://david-dm.org/braveg1rl/performance-now.png)](https://david-dm.org/braveg1rl/performance-now) - -Implements a function similar to `performance.now` (based on `process.hrtime`). - -Modern browsers have a `window.performance` object with - among others - a `now` method which gives time in milliseconds, but with sub-millisecond precision. This module offers the same function based on the Node.js native `process.hrtime` function. - -Using `process.hrtime` means that the reported time will be monotonically increasing, and not subject to clock-drift. - -According to the [High Resolution Time specification](http://www.w3.org/TR/hr-time/), the number of milliseconds reported by `performance.now` should be relative to the value of `performance.timing.navigationStart`. - -In the current version of the module (2.0) the reported time is relative to the time the current Node process has started (inferred from `process.uptime()`). - -Version 1.0 reported a different time. The reported time was relative to the time the module was loaded (i.e. the time it was first `require`d). If you need this functionality, version 1.0 is still available on NPM. - -## Example usage - -```javascript -var now = require("performance-now") -var start = now() -var end = now() -console.log(start.toFixed(3)) // the number of milliseconds the current node process is running -console.log((start-end).toFixed(3)) // ~ 0.002 on my system -``` - -Running the now function two times right after each other yields a time difference of a few microseconds. Given this overhead, I think it's best to assume that the precision of intervals computed with this method is not higher than 10 microseconds, if you don't know the exact overhead on your own system. - -## License - -performance-now is released under the [MIT License](http://opensource.org/licenses/MIT). -Copyright (c) 2017 Braveg1rl diff --git a/node_modules/proc-log/LICENSE b/node_modules/proc-log/LICENSE new file mode 100644 index 0000000000000..83837797202b7 --- /dev/null +++ b/node_modules/proc-log/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) GitHub, Inc. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/@npmcli/arborist/lib/proc-log.js b/node_modules/proc-log/index.js similarity index 93% rename from node_modules/@npmcli/arborist/lib/proc-log.js rename to node_modules/proc-log/index.js index 52e0e466798ee..9b58713ff3f85 100644 --- a/node_modules/@npmcli/arborist/lib/proc-log.js +++ b/node_modules/proc-log/index.js @@ -1,4 +1,3 @@ -// default logger. // emits 'log' events on the process const LEVELS = [ 'notice', @@ -18,4 +17,6 @@ const logger = {} for (const level of LEVELS) logger[level] = log(level) +logger.LEVELS = LEVELS + module.exports = logger diff --git a/node_modules/proc-log/package.json b/node_modules/proc-log/package.json new file mode 100644 index 0000000000000..178009f61b8d2 --- /dev/null +++ b/node_modules/proc-log/package.json @@ -0,0 +1,28 @@ +{ + "name": "proc-log", + "version": "1.0.0", + "files": [ + "index.js" + ], + "description": "just emit 'log' events on the process object", + "repository": "https://github.com/npm/proc-log", + "author": "Isaac Z. Schlueter <i@izs.me> (https://izs.me)", + "license": "ISC", + "scripts": { + "test": "tap", + "snap": "tap", + "posttest": "eslint index.js test/*.js", + "postsnap": "eslint index.js test/*.js --fix", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags" + }, + "devDependencies": { + "eslint": "^7.9.0", + "eslint-plugin-import": "^2.22.0", + "eslint-plugin-node": "^11.1.0", + "eslint-plugin-promise": "^4.2.1", + "eslint-plugin-standard": "^4.0.1", + "tap": "^15.0.2" + } +} diff --git a/node_modules/promise-all-reject-late/.github/FUNDING.yml b/node_modules/promise-all-reject-late/.github/FUNDING.yml deleted file mode 100644 index 20d8c03a4dca6..0000000000000 --- a/node_modules/promise-all-reject-late/.github/FUNDING.yml +++ /dev/null @@ -1,3 +0,0 @@ -# These are supported funding model platforms - -github: [isaacs] diff --git a/node_modules/promise-all-reject-late/.npmignore b/node_modules/promise-all-reject-late/.npmignore deleted file mode 100644 index 3870bd5bb7207..0000000000000 --- a/node_modules/promise-all-reject-late/.npmignore +++ /dev/null @@ -1,24 +0,0 @@ -# ignore most things, include some others -/* -/.* - -!bin/ -!lib/ -!docs/ -!package.json -!package-lock.json -!README.md -!CONTRIBUTING.md -!LICENSE -!CHANGELOG.md -!example/ -!scripts/ -!tap-snapshots/ -!test/ -!.github/ -!.travis.yml -!.gitignore -!.gitattributes -!coverage-map.js -!map.js -!index.js diff --git a/node_modules/promise-all-reject-late/README.md b/node_modules/promise-all-reject-late/README.md deleted file mode 100644 index eda7c70627f63..0000000000000 --- a/node_modules/promise-all-reject-late/README.md +++ /dev/null @@ -1,40 +0,0 @@ -# promise-all-reject-late - -Like Promise.all, but save rejections until all promises are resolved. - -This is handy when you want to do a bunch of things in parallel, and -rollback on failure, without clobbering or conflicting with those parallel -actions that may be in flight. For example, creating a bunch of files, -and deleting any if they don't all succeed. - -Example: - -```js -const lateReject = require('promise-all-reject-late') - -const { promisify } = require('util') -const fs = require('fs') -const writeFile = promisify(fs.writeFile) - -const createFilesOrRollback = (files) => { - return lateReject(files.map(file => writeFile(file, 'some data'))) - .catch(er => { - // try to clean up, then fail with the initial error - // we know that all write attempts are finished at this point - return lateReject(files.map(file => rimraf(file))) - .catch(er => { - console.error('failed to clean up, youre on your own i guess', er) - }) - .then(() => { - // fail with the original error - throw er - }) - }) -} -``` - -## API - -* `lateReject([array, of, promises])` - Resolve all the promises, - returning a promise that rejects with the first error, or resolves with - the array of results, but only after all promises are settled. diff --git a/node_modules/promise-call-limit/README.md b/node_modules/promise-call-limit/README.md deleted file mode 100644 index eae5de8ce0bfb..0000000000000 --- a/node_modules/promise-call-limit/README.md +++ /dev/null @@ -1,30 +0,0 @@ -# promise-call-limit - -Call an array of promise-returning functions, restricting concurrency to a -specified limit. - -## USAGE - -```js -const promiseCallLimit = require('promise-call-limit') -const things = getLongListOfThingsToFrobulate() - -// frobulate no more than 4 things in parallel -promiseCallLimit(things.map(thing => () => frobulateThing(thing)), 4) - .then(results => console.log('frobulated 4 at a time', results)) -``` - -## API - -### promiseCallLimit(queue Array<() => Promise>, limit = defaultLimit) - -The default limit is the number of CPUs on the system - 1, or 1. - -The reason for subtracting one is that presumably the main thread is taking -up a CPU as well, so let's not be greedy. - -Note that the array should be a list of Promise-_returning_ functions, not -Promises themselves. If you have a bunch of Promises already, you're best -off just calling `Promise.all()`. - -The functions in the queue are called without any arguments. diff --git a/node_modules/promise-inflight/README.md b/node_modules/promise-inflight/README.md deleted file mode 100644 index f0ae3a44432d6..0000000000000 --- a/node_modules/promise-inflight/README.md +++ /dev/null @@ -1,34 +0,0 @@ -# promise-inflight - -One promise for multiple requests in flight to avoid async duplication - -## USAGE - -```javascript -const inflight = require('promise-inflight') - -// some request that does some stuff -function req(key) { - // key is any random string. like a url or filename or whatever. - return inflight(key, () => { - // this is where you'd fetch the url or whatever - return Promise.delay(100) - }) -} - -// only assigns a single setTimeout -// when it dings, all thens get called with the same result. (There's only -// one underlying promise.) -req('foo').then(…) -req('foo').then(…) -req('foo').then(…) -req('foo').then(…) -``` - -## SEE ALSO - -* [inflight](https://npmjs.com/package/inflight) - For the callback based function on which this is based. - -## STILL NEEDS - -Tests! diff --git a/node_modules/promise-retry/.editorconfig b/node_modules/promise-retry/.editorconfig deleted file mode 100644 index 8bc4f108d549f..0000000000000 --- a/node_modules/promise-retry/.editorconfig +++ /dev/null @@ -1,15 +0,0 @@ -root = true - -[*] -indent_style = space -indent_size = 4 -end_of_line = lf -charset = utf-8 -trim_trailing_whitespace = true -insert_final_newline = true - -[*.md] -trim_trailing_whitespace = false - -[package.json] -indent_size = 2 diff --git a/node_modules/promise-retry/.travis.yml b/node_modules/promise-retry/.travis.yml deleted file mode 100644 index e2d26a9cad62b..0000000000000 --- a/node_modules/promise-retry/.travis.yml +++ /dev/null @@ -1,4 +0,0 @@ -language: node_js -node_js: - - "10" - - "12" diff --git a/node_modules/promise-retry/README.md b/node_modules/promise-retry/README.md deleted file mode 100644 index 587de5c0b1841..0000000000000 --- a/node_modules/promise-retry/README.md +++ /dev/null @@ -1,94 +0,0 @@ -# node-promise-retry - -[![NPM version][npm-image]][npm-url] [![Downloads][downloads-image]][npm-url] [![Build Status][travis-image]][travis-url] [![Dependency status][david-dm-image]][david-dm-url] [![Dev Dependency status][david-dm-dev-image]][david-dm-dev-url] [![Greenkeeper badge][greenkeeper-image]][greenkeeper-url] - -[npm-url]:https://npmjs.org/package/promise-retry -[downloads-image]:http://img.shields.io/npm/dm/promise-retry.svg -[npm-image]:http://img.shields.io/npm/v/promise-retry.svg -[travis-url]:https://travis-ci.org/IndigoUnited/node-promise-retry -[travis-image]:http://img.shields.io/travis/IndigoUnited/node-promise-retry/master.svg -[david-dm-url]:https://david-dm.org/IndigoUnited/node-promise-retry -[david-dm-image]:https://img.shields.io/david/IndigoUnited/node-promise-retry.svg -[david-dm-dev-url]:https://david-dm.org/IndigoUnited/node-promise-retry?type=dev -[david-dm-dev-image]:https://img.shields.io/david/dev/IndigoUnited/node-promise-retry.svg -[greenkeeper-image]:https://badges.greenkeeper.io/IndigoUnited/node-promise-retry.svg -[greenkeeper-url]:https://greenkeeper.io/ - -Retries a function that returns a promise, leveraging the power of the [retry](https://github.com/tim-kos/node-retry) module to the promises world. - -There's already some modules that are able to retry functions that return promises but -they were rather difficult to use or do not offer an easy way to do conditional retries. - - -## Installation - -`$ npm install promise-retry` - - -## Usage - -### promiseRetry(fn, [options]) - -Calls `fn` until the returned promise ends up fulfilled or rejected with an error different than -a `retry` error. -The `options` argument is an object which maps to the [retry](https://github.com/tim-kos/node-retry) module options: - -- `retries`: The maximum amount of times to retry the operation. Default is `10`. -- `factor`: The exponential factor to use. Default is `2`. -- `minTimeout`: The number of milliseconds before starting the first retry. Default is `1000`. -- `maxTimeout`: The maximum number of milliseconds between two retries. Default is `Infinity`. -- `randomize`: Randomizes the timeouts by multiplying with a factor between `1` to `2`. Default is `false`. - - -The `fn` function will receive a `retry` function as its first argument that should be called with an error whenever you want to retry `fn`. The `retry` function will always throw an error. -If there are retries left, it will throw a special `retry` error that will be handled internally to call `fn` again. -If there are no retries left, it will throw the actual error passed to it. - -If you prefer, you can pass the options first using the alternative function signature `promiseRetry([options], fn)`. - -## Example -```js -var promiseRetry = require('promise-retry'); - -// Simple example -promiseRetry(function (retry, number) { - console.log('attempt number', number); - - return doSomething() - .catch(retry); -}) -.then(function (value) { - // .. -}, function (err) { - // .. -}); - -// Conditional example -promiseRetry(function (retry, number) { - console.log('attempt number', number); - - return doSomething() - .catch(function (err) { - if (err.code === 'ETIMEDOUT') { - retry(err); - } - - throw err; - }); -}) -.then(function (value) { - // .. -}, function (err) { - // .. -}); -``` - - -## Tests - -`$ npm test` - - -## License - -Released under the [MIT License](http://www.opensource.org/licenses/mit-license.php). diff --git a/node_modules/promzard/.npmignore b/node_modules/promzard/.npmignore deleted file mode 100644 index 15a1789a695f3..0000000000000 --- a/node_modules/promzard/.npmignore +++ /dev/null @@ -1 +0,0 @@ -example/npm-init/package.json diff --git a/node_modules/promzard/README.md b/node_modules/promzard/README.md deleted file mode 100644 index 93c0418a6c6b7..0000000000000 --- a/node_modules/promzard/README.md +++ /dev/null @@ -1,133 +0,0 @@ -# promzard - -A prompting wizard for building files from specialized PromZard modules. -Used by `npm init`. - -A reimplementation of @SubStack's -[prompter](https://github.com/substack/node-prompter), which does not -use AST traversal. - -From another point of view, it's a reimplementation of -[@Marak](https://github.com/marak)'s -[wizard](https://github.com/Marak/wizard) which doesn't use schemas. - -The goal is a nice drop-in enhancement for `npm init`. - -## Usage - -```javascript -var promzard = require('promzard') -promzard(inputFile, optionalContextAdditions, function (er, data) { - // .. you know what you doing .. -}) -``` - -In the `inputFile` you can have something like this: - -```javascript -var fs = require('fs') -module.exports = { - "greeting": prompt("Who shall you greet?", "world", function (who) { - return "Hello, " + who - }), - "filename": __filename, - "directory": function (cb) { - fs.readdir(__dirname, cb) - } -} -``` - -When run, promzard will display the prompts and resolve the async -functions in order, and then either give you an error, or the resolved -data, ready to be dropped into a JSON file or some other place. - - -### promzard(inputFile, ctx, callback) - -The inputFile is just a node module. You can require() things, set -module.exports, etc. Whatever that module exports is the result, and it -is walked over to call any functions as described below. - -The only caveat is that you must give PromZard the full absolute path -to the module (you can get this via Node's `require.resolve`.) Also, -the `prompt` function is injected into the context object, so watch out. - -Whatever you put in that `ctx` will of course also be available in the -module. You can get quite fancy with this, passing in existing configs -and so on. - -### Class: promzard.PromZard(file, ctx) - -Just like the `promzard` function, but the EventEmitter that makes it -all happen. Emits either a `data` event with the data, or a `error` -event if it blows up. - -If `error` is emitted, then `data` never will be. - -### prompt(...) - -In the promzard input module, you can call the `prompt` function. -This prompts the user to input some data. The arguments are interpreted -based on type: - -1. `string` The first string encountered is the prompt. The second is - the default value. -2. `function` A transformer function which receives the data and returns - something else. More than meets the eye. -3. `object` The `prompt` member is the prompt, the `default` member is - the default value, and the `transform` is the transformer. - -Whatever the final value is, that's what will be put on the resulting -object. - -### Functions - -If there are any functions on the promzard input module's exports, then -promzard will call each of them with a callback. This way, your module -can do asynchronous actions if necessary to validate or ascertain -whatever needs verification. - -The functions are called in the context of the ctx object, and are given -a single argument, which is a callback that should be called with either -an error, or the result to assign to that spot. - -In the async function, you can also call prompt() and return the result -of the prompt in the callback. - -For example, this works fine in a promzard module: - -``` -exports.asyncPrompt = function (cb) { - fs.stat(someFile, function (er, st) { - // if there's an error, no prompt, just error - // otherwise prompt and use the actual file size as the default - cb(er, prompt('file size', st.size)) - }) -} -``` - -You can also return other async functions in the async function -callback. Though that's a bit silly, it could be a handy way to reuse -functionality in some cases. - -### Sync vs Async - -The `prompt()` function is not synchronous, though it appears that way. -It just returns a token that is swapped out when the data object is -walked over asynchronously later, and returns a token. - -For that reason, prompt() calls whose results don't end up on the data -object are never shown to the user. For example, this will only prompt -once: - -``` -exports.promptThreeTimes = prompt('prompt me once', 'shame on you') -exports.promptThreeTimes = prompt('prompt me twice', 'um....') -exports.promptThreeTimes = prompt('you cant prompt me again') -``` - -### Isn't this exactly the sort of 'looks sync' that you said was bad about other libraries? - -Yeah, sorta. I wouldn't use promzard for anything more complicated than -a wizard that spits out prompts to set up a config file or something. -Maybe there are other use cases I haven't considered. diff --git a/node_modules/promzard/example/npm-init/README.md b/node_modules/promzard/example/npm-init/README.md deleted file mode 100644 index 46e5592c304f5..0000000000000 --- a/node_modules/promzard/example/npm-init/README.md +++ /dev/null @@ -1,8 +0,0 @@ -# npm-init - -An initter you init wit, innit? - -## More stuff here - -Blerp derp herp lerg borgle pop munch efemerate baz foo a gandt synergy -jorka chatt slurm. diff --git a/node_modules/psl/README.md b/node_modules/psl/README.md deleted file mode 100644 index e876c3d6f64c4..0000000000000 --- a/node_modules/psl/README.md +++ /dev/null @@ -1,215 +0,0 @@ -# psl (Public Suffix List) - -[![NPM](https://nodei.co/npm/psl.png?downloads=true&downloadRank=true)](https://nodei.co/npm/psl/) - -[![Greenkeeper badge](https://badges.greenkeeper.io/lupomontero/psl.svg)](https://greenkeeper.io/) -[![Build Status](https://travis-ci.org/lupomontero/psl.svg?branch=master)](https://travis-ci.org/lupomontero/psl) -[![devDependency Status](https://david-dm.org/lupomontero/psl/dev-status.png)](https://david-dm.org/lupomontero/psl#info=devDependencies) - -`psl` is a `JavaScript` domain name parser based on the -[Public Suffix List](https://publicsuffix.org/). - -This implementation is tested against the -[test data hosted by Mozilla](http://mxr.mozilla.org/mozilla-central/source/netwerk/test/unit/data/test_psl.txt?raw=1) -and kindly provided by [Comodo](https://www.comodo.com/). - -Cross browser testing provided by -[<img alt="BrowserStack" width="160" src="./browserstack-logo.svg" />](https://www.browserstack.com/) - -## What is the Public Suffix List? - -The Public Suffix List is a cross-vendor initiative to provide an accurate list -of domain name suffixes. - -The Public Suffix List is an initiative of the Mozilla Project, but is -maintained as a community resource. It is available for use in any software, -but was originally created to meet the needs of browser manufacturers. - -A "public suffix" is one under which Internet users can directly register names. -Some examples of public suffixes are ".com", ".co.uk" and "pvt.k12.wy.us". The -Public Suffix List is a list of all known public suffixes. - -Source: http://publicsuffix.org - - -## Installation - -### Node.js - -```sh -npm install --save psl -``` - -### Browser - -Download [psl.min.js](https://raw.githubusercontent.com/lupomontero/psl/master/dist/psl.min.js) -and include it in a script tag. - -```html -<script src="psl.min.js"></script> -``` - -This script is browserified and wrapped in a [umd](https://github.com/umdjs/umd) -wrapper so you should be able to use it standalone or together with a module -loader. - -## API - -### `psl.parse(domain)` - -Parse domain based on Public Suffix List. Returns an `Object` with the following -properties: - -* `tld`: Top level domain (this is the _public suffix_). -* `sld`: Second level domain (the first private part of the domain name). -* `domain`: The domain name is the `sld` + `tld`. -* `subdomain`: Optional parts left of the domain. - -#### Example: - -```js -var psl = require('psl'); - -// Parse domain without subdomain -var parsed = psl.parse('google.com'); -console.log(parsed.tld); // 'com' -console.log(parsed.sld); // 'google' -console.log(parsed.domain); // 'google.com' -console.log(parsed.subdomain); // null - -// Parse domain with subdomain -var parsed = psl.parse('www.google.com'); -console.log(parsed.tld); // 'com' -console.log(parsed.sld); // 'google' -console.log(parsed.domain); // 'google.com' -console.log(parsed.subdomain); // 'www' - -// Parse domain with nested subdomains -var parsed = psl.parse('a.b.c.d.foo.com'); -console.log(parsed.tld); // 'com' -console.log(parsed.sld); // 'foo' -console.log(parsed.domain); // 'foo.com' -console.log(parsed.subdomain); // 'a.b.c.d' -``` - -### `psl.get(domain)` - -Get domain name, `sld` + `tld`. Returns `null` if not valid. - -#### Example: - -```js -var psl = require('psl'); - -// null input. -psl.get(null); // null - -// Mixed case. -psl.get('COM'); // null -psl.get('example.COM'); // 'example.com' -psl.get('WwW.example.COM'); // 'example.com' - -// Unlisted TLD. -psl.get('example'); // null -psl.get('example.example'); // 'example.example' -psl.get('b.example.example'); // 'example.example' -psl.get('a.b.example.example'); // 'example.example' - -// TLD with only 1 rule. -psl.get('biz'); // null -psl.get('domain.biz'); // 'domain.biz' -psl.get('b.domain.biz'); // 'domain.biz' -psl.get('a.b.domain.biz'); // 'domain.biz' - -// TLD with some 2-level rules. -psl.get('uk.com'); // null); -psl.get('example.uk.com'); // 'example.uk.com'); -psl.get('b.example.uk.com'); // 'example.uk.com'); - -// More complex TLD. -psl.get('c.kobe.jp'); // null -psl.get('b.c.kobe.jp'); // 'b.c.kobe.jp' -psl.get('a.b.c.kobe.jp'); // 'b.c.kobe.jp' -psl.get('city.kobe.jp'); // 'city.kobe.jp' -psl.get('www.city.kobe.jp'); // 'city.kobe.jp' - -// IDN labels. -psl.get('食狮.com.cn'); // '食狮.com.cn' -psl.get('食狮.公司.cn'); // '食狮.公司.cn' -psl.get('www.食狮.公司.cn'); // '食狮.公司.cn' - -// Same as above, but punycoded. -psl.get('xn--85x722f.com.cn'); // 'xn--85x722f.com.cn' -psl.get('xn--85x722f.xn--55qx5d.cn'); // 'xn--85x722f.xn--55qx5d.cn' -psl.get('www.xn--85x722f.xn--55qx5d.cn'); // 'xn--85x722f.xn--55qx5d.cn' -``` - -### `psl.isValid(domain)` - -Check whether a domain has a valid Public Suffix. Returns a `Boolean` indicating -whether the domain has a valid Public Suffix. - -#### Example - -```js -var psl = require('psl'); - -psl.isValid('google.com'); // true -psl.isValid('www.google.com'); // true -psl.isValid('x.yz'); // false -``` - - -## Testing and Building - -Test are written using [`mocha`](https://mochajs.org/) and can be -run in two different environments: `node` and `phantomjs`. - -```sh -# This will run `eslint`, `mocha` and `karma`. -npm test - -# Individual test environments -# Run tests in node only. -./node_modules/.bin/mocha test -# Run tests in phantomjs only. -./node_modules/.bin/karma start ./karma.conf.js --single-run - -# Build data (parse raw list) and create dist files -npm run build -``` - -Feel free to fork if you see possible improvements! - - -## Acknowledgements - -* Mozilla Foundation's [Public Suffix List](https://publicsuffix.org/) -* Thanks to Rob Stradling of [Comodo](https://www.comodo.com/) for providing - test data. -* Inspired by [weppos/publicsuffix-ruby](https://github.com/weppos/publicsuffix-ruby) - - -## License - -The MIT License (MIT) - -Copyright (c) 2017 Lupo Montero <lupomontero@gmail.com> - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/node_modules/puka/CHANGELOG.md b/node_modules/puka/CHANGELOG.md deleted file mode 100644 index 781b81295a4a7..0000000000000 --- a/node_modules/puka/CHANGELOG.md +++ /dev/null @@ -1,31 +0,0 @@ -# Changelog -All notable changes to this project will be documented in this file. - -The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) -and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). - -## [1.0.1](https://gitlab.com/rhendric/puka/-/compare/v1.0.0...v1.0.1) - 2020-05-16 - -### Fixed - -- Add more carets to win32 command arguments ([45965ca](https://gitlab.com/rhendric/puka/-/commit/45965ca60fcc518082e0b085d8e81f3f3279ffb4)) - - As previously documented and implemented, Puka assumed that all programs - are batch files for the purpose of multi-escaping commands that appear - in pipelines. However, regardless of whether a command is in a pipeline, - one extra layer of escaping is needed if the command invokes a batch - file, which Puka was not producing. This only applies to the arguments - to the command, not to the batch file path, nor to paths used in - redirects. (The property-based spawn test which was supposed to catch - such oversights missed this one because it was invoking the Node.js - executable directly, not, as recommended in the documentation, a batch - file.) - - Going forward, the caveats described in the documentation continue to - apply: if you are running programs on Windows with Puka, make sure they - are batch files, or you may find arguments are being escaped with too - many carets. As the documentation says, if this causes problems for you, - please open an issue so we can work out the details of what a good - workaround looks like. - -## [1.0.0](https://gitlab.com/rhendric/puka/-/tags/v1.0.0) - 2017-09-29 diff --git a/node_modules/puka/LICENSE.txt b/node_modules/puka/LICENSE.txt deleted file mode 100644 index 0141196a59337..0000000000000 --- a/node_modules/puka/LICENSE.txt +++ /dev/null @@ -1,18 +0,0 @@ -Copyright 2017 Ryan Hendrickson <ryan.hendrickson@alum.mit.edu> - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software is furnished to do so, -subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS -FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR -COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER -IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/puka/README.md b/node_modules/puka/README.md deleted file mode 100644 index 2670f742b3677..0000000000000 --- a/node_modules/puka/README.md +++ /dev/null @@ -1,411 +0,0 @@ -# Puka - -[![GitLab CI pipeline status](https://gitlab.com/rhendric/puka/badges/master/pipeline.svg)](https://gitlab.com/rhendric/puka/commits/master) [![AppVeyor build status](https://img.shields.io/appveyor/ci/rhendric/puka.svg?label=windows%20tests)](https://ci.appveyor.com/project/rhendric/puka) [![Codecov status](https://img.shields.io/codecov/c/gl/rhendric/puka.svg)](https://codecov.io/gl/rhendric/puka) - -Puka is a cross-platform library for safely passing strings through shells. - -#### Contents - -- [Introduction](#introduction) - - [Why would I use Puka?](#why-would-i-use-puka) - - [How do I use Puka?](#how-do-i-use-puka) - - [What's the catch?](#whats-the-catch) -- [API Documentation](#api-documentation) - - [Basic API](#basic-api) - - [sh](#sh) - - [unquoted](#unquoted) - - [Advanced API](#advanced-api) - - [quoteForShell](#quoteforshell) - - [quoteForCmd](#quoteforcmd) - - [quoteForSh](#quoteforsh) - - [ShellString](#shellstring) - - [Secret API](#secret-api) -- [The sh DSL](#the-sh-dsl) - - [Syntax](#syntax) - - [Semantics](#semantics) - - [Types of placeholders](#types-of-placeholders) - -## Introduction - -### Why would I use Puka? - -When launching a child process from Node, you have a choice between launching -directly from the operating system (as with [child_process.spawn](https://nodejs.org/api/child_process.html#child_process_child_process_spawn_command_args_options), -if you don't use the `{ shell: true }` option), and running the command through -a shell (as with [child_process.exec](https://nodejs.org/api/child_process.html#child_process_child_process_exec_command_options_callback)). -Using a shell gives you more power, such as the ability to chain multiple -commands together or use redirection, but you have to construct your command as -a single string instead of using an array of arguments. And doing that can be -buggy (if not dangerous) if you don't take care to quote any arguments -correctly for the shell you're targeting, _and_ the quoting has to be done -differently on Windows and non-Windows shells. - -Puka solves that problem by giving you a simple and platform-agnostic way to -build shell commands with arguments that pass through your shell unaltered and -with no unsafe side effects, **whether you are running on Windows or a -Unix-based OS**. - -### How do I use Puka? - -Puka gives you an `sh` function intended for tagging -[template literals](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Template_literals), -which quotes (if necessary) any values interpolated into the template. A simple -example: - -```javascript -const { sh } = require('puka'); -const { execSync } = require('child_process'); - -const arg = 'file with spaces.txt'; -execSync(sh`some-command ${arg}`); -``` - -But Puka supports more than this! See [the `sh` DSL documentation](#the-sh-dsl) -for a detailed description of all the features currently supported. - -### What's the catch? - -Here are the ones I know about: - -Puka does _not_ ensure that the actual commands you're running are -cross-platform. If you're running npm programs, you generally won't have a -problem with that, but if you want to run ``sh`cat file` `` on Windows, you'll -need to depend on something like -[cash-cat](https://www.npmjs.com/package/cash-cat). - -I searched for days for a way to quote or escape line breaks in arguments to -`cmd.exe`, but couldn't find one (regular `^`-prepending and quotation marks -don't seem to cut it). If you know of a way that works, please [open an -issue](https://gitlab.com/rhendric/puka/issues/new) to tell me about it! Until -then, any line break characters (`\r` or `\n`) in values being interpolated by -`sh` will cause an error to be thrown on Windows only. - -Also on Windows, you may notice quoting mistakes if you run commands that -involve invoking a native executable (not a batch file ending in `.cmd` or -`.bat`). Unfortunately, batch files require some extra escaping on Windows, and -Puka assumes all programs are batch files because npm creates batch file shims -for programs it installs (and, if you care about cross-platform, you'll be -using npm programs in your commands). If this causes problems for you, please -[open an issue](https://gitlab.com/rhendric/puka/issues/new); if your situation -is specific enough, there may be workarounds or improvements to Puka to be -found. - -## API Documentation - -### Basic API - - - - -#### sh - -A string template tag for safely constructing cross-platform shell commands. - -An `sh` template is not actually treated as a literal string to be -interpolated; instead, it is a tiny DSL designed to make working with shell -strings safe, simple, and straightforward. To get started quickly, see the -examples below. [More detailed documentation][1] is available -further down. - -##### Examples - -```javascript -const title = '"this" & "that"'; -sh`script --title=${title}`; // => "script '--title=\"this\" & \"that\"'" -// Note: these examples show results for non-Windows platforms. -// On Windows, the above would instead be -// 'script ^^^"--title=\\^^^"this\\^^^" ^^^& \\^^^"that\\^^^"^^^"'. - -const names = ['file1', 'file 2']; -sh`rimraf ${names}.txt`; // => "rimraf file1.txt 'file 2.txt'" - -const cmd1 = ['cat', 'file 1.txt', 'file 2.txt']; -const cmd2 = ['use-input', '-abc']; -sh`${cmd1}|${cmd2}`; // => "cat 'file 1.txt' 'file 2.txt'|use-input -abc" -``` - -Returns **[String][2]** a string formatted for the platform Node is currently -running on. - -#### unquoted - -This function permits raw strings to be interpolated into a `sh` template. - -**IMPORTANT**: If you're using Puka due to security concerns, make sure you -don't pass any untrusted content to `unquoted`. This may be obvious, but -stray punctuation in an `unquoted` section can compromise the safety of the -entire shell command. - -##### Parameters - -- `value` any value (it will be treated as a string) - -##### Examples - -```javascript -const both = true; -sh`foo ${unquoted(both ? '&&' : '||')} bar`; // => 'foo && bar' -``` - -### Advanced API - -If these functions make life easier for you, go ahead and use them; they -are just as well supported as the above. But if you aren't certain you -need them, you probably don't. - - -#### quoteForShell - -Quotes a string for injecting into a shell command. - -This function is exposed for some hypothetical case when the `sh` DSL simply -won't do; `sh` is expected to be the more convenient option almost always. -Compare: - -```javascript -console.log('cmd' + args.map(a => ' ' + quoteForShell(a)).join('')); -console.log(sh`cmd ${args}`); // same as above - -console.log('cmd' + args.map(a => ' ' + quoteForShell(a, true)).join('')); -console.log(sh`cmd "${args}"`); // same as above -``` - -Additionally, on Windows, `sh` checks the entire command string for pipes, -which subtly change how arguments need to be quoted. If your commands may -involve pipes, you are strongly encouraged to use `sh` and not try to roll -your own with `quoteForShell`. - -##### Parameters - -- `text` **[String][2]** to be quoted -- `forceQuote` **[Boolean][3]?** whether to always add quotes even if the string - is already safe. Defaults to `false`. -- `platform` **[String][2]?** a value that `process.platform` might take: - `'win32'`, `'linux'`, etc.; determines how the string is to be formatted. - When omitted, effectively the same as `process.platform`. - -Returns **[String][2]** a string that is safe for the current (or specified) -platform. - -#### quoteForCmd - -A Windows-specific version of [quoteForShell][4]. - -##### Parameters - -- `text` **[String][2]** to be quoted -- `forceQuote` **[Boolean][3]?** whether to always add quotes even if the string - is already safe. Defaults to `false`. - -#### quoteForSh - -A Unix-specific version of [quoteForShell][4]. - -##### Parameters - -- `text` **[String][2]** to be quoted -- `forceQuote` **[Boolean][3]?** whether to always add quotes even if the string - is already safe. Defaults to `false`. - -#### ShellString - -A ShellString represents a shell command after it has been interpolated, but -before it has been formatted for a particular platform. ShellStrings are -useful if you want to prepare a command for a different platform than the -current one, for instance. - -To create a ShellString, use `ShellString.sh` the same way you would use -top-level `sh`. - -##### toString - -A method to format a ShellString into a regular String formatted for a -particular platform. - -###### Parameters - -- `platform` **[String][2]?** a value that `process.platform` might take: - `'win32'`, `'linux'`, etc.; determines how the string is to be formatted. - When omitted, effectively the same as `process.platform`. - -Returns **[String][2]** - -##### sh - -`ShellString.sh` is a template tag just like `sh`; the only difference is -that this function returns a ShellString which has not yet been formatted -into a String. - -Returns **[ShellString][5]** - -### Secret API - -Some internals of string formatting have been exposed for the ambitious and -brave souls who want to try to extend Puka to handle more shells or custom -interpolated values. This ‘secret’ API is partially documented in the code -but not here, and the semantic versioning guarantees on this API are bumped -down by one level: in other words, minor version releases of Puka can change -the secret API in backward-incompatible ways, and patch releases can add or -deprecate functionality. - -If it's not even documented in the code, use at your own risk—no semver -guarantees apply. - - -[1]: #the-sh-dsl - -[2]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String - -[3]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Boolean - -[4]: #quoteforshell - -[5]: #shellstring - -## The sh DSL - -### Syntax - -An `sh` template comprises words, separated by whitespace. Words can contain: - -- text, which is composed of any characters that are not whitespace, single or - double quotes, or any of the special characters - ``# $ & ( ) ; < > \ ` |``; -- quotations, which are matching single or double quotes surrounding any - characters other than the delimiting quote; and -- placeholders, using the standard JavaScript template syntax (`${}`). - (Placeholders may also appear inside quotations.) - -The special characters ``# $ & ( ) ; < > \ ` |``, if unquoted, form their own -words. - -Redirect operators (`<`, `>`, `>>`, `2>`, etc.) receive their own special -handling, as do semicolons. Other than these two exceptions, no attempt is made -to understand any more sophisticated features of shell syntax. - -Standard JavaScript escape sequences, such as `\t`, are honored in the template -literal, and are treated equivalently to the characters they represent. There -is no further mechanism for escaping within the `sh` DSL itself; in particular, -if you want to put quotes inside quotes, you have to use interpolation, like -this: - -```javascript -sh`echo "${'single = \', double = "'}"` // => "echo 'single = '\\'', double = \"'" -``` - -### Semantics - -Words that do not contain placeholders are emitted mostly verbatim to the -output string. Quotations are formatted in the expected style for the target -platform (single quotes for Unix, double quotes for Windows) regardless of the -quotes used in the template literal—as with JavaScript, single and double quotes -are interchangeable, except for the requirement to pair like with like. Unquoted -semicolons are translated to ampersands on Windows; all other special characters -(as enumerated above), when unquoted, are passed as-is to the output for the -shell to interpret. - -Puka may still quote words not containing the above special characters, if they -contain characters that need quoting on the target platform. For example, on -Windows, the character `%` is used for variable interpolation in `cmd.exe`, and -Puka quotes it on on that platform even if it appears unquoted in the template -literal. Consequently, there is no need to be paranoid about quoting anything -that doesn't look alphanumeric inside a `sh` template literal, for fear of being -burned on a different operating system; anything that matches the definition of -‘text’ above will never need manual quoting. - -#### Types of placeholders - -##### Strings - -If a word contains a string placeholder, then the value of the placeholder is -interpolated into the word and the entire word, if necessary, is quoted. If -the placeholder occurs within quotes, no further quoting is performed: - -```javascript -sh`script --file="${'herp derp'}.txt"`; // => "script --file='herp derp.txt'" -``` - -This behavior can be exploited to force consistent quoting, if desired; but -both of the examples below are safe on all platforms: - -```javascript -const words = ['oneword', 'two words']; -sh`minimal ${words[0]}`; // => "minimal oneword" -sh`minimal ${words[1]}`; // => "minimal 'two words'" -sh`consistent '${words[0]}'`; // => "consistent 'oneword'" -sh`consistent '${words[1]}'`; // => "consistent 'two words'" -``` - -##### Arrays and iterables - -If a word contains a placeholder for an array (or other iterable object), then -the entire word is repeated once for each value in the array, separated by -spaces. If the array is empty, then the word is not emitted at all, and neither -is any leading whitespace. - -```javascript -const files = ['foo', 'bar']; -sh`script ${files}`; // => "script foo bar" -sh`script --file=${files}`; // => "script --file=foo --file=bar" -sh`script --file=${[]}`; // => "script" -``` - -Note that, since special characters are their own words, the pipe operator here -is not repeated: - -```javascript -const cmd = ['script', 'foo', 'bar']; -sh`${cmd}|another-script`; // => "script foo bar|another-script" -``` - -Multiple arrays in the same word generate a Cartesian product: - -```javascript -const names = ['foo', 'bar'], exts = ['log', 'txt']; -// Same word -sh`... ${names}.${exts}`; // => "... foo.log foo.txt bar.log bar.txt" -sh`... "${names} ${exts}"`; // => "... 'foo log' 'foo txt' 'bar log' 'bar txt'" - -// Not the same word (extra space just for emphasis): -sh`... ${names} ${exts}`; // => "... foo bar log txt" -sh`... ${names};${exts}`; // => "... foo bar;log txt" -``` - -Finally, if a placeholder appears in the object of a redirect operator, the -entire redirect is repeated as necessary: - -```javascript -sh`script > ${['foo', 'bar']}.txt`; // => "script > foo.txt > bar.txt" -sh`script > ${[]}.txt`; // => "script" -``` - -##### unquoted - -The `unquoted` function returns a value that will skip being quoted when used -in a placeholder, alone or in an array. - -```javascript -const cmd = 'script < input.txt'; -const fields = ['foo', 'bar']; -sh`${unquoted(cmd)} | json ${fields}`; // => "script < input.txt | json foo bar" -``` - -##### ShellString - -If `ShellString.sh` is used to construct an unformatted ShellString, that value -can be used in a placeholder to insert the contents of the ShellString into the -outer template literal. This is safer than using `unquoted` as in the previous -example, but `unquoted` can be used when all you have is a string from another -(trusted!) source. - -```javascript -const url = 'http://example.com/data.json?x=1&y=2'; -const curl = ShellString.sh`curl -L ${url}`; -const fields = ['foo', 'bar']; -sh`${curl} | json ${fields}`; // => "curl -L 'http://example.com/data.json?x=1&y=2' | json foo bar" -``` - -##### Anything else - -... is treated like a string—namely, a value `x` is equivalent to `'' + x`, if -not in one of the above categories. diff --git a/node_modules/puka/index.js b/node_modules/puka/index.js deleted file mode 100644 index b69e47d7639db..0000000000000 --- a/node_modules/puka/index.js +++ /dev/null @@ -1,804 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, '__esModule', { value: true }); - -/** - * Key a method on your object with this symbol and you can get special - * formatting for that value! See ShellStringText, ShellStringUnquoted, or - * shellStringSemicolon for examples. - * @ignore - */ -const formatSymbol = Symbol('format'); -/** - * This symbol is for implementing advanced behaviors like the need for extra - * carets in Windows shell strings that use pipes. If present, it's called in - * an earlier phase than formatSymbol, and is passed a mutable context that can - * be read during the format phase to influence formatting. - * @ignore - */ -const preformatSymbol = Symbol('preformat'); - -// When minimum Node version becomes 6, replace calls to sticky with /.../y and -// inline execFrom. -let stickySupported = true; -try { - new RegExp('', 'y'); -} catch (e) { - stickySupported = false; -} -const sticky = stickySupported ? source => new RegExp(source, 'y') : source => new RegExp(`^(?:${source})`); -const execFrom = stickySupported ? (re, haystack, index) => (re.lastIndex = index, re.exec(haystack)) : (re, haystack, index) => re.exec(haystack.substr(index)); - -function quoteForCmd(text, forceQuote) { - let caretDepth = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 0; - // See the below blog post for an explanation of this function and - // quoteForWin32: - // https://blogs.msdn.microsoft.com/twistylittlepassagesallalike/2011/04/23/everyone-quotes-command-line-arguments-the-wrong-way/ - if (!text.length) { - return '""'; - } - if (/[\n\r]/.test(text)) { - throw new Error("Line breaks can't be quoted on Windows"); - } - const caretEscape = /["%]/.test(text); - text = quoteForWin32(text, forceQuote || !caretEscape && /[&()<>^|]/.test(text)); - if (caretEscape) { - // See Win32Context for explanation of what caretDepth is for. - do { - text = text.replace(/[\t "%&()<>^|]/g, '^$&'); - } while (caretDepth--); - } - return text; -} -const quoteForWin32 = (text, forceQuote) => forceQuote || /[\t "]/.test(text) ? `"${text.replace(/\\+(?=$|")/g, '$&$&').replace(/"/g, '\\"')}"` : text; -const cmdMetaChars = /[\t\n\r "%&()<>^|]/; -class Win32Context { - constructor() { - this.currentScope = newScope(null); - this.scopesByObject = new Map(); - this.argDetectState = 0; - this.argSet = new Set(); - } - read(text) { - // When cmd.exe executes a batch file, or pipes to or from one, it spawns a - // second copy of itself to run the inner command. This necessitates - // doubling up on carets so that escaped characters survive both cmd.exe - // invocations. See: - // https://stackoverflow.com/questions/8192318/why-does-delayed-expansion-fail-when-inside-a-piped-block-of-code#8194279 - // https://ss64.com/nt/syntax-redirection.html - // - // Parentheses can create an additional subshell, requiring additional - // escaping... it's a mess. - // - // So here's what we do about it: we read all unquoted text in a shell - // string and put it through this tiny parser that looks for pipes, - // sequence operators (&, &&, ||), redirects, and parentheses. This can't - // be part of the main Puka parsing, because it can be affected by - // `unquoted(...)` values provided at evaluation time. - // - // Then, after associating each thing that needs to be quoted with a scope - // (via `mark()`), and identifying whether or not it's an argument to a - // command, we can determine the depth of caret escaping required in each - // scope and pass it (via `Formatter::quote()`) to `quoteForCmd()`. - // - // See also `ShellStringText`, which holds the logic for the previous - // paragraph. - const length = text.length; - for (let pos = 0, match; pos < length;) { - while (match = execFrom(reUnimportant, text, pos)) { - if (match[2] == null) { - // (not whitespace) - if (match[1] != null) { - // (>&) - this.argDetectState = this.argDetectState === 0 ? ADS_FLAG_INITIAL_REDIRECT : 0; - } else if (this.argDetectState !== ADS_FLAG_ARGS) { - this.argDetectState |= ADS_FLAG_WORD; - } - } else { - // (whitespace) - if ((this.argDetectState & ADS_FLAG_WORD) !== 0) { - this.argDetectState = ADS_FLAG_ARGS & ~this.argDetectState >> 1; - } - } - pos += match[0].length; - } - if (pos >= length) break; - if (match = execFrom(reSeqOp, text, pos)) { - this.seq(); - pos += match[0].length; - } else { - const char = text.charCodeAt(pos); - if (char === CARET) { - pos += 2; - } else if (char === QUOTE) { - // If you were foolish enough to leave a dangling quotation mark in - // an unquoted span... you're likely to have bigger problems than - // incorrect escaping. So we just do the simplest thing of looking for - // the end quote only in this piece of text. - pos += execFrom(reNotQuote, text, pos + 1)[0].length + 2; - } else { - if (char === OPEN_PAREN) { - this.enterScope(); - } else if (char === CLOSE_PAREN) { - this.exitScope(); - } else if (char === PIPE) { - this.pipe(); - } else { - // (char === '<' or '>') - this.argDetectState = this.argDetectState === 0 ? ADS_FLAG_INITIAL_REDIRECT : 0; - } - pos++; - } - } - } - } - enterScope() { - this.currentScope = newScope(this.currentScope); - this.argDetectState = 0; - } - exitScope() { - this.currentScope = this.currentScope.parent || (this.currentScope.parent = newScope(null)); - this.argDetectState = ADS_FLAG_ARGS; - } - seq() { - // | binds tighter than sequence operators, so the latter create new sibling - // scopes for future |s to mutate. - this.currentScope = newScope(this.currentScope.parent); - this.argDetectState = 0; - } - pipe() { - this.currentScope.depthDelta = 1; - this.argDetectState = 0; - } - mark(obj) { - this.scopesByObject.set(obj, this.currentScope); - if (this.argDetectState === ADS_FLAG_ARGS) { - this.argSet.add(obj); - } else { - this.argDetectState |= ADS_FLAG_WORD; - } - } - at(obj) { - const scope = this.scopesByObject.get(obj); - return { - depth: getDepth(scope), - isArgument: this.argSet.has(obj), - isNative: scope.isNative - }; - } -} -// These flags span the Win32Context's argument detection state machine. WORD -// is set when the context is inside a word that is not an argument (meaning it -// is either the first word in the command, or it is the object of a redirect). -// ARGS is set when the context has reached the arguments of a command. -// INITIAL_REDIRECT tracks the edge case when a redirect occurs before the -// first word of the command (if this flag is set, reaching the end of a word -// should take the state machine back to 0 instead of setting ADS_FLAG_ARGS). -const ADS_FLAG_WORD = 0x1; -const ADS_FLAG_ARGS = 0x2; -const ADS_FLAG_INITIAL_REDIRECT = 0x4; -const getDepth = scope => scope === null ? 0 : scope.depth !== -1 ? scope.depth : scope.depth = getDepth(scope.parent) + scope.depthDelta; -const newScope = parent => ({ - parent, - depthDelta: 0, - depth: -1, - isNative: false -}); -const CARET = '^'.charCodeAt(); -const QUOTE = '"'.charCodeAt(); -const OPEN_PAREN = '('.charCodeAt(); -const CLOSE_PAREN = ')'.charCodeAt(); -const PIPE = '|'.charCodeAt(); -const reNotQuote = sticky('[^"]*'); -const reSeqOp = sticky('&&?|\\|\\|'); -const reUnimportant = sticky('(\\d*>&)|[^\\s"$&()<>^|]+|(\\s+)'); - -const quoteForSh = (text, forceQuote) => text.length ? forceQuote || shMetaChars.test(text) ? `'${text.replace(/'/g, "'\\''")}'`.replace(/^(?:'')+(?!$)/, '').replace(/\\'''/g, "\\'") : text : "''"; -const shMetaChars = /[\t\n\r "#$&'()*;<>?\\`|~]/; - -/** - * To get a Formatter, call `Formatter.for`. - * - * To create a new Formatter, pass an object to `Formatter.declare`. - * - * To set the global default Formatter, assign to `Formatter.default`. - * - * @class - * @property {Formatter} default - The Formatter to be used when no platform - * is provided—for example, when creating strings with `sh`. - * @ignore - */ -function Formatter() {} -Object.assign(Formatter, -/** @lends Formatter */ -{ - /** - * Gets a Formatter that has been declared for the provided platform, or - * the base `'sh'` formatter if there is no Formatter specific to this - * platform, or the Formatter for the current platform if no specific platform - * is provided. - */ - for(platform) { - return platform == null ? Formatter.default || (Formatter.default = Formatter.for(process.platform)) : Formatter._registry.get(platform) || Formatter._registry.get('sh'); - }, - /** - * Creates a new Formatter or mutates the properties on an existing - * Formatter. The `platform` key on the provided properties object determines - * when the Formatter is retrieved. - */ - declare(props) { - const platform = props && props.platform || 'sh'; - const existingFormatter = Formatter._registry.get(platform); - const formatter = Object.assign(existingFormatter || new Formatter(), props); - formatter.emptyString === void 0 && (formatter.emptyString = formatter.quote('', true)); - existingFormatter || Formatter._registry.set(formatter.platform, formatter); - }, - _registry: new Map(), - prototype: { - platform: 'sh', - quote: quoteForSh, - metaChars: shMetaChars, - hasExtraMetaChars: false, - statementSeparator: ';', - createContext() { - return defaultContext; - } - } -}); -const defaultContext = { - at() {} -}; -Formatter.declare(); -Formatter.declare({ - platform: 'win32', - quote(text, forceQuote, opts) { - const caretDepth = opts ? (opts.depth || 0) + (opts.isArgument && !opts.isNative ? 1 : 0) : 0; - return quoteForCmd(text, forceQuote, caretDepth); - }, - metaChars: cmdMetaChars, - hasExtraMetaChars: true, - statementSeparator: '&', - createContext(root) { - const context = new this.Context(); - root[preformatSymbol](context); - return context; - }, - Context: Win32Context -}); - -const isObject = any => any === Object(any); -function memoize(f) { - const cache = new WeakMap(); - return arg => { - let result = cache.get(arg); - if (result === void 0) { - result = f(arg); - cache.set(arg, result); - } - return result; - }; -} - -/** - * Represents a contiguous span of text that may or must be quoted. The contents - * may already contain quoted segments, which will always be quoted. If unquoted - * segments also require quoting, the entire span will be quoted together. - * @ignore - */ -class ShellStringText { - constructor(contents, untested) { - this.contents = contents; - this.untested = untested; - } - [formatSymbol](formatter, context) { - const unformattedContents = this.contents; - const length = unformattedContents.length; - const contents = new Array(length); - for (let i = 0; i < length; i++) { - const c = unformattedContents[i]; - contents[i] = isObject(c) && formatSymbol in c ? c[formatSymbol](formatter) : c; - } - for (let unquoted = true, i = 0; i < length; i++) { - const content = contents[i]; - if (content === null) { - unquoted = !unquoted; - } else { - if (unquoted && (formatter.hasExtraMetaChars || this.untested && this.untested.has(i)) && formatter.metaChars.test(content)) { - return formatter.quote(contents.join(''), false, context.at(this)); - } - } - } - const parts = []; - for (let quoted = null, i = 0; i < length; i++) { - const content = contents[i]; - if (content === null) { - quoted = quoted ? (parts.push(formatter.quote(quoted.join(''), true, context.at(this))), null) : []; - } else { - (quoted || parts).push(content); - } - } - const result = parts.join(''); - return result.length ? result : formatter.emptyString; - } - [preformatSymbol](context) { - context.mark(this); - } -} - -/** - * Represents a contiguous span of text that will not be quoted. - * @ignore - */ -class ShellStringUnquoted { - constructor(value) { - this.value = value; - } - [formatSymbol]() { - return this.value; - } - [preformatSymbol](context) { - context.read(this.value); - } -} - -/** - * Represents a semicolon... or an ampersand, on Windows. - * @ignore - */ -const shellStringSemicolon = { - [formatSymbol](formatter) { - return formatter.statementSeparator; - }, - [preformatSymbol](context) { - context.seq(); - } -}; - -const PLACEHOLDER = {}; -const parse = memoize(templateSpans => { - // These are the token types our DSL can recognize. Their values won't escape - // this function. - const TOKEN_TEXT = 0; - const TOKEN_QUOTE = 1; - const TOKEN_SEMI = 2; - const TOKEN_UNQUOTED = 3; - const TOKEN_SPACE = 4; - const TOKEN_REDIRECT = 5; - const result = []; - let placeholderCount = 0; - let prefix = null; - let onlyPrefixOnce = false; - let contents = []; - let quote = 0; - const lastSpan = templateSpans.length - 1; - for (let spanIndex = 0; spanIndex <= lastSpan; spanIndex++) { - const templateSpan = templateSpans[spanIndex]; - const posEnd = templateSpan.length; - let tokenStart = 0; - if (spanIndex) { - placeholderCount++; - contents.push(PLACEHOLDER); - } - // For each span, we first do a recognizing pass in which we use regular - // expressions to identify the positions of tokens in the text, and then - // a second pass that actually splits the text into the minimum number of - // substrings necessary. - const recognized = []; // [type1, index1, type2, index2...] - let firstWordBreak = -1; - let lastWordBreak = -1; - { - let pos = 0, - match; - while (pos < posEnd) { - if (quote) { - if (match = execFrom(quote === CHAR_SQUO ? reQuotation1 : reQuotation2, templateSpan, pos)) { - recognized.push(TOKEN_TEXT, pos); - pos += match[0].length; - } - if (pos < posEnd) { - recognized.push(TOKEN_QUOTE, pos++); - quote = 0; - } - } else { - if (match = execFrom(reRedirectOrSpace, templateSpan, pos)) { - firstWordBreak < 0 && (firstWordBreak = pos); - lastWordBreak = pos; - recognized.push(match[1] ? TOKEN_REDIRECT : TOKEN_SPACE, pos); - pos += match[0].length; - } - if (match = execFrom(reText, templateSpan, pos)) { - const setBreaks = match[1] != null; - setBreaks && firstWordBreak < 0 && (firstWordBreak = pos); - recognized.push(setBreaks ? TOKEN_UNQUOTED : TOKEN_TEXT, pos); - pos += match[0].length; - setBreaks && (lastWordBreak = pos); - } - const char = templateSpan.charCodeAt(pos); - if (char === CHAR_SEMI) { - firstWordBreak < 0 && (firstWordBreak = pos); - recognized.push(TOKEN_SEMI, pos++); - lastWordBreak = pos; - } else if (char === CHAR_SQUO || char === CHAR_DQUO) { - recognized.push(TOKEN_QUOTE, pos++); - quote = char; - } - } - } - } - // Word breaks are only important if they separate words with placeholders, - // so we can ignore the first/last break if this is the first/last span. - spanIndex === 0 && (firstWordBreak = -1); - spanIndex === lastSpan && (lastWordBreak = posEnd); - // Here begins the second pass mentioned above. This loop runs one more - // iteration than there are tokens in recognized, because it handles tokens - // on a one-iteration delay; hence the i <= iEnd instead of i < iEnd. - const iEnd = recognized.length; - for (let i = 0, type = -1; i <= iEnd; i += 2) { - let typeNext = -1, - pos; - if (i === iEnd) { - pos = posEnd; - } else { - typeNext = recognized[i]; - pos = recognized[i + 1]; - // If the next token is space or redirect, but there's another word - // break in this span, then we can handle that token the same way we - // would handle unquoted text because it isn't being attached to a - // placeholder. - typeNext >= TOKEN_SPACE && pos !== lastWordBreak && (typeNext = TOKEN_UNQUOTED); - } - const breakHere = pos === firstWordBreak || pos === lastWordBreak; - if (pos && (breakHere || typeNext !== type)) { - let value = type === TOKEN_QUOTE ? null : type === TOKEN_SEMI ? shellStringSemicolon : templateSpan.substring(tokenStart, pos); - if (type >= TOKEN_SEMI) { - // This branch handles semicolons, unquoted text, spaces, and - // redirects. shellStringSemicolon is already a formatSymbol object; - // the rest need to be wrapped. - type === TOKEN_SEMI || (value = new ShellStringUnquoted(value)); - // We don't need to check placeholderCount here like we do below; - // that's only relevant during the first word break of the span, and - // because this iteration of the loop is processing the token that - // was checked for breaks in the previous iteration, it will have - // already been handled. For the same reason, prefix is guaranteed to - // be null. - if (contents.length) { - result.push(new ShellStringText(contents, null)); - contents = []; - } - // Only spaces and redirects become prefixes, but not if they've been - // rewritten to unquoted above. - if (type >= TOKEN_SPACE) { - prefix = value; - onlyPrefixOnce = type === TOKEN_SPACE; - } else { - result.push(value); - } - } else { - contents.push(value); - } - tokenStart = pos; - } - if (breakHere) { - if (placeholderCount) { - result.push({ - contents, - placeholderCount, - prefix, - onlyPrefixOnce - }); - } else { - // There's no prefix to handle in this branch; a prefix prior to this - // span would mean placeholderCount > 0, and a prefix in this span - // can't be created because spaces and redirects get rewritten to - // unquoted before the last word break. - contents.length && result.push(new ShellStringText(contents, null)); - } - placeholderCount = 0; - prefix = null; - onlyPrefixOnce = false; - contents = []; - } - type = typeNext; - } - } - if (quote) { - throw new SyntaxError(`String is missing a ${String.fromCharCode(quote)} character`); - } - return result; -}); -const CHAR_SEMI = ';'.charCodeAt(); -const CHAR_SQUO = "'".charCodeAt(); -const CHAR_DQUO = '"'.charCodeAt(); -const reQuotation1 = sticky("[^']+"); -const reQuotation2 = sticky('[^"]+'); -const reText = sticky('[^\\s"#$&\'();<>\\\\`|]+|([#$&()\\\\`|]+)'); -const reRedirectOrSpace = sticky('(\\s*\\d*[<>]+\\s*)|\\s+'); - -class BitSet { - constructor() { - this.vector = new Int32Array(1); - } - has(n) { - return (this.vector[n >>> 5] & 1 << n) !== 0; - } - add(n) { - const i = n >>> 5, - requiredLength = i + 1; - let vector = this.vector, - _vector = vector, - length = _vector.length; - if (requiredLength > length) { - while (requiredLength > (length *= 2)); - const oldValues = vector; - vector = new Int32Array(length); - vector.set(oldValues); - this.vector = vector; - } - vector[i] |= 1 << n; - } -} - -function evaluate(template, values) { - values = values.map(toStringishArray); - const children = []; - let valuesStart = 0; - for (let i = 0, iMax = template.length; i < iMax; i++) { - const word = template[i]; - if (formatSymbol in word) { - children.push(word); - continue; - } - const contents = word.contents, - placeholderCount = word.placeholderCount, - prefix = word.prefix, - onlyPrefixOnce = word.onlyPrefixOnce; - const kMax = contents.length; - const valuesEnd = valuesStart + placeholderCount; - const tuples = cartesianProduct(values, valuesStart, valuesEnd); - valuesStart = valuesEnd; - for (let j = 0, jMax = tuples.length; j < jMax; j++) { - const needSpace = j > 0; - const tuple = tuples[j]; - (needSpace || prefix) && children.push(needSpace && (onlyPrefixOnce || !prefix) ? unquotedSpace : prefix); - let interpolatedContents = []; - let untested = null; - let quoting = false; - let tupleIndex = 0; - for (let k = 0; k < kMax; k++) { - const content = contents[k]; - if (content === PLACEHOLDER) { - const value = tuple[tupleIndex++]; - if (quoting) { - interpolatedContents.push(value); - } else { - if (isObject(value) && formatSymbol in value) { - if (interpolatedContents.length) { - children.push(new ShellStringText(interpolatedContents, untested)); - interpolatedContents = []; - untested = null; - } - children.push(value); - } else { - (untested || (untested = new BitSet())).add(interpolatedContents.length); - interpolatedContents.push(value); - } - } - } else { - interpolatedContents.push(content); - content === null && (quoting = !quoting); - } - } - if (interpolatedContents.length) { - children.push(new ShellStringText(interpolatedContents, untested)); - } - } - } - return children; -} -const primToStringish = value => value == null ? '' + value : value; -function toStringishArray(value) { - let array; - switch (true) { - default: - if (isObject(value)) { - if (Array.isArray(value)) { - array = value; - break; - } - if (Symbol.iterator in value) { - array = Array.from(value); - break; - } - } - array = [value]; - } - return array.map(primToStringish); -} -function cartesianProduct(arrs, start, end) { - const size = end - start; - let resultLength = 1; - for (let i = start; i < end; i++) { - resultLength *= arrs[i].length; - } - if (resultLength > 1e6) { - throw new RangeError("Far too many elements to interpolate"); - } - const result = new Array(resultLength); - const indices = new Array(size).fill(0); - for (let i = 0; i < resultLength; i++) { - const value = result[i] = new Array(size); - for (let j = 0; j < size; j++) { - value[j] = arrs[j + start][indices[j]]; - } - for (let j = size - 1; j >= 0; j--) { - if (++indices[j] < arrs[j + start].length) break; - indices[j] = 0; - } - } - return result; -} -const unquotedSpace = new ShellStringUnquoted(' '); - -/** - * A ShellString represents a shell command after it has been interpolated, but - * before it has been formatted for a particular platform. ShellStrings are - * useful if you want to prepare a command for a different platform than the - * current one, for instance. - * - * To create a ShellString, use `ShellString.sh` the same way you would use - * top-level `sh`. - */ -class ShellString { - /** @hideconstructor */ - constructor(children) { - this.children = children; - } - /** - * `ShellString.sh` is a template tag just like `sh`; the only difference is - * that this function returns a ShellString which has not yet been formatted - * into a String. - * @returns {ShellString} - * @function sh - * @static - * @memberof ShellString - */ - static sh(templateSpans) { - for (var _len = arguments.length, values = new Array(_len > 1 ? _len - 1 : 0), _key = 1; _key < _len; _key++) { - values[_key - 1] = arguments[_key]; - } - return new ShellString(evaluate(parse(templateSpans), values)); - } - /** - * A method to format a ShellString into a regular String formatted for a - * particular platform. - * - * @param {String} [platform] a value that `process.platform` might take: - * `'win32'`, `'linux'`, etc.; determines how the string is to be formatted. - * When omitted, effectively the same as `process.platform`. - * @returns {String} - */ - toString(platform) { - return this[formatSymbol](Formatter.for(platform)); - } - [formatSymbol](formatter) { - let context = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : formatter.createContext(this); - return this.children.map(child => child[formatSymbol](formatter, context)).join(''); - } - [preformatSymbol](context) { - const children = this.children; - for (let i = 0, iMax = children.length; i < iMax; i++) { - const child = children[i]; - if (preformatSymbol in child) { - child[preformatSymbol](context); - } - } - } -} - -/** - * A Windows-specific version of {@link quoteForShell}. - * @param {String} text to be quoted - * @param {Boolean} [forceQuote] whether to always add quotes even if the string - * is already safe. Defaults to `false`. - */ - -/** - * A Unix-specific version of {@link quoteForShell}. - * @param {String} text to be quoted - * @param {Boolean} [forceQuote] whether to always add quotes even if the string - * is already safe. Defaults to `false`. - */ - -/** - * Quotes a string for injecting into a shell command. - * - * This function is exposed for some hypothetical case when the `sh` DSL simply - * won't do; `sh` is expected to be the more convenient option almost always. - * Compare: - * - * ```javascript - * console.log('cmd' + args.map(a => ' ' + quoteForShell(a)).join('')); - * console.log(sh`cmd ${args}`); // same as above - * - * console.log('cmd' + args.map(a => ' ' + quoteForShell(a, true)).join('')); - * console.log(sh`cmd "${args}"`); // same as above - * ``` - * - * Additionally, on Windows, `sh` checks the entire command string for pipes, - * which subtly change how arguments need to be quoted. If your commands may - * involve pipes, you are strongly encouraged to use `sh` and not try to roll - * your own with `quoteForShell`. - * - * @param {String} text to be quoted - * @param {Boolean} [forceQuote] whether to always add quotes even if the string - * is already safe. Defaults to `false`. - * @param {String} [platform] a value that `process.platform` might take: - * `'win32'`, `'linux'`, etc.; determines how the string is to be formatted. - * When omitted, effectively the same as `process.platform`. - * - * @returns {String} a string that is safe for the current (or specified) - * platform. - */ -function quoteForShell(text, forceQuote, platform) { - return Formatter.for(platform).quote(text, forceQuote); -} - -/** - * A string template tag for safely constructing cross-platform shell commands. - * - * An `sh` template is not actually treated as a literal string to be - * interpolated; instead, it is a tiny DSL designed to make working with shell - * strings safe, simple, and straightforward. To get started quickly, see the - * examples below. {@link #the-sh-dsl More detailed documentation} is available - * further down. - * - * @name sh - * @example - * const title = '"this" & "that"'; - * sh`script --title=${title}`; // => "script '--title=\"this\" & \"that\"'" - * // Note: these examples show results for non-Windows platforms. - * // On Windows, the above would instead be - * // 'script ^^^"--title=\\^^^"this\\^^^" ^^^& \\^^^"that\\^^^"^^^"'. - * - * const names = ['file1', 'file 2']; - * sh`rimraf ${names}.txt`; // => "rimraf file1.txt 'file 2.txt'" - * - * const cmd1 = ['cat', 'file 1.txt', 'file 2.txt']; - * const cmd2 = ['use-input', '-abc']; - * sh`${cmd1}|${cmd2}`; // => "cat 'file 1.txt' 'file 2.txt'|use-input -abc" - * - * @returns {String} - a string formatted for the platform Node is currently - * running on. - */ -const sh = function () { - return ShellString.sh.apply(ShellString, arguments).toString(); -}; - -/** - * This function permits raw strings to be interpolated into a `sh` template. - * - * **IMPORTANT**: If you're using Puka due to security concerns, make sure you - * don't pass any untrusted content to `unquoted`. This may be obvious, but - * stray punctuation in an `unquoted` section can compromise the safety of the - * entire shell command. - * - * @param value - any value (it will be treated as a string) - * - * @example - * const both = true; - * sh`foo ${unquoted(both ? '&&' : '||')} bar`; // => 'foo && bar' - */ -const unquoted = value => new ShellStringUnquoted(value); - -exports.Formatter = Formatter; -exports.ShellString = ShellString; -exports.ShellStringText = ShellStringText; -exports.ShellStringUnquoted = ShellStringUnquoted; -exports.quoteForCmd = quoteForCmd; -exports.quoteForSh = quoteForSh; -exports.quoteForShell = quoteForShell; -exports.sh = sh; -exports.shellStringSemicolon = shellStringSemicolon; -exports.formatSymbol = formatSymbol; -exports.preformatSymbol = preformatSymbol; -exports.unquoted = unquoted; diff --git a/node_modules/puka/package.json b/node_modules/puka/package.json deleted file mode 100644 index 41798dc2493b8..0000000000000 --- a/node_modules/puka/package.json +++ /dev/null @@ -1,38 +0,0 @@ -{ - "name": "puka", - "version": "1.0.1", - "description": "A cross-platform library for safely passing strings through shells", - "keywords": [ - "args", - "arguments", - "cmd", - "command", - "command-line", - "cross-platform", - "escape", - "escaping", - "exec", - "linux", - "mac", - "macos", - "osx", - "quote", - "quoting", - "sh", - "shell", - "spawn", - "unix", - "win", - "win32", - "windows" - ], - "homepage": "https://gitlab.com/rhendric/puka", - "bugs": "https://gitlab.com/rhendric/puka/issues", - "license": "MIT", - "author": "Ryan Hendrickson <ryan.hendrickson@alum.mit.edu>", - "repository": "gitlab:rhendric/puka", - "dependencies": {}, - "engines": { - "node": ">=4" - } -} \ No newline at end of file diff --git a/node_modules/punycode/README.md b/node_modules/punycode/README.md deleted file mode 100644 index ee2f9d63320c0..0000000000000 --- a/node_modules/punycode/README.md +++ /dev/null @@ -1,122 +0,0 @@ -# Punycode.js [![Build status](https://travis-ci.org/bestiejs/punycode.js.svg?branch=master)](https://travis-ci.org/bestiejs/punycode.js) [![Code coverage status](http://img.shields.io/codecov/c/github/bestiejs/punycode.js.svg)](https://codecov.io/gh/bestiejs/punycode.js) [![Dependency status](https://gemnasium.com/bestiejs/punycode.js.svg)](https://gemnasium.com/bestiejs/punycode.js) - -Punycode.js is a robust Punycode converter that fully complies to [RFC 3492](https://tools.ietf.org/html/rfc3492) and [RFC 5891](https://tools.ietf.org/html/rfc5891). - -This JavaScript library is the result of comparing, optimizing and documenting different open-source implementations of the Punycode algorithm: - -* [The C example code from RFC 3492](https://tools.ietf.org/html/rfc3492#appendix-C) -* [`punycode.c` by _Markus W. Scherer_ (IBM)](http://opensource.apple.com/source/ICU/ICU-400.42/icuSources/common/punycode.c) -* [`punycode.c` by _Ben Noordhuis_](https://github.com/bnoordhuis/punycode/blob/master/punycode.c) -* [JavaScript implementation by _some_](http://stackoverflow.com/questions/183485/can-anyone-recommend-a-good-free-javascript-for-punycode-to-unicode-conversion/301287#301287) -* [`punycode.js` by _Ben Noordhuis_](https://github.com/joyent/node/blob/426298c8c1c0d5b5224ac3658c41e7c2a3fe9377/lib/punycode.js) (note: [not fully compliant](https://github.com/joyent/node/issues/2072)) - -This project was [bundled](https://github.com/joyent/node/blob/master/lib/punycode.js) with Node.js from [v0.6.2+](https://github.com/joyent/node/compare/975f1930b1...61e796decc) until [v7](https://github.com/nodejs/node/pull/7941) (soft-deprecated). - -The current version supports recent versions of Node.js only. It provides a CommonJS module and an ES6 module. For the old version that offers the same functionality with broader support, including Rhino, Ringo, Narwhal, and web browsers, see [v1.4.1](https://github.com/bestiejs/punycode.js/releases/tag/v1.4.1). - -## Installation - -Via [npm](https://www.npmjs.com/): - -```bash -npm install punycode --save -``` - -In [Node.js](https://nodejs.org/): - -```js -const punycode = require('punycode'); -``` - -## API - -### `punycode.decode(string)` - -Converts a Punycode string of ASCII symbols to a string of Unicode symbols. - -```js -// decode domain name parts -punycode.decode('maana-pta'); // 'mañana' -punycode.decode('--dqo34k'); // '☃-⌘' -``` - -### `punycode.encode(string)` - -Converts a string of Unicode symbols to a Punycode string of ASCII symbols. - -```js -// encode domain name parts -punycode.encode('mañana'); // 'maana-pta' -punycode.encode('☃-⌘'); // '--dqo34k' -``` - -### `punycode.toUnicode(input)` - -Converts a Punycode string representing a domain name or an email address to Unicode. Only the Punycoded parts of the input will be converted, i.e. it doesn’t matter if you call it on a string that has already been converted to Unicode. - -```js -// decode domain names -punycode.toUnicode('xn--maana-pta.com'); -// → 'mañana.com' -punycode.toUnicode('xn----dqo34k.com'); -// → '☃-⌘.com' - -// decode email addresses -punycode.toUnicode('джумла@xn--p-8sbkgc5ag7bhce.xn--ba-lmcq'); -// → 'джумла@джpумлатест.bрфa' -``` - -### `punycode.toASCII(input)` - -Converts a lowercased Unicode string representing a domain name or an email address to Punycode. Only the non-ASCII parts of the input will be converted, i.e. it doesn’t matter if you call it with a domain that’s already in ASCII. - -```js -// encode domain names -punycode.toASCII('mañana.com'); -// → 'xn--maana-pta.com' -punycode.toASCII('☃-⌘.com'); -// → 'xn----dqo34k.com' - -// encode email addresses -punycode.toASCII('джумла@джpумлатест.bрфa'); -// → 'джумла@xn--p-8sbkgc5ag7bhce.xn--ba-lmcq' -``` - -### `punycode.ucs2` - -#### `punycode.ucs2.decode(string)` - -Creates an array containing the numeric code point values of each Unicode symbol in the string. While [JavaScript uses UCS-2 internally](https://mathiasbynens.be/notes/javascript-encoding), this function will convert a pair of surrogate halves (each of which UCS-2 exposes as separate characters) into a single code point, matching UTF-16. - -```js -punycode.ucs2.decode('abc'); -// → [0x61, 0x62, 0x63] -// surrogate pair for U+1D306 TETRAGRAM FOR CENTRE: -punycode.ucs2.decode('\uD834\uDF06'); -// → [0x1D306] -``` - -#### `punycode.ucs2.encode(codePoints)` - -Creates a string based on an array of numeric code point values. - -```js -punycode.ucs2.encode([0x61, 0x62, 0x63]); -// → 'abc' -punycode.ucs2.encode([0x1D306]); -// → '\uD834\uDF06' -``` - -### `punycode.version` - -A string representing the current Punycode.js version number. - -## Author - -| [![twitter/mathias](https://gravatar.com/avatar/24e08a9ea84deb17ae121074d0f17125?s=70)](https://twitter.com/mathias "Follow @mathias on Twitter") | -|---| -| [Mathias Bynens](https://mathiasbynens.be/) | - -## License - -Punycode.js is available under the [MIT](https://mths.be/mit) license. diff --git a/node_modules/qrcode-terminal/.travis.yml b/node_modules/qrcode-terminal/.travis.yml deleted file mode 100644 index 6e5919de39a31..0000000000000 --- a/node_modules/qrcode-terminal/.travis.yml +++ /dev/null @@ -1,3 +0,0 @@ -language: node_js -node_js: - - "0.10" diff --git a/node_modules/qrcode-terminal/README.md b/node_modules/qrcode-terminal/README.md deleted file mode 100644 index f5c830f20f45b..0000000000000 --- a/node_modules/qrcode-terminal/README.md +++ /dev/null @@ -1,82 +0,0 @@ -# QRCode Terminal Edition [![Build Status][travis-ci-img]][travis-ci-url] - -> Going where no QRCode has gone before. - -![Basic Example][basic-example-img] - -# Node Library - -## Install - -Can be installed with: - - $ npm install qrcode-terminal - -and used: - - var qrcode = require('qrcode-terminal'); - -## Usage - -To display some data to the terminal just call: - - qrcode.generate('This will be a QRCode, eh!'); - -You can even specify the error level (default is 'L'): - - qrcode.setErrorLevel('Q'); - qrcode.generate('This will be a QRCode with error level Q!'); - -If you don't want to display to the terminal but just want to string you can provide a callback: - - qrcode.generate('http://github.com', function (qrcode) { - console.log(qrcode); - }); - -If you want to display small output, provide `opts` with `small`: - - qrcode.generate('This will be a small QRCode, eh!', {small: true}); - - qrcode.generate('This will be a small QRCode, eh!', {small: true}, function (qrcode) { - console.log(qrcode) - }); - -# Command-Line - -## Install - - $ npm install -g qrcode-terminal - -## Usage - - $ qrcode-terminal --help - $ qrcode-terminal 'http://github.com' - $ echo 'http://github.com' | qrcode-terminal - -# Support - -- OS X -- Linux -- Windows - -# Server-side - -[node-qrcode][node-qrcode-url] is a popular server-side QRCode generator that -renders to a `canvas` object. - -# Developing - -To setup the development envrionment run `npm install` - -To run tests run `npm test` - -# Contributers - - Gord Tanner <gtanner@gmail.com> - Micheal Brooks <michael@michaelbrooks.ca> - -[travis-ci-img]: https://travis-ci.org/gtanner/qrcode-terminal.png -[travis-ci-url]: https://travis-ci.org/gtanner/qrcode-terminal -[basic-example-img]: https://raw.github.com/gtanner/qrcode-terminal/master/example/basic.png -[node-qrcode-url]: https://github.com/soldair/node-qrcode - diff --git a/node_modules/qs/.editorconfig b/node_modules/qs/.editorconfig deleted file mode 100644 index b2654e7ac5ca0..0000000000000 --- a/node_modules/qs/.editorconfig +++ /dev/null @@ -1,30 +0,0 @@ -root = true - -[*] -indent_style = space -indent_size = 4 -end_of_line = lf -charset = utf-8 -trim_trailing_whitespace = true -insert_final_newline = true -max_line_length = 140 - -[test/*] -max_line_length = off - -[*.md] -max_line_length = off - -[*.json] -max_line_length = off - -[Makefile] -max_line_length = off - -[CHANGELOG.md] -indent_style = space -indent_size = 2 - -[LICENSE] -indent_size = 2 -max_line_length = off diff --git a/node_modules/qs/.eslintignore b/node_modules/qs/.eslintignore deleted file mode 100644 index 1521c8b7652b1..0000000000000 --- a/node_modules/qs/.eslintignore +++ /dev/null @@ -1 +0,0 @@ -dist diff --git a/node_modules/qs/CHANGELOG.md b/node_modules/qs/CHANGELOG.md deleted file mode 100644 index fe52320912363..0000000000000 --- a/node_modules/qs/CHANGELOG.md +++ /dev/null @@ -1,226 +0,0 @@ -## **6.5.2** -- [Fix] use `safer-buffer` instead of `Buffer` constructor -- [Refactor] utils: `module.exports` one thing, instead of mutating `exports` (#230) -- [Dev Deps] update `browserify`, `eslint`, `iconv-lite`, `safer-buffer`, `tape`, `browserify` - -## **6.5.1** -- [Fix] Fix parsing & compacting very deep objects (#224) -- [Refactor] name utils functions -- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `tape` -- [Tests] up to `node` `v8.4`; use `nvm install-latest-npm` so newer npm doesn’t break older node -- [Tests] Use precise dist for Node.js 0.6 runtime (#225) -- [Tests] make 0.6 required, now that it’s passing -- [Tests] on `node` `v8.2`; fix npm on node 0.6 - -## **6.5.0** -- [New] add `utils.assign` -- [New] pass default encoder/decoder to custom encoder/decoder functions (#206) -- [New] `parse`/`stringify`: add `ignoreQueryPrefix`/`addQueryPrefix` options, respectively (#213) -- [Fix] Handle stringifying empty objects with addQueryPrefix (#217) -- [Fix] do not mutate `options` argument (#207) -- [Refactor] `parse`: cache index to reuse in else statement (#182) -- [Docs] add various badges to readme (#208) -- [Dev Deps] update `eslint`, `browserify`, `iconv-lite`, `tape` -- [Tests] up to `node` `v8.1`, `v7.10`, `v6.11`; npm v4.6 breaks on node < v1; npm v5+ breaks on node < v4 -- [Tests] add `editorconfig-tools` - -## **6.4.0** -- [New] `qs.stringify`: add `encodeValuesOnly` option -- [Fix] follow `allowPrototypes` option during merge (#201, #201) -- [Fix] support keys starting with brackets (#202, #200) -- [Fix] chmod a-x -- [Dev Deps] update `eslint` -- [Tests] up to `node` `v7.7`, `v6.10`,` v4.8`; disable osx builds since they block linux builds -- [eslint] reduce warnings - -## **6.3.2** -- [Fix] follow `allowPrototypes` option during merge (#201, #200) -- [Dev Deps] update `eslint` -- [Fix] chmod a-x -- [Fix] support keys starting with brackets (#202, #200) -- [Tests] up to `node` `v7.7`, `v6.10`,` v4.8`; disable osx builds since they block linux builds - -## **6.3.1** -- [Fix] ensure that `allowPrototypes: false` does not ever shadow Object.prototype properties (thanks, @snyk!) -- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `browserify`, `iconv-lite`, `qs-iconv`, `tape` -- [Tests] on all node minors; improve test matrix -- [Docs] document stringify option `allowDots` (#195) -- [Docs] add empty object and array values example (#195) -- [Docs] Fix minor inconsistency/typo (#192) -- [Docs] document stringify option `sort` (#191) -- [Refactor] `stringify`: throw faster with an invalid encoder -- [Refactor] remove unnecessary escapes (#184) -- Remove contributing.md, since `qs` is no longer part of `hapi` (#183) - -## **6.3.0** -- [New] Add support for RFC 1738 (#174, #173) -- [New] `stringify`: Add `serializeDate` option to customize Date serialization (#159) -- [Fix] ensure `utils.merge` handles merging two arrays -- [Refactor] only constructors should be capitalized -- [Refactor] capitalized var names are for constructors only -- [Refactor] avoid using a sparse array -- [Robustness] `formats`: cache `String#replace` -- [Dev Deps] update `browserify`, `eslint`, `@ljharb/eslint-config`; add `safe-publish-latest` -- [Tests] up to `node` `v6.8`, `v4.6`; improve test matrix -- [Tests] flesh out arrayLimit/arrayFormat tests (#107) -- [Tests] skip Object.create tests when null objects are not available -- [Tests] Turn on eslint for test files (#175) - -## **6.2.3** -- [Fix] follow `allowPrototypes` option during merge (#201, #200) -- [Fix] chmod a-x -- [Fix] support keys starting with brackets (#202, #200) -- [Tests] up to `node` `v7.7`, `v6.10`,` v4.8`; disable osx builds since they block linux builds - -## **6.2.2** -- [Fix] ensure that `allowPrototypes: false` does not ever shadow Object.prototype properties - -## **6.2.1** -- [Fix] ensure `key[]=x&key[]&key[]=y` results in 3, not 2, values -- [Refactor] Be explicit and use `Object.prototype.hasOwnProperty.call` -- [Tests] remove `parallelshell` since it does not reliably report failures -- [Tests] up to `node` `v6.3`, `v5.12` -- [Dev Deps] update `tape`, `eslint`, `@ljharb/eslint-config`, `qs-iconv` - -## [**6.2.0**](https://github.com/ljharb/qs/issues?milestone=36&state=closed) -- [New] pass Buffers to the encoder/decoder directly (#161) -- [New] add "encoder" and "decoder" options, for custom param encoding/decoding (#160) -- [Fix] fix compacting of nested sparse arrays (#150) - -## **6.1.2 -- [Fix] follow `allowPrototypes` option during merge (#201, #200) -- [Fix] chmod a-x -- [Fix] support keys starting with brackets (#202, #200) -- [Tests] up to `node` `v7.7`, `v6.10`,` v4.8`; disable osx builds since they block linux builds - -## **6.1.1** -- [Fix] ensure that `allowPrototypes: false` does not ever shadow Object.prototype properties - -## [**6.1.0**](https://github.com/ljharb/qs/issues?milestone=35&state=closed) -- [New] allowDots option for `stringify` (#151) -- [Fix] "sort" option should work at a depth of 3 or more (#151) -- [Fix] Restore `dist` directory; will be removed in v7 (#148) - -## **6.0.4** -- [Fix] follow `allowPrototypes` option during merge (#201, #200) -- [Fix] chmod a-x -- [Fix] support keys starting with brackets (#202, #200) -- [Tests] up to `node` `v7.7`, `v6.10`,` v4.8`; disable osx builds since they block linux builds - -## **6.0.3** -- [Fix] ensure that `allowPrototypes: false` does not ever shadow Object.prototype properties -- [Fix] Restore `dist` directory; will be removed in v7 (#148) - -## [**6.0.2**](https://github.com/ljharb/qs/issues?milestone=33&state=closed) -- Revert ES6 requirement and restore support for node down to v0.8. - -## [**6.0.1**](https://github.com/ljharb/qs/issues?milestone=32&state=closed) -- [**#127**](https://github.com/ljharb/qs/pull/127) Fix engines definition in package.json - -## [**6.0.0**](https://github.com/ljharb/qs/issues?milestone=31&state=closed) -- [**#124**](https://github.com/ljharb/qs/issues/124) Use ES6 and drop support for node < v4 - -## **5.2.1** -- [Fix] ensure `key[]=x&key[]&key[]=y` results in 3, not 2, values - -## [**5.2.0**](https://github.com/ljharb/qs/issues?milestone=30&state=closed) -- [**#64**](https://github.com/ljharb/qs/issues/64) Add option to sort object keys in the query string - -## [**5.1.0**](https://github.com/ljharb/qs/issues?milestone=29&state=closed) -- [**#117**](https://github.com/ljharb/qs/issues/117) make URI encoding stringified results optional -- [**#106**](https://github.com/ljharb/qs/issues/106) Add flag `skipNulls` to optionally skip null values in stringify - -## [**5.0.0**](https://github.com/ljharb/qs/issues?milestone=28&state=closed) -- [**#114**](https://github.com/ljharb/qs/issues/114) default allowDots to false -- [**#100**](https://github.com/ljharb/qs/issues/100) include dist to npm - -## [**4.0.0**](https://github.com/ljharb/qs/issues?milestone=26&state=closed) -- [**#98**](https://github.com/ljharb/qs/issues/98) make returning plain objects and allowing prototype overwriting properties optional - -## [**3.1.0**](https://github.com/ljharb/qs/issues?milestone=24&state=closed) -- [**#89**](https://github.com/ljharb/qs/issues/89) Add option to disable "Transform dot notation to bracket notation" - -## [**3.0.0**](https://github.com/ljharb/qs/issues?milestone=23&state=closed) -- [**#80**](https://github.com/ljharb/qs/issues/80) qs.parse silently drops properties -- [**#77**](https://github.com/ljharb/qs/issues/77) Perf boost -- [**#60**](https://github.com/ljharb/qs/issues/60) Add explicit option to disable array parsing -- [**#74**](https://github.com/ljharb/qs/issues/74) Bad parse when turning array into object -- [**#81**](https://github.com/ljharb/qs/issues/81) Add a `filter` option -- [**#68**](https://github.com/ljharb/qs/issues/68) Fixed issue with recursion and passing strings into objects. -- [**#66**](https://github.com/ljharb/qs/issues/66) Add mixed array and object dot notation support Closes: #47 -- [**#76**](https://github.com/ljharb/qs/issues/76) RFC 3986 -- [**#85**](https://github.com/ljharb/qs/issues/85) No equal sign -- [**#84**](https://github.com/ljharb/qs/issues/84) update license attribute - -## [**2.4.1**](https://github.com/ljharb/qs/issues?milestone=20&state=closed) -- [**#73**](https://github.com/ljharb/qs/issues/73) Property 'hasOwnProperty' of object #<Object> is not a function - -## [**2.4.0**](https://github.com/ljharb/qs/issues?milestone=19&state=closed) -- [**#70**](https://github.com/ljharb/qs/issues/70) Add arrayFormat option - -## [**2.3.3**](https://github.com/ljharb/qs/issues?milestone=18&state=closed) -- [**#59**](https://github.com/ljharb/qs/issues/59) make sure array indexes are >= 0, closes #57 -- [**#58**](https://github.com/ljharb/qs/issues/58) make qs usable for browser loader - -## [**2.3.2**](https://github.com/ljharb/qs/issues?milestone=17&state=closed) -- [**#55**](https://github.com/ljharb/qs/issues/55) allow merging a string into an object - -## [**2.3.1**](https://github.com/ljharb/qs/issues?milestone=16&state=closed) -- [**#52**](https://github.com/ljharb/qs/issues/52) Return "undefined" and "false" instead of throwing "TypeError". - -## [**2.3.0**](https://github.com/ljharb/qs/issues?milestone=15&state=closed) -- [**#50**](https://github.com/ljharb/qs/issues/50) add option to omit array indices, closes #46 - -## [**2.2.5**](https://github.com/ljharb/qs/issues?milestone=14&state=closed) -- [**#39**](https://github.com/ljharb/qs/issues/39) Is there an alternative to Buffer.isBuffer? -- [**#49**](https://github.com/ljharb/qs/issues/49) refactor utils.merge, fixes #45 -- [**#41**](https://github.com/ljharb/qs/issues/41) avoid browserifying Buffer, for #39 - -## [**2.2.4**](https://github.com/ljharb/qs/issues?milestone=13&state=closed) -- [**#38**](https://github.com/ljharb/qs/issues/38) how to handle object keys beginning with a number - -## [**2.2.3**](https://github.com/ljharb/qs/issues?milestone=12&state=closed) -- [**#37**](https://github.com/ljharb/qs/issues/37) parser discards first empty value in array -- [**#36**](https://github.com/ljharb/qs/issues/36) Update to lab 4.x - -## [**2.2.2**](https://github.com/ljharb/qs/issues?milestone=11&state=closed) -- [**#33**](https://github.com/ljharb/qs/issues/33) Error when plain object in a value -- [**#34**](https://github.com/ljharb/qs/issues/34) use Object.prototype.hasOwnProperty.call instead of obj.hasOwnProperty -- [**#24**](https://github.com/ljharb/qs/issues/24) Changelog? Semver? - -## [**2.2.1**](https://github.com/ljharb/qs/issues?milestone=10&state=closed) -- [**#32**](https://github.com/ljharb/qs/issues/32) account for circular references properly, closes #31 -- [**#31**](https://github.com/ljharb/qs/issues/31) qs.parse stackoverflow on circular objects - -## [**2.2.0**](https://github.com/ljharb/qs/issues?milestone=9&state=closed) -- [**#26**](https://github.com/ljharb/qs/issues/26) Don't use Buffer global if it's not present -- [**#30**](https://github.com/ljharb/qs/issues/30) Bug when merging non-object values into arrays -- [**#29**](https://github.com/ljharb/qs/issues/29) Don't call Utils.clone at the top of Utils.merge -- [**#23**](https://github.com/ljharb/qs/issues/23) Ability to not limit parameters? - -## [**2.1.0**](https://github.com/ljharb/qs/issues?milestone=8&state=closed) -- [**#22**](https://github.com/ljharb/qs/issues/22) Enable using a RegExp as delimiter - -## [**2.0.0**](https://github.com/ljharb/qs/issues?milestone=7&state=closed) -- [**#18**](https://github.com/ljharb/qs/issues/18) Why is there arrayLimit? -- [**#20**](https://github.com/ljharb/qs/issues/20) Configurable parametersLimit -- [**#21**](https://github.com/ljharb/qs/issues/21) make all limits optional, for #18, for #20 - -## [**1.2.2**](https://github.com/ljharb/qs/issues?milestone=6&state=closed) -- [**#19**](https://github.com/ljharb/qs/issues/19) Don't overwrite null values - -## [**1.2.1**](https://github.com/ljharb/qs/issues?milestone=5&state=closed) -- [**#16**](https://github.com/ljharb/qs/issues/16) ignore non-string delimiters -- [**#15**](https://github.com/ljharb/qs/issues/15) Close code block - -## [**1.2.0**](https://github.com/ljharb/qs/issues?milestone=4&state=closed) -- [**#12**](https://github.com/ljharb/qs/issues/12) Add optional delim argument -- [**#13**](https://github.com/ljharb/qs/issues/13) fix #11: flattened keys in array are now correctly parsed - -## [**1.1.0**](https://github.com/ljharb/qs/issues?milestone=3&state=closed) -- [**#7**](https://github.com/ljharb/qs/issues/7) Empty values of a POST array disappear after being submitted -- [**#9**](https://github.com/ljharb/qs/issues/9) Should not omit equals signs (=) when value is null -- [**#6**](https://github.com/ljharb/qs/issues/6) Minor grammar fix in README - -## [**1.0.2**](https://github.com/ljharb/qs/issues?milestone=2&state=closed) -- [**#5**](https://github.com/ljharb/qs/issues/5) array holes incorrectly copied into object on large index diff --git a/node_modules/qs/README.md b/node_modules/qs/README.md deleted file mode 100644 index d81196662bc23..0000000000000 --- a/node_modules/qs/README.md +++ /dev/null @@ -1,475 +0,0 @@ -# qs <sup>[![Version Badge][2]][1]</sup> - -[![Build Status][3]][4] -[![dependency status][5]][6] -[![dev dependency status][7]][8] -[![License][license-image]][license-url] -[![Downloads][downloads-image]][downloads-url] - -[![npm badge][11]][1] - -A querystring parsing and stringifying library with some added security. - -Lead Maintainer: [Jordan Harband](https://github.com/ljharb) - -The **qs** module was originally created and maintained by [TJ Holowaychuk](https://github.com/visionmedia/node-querystring). - -## Usage - -```javascript -var qs = require('qs'); -var assert = require('assert'); - -var obj = qs.parse('a=c'); -assert.deepEqual(obj, { a: 'c' }); - -var str = qs.stringify(obj); -assert.equal(str, 'a=c'); -``` - -### Parsing Objects - -[](#preventEval) -```javascript -qs.parse(string, [options]); -``` - -**qs** allows you to create nested objects within your query strings, by surrounding the name of sub-keys with square brackets `[]`. -For example, the string `'foo[bar]=baz'` converts to: - -```javascript -assert.deepEqual(qs.parse('foo[bar]=baz'), { - foo: { - bar: 'baz' - } -}); -``` - -When using the `plainObjects` option the parsed value is returned as a null object, created via `Object.create(null)` and as such you should be aware that prototype methods will not exist on it and a user may set those names to whatever value they like: - -```javascript -var nullObject = qs.parse('a[hasOwnProperty]=b', { plainObjects: true }); -assert.deepEqual(nullObject, { a: { hasOwnProperty: 'b' } }); -``` - -By default parameters that would overwrite properties on the object prototype are ignored, if you wish to keep the data from those fields either use `plainObjects` as mentioned above, or set `allowPrototypes` to `true` which will allow user input to overwrite those properties. *WARNING* It is generally a bad idea to enable this option as it can cause problems when attempting to use the properties that have been overwritten. Always be careful with this option. - -```javascript -var protoObject = qs.parse('a[hasOwnProperty]=b', { allowPrototypes: true }); -assert.deepEqual(protoObject, { a: { hasOwnProperty: 'b' } }); -``` - -URI encoded strings work too: - -```javascript -assert.deepEqual(qs.parse('a%5Bb%5D=c'), { - a: { b: 'c' } -}); -``` - -You can also nest your objects, like `'foo[bar][baz]=foobarbaz'`: - -```javascript -assert.deepEqual(qs.parse('foo[bar][baz]=foobarbaz'), { - foo: { - bar: { - baz: 'foobarbaz' - } - } -}); -``` - -By default, when nesting objects **qs** will only parse up to 5 children deep. This means if you attempt to parse a string like -`'a[b][c][d][e][f][g][h][i]=j'` your resulting object will be: - -```javascript -var expected = { - a: { - b: { - c: { - d: { - e: { - f: { - '[g][h][i]': 'j' - } - } - } - } - } - } -}; -var string = 'a[b][c][d][e][f][g][h][i]=j'; -assert.deepEqual(qs.parse(string), expected); -``` - -This depth can be overridden by passing a `depth` option to `qs.parse(string, [options])`: - -```javascript -var deep = qs.parse('a[b][c][d][e][f][g][h][i]=j', { depth: 1 }); -assert.deepEqual(deep, { a: { b: { '[c][d][e][f][g][h][i]': 'j' } } }); -``` - -The depth limit helps mitigate abuse when **qs** is used to parse user input, and it is recommended to keep it a reasonably small number. - -For similar reasons, by default **qs** will only parse up to 1000 parameters. This can be overridden by passing a `parameterLimit` option: - -```javascript -var limited = qs.parse('a=b&c=d', { parameterLimit: 1 }); -assert.deepEqual(limited, { a: 'b' }); -``` - -To bypass the leading question mark, use `ignoreQueryPrefix`: - -```javascript -var prefixed = qs.parse('?a=b&c=d', { ignoreQueryPrefix: true }); -assert.deepEqual(prefixed, { a: 'b', c: 'd' }); -``` - -An optional delimiter can also be passed: - -```javascript -var delimited = qs.parse('a=b;c=d', { delimiter: ';' }); -assert.deepEqual(delimited, { a: 'b', c: 'd' }); -``` - -Delimiters can be a regular expression too: - -```javascript -var regexed = qs.parse('a=b;c=d,e=f', { delimiter: /[;,]/ }); -assert.deepEqual(regexed, { a: 'b', c: 'd', e: 'f' }); -``` - -Option `allowDots` can be used to enable dot notation: - -```javascript -var withDots = qs.parse('a.b=c', { allowDots: true }); -assert.deepEqual(withDots, { a: { b: 'c' } }); -``` - -### Parsing Arrays - -**qs** can also parse arrays using a similar `[]` notation: - -```javascript -var withArray = qs.parse('a[]=b&a[]=c'); -assert.deepEqual(withArray, { a: ['b', 'c'] }); -``` - -You may specify an index as well: - -```javascript -var withIndexes = qs.parse('a[1]=c&a[0]=b'); -assert.deepEqual(withIndexes, { a: ['b', 'c'] }); -``` - -Note that the only difference between an index in an array and a key in an object is that the value between the brackets must be a number -to create an array. When creating arrays with specific indices, **qs** will compact a sparse array to only the existing values preserving -their order: - -```javascript -var noSparse = qs.parse('a[1]=b&a[15]=c'); -assert.deepEqual(noSparse, { a: ['b', 'c'] }); -``` - -Note that an empty string is also a value, and will be preserved: - -```javascript -var withEmptyString = qs.parse('a[]=&a[]=b'); -assert.deepEqual(withEmptyString, { a: ['', 'b'] }); - -var withIndexedEmptyString = qs.parse('a[0]=b&a[1]=&a[2]=c'); -assert.deepEqual(withIndexedEmptyString, { a: ['b', '', 'c'] }); -``` - -**qs** will also limit specifying indices in an array to a maximum index of `20`. Any array members with an index of greater than `20` will -instead be converted to an object with the index as the key: - -```javascript -var withMaxIndex = qs.parse('a[100]=b'); -assert.deepEqual(withMaxIndex, { a: { '100': 'b' } }); -``` - -This limit can be overridden by passing an `arrayLimit` option: - -```javascript -var withArrayLimit = qs.parse('a[1]=b', { arrayLimit: 0 }); -assert.deepEqual(withArrayLimit, { a: { '1': 'b' } }); -``` - -To disable array parsing entirely, set `parseArrays` to `false`. - -```javascript -var noParsingArrays = qs.parse('a[]=b', { parseArrays: false }); -assert.deepEqual(noParsingArrays, { a: { '0': 'b' } }); -``` - -If you mix notations, **qs** will merge the two items into an object: - -```javascript -var mixedNotation = qs.parse('a[0]=b&a[b]=c'); -assert.deepEqual(mixedNotation, { a: { '0': 'b', b: 'c' } }); -``` - -You can also create arrays of objects: - -```javascript -var arraysOfObjects = qs.parse('a[][b]=c'); -assert.deepEqual(arraysOfObjects, { a: [{ b: 'c' }] }); -``` - -### Stringifying - -[](#preventEval) -```javascript -qs.stringify(object, [options]); -``` - -When stringifying, **qs** by default URI encodes output. Objects are stringified as you would expect: - -```javascript -assert.equal(qs.stringify({ a: 'b' }), 'a=b'); -assert.equal(qs.stringify({ a: { b: 'c' } }), 'a%5Bb%5D=c'); -``` - -This encoding can be disabled by setting the `encode` option to `false`: - -```javascript -var unencoded = qs.stringify({ a: { b: 'c' } }, { encode: false }); -assert.equal(unencoded, 'a[b]=c'); -``` - -Encoding can be disabled for keys by setting the `encodeValuesOnly` option to `true`: -```javascript -var encodedValues = qs.stringify( - { a: 'b', c: ['d', 'e=f'], f: [['g'], ['h']] }, - { encodeValuesOnly: true } -); -assert.equal(encodedValues,'a=b&c[0]=d&c[1]=e%3Df&f[0][0]=g&f[1][0]=h'); -``` - -This encoding can also be replaced by a custom encoding method set as `encoder` option: - -```javascript -var encoded = qs.stringify({ a: { b: 'c' } }, { encoder: function (str) { - // Passed in values `a`, `b`, `c` - return // Return encoded string -}}) -``` - -_(Note: the `encoder` option does not apply if `encode` is `false`)_ - -Analogue to the `encoder` there is a `decoder` option for `parse` to override decoding of properties and values: - -```javascript -var decoded = qs.parse('x=z', { decoder: function (str) { - // Passed in values `x`, `z` - return // Return decoded string -}}) -``` - -Examples beyond this point will be shown as though the output is not URI encoded for clarity. Please note that the return values in these cases *will* be URI encoded during real usage. - -When arrays are stringified, by default they are given explicit indices: - -```javascript -qs.stringify({ a: ['b', 'c', 'd'] }); -// 'a[0]=b&a[1]=c&a[2]=d' -``` - -You may override this by setting the `indices` option to `false`: - -```javascript -qs.stringify({ a: ['b', 'c', 'd'] }, { indices: false }); -// 'a=b&a=c&a=d' -``` - -You may use the `arrayFormat` option to specify the format of the output array: - -```javascript -qs.stringify({ a: ['b', 'c'] }, { arrayFormat: 'indices' }) -// 'a[0]=b&a[1]=c' -qs.stringify({ a: ['b', 'c'] }, { arrayFormat: 'brackets' }) -// 'a[]=b&a[]=c' -qs.stringify({ a: ['b', 'c'] }, { arrayFormat: 'repeat' }) -// 'a=b&a=c' -``` - -When objects are stringified, by default they use bracket notation: - -```javascript -qs.stringify({ a: { b: { c: 'd', e: 'f' } } }); -// 'a[b][c]=d&a[b][e]=f' -``` - -You may override this to use dot notation by setting the `allowDots` option to `true`: - -```javascript -qs.stringify({ a: { b: { c: 'd', e: 'f' } } }, { allowDots: true }); -// 'a.b.c=d&a.b.e=f' -``` - -Empty strings and null values will omit the value, but the equals sign (=) remains in place: - -```javascript -assert.equal(qs.stringify({ a: '' }), 'a='); -``` - -Key with no values (such as an empty object or array) will return nothing: - -```javascript -assert.equal(qs.stringify({ a: [] }), ''); -assert.equal(qs.stringify({ a: {} }), ''); -assert.equal(qs.stringify({ a: [{}] }), ''); -assert.equal(qs.stringify({ a: { b: []} }), ''); -assert.equal(qs.stringify({ a: { b: {}} }), ''); -``` - -Properties that are set to `undefined` will be omitted entirely: - -```javascript -assert.equal(qs.stringify({ a: null, b: undefined }), 'a='); -``` - -The query string may optionally be prepended with a question mark: - -```javascript -assert.equal(qs.stringify({ a: 'b', c: 'd' }, { addQueryPrefix: true }), '?a=b&c=d'); -``` - -The delimiter may be overridden with stringify as well: - -```javascript -assert.equal(qs.stringify({ a: 'b', c: 'd' }, { delimiter: ';' }), 'a=b;c=d'); -``` - -If you only want to override the serialization of `Date` objects, you can provide a `serializeDate` option: - -```javascript -var date = new Date(7); -assert.equal(qs.stringify({ a: date }), 'a=1970-01-01T00:00:00.007Z'.replace(/:/g, '%3A')); -assert.equal( - qs.stringify({ a: date }, { serializeDate: function (d) { return d.getTime(); } }), - 'a=7' -); -``` - -You may use the `sort` option to affect the order of parameter keys: - -```javascript -function alphabeticalSort(a, b) { - return a.localeCompare(b); -} -assert.equal(qs.stringify({ a: 'c', z: 'y', b : 'f' }, { sort: alphabeticalSort }), 'a=c&b=f&z=y'); -``` - -Finally, you can use the `filter` option to restrict which keys will be included in the stringified output. -If you pass a function, it will be called for each key to obtain the replacement value. Otherwise, if you -pass an array, it will be used to select properties and array indices for stringification: - -```javascript -function filterFunc(prefix, value) { - if (prefix == 'b') { - // Return an `undefined` value to omit a property. - return; - } - if (prefix == 'e[f]') { - return value.getTime(); - } - if (prefix == 'e[g][0]') { - return value * 2; - } - return value; -} -qs.stringify({ a: 'b', c: 'd', e: { f: new Date(123), g: [2] } }, { filter: filterFunc }); -// 'a=b&c=d&e[f]=123&e[g][0]=4' -qs.stringify({ a: 'b', c: 'd', e: 'f' }, { filter: ['a', 'e'] }); -// 'a=b&e=f' -qs.stringify({ a: ['b', 'c', 'd'], e: 'f' }, { filter: ['a', 0, 2] }); -// 'a[0]=b&a[2]=d' -``` - -### Handling of `null` values - -By default, `null` values are treated like empty strings: - -```javascript -var withNull = qs.stringify({ a: null, b: '' }); -assert.equal(withNull, 'a=&b='); -``` - -Parsing does not distinguish between parameters with and without equal signs. Both are converted to empty strings. - -```javascript -var equalsInsensitive = qs.parse('a&b='); -assert.deepEqual(equalsInsensitive, { a: '', b: '' }); -``` - -To distinguish between `null` values and empty strings use the `strictNullHandling` flag. In the result string the `null` -values have no `=` sign: - -```javascript -var strictNull = qs.stringify({ a: null, b: '' }, { strictNullHandling: true }); -assert.equal(strictNull, 'a&b='); -``` - -To parse values without `=` back to `null` use the `strictNullHandling` flag: - -```javascript -var parsedStrictNull = qs.parse('a&b=', { strictNullHandling: true }); -assert.deepEqual(parsedStrictNull, { a: null, b: '' }); -``` - -To completely skip rendering keys with `null` values, use the `skipNulls` flag: - -```javascript -var nullsSkipped = qs.stringify({ a: 'b', c: null}, { skipNulls: true }); -assert.equal(nullsSkipped, 'a=b'); -``` - -### Dealing with special character sets - -By default the encoding and decoding of characters is done in `utf-8`. If you -wish to encode querystrings to a different character set (i.e. -[Shift JIS](https://en.wikipedia.org/wiki/Shift_JIS)) you can use the -[`qs-iconv`](https://github.com/martinheidegger/qs-iconv) library: - -```javascript -var encoder = require('qs-iconv/encoder')('shift_jis'); -var shiftJISEncoded = qs.stringify({ a: 'こんにちは!' }, { encoder: encoder }); -assert.equal(shiftJISEncoded, 'a=%82%B1%82%F1%82%C9%82%BF%82%CD%81I'); -``` - -This also works for decoding of query strings: - -```javascript -var decoder = require('qs-iconv/decoder')('shift_jis'); -var obj = qs.parse('a=%82%B1%82%F1%82%C9%82%BF%82%CD%81I', { decoder: decoder }); -assert.deepEqual(obj, { a: 'こんにちは!' }); -``` - -### RFC 3986 and RFC 1738 space encoding - -RFC3986 used as default option and encodes ' ' to *%20* which is backward compatible. -In the same time, output can be stringified as per RFC1738 with ' ' equal to '+'. - -``` -assert.equal(qs.stringify({ a: 'b c' }), 'a=b%20c'); -assert.equal(qs.stringify({ a: 'b c' }, { format : 'RFC3986' }), 'a=b%20c'); -assert.equal(qs.stringify({ a: 'b c' }, { format : 'RFC1738' }), 'a=b+c'); -``` - -[1]: https://npmjs.org/package/qs -[2]: http://versionbadg.es/ljharb/qs.svg -[3]: https://api.travis-ci.org/ljharb/qs.svg -[4]: https://travis-ci.org/ljharb/qs -[5]: https://david-dm.org/ljharb/qs.svg -[6]: https://david-dm.org/ljharb/qs -[7]: https://david-dm.org/ljharb/qs/dev-status.svg -[8]: https://david-dm.org/ljharb/qs?type=dev -[9]: https://ci.testling.com/ljharb/qs.png -[10]: https://ci.testling.com/ljharb/qs -[11]: https://nodei.co/npm/qs.png?downloads=true&stars=true -[license-image]: http://img.shields.io/npm/l/qs.svg -[license-url]: LICENSE -[downloads-image]: http://img.shields.io/npm/dm/qs.svg -[downloads-url]: http://npm-stat.com/charts.html?package=qs diff --git a/node_modules/read-cmd-shim/README.md b/node_modules/read-cmd-shim/README.md deleted file mode 100644 index 457e36e35fca5..0000000000000 --- a/node_modules/read-cmd-shim/README.md +++ /dev/null @@ -1,36 +0,0 @@ -# read-cmd-shim - -Figure out what a [`cmd-shim`](https://github.com/ForbesLindesay/cmd-shim) -is pointing at. This acts as the equivalent of -[`fs.readlink`](https://nodejs.org/api/fs.html#fs_fs_readlink_path_callback). - -### Usage - -``` -const readCmdShim = require('read-cmd-shim') - -readCmdShim('/path/to/shim.cmd').then(destination => { - … -}) - -const destination = readCmdShim.sync('/path/to/shim.cmd') -``` - -### readCmdShim(path) -> Promise - -Reads the `cmd-shim` located at `path` and resolves with the _relative_ -path that the shim points at. Consider this as roughly the equivalent of -`fs.readlink`. - -This can read both `.cmd` style that are run by the Windows Command Prompt -and Powershell, and the kind without any extension that are used by Cygwin. - -This can return errors that `fs.readFile` returns, except that they'll -include a stack trace from where `readCmdShim` was called. Plus it can -return a special `ENOTASHIM` exception, when it can't find a cmd-shim in the -file referenced by `path`. This should only happen if you pass in a -non-command shim. - -### readCmdShim.sync(path) - -Same as above but synchronous. Errors are thrown. diff --git a/node_modules/read-package-json-fast/README.md b/node_modules/read-package-json-fast/README.md deleted file mode 100644 index 5ab6adbece825..0000000000000 --- a/node_modules/read-package-json-fast/README.md +++ /dev/null @@ -1,79 +0,0 @@ -# read-package-json-fast - -Like [`read-package-json`](http://npm.im/read-package-json), but faster and -more accepting of "missing" data. - -This is only suitable for reading package.json files in a node_modules -tree, since it doesn't do the various cleanups, normalization, and warnings -that are beneficial at the root level in a package being published. - -## USAGE - -```js -const rpj = require('read-package-json-fast') - -// typical promisey type API -rpj('/path/to/package.json') - .then(data => ...) - .catch(er => ...) - -// or just normalize a package manifest -const normalized = rpj.normalize(packageJsonObject) -``` - -Errors raised from parsing will use -[`json-parse-even-better-errors`](http://npm.im/json-parse-even-better-errors), -so they'll be of type `JSONParseError` and have a `code: 'EJSONPARSE'` -property. Errors will also always have a `path` member referring to the -path originally passed into the function. - -## Indentation - -To preserve indentation when the file is saved back to disk, use -`data[Symbol.for('indent')]` as the third argument to `JSON.stringify`, and -if you want to preserve windows `\r\n` newlines, replace the `\n` chars in -the string with `data[Symbol.for('newline')]`. - -For example: - -```js -const data = await readPackageJsonFast('./package.json') -const indent = Symbol.for('indent') -const newline = Symbol.for('newline') -// .. do some stuff to the data .. -const string = JSON.stringify(data, null, data[indent]) + '\n' -const eolFixed = data[newline] === '\n' ? string - : string.replace(/\n/g, data[newline]) -await writeFile('./package.json', eolFixed) -``` - -Indentation is determined by looking at the whitespace between the initial -`{` and the first `"` that follows it. If you have lots of weird -inconsistent indentation, then it won't track that or give you any way to -preserve it. Whether this is a bug or a feature is debatable ;) - -## WHAT THIS MODULE DOES - -- Parse JSON -- Normalize `bundledDependencies`/`bundleDependencies` naming to just - `bundleDependencies` (without the extra `d`) -- Handle `true`, `false`, or object values passed to `bundleDependencies` -- Normalize `funding: <string>` to `funding: { url: <string> }` -- Remove any `scripts` members that are not a string value. -- Normalize a string `bin` member to `{ [name]: bin }`. -- Fold `optionalDependencies` into `dependencies`. -- Set the `_id` property if name and version are set. (This is - load-bearing in a few places within the npm CLI.) - -## WHAT THIS MODULE DOES NOT DO - -- Warn about invalid/missing name, version, repository, etc. -- Extract a description from the `README.md` file, or attach the readme to - the parsed data object. -- Read the `HEAD` value out of the `.git` folder. -- Warn about potentially typo'ed scripts (eg, `tset` instead of `test`) -- Check to make sure that all the files in the `files` field exist and are - valid files. -- Fix bundleDependencies that are not listed in `dependencies`. -- Fix `dependencies` fields that are not strictly objects of string values. -- Anything involving the `directories` field (ie, bins, mans, and so on). diff --git a/node_modules/read-package-json-fast/index.js b/node_modules/read-package-json-fast/index.js index bc1c059272c04..646ff7dfbbd76 100644 --- a/node_modules/read-package-json-fast/index.js +++ b/node_modules/read-package-json-fast/index.js @@ -1,15 +1,56 @@ const {promisify} = require('util') const fs = require('fs') const readFile = promisify(fs.readFile) +const lstat = promisify(fs.lstat) +const readdir = promisify(fs.readdir) const parse = require('json-parse-even-better-errors') + +const { resolve, dirname, join, relative } = require('path') + const rpj = path => readFile(path, 'utf8') - .then(data => normalize(stripUnderscores(parse(data)))) + .then(data => readBinDir(path, normalize(stripUnderscores(parse(data))))) .catch(er => { er.path = path throw er }) + const normalizePackageBin = require('npm-normalize-package-bin') +// load the directories.bin folder as a 'bin' object +const readBinDir = async (path, data) => { + if (data.bin) + return data + + const m = data.directories && data.directories.bin + if (!m || typeof m !== 'string') + return data + + // cut off any monkey business, like setting directories.bin + // to ../../../etc/passwd or /etc/passwd or something like that. + const root = dirname(path) + const dir = join('.', join('/', m)) + data.bin = await walkBinDir(root, dir, {}) + return data +} + +const walkBinDir = async (root, dir, obj) => { + const entries = await readdir(resolve(root, dir)).catch(() => []) + for (const entry of entries) { + if (entry.charAt(0) === '.') + continue + const f = resolve(root, dir, entry) + // ignore stat errors, weird file types, symlinks, etc. + const st = await lstat(f).catch(() => null) + if (!st) + continue + else if (st.isFile()) + obj[entry] = relative(root, f) + else if (st.isDirectory()) + await walkBinDir(root, join(dir, entry), obj) + } + return obj +} + // do not preserve _fields set in files, they are sus const stripUnderscores = data => { for (const key of Object.keys(data).filter(k => /^_/.test(k))) diff --git a/node_modules/read-package-json-fast/package.json b/node_modules/read-package-json-fast/package.json index 388e76595833e..c3a9f7dc5c37b 100644 --- a/node_modules/read-package-json-fast/package.json +++ b/node_modules/read-package-json-fast/package.json @@ -1,6 +1,6 @@ { "name": "read-package-json-fast", - "version": "2.0.2", + "version": "2.0.3", "description": "Like read-package-json, but faster", "author": "Isaac Z. Schlueter <i@izs.me> (https://izs.me)", "license": "ISC", @@ -18,7 +18,7 @@ "check-coverage": true }, "devDependencies": { - "tap": "^14.10.1" + "tap": "^15.0.9" }, "dependencies": { "json-parse-even-better-errors": "^2.3.0", diff --git a/node_modules/read-package-json/CHANGELOG.md b/node_modules/read-package-json/CHANGELOG.md deleted file mode 100644 index 929900482f110..0000000000000 --- a/node_modules/read-package-json/CHANGELOG.md +++ /dev/null @@ -1,61 +0,0 @@ -# Change Log - -All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. - -<a name="3.0.1"></a> -## [3.0.1](https://github.com/npm/read-package-json/compare/v3.0.0...v3.0.1) (2021-02-22) - - -### Bug Fixes - -* Strip underscore prefixed fields from file contents ([ac771d8](https://github.com/npm/read-package-json/commit/ac771d8)) - - - -<a name="3.0.0"></a> -# [3.0.0](https://github.com/npm/read-package-json/compare/v2.1.2...v3.0.0) (2020-10-13) - - -### Bug Fixes - -* check-in updated lockfile ([19d9fbe](https://github.com/npm/read-package-json/commit/19d9fbe)) - - - -<a name="2.1.2"></a> -## [2.1.2](https://github.com/npm/read-package-json/compare/v2.1.1...v2.1.2) (2020-08-20) - - -### Bug Fixes - -* even better json errors, remove graceful-fs ([fdbf082](https://github.com/npm/read-package-json/commit/fdbf082)) - - - -<a name="2.1.1"></a> -## [2.1.1](https://github.com/npm/read-package-json/compare/v2.1.0...v2.1.1) (2019-12-09) - - -### Bug Fixes - -* normalize and sanitize pkg bin entries ([b8cb5fa](https://github.com/npm/read-package-json/commit/b8cb5fa)) - - - -<a name="2.1.0"></a> -# [2.1.0](https://github.com/npm/read-package-json/compare/v2.0.13...v2.1.0) (2019-08-13) - - -### Features - -* support bundleDependencies: true ([76f6f42](https://github.com/npm/read-package-json/commit/76f6f42)) - - - -<a name="2.0.13"></a> -## [2.0.13](https://github.com/npm/read-package-json/compare/v2.0.12...v2.0.13) (2018-03-08) - - -### Bug Fixes - -* **git:** support git packed refs --all mode ([#77](https://github.com/npm/read-package-json/issues/77)) ([1869940](https://github.com/npm/read-package-json/commit/1869940)) diff --git a/node_modules/read-package-json/README.md b/node_modules/read-package-json/README.md deleted file mode 100644 index da1f63dc8828b..0000000000000 --- a/node_modules/read-package-json/README.md +++ /dev/null @@ -1,151 +0,0 @@ -# read-package-json - -This is the thing that npm uses to read package.json files. It -validates some stuff, and loads some default things. - -It keeps a cache of the files you've read, so that you don't end -up reading the same package.json file multiple times. - -Note that if you just want to see what's literally in the package.json -file, you can usually do `var data = require('some-module/package.json')`. - -This module is basically only needed by npm, but it's handy to see what -npm will see when it looks at your package. - -## Usage - -```javascript -var readJson = require('read-package-json') - -// readJson(filename, [logFunction=noop], [strict=false], cb) -readJson('/path/to/package.json', console.error, false, function (er, data) { - if (er) { - console.error("There was an error reading the file") - return - } - - console.error('the package data is', data) -}); -``` - -## readJson(file, [logFn = noop], [strict = false], cb) - -* `file` {String} The path to the package.json file -* `logFn` {Function} Function to handle logging. Defaults to a noop. -* `strict` {Boolean} True to enforce SemVer 2.0 version strings, and - other strict requirements. -* `cb` {Function} Gets called with `(er, data)`, as is The Node Way. - -Reads the JSON file and does the things. - -## `package.json` Fields - -See `man 5 package.json` or `npm help json`. - -## readJson.log - -By default this is a reference to the `npmlog` module. But if that -module can't be found, then it'll be set to just a dummy thing that does -nothing. - -Replace with your own `{log,warn,error}` object for fun loggy time. - -## readJson.extras(file, data, cb) - -Run all the extra stuff relative to the file, with the parsed data. - -Modifies the data as it does stuff. Calls the cb when it's done. - -## readJson.extraSet = [fn, fn, ...] - -Array of functions that are called by `extras`. Each one receives the -arguments `fn(file, data, cb)` and is expected to call `cb(er, data)` -when done or when an error occurs. - -Order is indeterminate, so each function should be completely -independent. - -Mix and match! - -## Other Relevant Files Besides `package.json` - -Some other files have an effect on the resulting data object, in the -following ways: - -### `README?(.*)` - -If there is a `README` or `README.*` file present, then npm will attach -a `readme` field to the data with the contents of this file. - -Owing to the fact that roughly 100% of existing node modules have -Markdown README files, it will generally be assumed to be Markdown, -regardless of the extension. Please plan accordingly. - -### `server.js` - -If there is a `server.js` file, and there is not already a -`scripts.start` field, then `scripts.start` will be set to `node -server.js`. - -### `AUTHORS` - -If there is not already a `contributors` field, then the `contributors` -field will be set to the contents of the `AUTHORS` file, split by lines, -and parsed. - -### `bindings.gyp` - -If a bindings.gyp file exists, and there is not already a -`scripts.install` field, then the `scripts.install` field will be set to -`node-gyp rebuild`. - -### `index.js` - -If the json file does not exist, but there is a `index.js` file -present instead, and that file has a package comment, then it will try -to parse the package comment, and use that as the data instead. - -A package comment looks like this: - -```javascript -/**package - * { "name": "my-bare-module" - * , "version": "1.2.3" - * , "description": "etc...." } - **/ - -// or... - -/**package -{ "name": "my-bare-module" -, "version": "1.2.3" -, "description": "etc...." } -**/ -``` - -The important thing is that it starts with `/**package`, and ends with -`**/`. If the package.json file exists, then the index.js is not -parsed. - -### `{directories.man}/*.[0-9]` - -If there is not already a `man` field defined as an array of files or a -single file, and -there is a `directories.man` field defined, then that directory will -be searched for manpages. - -Any valid manpages found in that directory will be assigned to the `man` -array, and installed in the appropriate man directory at package install -time, when installed globally on a Unix system. - -### `{directories.bin}/*` - -If there is not already a `bin` field defined as a string filename or a -hash of `<name> : <filename>` pairs, then the `directories.bin` -directory will be searched and all the files within it will be linked as -executables at install time. - -When installing locally, npm links bins into `node_modules/.bin`, which -is in the `PATH` environ when npm runs scripts. When -installing globally, they are linked into `{prefix}/bin`, which is -presumably in the `PATH` environment variable. diff --git a/node_modules/read/README.md b/node_modules/read/README.md deleted file mode 100644 index 5967fad118024..0000000000000 --- a/node_modules/read/README.md +++ /dev/null @@ -1,53 +0,0 @@ -## read - -For reading user input from stdin. - -Similar to the `readline` builtin's `question()` method, but with a -few more features. - -## USAGE - -```javascript -var read = require("read") -read(options, callback) -``` - -The callback gets called with either the user input, or the default -specified, or an error, as `callback(error, result, isDefault)` -node style. - -## OPTIONS - -Every option is optional. - -* `prompt` What to write to stdout before reading input. -* `silent` Don't echo the output as the user types it. -* `replace` Replace silenced characters with the supplied character value. -* `timeout` Number of ms to wait for user input before giving up. -* `default` The default value if the user enters nothing. -* `edit` Allow the user to edit the default value. -* `terminal` Treat the output as a TTY, whether it is or not. -* `input` Readable stream to get input data from. (default `process.stdin`) -* `output` Writeable stream to write prompts to. (default: `process.stdout`) - -If silent is true, and the input is a TTY, then read will set raw -mode, and read character by character. - -## COMPATIBILITY - -This module works sort of with node 0.6. It does not work with node -versions less than 0.6. It is best on node 0.8. - -On node version 0.6, it will remove all listeners on the input -stream's `data` and `keypress` events, because the readline module did -not fully clean up after itself in that version of node, and did not -make it possible to clean up after it in a way that has no potential -for side effects. - -Additionally, some of the readline options (like `terminal`) will not -function in versions of node before 0.8, because they were not -implemented in the builtin readline module. - -## CONTRIBUTING - -Patches welcome. diff --git a/node_modules/readable-stream/.travis.yml b/node_modules/readable-stream/.travis.yml deleted file mode 100644 index f62cdac0686da..0000000000000 --- a/node_modules/readable-stream/.travis.yml +++ /dev/null @@ -1,34 +0,0 @@ -sudo: false -language: node_js -before_install: - - (test $NPM_LEGACY && npm install -g npm@2 && npm install -g npm@3) || true -notifications: - email: false -matrix: - fast_finish: true - include: - - node_js: '0.8' - env: NPM_LEGACY=true - - node_js: '0.10' - env: NPM_LEGACY=true - - node_js: '0.11' - env: NPM_LEGACY=true - - node_js: '0.12' - env: NPM_LEGACY=true - - node_js: 1 - env: NPM_LEGACY=true - - node_js: 2 - env: NPM_LEGACY=true - - node_js: 3 - env: NPM_LEGACY=true - - node_js: 4 - - node_js: 5 - - node_js: 6 - - node_js: 7 - - node_js: 8 - - node_js: 9 -script: "npm run test" -env: - global: - - secure: rE2Vvo7vnjabYNULNyLFxOyt98BoJexDqsiOnfiD6kLYYsiQGfr/sbZkPMOFm9qfQG7pjqx+zZWZjGSswhTt+626C0t/njXqug7Yps4c3dFblzGfreQHp7wNX5TFsvrxd6dAowVasMp61sJcRnB2w8cUzoe3RAYUDHyiHktwqMc= - - secure: g9YINaKAdMatsJ28G9jCGbSaguXCyxSTy+pBO6Ch0Cf57ZLOTka3HqDj8p3nV28LUIHZ3ut5WO43CeYKwt4AUtLpBS3a0dndHdY6D83uY6b2qh5hXlrcbeQTq2cvw2y95F7hm4D1kwrgZ7ViqaKggRcEupAL69YbJnxeUDKWEdI= diff --git a/node_modules/readable-stream/README.md b/node_modules/readable-stream/README.md deleted file mode 100644 index 23fe3f3e3009a..0000000000000 --- a/node_modules/readable-stream/README.md +++ /dev/null @@ -1,58 +0,0 @@ -# readable-stream - -***Node-core v8.11.1 streams for userland*** [![Build Status](https://travis-ci.org/nodejs/readable-stream.svg?branch=master)](https://travis-ci.org/nodejs/readable-stream) - - -[![NPM](https://nodei.co/npm/readable-stream.png?downloads=true&downloadRank=true)](https://nodei.co/npm/readable-stream/) -[![NPM](https://nodei.co/npm-dl/readable-stream.png?&months=6&height=3)](https://nodei.co/npm/readable-stream/) - - -[![Sauce Test Status](https://saucelabs.com/browser-matrix/readable-stream.svg)](https://saucelabs.com/u/readable-stream) - -```bash -npm install --save readable-stream -``` - -***Node-core streams for userland*** - -This package is a mirror of the Streams2 and Streams3 implementations in -Node-core. - -Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v8.11.1/docs/api/stream.html). - -If you want to guarantee a stable streams base, regardless of what version of -Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html). - -As of version 2.0.0 **readable-stream** uses semantic versioning. - -# Streams Working Group - -`readable-stream` is maintained by the Streams Working Group, which -oversees the development and maintenance of the Streams API within -Node.js. The responsibilities of the Streams Working Group include: - -* Addressing stream issues on the Node.js issue tracker. -* Authoring and editing stream documentation within the Node.js project. -* Reviewing changes to stream subclasses within the Node.js project. -* Redirecting changes to streams from the Node.js project to this - project. -* Assisting in the implementation of stream providers within Node.js. -* Recommending versions of `readable-stream` to be included in Node.js. -* Messaging about the future of streams to give the community advance - notice of changes. - -<a name="members"></a> -## Team Members - -* **Chris Dickinson** ([@chrisdickinson](https://github.com/chrisdickinson)) <christopher.s.dickinson@gmail.com> - - Release GPG key: 9554F04D7259F04124DE6B476D5A82AC7E37093B -* **Calvin Metcalf** ([@calvinmetcalf](https://github.com/calvinmetcalf)) <calvin.metcalf@gmail.com> - - Release GPG key: F3EF5F62A87FC27A22E643F714CE4FF5015AA242 -* **Rod Vagg** ([@rvagg](https://github.com/rvagg)) <rod@vagg.org> - - Release GPG key: DD8F2338BAE7501E3DD5AC78C273792F7D83545D -* **Sam Newman** ([@sonewman](https://github.com/sonewman)) <newmansam@outlook.com> -* **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) <mathiasbuus@gmail.com> -* **Domenic Denicola** ([@domenic](https://github.com/domenic)) <d@domenic.me> -* **Matteo Collina** ([@mcollina](https://github.com/mcollina)) <matteo.collina@gmail.com> - - Release GPG key: 3ABC01543F22DD2239285CDD818674489FBC127E -* **Irina Shestak** ([@lrlna](https://github.com/lrlna)) <shestak.irina@gmail.com> diff --git a/node_modules/readdir-scoped-modules/README.md b/node_modules/readdir-scoped-modules/README.md deleted file mode 100644 index ade57a186dc73..0000000000000 --- a/node_modules/readdir-scoped-modules/README.md +++ /dev/null @@ -1,17 +0,0 @@ -# readdir-scoped-modules - -Like `fs.readdir` but handling `@org/module` dirs as if they were -a single entry. - -Used by npm. - -## USAGE - -```javascript -var readdir = require('readdir-scoped-modules') - -readdir('node_modules', function (er, entries) { - // entries will be something like - // ['a', '@org/foo', '@org/bar'] -}) -``` diff --git a/node_modules/request/CHANGELOG.md b/node_modules/request/CHANGELOG.md deleted file mode 100644 index d3ffcd00d2e62..0000000000000 --- a/node_modules/request/CHANGELOG.md +++ /dev/null @@ -1,717 +0,0 @@ -## Change Log - -### v2.88.0 (2018/08/10) -- [#2996](https://github.com/request/request/pull/2996) fix(uuid): import versioned uuid (@kwonoj) -- [#2994](https://github.com/request/request/pull/2994) Update to oauth-sign 0.9.0 (@dlecocq) -- [#2993](https://github.com/request/request/pull/2993) Fix header tests (@simov) -- [#2904](https://github.com/request/request/pull/2904) #515, #2894 Strip port suffix from Host header if the protocol is known. (#2904) (@paambaati) -- [#2791](https://github.com/request/request/pull/2791) Improve AWS SigV4 support. (#2791) (@vikhyat) -- [#2977](https://github.com/request/request/pull/2977) Update test certificates (@simov) - -### v2.87.0 (2018/05/21) -- [#2943](https://github.com/request/request/pull/2943) Replace hawk dependency with a local implemenation (#2943) (@hueniverse) - -### v2.86.0 (2018/05/15) -- [#2885](https://github.com/request/request/pull/2885) Remove redundant code (for Node.js 0.9.4 and below) and dependency (@ChALkeR) -- [#2942](https://github.com/request/request/pull/2942) Make Test GREEN Again! (@simov) -- [#2923](https://github.com/request/request/pull/2923) Alterations for failing CI tests (@gareth-robinson) - -### v2.85.0 (2018/03/12) -- [#2880](https://github.com/request/request/pull/2880) Revert "Update hawk to 7.0.7 (#2880)" (@simov) - -### v2.84.0 (2018/03/12) -- [#2793](https://github.com/request/request/pull/2793) Fixed calculation of oauth_body_hash, issue #2792 (@dvishniakov) -- [#2880](https://github.com/request/request/pull/2880) Update hawk to 7.0.7 (#2880) (@kornel-kedzierski) - -### v2.83.0 (2017/09/27) -- [#2776](https://github.com/request/request/pull/2776) Updating tough-cookie due to security fix. (#2776) (@karlnorling) - -### v2.82.0 (2017/09/19) -- [#2703](https://github.com/request/request/pull/2703) Add Node.js v8 to Travis CI (@ryysud) -- [#2751](https://github.com/request/request/pull/2751) Update of hawk and qs to latest version (#2751) (@Olivier-Moreau) -- [#2658](https://github.com/request/request/pull/2658) Fixed some text in README.md (#2658) (@Marketionist) -- [#2635](https://github.com/request/request/pull/2635) chore(package): update aws-sign2 to version 0.7.0 (#2635) (@greenkeeperio-bot) -- [#2641](https://github.com/request/request/pull/2641) Update README to simplify & update convenience methods (#2641) (@FredKSchott) -- [#2541](https://github.com/request/request/pull/2541) Add convenience method for HTTP OPTIONS (#2541) (@jamesseanwright) -- [#2605](https://github.com/request/request/pull/2605) Add promise support section to README (#2605) (@FredKSchott) -- [#2579](https://github.com/request/request/pull/2579) refactor(lint): replace eslint with standard (#2579) (@ahmadnassri) -- [#2598](https://github.com/request/request/pull/2598) Update codecov to version 2.0.2 🚀 (@greenkeeperio-bot) -- [#2590](https://github.com/request/request/pull/2590) Adds test-timing keepAlive test (@nicjansma) -- [#2589](https://github.com/request/request/pull/2589) fix tabulation on request example README.MD (@odykyi) -- [#2594](https://github.com/request/request/pull/2594) chore(dependencies): har-validator to 5.x [removes babel dep] (@ahmadnassri) - -### v2.81.0 (2017/03/09) -- [#2584](https://github.com/request/request/pull/2584) Security issue: Upgrade qs to version 6.4.0 (@sergejmueller) -- [#2578](https://github.com/request/request/pull/2578) safe-buffer doesn't zero-fill by default, its just a polyfill. (#2578) (@mikeal) -- [#2566](https://github.com/request/request/pull/2566) Timings: Tracks 'lookup', adds 'wait' time, fixes connection re-use (#2566) (@nicjansma) -- [#2574](https://github.com/request/request/pull/2574) Migrating to safe-buffer for improved security. (@mikeal) -- [#2573](https://github.com/request/request/pull/2573) fixes #2572 (@ahmadnassri) - -### v2.80.0 (2017/03/04) -- [#2571](https://github.com/request/request/pull/2571) Correctly format the Host header for IPv6 addresses (@JamesMGreene) -- [#2558](https://github.com/request/request/pull/2558) Update README.md example snippet (@FredKSchott) -- [#2221](https://github.com/request/request/pull/2221) Adding a simple Response object reference in argument specification (@calamarico) -- [#2452](https://github.com/request/request/pull/2452) Adds .timings array with DNC, TCP, request and response times (@nicjansma) -- [#2553](https://github.com/request/request/pull/2553) add ISSUE_TEMPLATE, move PR template (@FredKSchott) -- [#2539](https://github.com/request/request/pull/2539) Create PULL_REQUEST_TEMPLATE.md (@FredKSchott) -- [#2524](https://github.com/request/request/pull/2524) Update caseless to version 0.12.0 🚀 (@greenkeeperio-bot) -- [#2460](https://github.com/request/request/pull/2460) Fix wrong MIME type in example (@OwnageIsMagic) -- [#2514](https://github.com/request/request/pull/2514) Change tags to keywords in package.json (@humphd) -- [#2492](https://github.com/request/request/pull/2492) More lenient gzip decompression (@addaleax) - -### v2.79.0 (2016/11/18) -- [#2368](https://github.com/request/request/pull/2368) Fix typeof check in test-pool.js (@forivall) -- [#2394](https://github.com/request/request/pull/2394) Use `files` in package.json (@SimenB) -- [#2463](https://github.com/request/request/pull/2463) AWS support for session tokens for temporary credentials (@simov) -- [#2467](https://github.com/request/request/pull/2467) Migrate to uuid (@simov, @antialias) -- [#2459](https://github.com/request/request/pull/2459) Update taper to version 0.5.0 🚀 (@greenkeeperio-bot) -- [#2448](https://github.com/request/request/pull/2448) Make other connect timeout test more reliable too (@mscdex) - -### v2.78.0 (2016/11/03) -- [#2447](https://github.com/request/request/pull/2447) Always set request timeout on keep-alive connections (@mscdex) - -### v2.77.0 (2016/11/03) -- [#2439](https://github.com/request/request/pull/2439) Fix socket 'connect' listener handling (@mscdex) -- [#2442](https://github.com/request/request/pull/2442) 👻😱 Node.js 0.10 is unmaintained 😱👻 (@greenkeeperio-bot) -- [#2435](https://github.com/request/request/pull/2435) Add followOriginalHttpMethod to redirect to original HTTP method (@kirrg001) -- [#2414](https://github.com/request/request/pull/2414) Improve test-timeout reliability (@mscdex) - -### v2.76.0 (2016/10/25) -- [#2424](https://github.com/request/request/pull/2424) Handle buffers directly instead of using "bl" (@zertosh) -- [#2415](https://github.com/request/request/pull/2415) Re-enable timeout tests on Travis + other fixes (@mscdex) -- [#2431](https://github.com/request/request/pull/2431) Improve timeouts accuracy and node v6.8.0+ compatibility (@mscdex, @greenkeeperio-bot) -- [#2428](https://github.com/request/request/pull/2428) Update qs to version 6.3.0 🚀 (@greenkeeperio-bot) -- [#2420](https://github.com/request/request/pull/2420) change .on to .once, remove possible memory leaks (@duereg) -- [#2426](https://github.com/request/request/pull/2426) Remove "isFunction" helper in favor of "typeof" check (@zertosh) -- [#2425](https://github.com/request/request/pull/2425) Simplify "defer" helper creation (@zertosh) -- [#2402](https://github.com/request/request/pull/2402) form-data@2.1.1 breaks build 🚨 (@greenkeeperio-bot) -- [#2393](https://github.com/request/request/pull/2393) Update form-data to version 2.1.0 🚀 (@greenkeeperio-bot) - -### v2.75.0 (2016/09/17) -- [#2381](https://github.com/request/request/pull/2381) Drop support for Node 0.10 (@simov) -- [#2377](https://github.com/request/request/pull/2377) Update form-data to version 2.0.0 🚀 (@greenkeeperio-bot) -- [#2353](https://github.com/request/request/pull/2353) Add greenkeeper ignored packages (@simov) -- [#2351](https://github.com/request/request/pull/2351) Update karma-tap to version 3.0.1 🚀 (@greenkeeperio-bot) -- [#2348](https://github.com/request/request/pull/2348) form-data@1.0.1 breaks build 🚨 (@greenkeeperio-bot) -- [#2349](https://github.com/request/request/pull/2349) Check error type instead of string (@scotttrinh) - -### v2.74.0 (2016/07/22) -- [#2295](https://github.com/request/request/pull/2295) Update tough-cookie to 2.3.0 (@stash-sfdc) -- [#2280](https://github.com/request/request/pull/2280) Update karma-tap to version 2.0.1 🚀 (@greenkeeperio-bot) - -### v2.73.0 (2016/07/09) -- [#2240](https://github.com/request/request/pull/2240) Remove connectionErrorHandler to fix #1903 (@zarenner) -- [#2251](https://github.com/request/request/pull/2251) tape@4.6.0 breaks build 🚨 (@greenkeeperio-bot) -- [#2225](https://github.com/request/request/pull/2225) Update docs (@ArtskydJ) -- [#2203](https://github.com/request/request/pull/2203) Update browserify to version 13.0.1 🚀 (@greenkeeperio-bot) -- [#2275](https://github.com/request/request/pull/2275) Update karma to version 1.1.1 🚀 (@greenkeeperio-bot) -- [#2204](https://github.com/request/request/pull/2204) Add codecov.yml and disable PR comments (@simov) -- [#2212](https://github.com/request/request/pull/2212) Fix link to http.IncomingMessage documentation (@nazieb) -- [#2208](https://github.com/request/request/pull/2208) Update to form-data RC4 and pass null values to it (@simov) -- [#2207](https://github.com/request/request/pull/2207) Move aws4 require statement to the top (@simov) -- [#2199](https://github.com/request/request/pull/2199) Update karma-coverage to version 1.0.0 🚀 (@greenkeeperio-bot) -- [#2206](https://github.com/request/request/pull/2206) Update qs to version 6.2.0 🚀 (@greenkeeperio-bot) -- [#2205](https://github.com/request/request/pull/2205) Use server-destory to close hanging sockets in tests (@simov) -- [#2200](https://github.com/request/request/pull/2200) Update karma-cli to version 1.0.0 🚀 (@greenkeeperio-bot) - -### v2.72.0 (2016/04/17) -- [#2176](https://github.com/request/request/pull/2176) Do not try to pipe Gzip responses with no body (@simov) -- [#2175](https://github.com/request/request/pull/2175) Add 'delete' alias for the 'del' API method (@simov, @MuhanZou) -- [#2172](https://github.com/request/request/pull/2172) Add support for deflate content encoding (@czardoz) -- [#2169](https://github.com/request/request/pull/2169) Add callback option (@simov) -- [#2165](https://github.com/request/request/pull/2165) Check for self.req existence inside the write method (@simov) -- [#2167](https://github.com/request/request/pull/2167) Fix TravisCI badge reference master branch (@a0viedo) - -### v2.71.0 (2016/04/12) -- [#2164](https://github.com/request/request/pull/2164) Catch errors from the underlying http module (@simov) - -### v2.70.0 (2016/04/05) -- [#2147](https://github.com/request/request/pull/2147) Update eslint to version 2.5.3 🚀 (@simov, @greenkeeperio-bot) -- [#2009](https://github.com/request/request/pull/2009) Support JSON stringify replacer argument. (@elyobo) -- [#2142](https://github.com/request/request/pull/2142) Update eslint to version 2.5.1 🚀 (@greenkeeperio-bot) -- [#2128](https://github.com/request/request/pull/2128) Update browserify-istanbul to version 2.0.0 🚀 (@greenkeeperio-bot) -- [#2115](https://github.com/request/request/pull/2115) Update eslint to version 2.3.0 🚀 (@simov, @greenkeeperio-bot) -- [#2089](https://github.com/request/request/pull/2089) Fix badges (@simov) -- [#2092](https://github.com/request/request/pull/2092) Update browserify-istanbul to version 1.0.0 🚀 (@greenkeeperio-bot) -- [#2079](https://github.com/request/request/pull/2079) Accept read stream as body option (@simov) -- [#2070](https://github.com/request/request/pull/2070) Update bl to version 1.1.2 🚀 (@greenkeeperio-bot) -- [#2063](https://github.com/request/request/pull/2063) Up bluebird and oauth-sign (@simov) -- [#2058](https://github.com/request/request/pull/2058) Karma fixes for latest versions (@eiriksm) -- [#2057](https://github.com/request/request/pull/2057) Update contributing guidelines (@simov) -- [#2054](https://github.com/request/request/pull/2054) Update qs to version 6.1.0 🚀 (@greenkeeperio-bot) - -### v2.69.0 (2016/01/27) -- [#2041](https://github.com/request/request/pull/2041) restore aws4 as regular dependency (@rmg) - -### v2.68.0 (2016/01/27) -- [#2036](https://github.com/request/request/pull/2036) Add AWS Signature Version 4 (@simov, @mirkods) -- [#2022](https://github.com/request/request/pull/2022) Convert numeric multipart bodies to string (@simov, @feross) -- [#2024](https://github.com/request/request/pull/2024) Update har-validator dependency for nsp advisory #76 (@TylerDixon) -- [#2016](https://github.com/request/request/pull/2016) Update qs to version 6.0.2 🚀 (@greenkeeperio-bot) -- [#2007](https://github.com/request/request/pull/2007) Use the `extend` module instead of util._extend (@simov) -- [#2003](https://github.com/request/request/pull/2003) Update browserify to version 13.0.0 🚀 (@greenkeeperio-bot) -- [#1989](https://github.com/request/request/pull/1989) Update buffer-equal to version 1.0.0 🚀 (@greenkeeperio-bot) -- [#1956](https://github.com/request/request/pull/1956) Check form-data content-length value before setting up the header (@jongyoonlee) -- [#1958](https://github.com/request/request/pull/1958) Use IncomingMessage.destroy method (@simov) -- [#1952](https://github.com/request/request/pull/1952) Adds example for Tor proxy (@prometheansacrifice) -- [#1943](https://github.com/request/request/pull/1943) Update eslint to version 1.10.3 🚀 (@simov, @greenkeeperio-bot) -- [#1924](https://github.com/request/request/pull/1924) Update eslint to version 1.10.1 🚀 (@greenkeeperio-bot) -- [#1915](https://github.com/request/request/pull/1915) Remove content-length and transfer-encoding headers from defaultProxyHeaderWhiteList (@yaxia) - -### v2.67.0 (2015/11/19) -- [#1913](https://github.com/request/request/pull/1913) Update http-signature to version 1.1.0 🚀 (@greenkeeperio-bot) - -### v2.66.0 (2015/11/18) -- [#1906](https://github.com/request/request/pull/1906) Update README URLs based on HTTP redirects (@ReadmeCritic) -- [#1905](https://github.com/request/request/pull/1905) Convert typed arrays into regular buffers (@simov) -- [#1902](https://github.com/request/request/pull/1902) node-uuid@1.4.7 breaks build 🚨 (@greenkeeperio-bot) -- [#1894](https://github.com/request/request/pull/1894) Fix tunneling after redirection from https (Original: #1881) (@simov, @falms) -- [#1893](https://github.com/request/request/pull/1893) Update eslint to version 1.9.0 🚀 (@greenkeeperio-bot) -- [#1852](https://github.com/request/request/pull/1852) Update eslint to version 1.7.3 🚀 (@simov, @greenkeeperio-bot, @paulomcnally, @michelsalib, @arbaaz, @nsklkn, @LoicMahieu, @JoshWillik, @jzaefferer, @ryanwholey, @djchie, @thisconnect, @mgenereu, @acroca, @Sebmaster, @KoltesDigital) -- [#1876](https://github.com/request/request/pull/1876) Implement loose matching for har mime types (@simov) -- [#1875](https://github.com/request/request/pull/1875) Update bluebird to version 3.0.2 🚀 (@simov, @greenkeeperio-bot) -- [#1871](https://github.com/request/request/pull/1871) Update browserify to version 12.0.1 🚀 (@greenkeeperio-bot) -- [#1866](https://github.com/request/request/pull/1866) Add missing quotes on x-token property in README (@miguelmota) -- [#1874](https://github.com/request/request/pull/1874) Fix typo in README.md (@gswalden) -- [#1860](https://github.com/request/request/pull/1860) Improve referer header tests and docs (@simov) -- [#1861](https://github.com/request/request/pull/1861) Remove redundant call to Stream constructor (@watson) -- [#1857](https://github.com/request/request/pull/1857) Fix Referer header to point to the original host name (@simov) -- [#1850](https://github.com/request/request/pull/1850) Update karma-coverage to version 0.5.3 🚀 (@greenkeeperio-bot) -- [#1847](https://github.com/request/request/pull/1847) Use node's latest version when building (@simov) -- [#1836](https://github.com/request/request/pull/1836) Tunnel: fix wrong property name (@KoltesDigital) -- [#1820](https://github.com/request/request/pull/1820) Set href as request.js uses it (@mgenereu) -- [#1840](https://github.com/request/request/pull/1840) Update http-signature to version 1.0.2 🚀 (@greenkeeperio-bot) -- [#1845](https://github.com/request/request/pull/1845) Update istanbul to version 0.4.0 🚀 (@greenkeeperio-bot) - -### v2.65.0 (2015/10/11) -- [#1833](https://github.com/request/request/pull/1833) Update aws-sign2 to version 0.6.0 🚀 (@greenkeeperio-bot) -- [#1811](https://github.com/request/request/pull/1811) Enable loose cookie parsing in tough-cookie (@Sebmaster) -- [#1830](https://github.com/request/request/pull/1830) Bring back tilde ranges for all dependencies (@simov) -- [#1821](https://github.com/request/request/pull/1821) Implement support for RFC 2617 MD5-sess algorithm. (@BigDSK) -- [#1828](https://github.com/request/request/pull/1828) Updated qs dependency to 5.2.0 (@acroca) -- [#1818](https://github.com/request/request/pull/1818) Extract `readResponseBody` method out of `onRequestResponse` (@pvoisin) -- [#1819](https://github.com/request/request/pull/1819) Run stringify once (@mgenereu) -- [#1814](https://github.com/request/request/pull/1814) Updated har-validator to version 2.0.2 (@greenkeeperio-bot) -- [#1807](https://github.com/request/request/pull/1807) Updated tough-cookie to version 2.1.0 (@greenkeeperio-bot) -- [#1800](https://github.com/request/request/pull/1800) Add caret ranges for devDependencies, except eslint (@simov) -- [#1799](https://github.com/request/request/pull/1799) Updated karma-browserify to version 4.4.0 (@greenkeeperio-bot) -- [#1797](https://github.com/request/request/pull/1797) Updated tape to version 4.2.0 (@greenkeeperio-bot) -- [#1788](https://github.com/request/request/pull/1788) Pinned all dependencies (@greenkeeperio-bot) - -### v2.64.0 (2015/09/25) -- [#1787](https://github.com/request/request/pull/1787) npm ignore examples, release.sh and disabled.appveyor.yml (@thisconnect) -- [#1775](https://github.com/request/request/pull/1775) Fix typo in README.md (@djchie) -- [#1776](https://github.com/request/request/pull/1776) Changed word 'conjuction' to read 'conjunction' in README.md (@ryanwholey) -- [#1785](https://github.com/request/request/pull/1785) Revert: Set default application/json content-type when using json option #1772 (@simov) - -### v2.63.0 (2015/09/21) -- [#1772](https://github.com/request/request/pull/1772) Set default application/json content-type when using json option (@jzaefferer) - -### v2.62.0 (2015/09/15) -- [#1768](https://github.com/request/request/pull/1768) Add node 4.0 to the list of build targets (@simov) -- [#1767](https://github.com/request/request/pull/1767) Query strings now cooperate with unix sockets (@JoshWillik) -- [#1750](https://github.com/request/request/pull/1750) Revert doc about installation of tough-cookie added in #884 (@LoicMahieu) -- [#1746](https://github.com/request/request/pull/1746) Missed comma in Readme (@nsklkn) -- [#1743](https://github.com/request/request/pull/1743) Fix options not being initialized in defaults method (@simov) - -### v2.61.0 (2015/08/19) -- [#1721](https://github.com/request/request/pull/1721) Minor fix in README.md (@arbaaz) -- [#1733](https://github.com/request/request/pull/1733) Avoid useless Buffer transformation (@michelsalib) -- [#1726](https://github.com/request/request/pull/1726) Update README.md (@paulomcnally) -- [#1715](https://github.com/request/request/pull/1715) Fix forever option in node > 0.10 #1709 (@calibr) -- [#1716](https://github.com/request/request/pull/1716) Do not create Buffer from Object in setContentLength(iojs v3.0 issue) (@calibr) -- [#1711](https://github.com/request/request/pull/1711) Add ability to detect connect timeouts (@kevinburke) -- [#1712](https://github.com/request/request/pull/1712) Set certificate expiration to August 2, 2018 (@kevinburke) -- [#1700](https://github.com/request/request/pull/1700) debug() when JSON.parse() on a response body fails (@phillipj) - -### v2.60.0 (2015/07/21) -- [#1687](https://github.com/request/request/pull/1687) Fix caseless bug - content-type not being set for multipart/form-data (@simov, @garymathews) - -### v2.59.0 (2015/07/20) -- [#1671](https://github.com/request/request/pull/1671) Add tests and docs for using the agent, agentClass, agentOptions and forever options. - Forever option defaults to using http(s).Agent in node 0.12+ (@simov) -- [#1679](https://github.com/request/request/pull/1679) Fix - do not remove OAuth param when using OAuth realm (@simov, @jhalickman) -- [#1668](https://github.com/request/request/pull/1668) updated dependencies (@deamme) -- [#1656](https://github.com/request/request/pull/1656) Fix form method (@simov) -- [#1651](https://github.com/request/request/pull/1651) Preserve HEAD method when using followAllRedirects (@simov) -- [#1652](https://github.com/request/request/pull/1652) Update `encoding` option documentation in README.md (@daniel347x) -- [#1650](https://github.com/request/request/pull/1650) Allow content-type overriding when using the `form` option (@simov) -- [#1646](https://github.com/request/request/pull/1646) Clarify the nature of setting `ca` in `agentOptions` (@jeffcharles) - -### v2.58.0 (2015/06/16) -- [#1638](https://github.com/request/request/pull/1638) Use the `extend` module to deep extend in the defaults method (@simov) -- [#1631](https://github.com/request/request/pull/1631) Move tunnel logic into separate module (@simov) -- [#1634](https://github.com/request/request/pull/1634) Fix OAuth query transport_method (@simov) -- [#1603](https://github.com/request/request/pull/1603) Add codecov (@simov) - -### v2.57.0 (2015/05/31) -- [#1615](https://github.com/request/request/pull/1615) Replace '.client' with '.socket' as the former was deprecated in 2.2.0. (@ChALkeR) - -### v2.56.0 (2015/05/28) -- [#1610](https://github.com/request/request/pull/1610) Bump module dependencies (@simov) -- [#1600](https://github.com/request/request/pull/1600) Extract the querystring logic into separate module (@simov) -- [#1607](https://github.com/request/request/pull/1607) Re-generate certificates (@simov) -- [#1599](https://github.com/request/request/pull/1599) Move getProxyFromURI logic below the check for Invaild URI (#1595) (@simov) -- [#1598](https://github.com/request/request/pull/1598) Fix the way http verbs are defined in order to please intellisense IDEs (@simov, @flannelJesus) -- [#1591](https://github.com/request/request/pull/1591) A few minor fixes: (@simov) -- [#1584](https://github.com/request/request/pull/1584) Refactor test-default tests (according to comments in #1430) (@simov) -- [#1585](https://github.com/request/request/pull/1585) Fixing documentation regarding TLS options (#1583) (@mainakae) -- [#1574](https://github.com/request/request/pull/1574) Refresh the oauth_nonce on redirect (#1573) (@simov) -- [#1570](https://github.com/request/request/pull/1570) Discovered tests that weren't properly running (@seanstrom) -- [#1569](https://github.com/request/request/pull/1569) Fix pause before response arrives (@kevinoid) -- [#1558](https://github.com/request/request/pull/1558) Emit error instead of throw (@simov) -- [#1568](https://github.com/request/request/pull/1568) Fix stall when piping gzipped response (@kevinoid) -- [#1560](https://github.com/request/request/pull/1560) Update combined-stream (@apechimp) -- [#1543](https://github.com/request/request/pull/1543) Initial support for oauth_body_hash on json payloads (@simov, @aesopwolf) -- [#1541](https://github.com/request/request/pull/1541) Fix coveralls (@simov) -- [#1540](https://github.com/request/request/pull/1540) Fix recursive defaults for convenience methods (@simov) -- [#1536](https://github.com/request/request/pull/1536) More eslint style rules (@froatsnook) -- [#1533](https://github.com/request/request/pull/1533) Adding dependency status bar to README.md (@YasharF) -- [#1539](https://github.com/request/request/pull/1539) ensure the latest version of har-validator is included (@ahmadnassri) -- [#1516](https://github.com/request/request/pull/1516) forever+pool test (@devTristan) - -### v2.55.0 (2015/04/05) -- [#1520](https://github.com/request/request/pull/1520) Refactor defaults (@simov) -- [#1525](https://github.com/request/request/pull/1525) Delete request headers with undefined value. (@froatsnook) -- [#1521](https://github.com/request/request/pull/1521) Add promise tests (@simov) -- [#1518](https://github.com/request/request/pull/1518) Fix defaults (@simov) -- [#1515](https://github.com/request/request/pull/1515) Allow static invoking of convenience methods (@simov) -- [#1505](https://github.com/request/request/pull/1505) Fix multipart boundary extraction regexp (@simov) -- [#1510](https://github.com/request/request/pull/1510) Fix basic auth form data (@simov) - -### v2.54.0 (2015/03/24) -- [#1501](https://github.com/request/request/pull/1501) HTTP Archive 1.2 support (@ahmadnassri) -- [#1486](https://github.com/request/request/pull/1486) Add a test for the forever agent (@akshayp) -- [#1500](https://github.com/request/request/pull/1500) Adding handling for no auth method and null bearer (@philberg) -- [#1498](https://github.com/request/request/pull/1498) Add table of contents in readme (@simov) -- [#1477](https://github.com/request/request/pull/1477) Add support for qs options via qsOptions key (@simov) -- [#1496](https://github.com/request/request/pull/1496) Parameters encoded to base 64 should be decoded as UTF-8, not ASCII. (@albanm) -- [#1494](https://github.com/request/request/pull/1494) Update eslint (@froatsnook) -- [#1474](https://github.com/request/request/pull/1474) Require Colon in Basic Auth (@erykwalder) -- [#1481](https://github.com/request/request/pull/1481) Fix baseUrl and redirections. (@burningtree) -- [#1469](https://github.com/request/request/pull/1469) Feature/base url (@froatsnook) -- [#1459](https://github.com/request/request/pull/1459) Add option to time request/response cycle (including rollup of redirects) (@aaron-em) -- [#1468](https://github.com/request/request/pull/1468) Re-enable io.js/node 0.12 build (@simov, @mikeal, @BBB) -- [#1442](https://github.com/request/request/pull/1442) Fixed the issue with strictSSL tests on 0.12 & io.js by explicitly setting a cipher that matches the cert. (@BBB, @nickmccurdy, @demohi, @simov, @0x4139) -- [#1460](https://github.com/request/request/pull/1460) localAddress or proxy config is lost when redirecting (@simov, @0x4139) -- [#1453](https://github.com/request/request/pull/1453) Test on Node.js 0.12 and io.js with allowed failures (@nickmccurdy, @demohi) -- [#1426](https://github.com/request/request/pull/1426) Fixing tests to pass on io.js and node 0.12 (only test-https.js stiff failing) (@mikeal) -- [#1446](https://github.com/request/request/pull/1446) Missing HTTP referer header with redirects Fixes #1038 (@simov, @guimon) -- [#1428](https://github.com/request/request/pull/1428) Deprecate Node v0.8.x (@nylen) -- [#1436](https://github.com/request/request/pull/1436) Add ability to set a requester without setting default options (@tikotzky) -- [#1435](https://github.com/request/request/pull/1435) dry up verb methods (@sethpollack) -- [#1423](https://github.com/request/request/pull/1423) Allow fully qualified multipart content-type header (@simov) -- [#1430](https://github.com/request/request/pull/1430) Fix recursive requester (@tikotzky) -- [#1429](https://github.com/request/request/pull/1429) Throw error when making HEAD request with a body (@tikotzky) -- [#1419](https://github.com/request/request/pull/1419) Add note that the project is broken in 0.12.x (@nylen) -- [#1413](https://github.com/request/request/pull/1413) Fix basic auth (@simov) -- [#1397](https://github.com/request/request/pull/1397) Improve pipe-from-file tests (@nylen) - -### v2.53.0 (2015/02/02) -- [#1396](https://github.com/request/request/pull/1396) Do not rfc3986 escape JSON bodies (@nylen, @simov) -- [#1392](https://github.com/request/request/pull/1392) Improve `timeout` option description (@watson) - -### v2.52.0 (2015/02/02) -- [#1383](https://github.com/request/request/pull/1383) Add missing HTTPS options that were not being passed to tunnel (@brichard19) (@nylen) -- [#1388](https://github.com/request/request/pull/1388) Upgrade mime-types package version (@roderickhsiao) -- [#1389](https://github.com/request/request/pull/1389) Revise Setup Tunnel Function (@seanstrom) -- [#1374](https://github.com/request/request/pull/1374) Allow explicitly disabling tunneling for proxied https destinations (@nylen) -- [#1376](https://github.com/request/request/pull/1376) Use karma-browserify for tests. Add browser test coverage reporter. (@eiriksm) -- [#1366](https://github.com/request/request/pull/1366) Refactor OAuth into separate module (@simov) -- [#1373](https://github.com/request/request/pull/1373) Rewrite tunnel test to be pure Node.js (@nylen) -- [#1371](https://github.com/request/request/pull/1371) Upgrade test reporter (@nylen) -- [#1360](https://github.com/request/request/pull/1360) Refactor basic, bearer, digest auth logic into separate class (@simov) -- [#1354](https://github.com/request/request/pull/1354) Remove circular dependency from debugging code (@nylen) -- [#1351](https://github.com/request/request/pull/1351) Move digest auth into private prototype method (@simov) -- [#1352](https://github.com/request/request/pull/1352) Update hawk dependency to ~2.3.0 (@mridgway) -- [#1353](https://github.com/request/request/pull/1353) Correct travis-ci badge (@dogancelik) -- [#1349](https://github.com/request/request/pull/1349) Make sure we return on errored browser requests. (@eiriksm) -- [#1346](https://github.com/request/request/pull/1346) getProxyFromURI Extraction Refactor (@seanstrom) -- [#1337](https://github.com/request/request/pull/1337) Standardize test ports on 6767 (@nylen) -- [#1341](https://github.com/request/request/pull/1341) Emit FormData error events as Request error events (@nylen, @rwky) -- [#1343](https://github.com/request/request/pull/1343) Clean up readme badges, and add Travis and Coveralls badges (@nylen) -- [#1345](https://github.com/request/request/pull/1345) Update README.md (@Aaron-Hartwig) -- [#1338](https://github.com/request/request/pull/1338) Always wait for server.close() callback in tests (@nylen) -- [#1342](https://github.com/request/request/pull/1342) Add mock https server and redo start of browser tests for this purpose. (@eiriksm) -- [#1339](https://github.com/request/request/pull/1339) Improve auth docs (@nylen) -- [#1335](https://github.com/request/request/pull/1335) Add support for OAuth plaintext signature method (@simov) -- [#1332](https://github.com/request/request/pull/1332) Add clean script to remove test-browser.js after the tests run (@seanstrom) -- [#1327](https://github.com/request/request/pull/1327) Fix errors generating coverage reports. (@nylen) -- [#1330](https://github.com/request/request/pull/1330) Return empty buffer upon empty response body and encoding is set to null (@seanstrom) -- [#1326](https://github.com/request/request/pull/1326) Use faster container-based infrastructure on Travis (@nylen) -- [#1315](https://github.com/request/request/pull/1315) Implement rfc3986 option (@simov, @nylen, @apoco, @DullReferenceException, @mmalecki, @oliamb, @cliffcrosland, @LewisJEllis, @eiriksm, @poislagarde) -- [#1314](https://github.com/request/request/pull/1314) Detect urlencoded form data header via regex (@simov) -- [#1317](https://github.com/request/request/pull/1317) Improve OAuth1.0 server side flow example (@simov) - -### v2.51.0 (2014/12/10) -- [#1310](https://github.com/request/request/pull/1310) Revert changes introduced in https://github.com/request/request/pull/1282 (@simov) - -### v2.50.0 (2014/12/09) -- [#1308](https://github.com/request/request/pull/1308) Add browser test to keep track of browserify compability. (@eiriksm) -- [#1299](https://github.com/request/request/pull/1299) Add optional support for jsonReviver (@poislagarde) -- [#1277](https://github.com/request/request/pull/1277) Add Coveralls configuration (@simov) -- [#1307](https://github.com/request/request/pull/1307) Upgrade form-data, add back browserify compability. Fixes #455. (@eiriksm) -- [#1305](https://github.com/request/request/pull/1305) Fix typo in README.md (@LewisJEllis) -- [#1288](https://github.com/request/request/pull/1288) Update README.md to explain custom file use case (@cliffcrosland) - -### v2.49.0 (2014/11/28) -- [#1295](https://github.com/request/request/pull/1295) fix(proxy): no-proxy false positive (@oliamb) -- [#1292](https://github.com/request/request/pull/1292) Upgrade `caseless` to 0.8.1 (@mmalecki) -- [#1276](https://github.com/request/request/pull/1276) Set transfer encoding for multipart/related to chunked by default (@simov) -- [#1275](https://github.com/request/request/pull/1275) Fix multipart content-type headers detection (@simov) -- [#1269](https://github.com/request/request/pull/1269) adds streams example for review (@tbuchok) -- [#1238](https://github.com/request/request/pull/1238) Add examples README.md (@simov) - -### v2.48.0 (2014/11/12) -- [#1263](https://github.com/request/request/pull/1263) Fixed a syntax error / typo in README.md (@xna2) -- [#1253](https://github.com/request/request/pull/1253) Add multipart chunked flag (@simov, @nylen) -- [#1251](https://github.com/request/request/pull/1251) Clarify that defaults() does not modify global defaults (@nylen) -- [#1250](https://github.com/request/request/pull/1250) Improve documentation for pool and maxSockets options (@nylen) -- [#1237](https://github.com/request/request/pull/1237) Documenting error handling when using streams (@vmattos) -- [#1244](https://github.com/request/request/pull/1244) Finalize changelog command (@nylen) -- [#1241](https://github.com/request/request/pull/1241) Fix typo (@alexanderGugel) -- [#1223](https://github.com/request/request/pull/1223) Show latest version number instead of "upcoming" in changelog (@nylen) -- [#1236](https://github.com/request/request/pull/1236) Document how to use custom CA in README (#1229) (@hypesystem) -- [#1228](https://github.com/request/request/pull/1228) Support for oauth with RSA-SHA1 signing (@nylen) -- [#1216](https://github.com/request/request/pull/1216) Made json and multipart options coexist (@nylen, @simov) -- [#1225](https://github.com/request/request/pull/1225) Allow header white/exclusive lists in any case. (@RReverser) - -### v2.47.0 (2014/10/26) -- [#1222](https://github.com/request/request/pull/1222) Move from mikeal/request to request/request (@nylen) -- [#1220](https://github.com/request/request/pull/1220) update qs dependency to 2.3.1 (@FredKSchott) -- [#1212](https://github.com/request/request/pull/1212) Improve tests/test-timeout.js (@nylen) -- [#1219](https://github.com/request/request/pull/1219) remove old globalAgent workaround for node 0.4 (@request) -- [#1214](https://github.com/request/request/pull/1214) Remove cruft left over from optional dependencies (@nylen) -- [#1215](https://github.com/request/request/pull/1215) Add proxyHeaderExclusiveList option for proxy-only headers. (@RReverser) -- [#1211](https://github.com/request/request/pull/1211) Allow 'Host' header instead of 'host' and remember case across redirects (@nylen) -- [#1208](https://github.com/request/request/pull/1208) Improve release script (@nylen) -- [#1213](https://github.com/request/request/pull/1213) Support for custom cookie store (@nylen, @mitsuru) -- [#1197](https://github.com/request/request/pull/1197) Clean up some code around setting the agent (@FredKSchott) -- [#1209](https://github.com/request/request/pull/1209) Improve multipart form append test (@simov) -- [#1207](https://github.com/request/request/pull/1207) Update changelog (@nylen) -- [#1185](https://github.com/request/request/pull/1185) Stream multipart/related bodies (@simov) - -### v2.46.0 (2014/10/23) -- [#1198](https://github.com/request/request/pull/1198) doc for TLS/SSL protocol options (@shawnzhu) -- [#1200](https://github.com/request/request/pull/1200) Add a Gitter chat badge to README.md (@gitter-badger) -- [#1196](https://github.com/request/request/pull/1196) Upgrade taper test reporter to v0.3.0 (@nylen) -- [#1199](https://github.com/request/request/pull/1199) Fix lint error: undeclared var i (@nylen) -- [#1191](https://github.com/request/request/pull/1191) Move self.proxy decision logic out of init and into a helper (@FredKSchott) -- [#1190](https://github.com/request/request/pull/1190) Move _buildRequest() logic back into init (@FredKSchott) -- [#1186](https://github.com/request/request/pull/1186) Support Smarter Unix URL Scheme (@FredKSchott) -- [#1178](https://github.com/request/request/pull/1178) update form documentation for new usage (@FredKSchott) -- [#1180](https://github.com/request/request/pull/1180) Enable no-mixed-requires linting rule (@nylen) -- [#1184](https://github.com/request/request/pull/1184) Don't forward authorization header across redirects to different hosts (@nylen) -- [#1183](https://github.com/request/request/pull/1183) Correct README about pre and postamble CRLF using multipart and not mult... (@netpoetica) -- [#1179](https://github.com/request/request/pull/1179) Lint tests directory (@nylen) -- [#1169](https://github.com/request/request/pull/1169) add metadata for form-data file field (@dotcypress) -- [#1173](https://github.com/request/request/pull/1173) remove optional dependencies (@seanstrom) -- [#1165](https://github.com/request/request/pull/1165) Cleanup event listeners and remove function creation from init (@FredKSchott) -- [#1174](https://github.com/request/request/pull/1174) update the request.cookie docs to have a valid cookie example (@seanstrom) -- [#1168](https://github.com/request/request/pull/1168) create a detach helper and use detach helper in replace of nextTick (@seanstrom) -- [#1171](https://github.com/request/request/pull/1171) in post can send form data and use callback (@MiroRadenovic) -- [#1159](https://github.com/request/request/pull/1159) accept charset for x-www-form-urlencoded content-type (@seanstrom) -- [#1157](https://github.com/request/request/pull/1157) Update README.md: body with json=true (@Rob--W) -- [#1164](https://github.com/request/request/pull/1164) Disable tests/test-timeout.js on Travis (@nylen) -- [#1153](https://github.com/request/request/pull/1153) Document how to run a single test (@nylen) -- [#1144](https://github.com/request/request/pull/1144) adds documentation for the "response" event within the streaming section (@tbuchok) -- [#1162](https://github.com/request/request/pull/1162) Update eslintrc file to no longer allow past errors (@FredKSchott) -- [#1155](https://github.com/request/request/pull/1155) Support/use self everywhere (@seanstrom) -- [#1161](https://github.com/request/request/pull/1161) fix no-use-before-define lint warnings (@emkay) -- [#1156](https://github.com/request/request/pull/1156) adding curly brackets to get rid of lint errors (@emkay) -- [#1151](https://github.com/request/request/pull/1151) Fix localAddress test on OS X (@nylen) -- [#1145](https://github.com/request/request/pull/1145) documentation: fix outdated reference to setCookieSync old name in README (@FredKSchott) -- [#1131](https://github.com/request/request/pull/1131) Update pool documentation (@FredKSchott) -- [#1143](https://github.com/request/request/pull/1143) Rewrite all tests to use tape (@nylen) -- [#1137](https://github.com/request/request/pull/1137) Add ability to specifiy querystring lib in options. (@jgrund) -- [#1138](https://github.com/request/request/pull/1138) allow hostname and port in place of host on uri (@cappslock) -- [#1134](https://github.com/request/request/pull/1134) Fix multiple redirects and `self.followRedirect` (@blakeembrey) -- [#1130](https://github.com/request/request/pull/1130) documentation fix: add note about npm test for contributing (@FredKSchott) -- [#1120](https://github.com/request/request/pull/1120) Support/refactor request setup tunnel (@seanstrom) -- [#1129](https://github.com/request/request/pull/1129) linting fix: convert double quote strings to use single quotes (@FredKSchott) -- [#1124](https://github.com/request/request/pull/1124) linting fix: remove unneccesary semi-colons (@FredKSchott) - -### v2.45.0 (2014/10/06) -- [#1128](https://github.com/request/request/pull/1128) Add test for setCookie regression (@nylen) -- [#1127](https://github.com/request/request/pull/1127) added tests around using objects as values in a query string (@bcoe) -- [#1103](https://github.com/request/request/pull/1103) Support/refactor request constructor (@nylen, @seanstrom) -- [#1119](https://github.com/request/request/pull/1119) add basic linting to request library (@FredKSchott) -- [#1121](https://github.com/request/request/pull/1121) Revert "Explicitly use sync versions of cookie functions" (@nylen) -- [#1118](https://github.com/request/request/pull/1118) linting fix: Restructure bad empty if statement (@FredKSchott) -- [#1117](https://github.com/request/request/pull/1117) Fix a bad check for valid URIs (@FredKSchott) -- [#1113](https://github.com/request/request/pull/1113) linting fix: space out operators (@FredKSchott) -- [#1116](https://github.com/request/request/pull/1116) Fix typo in `noProxyHost` definition (@FredKSchott) -- [#1114](https://github.com/request/request/pull/1114) linting fix: Added a `new` operator that was missing when creating and throwing a new error (@FredKSchott) -- [#1096](https://github.com/request/request/pull/1096) No_proxy support (@samcday) -- [#1107](https://github.com/request/request/pull/1107) linting-fix: remove unused variables (@FredKSchott) -- [#1112](https://github.com/request/request/pull/1112) linting fix: Make return values consistent and more straitforward (@FredKSchott) -- [#1111](https://github.com/request/request/pull/1111) linting fix: authPieces was getting redeclared (@FredKSchott) -- [#1105](https://github.com/request/request/pull/1105) Use strict mode in request (@FredKSchott) -- [#1110](https://github.com/request/request/pull/1110) linting fix: replace lazy '==' with more strict '===' (@FredKSchott) -- [#1109](https://github.com/request/request/pull/1109) linting fix: remove function call from if-else conditional statement (@FredKSchott) -- [#1102](https://github.com/request/request/pull/1102) Fix to allow setting a `requester` on recursive calls to `request.defaults` (@tikotzky) -- [#1095](https://github.com/request/request/pull/1095) Tweaking engines in package.json (@pdehaan) -- [#1082](https://github.com/request/request/pull/1082) Forward the socket event from the httpModule request (@seanstrom) -- [#972](https://github.com/request/request/pull/972) Clarify gzip handling in the README (@kevinoid) -- [#1089](https://github.com/request/request/pull/1089) Mention that encoding defaults to utf8, not Buffer (@stuartpb) -- [#1088](https://github.com/request/request/pull/1088) Fix cookie example in README.md and make it more clear (@pipi32167) -- [#1027](https://github.com/request/request/pull/1027) Add support for multipart form data in request options. (@crocket) -- [#1076](https://github.com/request/request/pull/1076) use Request.abort() to abort the request when the request has timed-out (@seanstrom) -- [#1068](https://github.com/request/request/pull/1068) add optional postamble required by .NET multipart requests (@netpoetica) - -### v2.43.0 (2014/09/18) -- [#1057](https://github.com/request/request/pull/1057) Defaults should not overwrite defined options (@davidwood) -- [#1046](https://github.com/request/request/pull/1046) Propagate datastream errors, useful in case gzip fails. (@ZJONSSON, @Janpot) -- [#1063](https://github.com/request/request/pull/1063) copy the input headers object #1060 (@finnp) -- [#1031](https://github.com/request/request/pull/1031) Explicitly use sync versions of cookie functions (@ZJONSSON) -- [#1056](https://github.com/request/request/pull/1056) Fix redirects when passing url.parse(x) as URL to convenience method (@nylen) - -### v2.42.0 (2014/09/04) -- [#1053](https://github.com/request/request/pull/1053) Fix #1051 Parse auth properly when using non-tunneling proxy (@isaacs) - -### v2.41.0 (2014/09/04) -- [#1050](https://github.com/request/request/pull/1050) Pass whitelisted headers to tunneling proxy. Organize all tunneling logic. (@isaacs, @Feldhacker) -- [#1035](https://github.com/request/request/pull/1035) souped up nodei.co badge (@rvagg) -- [#1048](https://github.com/request/request/pull/1048) Aws is now possible over a proxy (@steven-aerts) -- [#1039](https://github.com/request/request/pull/1039) extract out helper functions to a helper file (@seanstrom) -- [#1021](https://github.com/request/request/pull/1021) Support/refactor indexjs (@seanstrom) -- [#1033](https://github.com/request/request/pull/1033) Improve and document debug options (@nylen) -- [#1034](https://github.com/request/request/pull/1034) Fix readme headings (@nylen) -- [#1030](https://github.com/request/request/pull/1030) Allow recursive request.defaults (@tikotzky) -- [#1029](https://github.com/request/request/pull/1029) Fix a couple of typos (@nylen) -- [#675](https://github.com/request/request/pull/675) Checking for SSL fault on connection before reading SSL properties (@VRMink) -- [#989](https://github.com/request/request/pull/989) Added allowRedirect function. Should return true if redirect is allowed or false otherwise (@doronin) -- [#1025](https://github.com/request/request/pull/1025) [fixes #1023] Set self._ended to true once response has ended (@mridgway) -- [#1020](https://github.com/request/request/pull/1020) Add back removed debug metadata (@FredKSchott) -- [#1008](https://github.com/request/request/pull/1008) Moving to module instead of cutomer buffer concatenation. (@mikeal) -- [#770](https://github.com/request/request/pull/770) Added dependency badge for README file; (@timgluz, @mafintosh, @lalitkapoor, @stash, @bobyrizov) -- [#1016](https://github.com/request/request/pull/1016) toJSON no longer results in an infinite loop, returns simple objects (@FredKSchott) -- [#1018](https://github.com/request/request/pull/1018) Remove pre-0.4.4 HTTPS fix (@mmalecki) -- [#1006](https://github.com/request/request/pull/1006) Migrate to caseless, fixes #1001 (@mikeal) -- [#995](https://github.com/request/request/pull/995) Fix parsing array of objects (@sjonnet19) -- [#999](https://github.com/request/request/pull/999) Fix fallback for browserify for optional modules. (@eiriksm) -- [#996](https://github.com/request/request/pull/996) Wrong oauth signature when multiple same param keys exist [updated] (@bengl) - -### v2.40.0 (2014/08/06) -- [#992](https://github.com/request/request/pull/992) Fix security vulnerability. Update qs (@poeticninja) -- [#988](https://github.com/request/request/pull/988) “--” -> “—” (@upisfree) -- [#987](https://github.com/request/request/pull/987) Show optional modules as being loaded by the module that reqeusted them (@iarna) - -### v2.39.0 (2014/07/24) -- [#976](https://github.com/request/request/pull/976) Update README.md (@pvoznenko) - -### v2.38.0 (2014/07/22) -- [#952](https://github.com/request/request/pull/952) Adding support to client certificate with proxy use case (@ofirshaked) -- [#884](https://github.com/request/request/pull/884) Documented tough-cookie installation. (@wbyoung) -- [#935](https://github.com/request/request/pull/935) Correct repository url (@fritx) -- [#963](https://github.com/request/request/pull/963) Update changelog (@nylen) -- [#960](https://github.com/request/request/pull/960) Support gzip with encoding on node pre-v0.9.4 (@kevinoid) -- [#953](https://github.com/request/request/pull/953) Add async Content-Length computation when using form-data (@LoicMahieu) -- [#844](https://github.com/request/request/pull/844) Add support for HTTP[S]_PROXY environment variables. Fixes #595. (@jvmccarthy) -- [#946](https://github.com/request/request/pull/946) defaults: merge headers (@aj0strow) - -### v2.37.0 (2014/07/07) -- [#957](https://github.com/request/request/pull/957) Silence EventEmitter memory leak warning #311 (@watson) -- [#955](https://github.com/request/request/pull/955) check for content-length header before setting it in nextTick (@camilleanne) -- [#951](https://github.com/request/request/pull/951) Add support for gzip content decoding (@kevinoid) -- [#949](https://github.com/request/request/pull/949) Manually enter querystring in form option (@charlespwd) -- [#944](https://github.com/request/request/pull/944) Make request work with browserify (@eiriksm) -- [#943](https://github.com/request/request/pull/943) New mime module (@eiriksm) -- [#927](https://github.com/request/request/pull/927) Bump version of hawk dep. (@samccone) -- [#907](https://github.com/request/request/pull/907) append secureOptions to poolKey (@medovob) - -### v2.35.0 (2014/05/17) -- [#901](https://github.com/request/request/pull/901) Fixes #555 (@pigulla) -- [#897](https://github.com/request/request/pull/897) merge with default options (@vohof) -- [#891](https://github.com/request/request/pull/891) fixes 857 - options object is mutated by calling request (@lalitkapoor) -- [#869](https://github.com/request/request/pull/869) Pipefilter test (@tgohn) -- [#866](https://github.com/request/request/pull/866) Fix typo (@dandv) -- [#861](https://github.com/request/request/pull/861) Add support for RFC 6750 Bearer Tokens (@phedny) -- [#809](https://github.com/request/request/pull/809) upgrade tunnel-proxy to 0.4.0 (@ksato9700) -- [#850](https://github.com/request/request/pull/850) Fix word consistency in readme (@0xNobody) -- [#810](https://github.com/request/request/pull/810) add some exposition to mpu example in README.md (@mikermcneil) -- [#840](https://github.com/request/request/pull/840) improve error reporting for invalid protocols (@FND) -- [#821](https://github.com/request/request/pull/821) added secureOptions back (@nw) -- [#815](https://github.com/request/request/pull/815) Create changelog based on pull requests (@lalitkapoor) - -### v2.34.0 (2014/02/18) -- [#516](https://github.com/request/request/pull/516) UNIX Socket URL Support (@lyuzashi) -- [#801](https://github.com/request/request/pull/801) 794 ignore cookie parsing and domain errors (@lalitkapoor) -- [#802](https://github.com/request/request/pull/802) Added the Apache license to the package.json. (@keskival) -- [#793](https://github.com/request/request/pull/793) Adds content-length calculation when submitting forms using form-data li... (@Juul) -- [#785](https://github.com/request/request/pull/785) Provide ability to override content-type when `json` option used (@vvo) -- [#781](https://github.com/request/request/pull/781) simpler isReadStream function (@joaojeronimo) - -### v2.32.0 (2014/01/16) -- [#767](https://github.com/request/request/pull/767) Use tough-cookie CookieJar sync API (@stash) -- [#764](https://github.com/request/request/pull/764) Case-insensitive authentication scheme (@bobyrizov) -- [#763](https://github.com/request/request/pull/763) Upgrade tough-cookie to 0.10.0 (@stash) -- [#744](https://github.com/request/request/pull/744) Use Cookie.parse (@lalitkapoor) -- [#757](https://github.com/request/request/pull/757) require aws-sign2 (@mafintosh) - -### v2.31.0 (2014/01/08) -- [#645](https://github.com/request/request/pull/645) update twitter api url to v1.1 (@mick) -- [#746](https://github.com/request/request/pull/746) README: Markdown code highlight (@weakish) -- [#745](https://github.com/request/request/pull/745) updating setCookie example to make it clear that the callback is required (@emkay) -- [#742](https://github.com/request/request/pull/742) Add note about JSON output body type (@iansltx) -- [#741](https://github.com/request/request/pull/741) README example is using old cookie jar api (@emkay) -- [#736](https://github.com/request/request/pull/736) Fix callback arguments documentation (@mmalecki) -- [#732](https://github.com/request/request/pull/732) JSHINT: Creating global 'for' variable. Should be 'for (var ...'. (@Fritz-Lium) -- [#730](https://github.com/request/request/pull/730) better HTTP DIGEST support (@dai-shi) -- [#728](https://github.com/request/request/pull/728) Fix TypeError when calling request.cookie (@scarletmeow) -- [#727](https://github.com/request/request/pull/727) fix requester bug (@jchris) -- [#724](https://github.com/request/request/pull/724) README.md: add custom HTTP Headers example. (@tcort) -- [#719](https://github.com/request/request/pull/719) Made a comment gender neutral. (@unsetbit) -- [#715](https://github.com/request/request/pull/715) Request.multipart no longer crashes when header 'Content-type' present (@pastaclub) -- [#710](https://github.com/request/request/pull/710) Fixing listing in callback part of docs. (@lukasz-zak) -- [#696](https://github.com/request/request/pull/696) Edited README.md for formatting and clarity of phrasing (@Zearin) -- [#694](https://github.com/request/request/pull/694) Typo in README (@VRMink) -- [#690](https://github.com/request/request/pull/690) Handle blank password in basic auth. (@diversario) -- [#682](https://github.com/request/request/pull/682) Optional dependencies (@Turbo87) -- [#683](https://github.com/request/request/pull/683) Travis CI support (@Turbo87) -- [#674](https://github.com/request/request/pull/674) change cookie module,to tough-cookie.please check it . (@sxyizhiren) -- [#666](https://github.com/request/request/pull/666) make `ciphers` and `secureProtocol` to work in https request (@richarddong) -- [#656](https://github.com/request/request/pull/656) Test case for #304. (@diversario) -- [#662](https://github.com/request/request/pull/662) option.tunnel to explicitly disable tunneling (@seanmonstar) -- [#659](https://github.com/request/request/pull/659) fix failure when running with NODE_DEBUG=request, and a test for that (@jrgm) -- [#630](https://github.com/request/request/pull/630) Send random cnonce for HTTP Digest requests (@wprl) -- [#619](https://github.com/request/request/pull/619) decouple things a bit (@joaojeronimo) -- [#613](https://github.com/request/request/pull/613) Fixes #583, moved initialization of self.uri.pathname (@lexander) -- [#605](https://github.com/request/request/pull/605) Only include ":" + pass in Basic Auth if it's defined (fixes #602) (@bendrucker) -- [#596](https://github.com/request/request/pull/596) Global agent is being used when pool is specified (@Cauldrath) -- [#594](https://github.com/request/request/pull/594) Emit complete event when there is no callback (@RomainLK) -- [#601](https://github.com/request/request/pull/601) Fixed a small typo (@michalstanko) -- [#589](https://github.com/request/request/pull/589) Prevent setting headers after they are sent (@geek) -- [#587](https://github.com/request/request/pull/587) Global cookie jar disabled by default (@threepointone) -- [#544](https://github.com/request/request/pull/544) Update http-signature version. (@davidlehn) -- [#581](https://github.com/request/request/pull/581) Fix spelling of "ignoring." (@bigeasy) -- [#568](https://github.com/request/request/pull/568) use agentOptions to create agent when specified in request (@SamPlacette) -- [#564](https://github.com/request/request/pull/564) Fix redirections (@criloz) -- [#541](https://github.com/request/request/pull/541) The exported request function doesn't have an auth method (@tschaub) -- [#542](https://github.com/request/request/pull/542) Expose Request class (@regality) -- [#536](https://github.com/request/request/pull/536) Allow explicitly empty user field for basic authentication. (@mikeando) -- [#532](https://github.com/request/request/pull/532) fix typo (@fredericosilva) -- [#497](https://github.com/request/request/pull/497) Added redirect event (@Cauldrath) -- [#503](https://github.com/request/request/pull/503) Fix basic auth for passwords that contain colons (@tonistiigi) -- [#521](https://github.com/request/request/pull/521) Improving test-localAddress.js (@noway) -- [#529](https://github.com/request/request/pull/529) dependencies versions bump (@jodaka) -- [#523](https://github.com/request/request/pull/523) Updating dependencies (@noway) -- [#520](https://github.com/request/request/pull/520) Fixing test-tunnel.js (@noway) -- [#519](https://github.com/request/request/pull/519) Update internal path state on post-creation QS changes (@jblebrun) -- [#510](https://github.com/request/request/pull/510) Add HTTP Signature support. (@davidlehn) -- [#502](https://github.com/request/request/pull/502) Fix POST (and probably other) requests that are retried after 401 Unauthorized (@nylen) -- [#508](https://github.com/request/request/pull/508) Honor the .strictSSL option when using proxies (tunnel-agent) (@jhs) -- [#512](https://github.com/request/request/pull/512) Make password optional to support the format: http://username@hostname/ (@pajato1) -- [#513](https://github.com/request/request/pull/513) add 'localAddress' support (@yyfrankyy) -- [#498](https://github.com/request/request/pull/498) Moving response emit above setHeaders on destination streams (@kenperkins) -- [#490](https://github.com/request/request/pull/490) Empty response body (3-rd argument) must be passed to callback as an empty string (@Olegas) -- [#479](https://github.com/request/request/pull/479) Changing so if Accept header is explicitly set, sending json does not ov... (@RoryH) -- [#475](https://github.com/request/request/pull/475) Use `unescape` from `querystring` (@shimaore) -- [#473](https://github.com/request/request/pull/473) V0.10 compat (@isaacs) -- [#471](https://github.com/request/request/pull/471) Using querystring library from visionmedia (@kbackowski) -- [#461](https://github.com/request/request/pull/461) Strip the UTF8 BOM from a UTF encoded response (@kppullin) -- [#460](https://github.com/request/request/pull/460) hawk 0.10.0 (@hueniverse) -- [#462](https://github.com/request/request/pull/462) if query params are empty, then request path shouldn't end with a '?' (merges cleanly now) (@jaipandya) -- [#456](https://github.com/request/request/pull/456) hawk 0.9.0 (@hueniverse) -- [#429](https://github.com/request/request/pull/429) Copy options before adding callback. (@nrn, @nfriedly, @youurayy, @jplock, @kapetan, @landeiro, @othiym23, @mmalecki) -- [#454](https://github.com/request/request/pull/454) Destroy the response if present when destroying the request (clean merge) (@mafintosh) -- [#310](https://github.com/request/request/pull/310) Twitter Oauth Stuff Out of Date; Now Updated (@joemccann, @isaacs, @mscdex) -- [#413](https://github.com/request/request/pull/413) rename googledoodle.png to .jpg (@nfriedly, @youurayy, @jplock, @kapetan, @landeiro, @othiym23, @mmalecki) -- [#448](https://github.com/request/request/pull/448) Convenience method for PATCH (@mloar) -- [#444](https://github.com/request/request/pull/444) protect against double callbacks on error path (@spollack) -- [#433](https://github.com/request/request/pull/433) Added support for HTTPS cert & key (@mmalecki) -- [#430](https://github.com/request/request/pull/430) Respect specified {Host,host} headers, not just {host} (@andrewschaaf) -- [#415](https://github.com/request/request/pull/415) Fixed a typo. (@jerem) -- [#338](https://github.com/request/request/pull/338) Add more auth options, including digest support (@nylen) -- [#403](https://github.com/request/request/pull/403) Optimize environment lookup to happen once only (@mmalecki) -- [#398](https://github.com/request/request/pull/398) Add more reporting to tests (@mmalecki) -- [#388](https://github.com/request/request/pull/388) Ensure "safe" toJSON doesn't break EventEmitters (@othiym23) -- [#381](https://github.com/request/request/pull/381) Resolving "Invalid signature. Expected signature base string: " (@landeiro) -- [#380](https://github.com/request/request/pull/380) Fixes missing host header on retried request when using forever agent (@mac-) -- [#376](https://github.com/request/request/pull/376) Headers lost on redirect (@kapetan) -- [#375](https://github.com/request/request/pull/375) Fix for missing oauth_timestamp parameter (@jplock) -- [#374](https://github.com/request/request/pull/374) Correct Host header for proxy tunnel CONNECT (@youurayy) -- [#370](https://github.com/request/request/pull/370) Twitter reverse auth uses x_auth_mode not x_auth_type (@drudge) -- [#369](https://github.com/request/request/pull/369) Don't remove x_auth_mode for Twitter reverse auth (@drudge) -- [#344](https://github.com/request/request/pull/344) Make AWS auth signing find headers correctly (@nlf) -- [#363](https://github.com/request/request/pull/363) rfc3986 on base_uri, now passes tests (@jeffmarshall) -- [#362](https://github.com/request/request/pull/362) Running `rfc3986` on `base_uri` in `oauth.hmacsign` instead of just `encodeURIComponent` (@jeffmarshall) -- [#361](https://github.com/request/request/pull/361) Don't create a Content-Length header if we already have it set (@danjenkins) -- [#360](https://github.com/request/request/pull/360) Delete self._form along with everything else on redirect (@jgautier) -- [#355](https://github.com/request/request/pull/355) stop sending erroneous headers on redirected requests (@azylman) -- [#332](https://github.com/request/request/pull/332) Fix #296 - Only set Content-Type if body exists (@Marsup) -- [#343](https://github.com/request/request/pull/343) Allow AWS to work in more situations, added a note in the README on its usage (@nlf) -- [#320](https://github.com/request/request/pull/320) request.defaults() doesn't need to wrap jar() (@StuartHarris) -- [#322](https://github.com/request/request/pull/322) Fix + test for piped into request bumped into redirect. #321 (@alexindigo) -- [#326](https://github.com/request/request/pull/326) Do not try to remove listener from an undefined connection (@CartoDB) -- [#318](https://github.com/request/request/pull/318) Pass servername to tunneling secure socket creation (@isaacs) -- [#317](https://github.com/request/request/pull/317) Workaround for #313 (@isaacs) -- [#293](https://github.com/request/request/pull/293) Allow parser errors to bubble up to request (@mscdex) -- [#290](https://github.com/request/request/pull/290) A test for #289 (@isaacs) -- [#280](https://github.com/request/request/pull/280) Like in node.js print options if NODE_DEBUG contains the word request (@Filirom1) -- [#207](https://github.com/request/request/pull/207) Fix #206 Change HTTP/HTTPS agent when redirecting between protocols (@isaacs) -- [#214](https://github.com/request/request/pull/214) documenting additional behavior of json option (@jphaas, @vpulim) -- [#272](https://github.com/request/request/pull/272) Boundary begins with CRLF? (@elspoono, @timshadel, @naholyr, @nanodocumet, @TehShrike) -- [#284](https://github.com/request/request/pull/284) Remove stray `console.log()` call in multipart generator. (@bcherry) -- [#241](https://github.com/request/request/pull/241) Composability updates suggested by issue #239 (@polotek) -- [#282](https://github.com/request/request/pull/282) OAuth Authorization header contains non-"oauth_" parameters (@jplock) -- [#279](https://github.com/request/request/pull/279) fix tests with boundary by injecting boundry from header (@benatkin) -- [#273](https://github.com/request/request/pull/273) Pipe back pressure issue (@mafintosh) -- [#268](https://github.com/request/request/pull/268) I'm not OCD seriously (@TehShrike) -- [#263](https://github.com/request/request/pull/263) Bug in OAuth key generation for sha1 (@nanodocumet) -- [#265](https://github.com/request/request/pull/265) uncaughtException when redirected to invalid URI (@naholyr) -- [#262](https://github.com/request/request/pull/262) JSON test should check for equality (@timshadel) -- [#261](https://github.com/request/request/pull/261) Setting 'pool' to 'false' does NOT disable Agent pooling (@timshadel) -- [#249](https://github.com/request/request/pull/249) Fix for the fix of your (closed) issue #89 where self.headers[content-length] is set to 0 for all methods (@sethbridges, @polotek, @zephrax, @jeromegn) -- [#255](https://github.com/request/request/pull/255) multipart allow body === '' ( the empty string ) (@Filirom1) -- [#260](https://github.com/request/request/pull/260) fixed just another leak of 'i' (@sreuter) -- [#246](https://github.com/request/request/pull/246) Fixing the set-cookie header (@jeromegn) -- [#243](https://github.com/request/request/pull/243) Dynamic boundary (@zephrax) -- [#240](https://github.com/request/request/pull/240) don't error when null is passed for options (@polotek) -- [#211](https://github.com/request/request/pull/211) Replace all occurrences of special chars in RFC3986 (@chriso, @vpulim) -- [#224](https://github.com/request/request/pull/224) Multipart content-type change (@janjongboom) -- [#217](https://github.com/request/request/pull/217) need to use Authorization (titlecase) header with Tumblr OAuth (@visnup) -- [#203](https://github.com/request/request/pull/203) Fix cookie and redirect bugs and add auth support for HTTPS tunnel (@vpulim) -- [#199](https://github.com/request/request/pull/199) Tunnel (@isaacs) -- [#198](https://github.com/request/request/pull/198) Bugfix on forever usage of util.inherits (@isaacs) -- [#197](https://github.com/request/request/pull/197) Make ForeverAgent work with HTTPS (@isaacs) -- [#193](https://github.com/request/request/pull/193) Fixes GH-119 (@goatslacker) -- [#188](https://github.com/request/request/pull/188) Add abort support to the returned request (@itay) -- [#176](https://github.com/request/request/pull/176) Querystring option (@csainty) -- [#182](https://github.com/request/request/pull/182) Fix request.defaults to support (uri, options, callback) api (@twilson63) -- [#180](https://github.com/request/request/pull/180) Modified the post, put, head and del shortcuts to support uri optional param (@twilson63) -- [#179](https://github.com/request/request/pull/179) fix to add opts in .pipe(stream, opts) (@substack) -- [#177](https://github.com/request/request/pull/177) Issue #173 Support uri as first and optional config as second argument (@twilson63) -- [#170](https://github.com/request/request/pull/170) can't create a cookie in a wrapped request (defaults) (@fabianonunes) -- [#168](https://github.com/request/request/pull/168) Picking off an EasyFix by adding some missing mimetypes. (@serby) -- [#161](https://github.com/request/request/pull/161) Fix cookie jar/headers.cookie collision (#125) (@papandreou) -- [#162](https://github.com/request/request/pull/162) Fix issue #159 (@dpetukhov) -- [#90](https://github.com/request/request/pull/90) add option followAllRedirects to follow post/put redirects (@jroes) -- [#148](https://github.com/request/request/pull/148) Retry Agent (@thejh) -- [#146](https://github.com/request/request/pull/146) Multipart should respect content-type if previously set (@apeace) -- [#144](https://github.com/request/request/pull/144) added "form" option to readme (@petejkim) -- [#133](https://github.com/request/request/pull/133) Fixed cookies parsing (@afanasy) -- [#135](https://github.com/request/request/pull/135) host vs hostname (@iangreenleaf) -- [#132](https://github.com/request/request/pull/132) return the body as a Buffer when encoding is set to null (@jahewson) -- [#112](https://github.com/request/request/pull/112) Support using a custom http-like module (@jhs) -- [#104](https://github.com/request/request/pull/104) Cookie handling contains bugs (@janjongboom) -- [#121](https://github.com/request/request/pull/121) Another patch for cookie handling regression (@jhurliman) -- [#117](https://github.com/request/request/pull/117) Remove the global `i` (@3rd-Eden) -- [#110](https://github.com/request/request/pull/110) Update to Iris Couch URL (@jhs) -- [#86](https://github.com/request/request/pull/86) Can't post binary to multipart requests (@kkaefer) -- [#105](https://github.com/request/request/pull/105) added test for proxy option. (@dominictarr) -- [#102](https://github.com/request/request/pull/102) Implemented cookies - closes issue 82: https://github.com/mikeal/request/issues/82 (@alessioalex) -- [#97](https://github.com/request/request/pull/97) Typo in previous pull causes TypeError in non-0.5.11 versions (@isaacs) -- [#96](https://github.com/request/request/pull/96) Authless parsed url host support (@isaacs) -- [#81](https://github.com/request/request/pull/81) Enhance redirect handling (@danmactough) -- [#78](https://github.com/request/request/pull/78) Don't try to do strictSSL for non-ssl connections (@isaacs) -- [#76](https://github.com/request/request/pull/76) Bug when a request fails and a timeout is set (@Marsup) -- [#70](https://github.com/request/request/pull/70) add test script to package.json (@isaacs, @aheckmann) -- [#73](https://github.com/request/request/pull/73) Fix #71 Respect the strictSSL flag (@isaacs) -- [#69](https://github.com/request/request/pull/69) Flatten chunked requests properly (@isaacs) -- [#67](https://github.com/request/request/pull/67) fixed global variable leaks (@aheckmann) -- [#66](https://github.com/request/request/pull/66) Do not overwrite established content-type headers for read stream deliver (@voodootikigod) -- [#53](https://github.com/request/request/pull/53) Parse json: Issue #51 (@benatkin) -- [#45](https://github.com/request/request/pull/45) Added timeout option (@mbrevoort) -- [#35](https://github.com/request/request/pull/35) The "end" event isn't emitted for some responses (@voxpelli) -- [#31](https://github.com/request/request/pull/31) Error on piping a request to a destination (@tobowers) \ No newline at end of file diff --git a/node_modules/request/README.md b/node_modules/request/README.md deleted file mode 100644 index 9da0eb7d893a3..0000000000000 --- a/node_modules/request/README.md +++ /dev/null @@ -1,1133 +0,0 @@ -# Deprecated! - -As of Feb 11th 2020, request is fully deprecated. No new changes are expected land. In fact, none have landed for some time. - -For more information about why request is deprecated and possible alternatives refer to -[this issue](https://github.com/request/request/issues/3142). - -# Request - Simplified HTTP client - -[![npm package](https://nodei.co/npm/request.png?downloads=true&downloadRank=true&stars=true)](https://nodei.co/npm/request/) - -[![Build status](https://img.shields.io/travis/request/request/master.svg?style=flat-square)](https://travis-ci.org/request/request) -[![Coverage](https://img.shields.io/codecov/c/github/request/request.svg?style=flat-square)](https://codecov.io/github/request/request?branch=master) -[![Coverage](https://img.shields.io/coveralls/request/request.svg?style=flat-square)](https://coveralls.io/r/request/request) -[![Dependency Status](https://img.shields.io/david/request/request.svg?style=flat-square)](https://david-dm.org/request/request) -[![Known Vulnerabilities](https://snyk.io/test/npm/request/badge.svg?style=flat-square)](https://snyk.io/test/npm/request) -[![Gitter](https://img.shields.io/badge/gitter-join_chat-blue.svg?style=flat-square)](https://gitter.im/request/request?utm_source=badge) - - -## Super simple to use - -Request is designed to be the simplest way possible to make http calls. It supports HTTPS and follows redirects by default. - -```js -const request = require('request'); -request('http://www.google.com', function (error, response, body) { - console.error('error:', error); // Print the error if one occurred - console.log('statusCode:', response && response.statusCode); // Print the response status code if a response was received - console.log('body:', body); // Print the HTML for the Google homepage. -}); -``` - - -## Table of contents - -- [Streaming](#streaming) -- [Promises & Async/Await](#promises--asyncawait) -- [Forms](#forms) -- [HTTP Authentication](#http-authentication) -- [Custom HTTP Headers](#custom-http-headers) -- [OAuth Signing](#oauth-signing) -- [Proxies](#proxies) -- [Unix Domain Sockets](#unix-domain-sockets) -- [TLS/SSL Protocol](#tlsssl-protocol) -- [Support for HAR 1.2](#support-for-har-12) -- [**All Available Options**](#requestoptions-callback) - -Request also offers [convenience methods](#convenience-methods) like -`request.defaults` and `request.post`, and there are -lots of [usage examples](#examples) and several -[debugging techniques](#debugging). - - ---- - - -## Streaming - -You can stream any response to a file stream. - -```js -request('http://google.com/doodle.png').pipe(fs.createWriteStream('doodle.png')) -``` - -You can also stream a file to a PUT or POST request. This method will also check the file extension against a mapping of file extensions to content-types (in this case `application/json`) and use the proper `content-type` in the PUT request (if the headers don’t already provide one). - -```js -fs.createReadStream('file.json').pipe(request.put('http://mysite.com/obj.json')) -``` - -Request can also `pipe` to itself. When doing so, `content-type` and `content-length` are preserved in the PUT headers. - -```js -request.get('http://google.com/img.png').pipe(request.put('http://mysite.com/img.png')) -``` - -Request emits a "response" event when a response is received. The `response` argument will be an instance of [http.IncomingMessage](https://nodejs.org/api/http.html#http_class_http_incomingmessage). - -```js -request - .get('http://google.com/img.png') - .on('response', function(response) { - console.log(response.statusCode) // 200 - console.log(response.headers['content-type']) // 'image/png' - }) - .pipe(request.put('http://mysite.com/img.png')) -``` - -To easily handle errors when streaming requests, listen to the `error` event before piping: - -```js -request - .get('http://mysite.com/doodle.png') - .on('error', function(err) { - console.error(err) - }) - .pipe(fs.createWriteStream('doodle.png')) -``` - -Now let’s get fancy. - -```js -http.createServer(function (req, resp) { - if (req.url === '/doodle.png') { - if (req.method === 'PUT') { - req.pipe(request.put('http://mysite.com/doodle.png')) - } else if (req.method === 'GET' || req.method === 'HEAD') { - request.get('http://mysite.com/doodle.png').pipe(resp) - } - } -}) -``` - -You can also `pipe()` from `http.ServerRequest` instances, as well as to `http.ServerResponse` instances. The HTTP method, headers, and entity-body data will be sent. Which means that, if you don't really care about security, you can do: - -```js -http.createServer(function (req, resp) { - if (req.url === '/doodle.png') { - const x = request('http://mysite.com/doodle.png') - req.pipe(x) - x.pipe(resp) - } -}) -``` - -And since `pipe()` returns the destination stream in ≥ Node 0.5.x you can do one line proxying. :) - -```js -req.pipe(request('http://mysite.com/doodle.png')).pipe(resp) -``` - -Also, none of this new functionality conflicts with requests previous features, it just expands them. - -```js -const r = request.defaults({'proxy':'http://localproxy.com'}) - -http.createServer(function (req, resp) { - if (req.url === '/doodle.png') { - r.get('http://google.com/doodle.png').pipe(resp) - } -}) -``` - -You can still use intermediate proxies, the requests will still follow HTTP forwards, etc. - -[back to top](#table-of-contents) - - ---- - - -## Promises & Async/Await - -`request` supports both streaming and callback interfaces natively. If you'd like `request` to return a Promise instead, you can use an alternative interface wrapper for `request`. These wrappers can be useful if you prefer to work with Promises, or if you'd like to use `async`/`await` in ES2017. - -Several alternative interfaces are provided by the request team, including: -- [`request-promise`](https://github.com/request/request-promise) (uses [Bluebird](https://github.com/petkaantonov/bluebird) Promises) -- [`request-promise-native`](https://github.com/request/request-promise-native) (uses native Promises) -- [`request-promise-any`](https://github.com/request/request-promise-any) (uses [any-promise](https://www.npmjs.com/package/any-promise) Promises) - -Also, [`util.promisify`](https://nodejs.org/api/util.html#util_util_promisify_original), which is available from Node.js v8.0 can be used to convert a regular function that takes a callback to return a promise instead. - - -[back to top](#table-of-contents) - - ---- - - -## Forms - -`request` supports `application/x-www-form-urlencoded` and `multipart/form-data` form uploads. For `multipart/related` refer to the `multipart` API. - - -#### application/x-www-form-urlencoded (URL-Encoded Forms) - -URL-encoded forms are simple. - -```js -request.post('http://service.com/upload', {form:{key:'value'}}) -// or -request.post('http://service.com/upload').form({key:'value'}) -// or -request.post({url:'http://service.com/upload', form: {key:'value'}}, function(err,httpResponse,body){ /* ... */ }) -``` - - -#### multipart/form-data (Multipart Form Uploads) - -For `multipart/form-data` we use the [form-data](https://github.com/form-data/form-data) library by [@felixge](https://github.com/felixge). For the most cases, you can pass your upload form data via the `formData` option. - - -```js -const formData = { - // Pass a simple key-value pair - my_field: 'my_value', - // Pass data via Buffers - my_buffer: Buffer.from([1, 2, 3]), - // Pass data via Streams - my_file: fs.createReadStream(__dirname + '/unicycle.jpg'), - // Pass multiple values /w an Array - attachments: [ - fs.createReadStream(__dirname + '/attachment1.jpg'), - fs.createReadStream(__dirname + '/attachment2.jpg') - ], - // Pass optional meta-data with an 'options' object with style: {value: DATA, options: OPTIONS} - // Use case: for some types of streams, you'll need to provide "file"-related information manually. - // See the `form-data` README for more information about options: https://github.com/form-data/form-data - custom_file: { - value: fs.createReadStream('/dev/urandom'), - options: { - filename: 'topsecret.jpg', - contentType: 'image/jpeg' - } - } -}; -request.post({url:'http://service.com/upload', formData: formData}, function optionalCallback(err, httpResponse, body) { - if (err) { - return console.error('upload failed:', err); - } - console.log('Upload successful! Server responded with:', body); -}); -``` - -For advanced cases, you can access the form-data object itself via `r.form()`. This can be modified until the request is fired on the next cycle of the event-loop. (Note that this calling `form()` will clear the currently set form data for that request.) - -```js -// NOTE: Advanced use-case, for normal use see 'formData' usage above -const r = request.post('http://service.com/upload', function optionalCallback(err, httpResponse, body) {...}) -const form = r.form(); -form.append('my_field', 'my_value'); -form.append('my_buffer', Buffer.from([1, 2, 3])); -form.append('custom_file', fs.createReadStream(__dirname + '/unicycle.jpg'), {filename: 'unicycle.jpg'}); -``` -See the [form-data README](https://github.com/form-data/form-data) for more information & examples. - - -#### multipart/related - -Some variations in different HTTP implementations require a newline/CRLF before, after, or both before and after the boundary of a `multipart/related` request (using the multipart option). This has been observed in the .NET WebAPI version 4.0. You can turn on a boundary preambleCRLF or postamble by passing them as `true` to your request options. - -```js - request({ - method: 'PUT', - preambleCRLF: true, - postambleCRLF: true, - uri: 'http://service.com/upload', - multipart: [ - { - 'content-type': 'application/json', - body: JSON.stringify({foo: 'bar', _attachments: {'message.txt': {follows: true, length: 18, 'content_type': 'text/plain' }}}) - }, - { body: 'I am an attachment' }, - { body: fs.createReadStream('image.png') } - ], - // alternatively pass an object containing additional options - multipart: { - chunked: false, - data: [ - { - 'content-type': 'application/json', - body: JSON.stringify({foo: 'bar', _attachments: {'message.txt': {follows: true, length: 18, 'content_type': 'text/plain' }}}) - }, - { body: 'I am an attachment' } - ] - } - }, - function (error, response, body) { - if (error) { - return console.error('upload failed:', error); - } - console.log('Upload successful! Server responded with:', body); - }) -``` - -[back to top](#table-of-contents) - - ---- - - -## HTTP Authentication - -```js -request.get('http://some.server.com/').auth('username', 'password', false); -// or -request.get('http://some.server.com/', { - 'auth': { - 'user': 'username', - 'pass': 'password', - 'sendImmediately': false - } -}); -// or -request.get('http://some.server.com/').auth(null, null, true, 'bearerToken'); -// or -request.get('http://some.server.com/', { - 'auth': { - 'bearer': 'bearerToken' - } -}); -``` - -If passed as an option, `auth` should be a hash containing values: - -- `user` || `username` -- `pass` || `password` -- `sendImmediately` (optional) -- `bearer` (optional) - -The method form takes parameters -`auth(username, password, sendImmediately, bearer)`. - -`sendImmediately` defaults to `true`, which causes a basic or bearer -authentication header to be sent. If `sendImmediately` is `false`, then -`request` will retry with a proper authentication header after receiving a -`401` response from the server (which must contain a `WWW-Authenticate` header -indicating the required authentication method). - -Note that you can also specify basic authentication using the URL itself, as -detailed in [RFC 1738](http://www.ietf.org/rfc/rfc1738.txt). Simply pass the -`user:password` before the host with an `@` sign: - -```js -const username = 'username', - password = 'password', - url = 'http://' + username + ':' + password + '@some.server.com'; - -request({url}, function (error, response, body) { - // Do more stuff with 'body' here -}); -``` - -Digest authentication is supported, but it only works with `sendImmediately` -set to `false`; otherwise `request` will send basic authentication on the -initial request, which will probably cause the request to fail. - -Bearer authentication is supported, and is activated when the `bearer` value is -available. The value may be either a `String` or a `Function` returning a -`String`. Using a function to supply the bearer token is particularly useful if -used in conjunction with `defaults` to allow a single function to supply the -last known token at the time of sending a request, or to compute one on the fly. - -[back to top](#table-of-contents) - - ---- - - -## Custom HTTP Headers - -HTTP Headers, such as `User-Agent`, can be set in the `options` object. -In the example below, we call the github API to find out the number -of stars and forks for the request repository. This requires a -custom `User-Agent` header as well as https. - -```js -const request = require('request'); - -const options = { - url: 'https://api.github.com/repos/request/request', - headers: { - 'User-Agent': 'request' - } -}; - -function callback(error, response, body) { - if (!error && response.statusCode == 200) { - const info = JSON.parse(body); - console.log(info.stargazers_count + " Stars"); - console.log(info.forks_count + " Forks"); - } -} - -request(options, callback); -``` - -[back to top](#table-of-contents) - - ---- - - -## OAuth Signing - -[OAuth version 1.0](https://tools.ietf.org/html/rfc5849) is supported. The -default signing algorithm is -[HMAC-SHA1](https://tools.ietf.org/html/rfc5849#section-3.4.2): - -```js -// OAuth1.0 - 3-legged server side flow (Twitter example) -// step 1 -const qs = require('querystring') - , oauth = - { callback: 'http://mysite.com/callback/' - , consumer_key: CONSUMER_KEY - , consumer_secret: CONSUMER_SECRET - } - , url = 'https://api.twitter.com/oauth/request_token' - ; -request.post({url:url, oauth:oauth}, function (e, r, body) { - // Ideally, you would take the body in the response - // and construct a URL that a user clicks on (like a sign in button). - // The verifier is only available in the response after a user has - // verified with twitter that they are authorizing your app. - - // step 2 - const req_data = qs.parse(body) - const uri = 'https://api.twitter.com/oauth/authenticate' - + '?' + qs.stringify({oauth_token: req_data.oauth_token}) - // redirect the user to the authorize uri - - // step 3 - // after the user is redirected back to your server - const auth_data = qs.parse(body) - , oauth = - { consumer_key: CONSUMER_KEY - , consumer_secret: CONSUMER_SECRET - , token: auth_data.oauth_token - , token_secret: req_data.oauth_token_secret - , verifier: auth_data.oauth_verifier - } - , url = 'https://api.twitter.com/oauth/access_token' - ; - request.post({url:url, oauth:oauth}, function (e, r, body) { - // ready to make signed requests on behalf of the user - const perm_data = qs.parse(body) - , oauth = - { consumer_key: CONSUMER_KEY - , consumer_secret: CONSUMER_SECRET - , token: perm_data.oauth_token - , token_secret: perm_data.oauth_token_secret - } - , url = 'https://api.twitter.com/1.1/users/show.json' - , qs = - { screen_name: perm_data.screen_name - , user_id: perm_data.user_id - } - ; - request.get({url:url, oauth:oauth, qs:qs, json:true}, function (e, r, user) { - console.log(user) - }) - }) -}) -``` - -For [RSA-SHA1 signing](https://tools.ietf.org/html/rfc5849#section-3.4.3), make -the following changes to the OAuth options object: -* Pass `signature_method : 'RSA-SHA1'` -* Instead of `consumer_secret`, specify a `private_key` string in - [PEM format](http://how2ssl.com/articles/working_with_pem_files/) - -For [PLAINTEXT signing](http://oauth.net/core/1.0/#anchor22), make -the following changes to the OAuth options object: -* Pass `signature_method : 'PLAINTEXT'` - -To send OAuth parameters via query params or in a post body as described in The -[Consumer Request Parameters](http://oauth.net/core/1.0/#consumer_req_param) -section of the oauth1 spec: -* Pass `transport_method : 'query'` or `transport_method : 'body'` in the OAuth - options object. -* `transport_method` defaults to `'header'` - -To use [Request Body Hash](https://oauth.googlecode.com/svn/spec/ext/body_hash/1.0/oauth-bodyhash.html) you can either -* Manually generate the body hash and pass it as a string `body_hash: '...'` -* Automatically generate the body hash by passing `body_hash: true` - -[back to top](#table-of-contents) - - ---- - - -## Proxies - -If you specify a `proxy` option, then the request (and any subsequent -redirects) will be sent via a connection to the proxy server. - -If your endpoint is an `https` url, and you are using a proxy, then -request will send a `CONNECT` request to the proxy server *first*, and -then use the supplied connection to connect to the endpoint. - -That is, first it will make a request like: - -``` -HTTP/1.1 CONNECT endpoint-server.com:80 -Host: proxy-server.com -User-Agent: whatever user agent you specify -``` - -and then the proxy server make a TCP connection to `endpoint-server` -on port `80`, and return a response that looks like: - -``` -HTTP/1.1 200 OK -``` - -At this point, the connection is left open, and the client is -communicating directly with the `endpoint-server.com` machine. - -See [the wikipedia page on HTTP Tunneling](https://en.wikipedia.org/wiki/HTTP_tunnel) -for more information. - -By default, when proxying `http` traffic, request will simply make a -standard proxied `http` request. This is done by making the `url` -section of the initial line of the request a fully qualified url to -the endpoint. - -For example, it will make a single request that looks like: - -``` -HTTP/1.1 GET http://endpoint-server.com/some-url -Host: proxy-server.com -Other-Headers: all go here - -request body or whatever -``` - -Because a pure "http over http" tunnel offers no additional security -or other features, it is generally simpler to go with a -straightforward HTTP proxy in this case. However, if you would like -to force a tunneling proxy, you may set the `tunnel` option to `true`. - -You can also make a standard proxied `http` request by explicitly setting -`tunnel : false`, but **note that this will allow the proxy to see the traffic -to/from the destination server**. - -If you are using a tunneling proxy, you may set the -`proxyHeaderWhiteList` to share certain headers with the proxy. - -You can also set the `proxyHeaderExclusiveList` to share certain -headers only with the proxy and not with destination host. - -By default, this set is: - -``` -accept -accept-charset -accept-encoding -accept-language -accept-ranges -cache-control -content-encoding -content-language -content-length -content-location -content-md5 -content-range -content-type -connection -date -expect -max-forwards -pragma -proxy-authorization -referer -te -transfer-encoding -user-agent -via -``` - -Note that, when using a tunneling proxy, the `proxy-authorization` -header and any headers from custom `proxyHeaderExclusiveList` are -*never* sent to the endpoint server, but only to the proxy server. - - -### Controlling proxy behaviour using environment variables - -The following environment variables are respected by `request`: - - * `HTTP_PROXY` / `http_proxy` - * `HTTPS_PROXY` / `https_proxy` - * `NO_PROXY` / `no_proxy` - -When `HTTP_PROXY` / `http_proxy` are set, they will be used to proxy non-SSL requests that do not have an explicit `proxy` configuration option present. Similarly, `HTTPS_PROXY` / `https_proxy` will be respected for SSL requests that do not have an explicit `proxy` configuration option. It is valid to define a proxy in one of the environment variables, but then override it for a specific request, using the `proxy` configuration option. Furthermore, the `proxy` configuration option can be explicitly set to false / null to opt out of proxying altogether for that request. - -`request` is also aware of the `NO_PROXY`/`no_proxy` environment variables. These variables provide a granular way to opt out of proxying, on a per-host basis. It should contain a comma separated list of hosts to opt out of proxying. It is also possible to opt of proxying when a particular destination port is used. Finally, the variable may be set to `*` to opt out of the implicit proxy configuration of the other environment variables. - -Here's some examples of valid `no_proxy` values: - - * `google.com` - don't proxy HTTP/HTTPS requests to Google. - * `google.com:443` - don't proxy HTTPS requests to Google, but *do* proxy HTTP requests to Google. - * `google.com:443, yahoo.com:80` - don't proxy HTTPS requests to Google, and don't proxy HTTP requests to Yahoo! - * `*` - ignore `https_proxy`/`http_proxy` environment variables altogether. - -[back to top](#table-of-contents) - - ---- - - -## UNIX Domain Sockets - -`request` supports making requests to [UNIX Domain Sockets](https://en.wikipedia.org/wiki/Unix_domain_socket). To make one, use the following URL scheme: - -```js -/* Pattern */ 'http://unix:SOCKET:PATH' -/* Example */ request.get('http://unix:/absolute/path/to/unix.socket:/request/path') -``` - -Note: The `SOCKET` path is assumed to be absolute to the root of the host file system. - -[back to top](#table-of-contents) - - ---- - - -## TLS/SSL Protocol - -TLS/SSL Protocol options, such as `cert`, `key` and `passphrase`, can be -set directly in `options` object, in the `agentOptions` property of the `options` object, or even in `https.globalAgent.options`. Keep in mind that, although `agentOptions` allows for a slightly wider range of configurations, the recommended way is via `options` object directly, as using `agentOptions` or `https.globalAgent.options` would not be applied in the same way in proxied environments (as data travels through a TLS connection instead of an http/https agent). - -```js -const fs = require('fs') - , path = require('path') - , certFile = path.resolve(__dirname, 'ssl/client.crt') - , keyFile = path.resolve(__dirname, 'ssl/client.key') - , caFile = path.resolve(__dirname, 'ssl/ca.cert.pem') - , request = require('request'); - -const options = { - url: 'https://api.some-server.com/', - cert: fs.readFileSync(certFile), - key: fs.readFileSync(keyFile), - passphrase: 'password', - ca: fs.readFileSync(caFile) -}; - -request.get(options); -``` - -### Using `options.agentOptions` - -In the example below, we call an API that requires client side SSL certificate -(in PEM format) with passphrase protected private key (in PEM format) and disable the SSLv3 protocol: - -```js -const fs = require('fs') - , path = require('path') - , certFile = path.resolve(__dirname, 'ssl/client.crt') - , keyFile = path.resolve(__dirname, 'ssl/client.key') - , request = require('request'); - -const options = { - url: 'https://api.some-server.com/', - agentOptions: { - cert: fs.readFileSync(certFile), - key: fs.readFileSync(keyFile), - // Or use `pfx` property replacing `cert` and `key` when using private key, certificate and CA certs in PFX or PKCS12 format: - // pfx: fs.readFileSync(pfxFilePath), - passphrase: 'password', - securityOptions: 'SSL_OP_NO_SSLv3' - } -}; - -request.get(options); -``` - -It is able to force using SSLv3 only by specifying `secureProtocol`: - -```js -request.get({ - url: 'https://api.some-server.com/', - agentOptions: { - secureProtocol: 'SSLv3_method' - } -}); -``` - -It is possible to accept other certificates than those signed by generally allowed Certificate Authorities (CAs). -This can be useful, for example, when using self-signed certificates. -To require a different root certificate, you can specify the signing CA by adding the contents of the CA's certificate file to the `agentOptions`. -The certificate the domain presents must be signed by the root certificate specified: - -```js -request.get({ - url: 'https://api.some-server.com/', - agentOptions: { - ca: fs.readFileSync('ca.cert.pem') - } -}); -``` - -The `ca` value can be an array of certificates, in the event you have a private or internal corporate public-key infrastructure hierarchy. For example, if you want to connect to https://api.some-server.com which presents a key chain consisting of: -1. its own public key, which is signed by: -2. an intermediate "Corp Issuing Server", that is in turn signed by: -3. a root CA "Corp Root CA"; - -you can configure your request as follows: - -```js -request.get({ - url: 'https://api.some-server.com/', - agentOptions: { - ca: [ - fs.readFileSync('Corp Issuing Server.pem'), - fs.readFileSync('Corp Root CA.pem') - ] - } -}); -``` - -[back to top](#table-of-contents) - - ---- - -## Support for HAR 1.2 - -The `options.har` property will override the values: `url`, `method`, `qs`, `headers`, `form`, `formData`, `body`, `json`, as well as construct multipart data and read files from disk when `request.postData.params[].fileName` is present without a matching `value`. - -A validation step will check if the HAR Request format matches the latest spec (v1.2) and will skip parsing if not matching. - -```js - const request = require('request') - request({ - // will be ignored - method: 'GET', - uri: 'http://www.google.com', - - // HTTP Archive Request Object - har: { - url: 'http://www.mockbin.com/har', - method: 'POST', - headers: [ - { - name: 'content-type', - value: 'application/x-www-form-urlencoded' - } - ], - postData: { - mimeType: 'application/x-www-form-urlencoded', - params: [ - { - name: 'foo', - value: 'bar' - }, - { - name: 'hello', - value: 'world' - } - ] - } - } - }) - - // a POST request will be sent to http://www.mockbin.com - // with body an application/x-www-form-urlencoded body: - // foo=bar&hello=world -``` - -[back to top](#table-of-contents) - - ---- - -## request(options, callback) - -The first argument can be either a `url` or an `options` object. The only required option is `uri`; all others are optional. - -- `uri` || `url` - fully qualified uri or a parsed url object from `url.parse()` -- `baseUrl` - fully qualified uri string used as the base url. Most useful with `request.defaults`, for example when you want to do many requests to the same domain. If `baseUrl` is `https://example.com/api/`, then requesting `/end/point?test=true` will fetch `https://example.com/api/end/point?test=true`. When `baseUrl` is given, `uri` must also be a string. -- `method` - http method (default: `"GET"`) -- `headers` - http headers (default: `{}`) - ---- - -- `qs` - object containing querystring values to be appended to the `uri` -- `qsParseOptions` - object containing options to pass to the [qs.parse](https://github.com/hapijs/qs#parsing-objects) method. Alternatively pass options to the [querystring.parse](https://nodejs.org/docs/v0.12.0/api/querystring.html#querystring_querystring_parse_str_sep_eq_options) method using this format `{sep:';', eq:':', options:{}}` -- `qsStringifyOptions` - object containing options to pass to the [qs.stringify](https://github.com/hapijs/qs#stringifying) method. Alternatively pass options to the [querystring.stringify](https://nodejs.org/docs/v0.12.0/api/querystring.html#querystring_querystring_stringify_obj_sep_eq_options) method using this format `{sep:';', eq:':', options:{}}`. For example, to change the way arrays are converted to query strings using the `qs` module pass the `arrayFormat` option with one of `indices|brackets|repeat` -- `useQuerystring` - if true, use `querystring` to stringify and parse - querystrings, otherwise use `qs` (default: `false`). Set this option to - `true` if you need arrays to be serialized as `foo=bar&foo=baz` instead of the - default `foo[0]=bar&foo[1]=baz`. - ---- - -- `body` - entity body for PATCH, POST and PUT requests. Must be a `Buffer`, `String` or `ReadStream`. If `json` is `true`, then `body` must be a JSON-serializable object. -- `form` - when passed an object or a querystring, this sets `body` to a querystring representation of value, and adds `Content-type: application/x-www-form-urlencoded` header. When passed no options, a `FormData` instance is returned (and is piped to request). See "Forms" section above. -- `formData` - data to pass for a `multipart/form-data` request. See - [Forms](#forms) section above. -- `multipart` - array of objects which contain their own headers and `body` - attributes. Sends a `multipart/related` request. See [Forms](#forms) section - above. - - Alternatively you can pass in an object `{chunked: false, data: []}` where - `chunked` is used to specify whether the request is sent in - [chunked transfer encoding](https://en.wikipedia.org/wiki/Chunked_transfer_encoding) - In non-chunked requests, data items with body streams are not allowed. -- `preambleCRLF` - append a newline/CRLF before the boundary of your `multipart/form-data` request. -- `postambleCRLF` - append a newline/CRLF at the end of the boundary of your `multipart/form-data` request. -- `json` - sets `body` to JSON representation of value and adds `Content-type: application/json` header. Additionally, parses the response body as JSON. -- `jsonReviver` - a [reviver function](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse) that will be passed to `JSON.parse()` when parsing a JSON response body. -- `jsonReplacer` - a [replacer function](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify) that will be passed to `JSON.stringify()` when stringifying a JSON request body. - ---- - -- `auth` - a hash containing values `user` || `username`, `pass` || `password`, and `sendImmediately` (optional). See documentation above. -- `oauth` - options for OAuth HMAC-SHA1 signing. See documentation above. -- `hawk` - options for [Hawk signing](https://github.com/hueniverse/hawk). The `credentials` key must contain the necessary signing info, [see hawk docs for details](https://github.com/hueniverse/hawk#usage-example). -- `aws` - `object` containing AWS signing information. Should have the properties `key`, `secret`, and optionally `session` (note that this only works for services that require session as part of the canonical string). Also requires the property `bucket`, unless you’re specifying your `bucket` as part of the path, or the request doesn’t use a bucket (i.e. GET Services). If you want to use AWS sign version 4 use the parameter `sign_version` with value `4` otherwise the default is version 2. If you are using SigV4, you can also include a `service` property that specifies the service name. **Note:** you need to `npm install aws4` first. -- `httpSignature` - options for the [HTTP Signature Scheme](https://github.com/joyent/node-http-signature/blob/master/http_signing.md) using [Joyent's library](https://github.com/joyent/node-http-signature). The `keyId` and `key` properties must be specified. See the docs for other options. - ---- - -- `followRedirect` - follow HTTP 3xx responses as redirects (default: `true`). This property can also be implemented as function which gets `response` object as a single argument and should return `true` if redirects should continue or `false` otherwise. -- `followAllRedirects` - follow non-GET HTTP 3xx responses as redirects (default: `false`) -- `followOriginalHttpMethod` - by default we redirect to HTTP method GET. you can enable this property to redirect to the original HTTP method (default: `false`) -- `maxRedirects` - the maximum number of redirects to follow (default: `10`) -- `removeRefererHeader` - removes the referer header when a redirect happens (default: `false`). **Note:** if true, referer header set in the initial request is preserved during redirect chain. - ---- - -- `encoding` - encoding to be used on `setEncoding` of response data. If `null`, the `body` is returned as a `Buffer`. Anything else **(including the default value of `undefined`)** will be passed as the [encoding](http://nodejs.org/api/buffer.html#buffer_buffer) parameter to `toString()` (meaning this is effectively `utf8` by default). (**Note:** if you expect binary data, you should set `encoding: null`.) -- `gzip` - if `true`, add an `Accept-Encoding` header to request compressed content encodings from the server (if not already present) and decode supported content encodings in the response. **Note:** Automatic decoding of the response content is performed on the body data returned through `request` (both through the `request` stream and passed to the callback function) but is not performed on the `response` stream (available from the `response` event) which is the unmodified `http.IncomingMessage` object which may contain compressed data. See example below. -- `jar` - if `true`, remember cookies for future use (or define your custom cookie jar; see examples section) - ---- - -- `agent` - `http(s).Agent` instance to use -- `agentClass` - alternatively specify your agent's class name -- `agentOptions` - and pass its options. **Note:** for HTTPS see [tls API doc for TLS/SSL options](http://nodejs.org/api/tls.html#tls_tls_connect_options_callback) and the [documentation above](#using-optionsagentoptions). -- `forever` - set to `true` to use the [forever-agent](https://github.com/request/forever-agent) **Note:** Defaults to `http(s).Agent({keepAlive:true})` in node 0.12+ -- `pool` - an object describing which agents to use for the request. If this option is omitted the request will use the global agent (as long as your options allow for it). Otherwise, request will search the pool for your custom agent. If no custom agent is found, a new agent will be created and added to the pool. **Note:** `pool` is used only when the `agent` option is not specified. - - A `maxSockets` property can also be provided on the `pool` object to set the max number of sockets for all agents created (ex: `pool: {maxSockets: Infinity}`). - - Note that if you are sending multiple requests in a loop and creating - multiple new `pool` objects, `maxSockets` will not work as intended. To - work around this, either use [`request.defaults`](#requestdefaultsoptions) - with your pool options or create the pool object with the `maxSockets` - property outside of the loop. -- `timeout` - integer containing number of milliseconds, controls two timeouts. - - **Read timeout**: Time to wait for a server to send response headers (and start the response body) before aborting the request. - - **Connection timeout**: Sets the socket to timeout after `timeout` milliseconds of inactivity. Note that increasing the timeout beyond the OS-wide TCP connection timeout will not have any effect ([the default in Linux can be anywhere from 20-120 seconds][linux-timeout]) - -[linux-timeout]: http://www.sekuda.com/overriding_the_default_linux_kernel_20_second_tcp_socket_connect_timeout - ---- - -- `localAddress` - local interface to bind for network connections. -- `proxy` - an HTTP proxy to be used. Supports proxy Auth with Basic Auth, identical to support for the `url` parameter (by embedding the auth info in the `uri`) -- `strictSSL` - if `true`, requires SSL certificates be valid. **Note:** to use your own certificate authority, you need to specify an agent that was created with that CA as an option. -- `tunnel` - controls the behavior of - [HTTP `CONNECT` tunneling](https://en.wikipedia.org/wiki/HTTP_tunnel#HTTP_CONNECT_tunneling) - as follows: - - `undefined` (default) - `true` if the destination is `https`, `false` otherwise - - `true` - always tunnel to the destination by making a `CONNECT` request to - the proxy - - `false` - request the destination as a `GET` request. -- `proxyHeaderWhiteList` - a whitelist of headers to send to a - tunneling proxy. -- `proxyHeaderExclusiveList` - a whitelist of headers to send - exclusively to a tunneling proxy and not to destination. - ---- - -- `time` - if `true`, the request-response cycle (including all redirects) is timed at millisecond resolution. When set, the following properties are added to the response object: - - `elapsedTime` Duration of the entire request/response in milliseconds (*deprecated*). - - `responseStartTime` Timestamp when the response began (in Unix Epoch milliseconds) (*deprecated*). - - `timingStart` Timestamp of the start of the request (in Unix Epoch milliseconds). - - `timings` Contains event timestamps in millisecond resolution relative to `timingStart`. If there were redirects, the properties reflect the timings of the final request in the redirect chain: - - `socket` Relative timestamp when the [`http`](https://nodejs.org/api/http.html#http_event_socket) module's `socket` event fires. This happens when the socket is assigned to the request. - - `lookup` Relative timestamp when the [`net`](https://nodejs.org/api/net.html#net_event_lookup) module's `lookup` event fires. This happens when the DNS has been resolved. - - `connect`: Relative timestamp when the [`net`](https://nodejs.org/api/net.html#net_event_connect) module's `connect` event fires. This happens when the server acknowledges the TCP connection. - - `response`: Relative timestamp when the [`http`](https://nodejs.org/api/http.html#http_event_response) module's `response` event fires. This happens when the first bytes are received from the server. - - `end`: Relative timestamp when the last bytes of the response are received. - - `timingPhases` Contains the durations of each request phase. If there were redirects, the properties reflect the timings of the final request in the redirect chain: - - `wait`: Duration of socket initialization (`timings.socket`) - - `dns`: Duration of DNS lookup (`timings.lookup` - `timings.socket`) - - `tcp`: Duration of TCP connection (`timings.connect` - `timings.socket`) - - `firstByte`: Duration of HTTP server response (`timings.response` - `timings.connect`) - - `download`: Duration of HTTP download (`timings.end` - `timings.response`) - - `total`: Duration entire HTTP round-trip (`timings.end`) - -- `har` - a [HAR 1.2 Request Object](http://www.softwareishard.com/blog/har-12-spec/#request), will be processed from HAR format into options overwriting matching values *(see the [HAR 1.2 section](#support-for-har-12) for details)* -- `callback` - alternatively pass the request's callback in the options object - -The callback argument gets 3 arguments: - -1. An `error` when applicable (usually from [`http.ClientRequest`](http://nodejs.org/api/http.html#http_class_http_clientrequest) object) -2. An [`http.IncomingMessage`](https://nodejs.org/api/http.html#http_class_http_incomingmessage) object (Response object) -3. The third is the `response` body (`String` or `Buffer`, or JSON object if the `json` option is supplied) - -[back to top](#table-of-contents) - - ---- - -## Convenience methods - -There are also shorthand methods for different HTTP METHODs and some other conveniences. - - -### request.defaults(options) - -This method **returns a wrapper** around the normal request API that defaults -to whatever options you pass to it. - -**Note:** `request.defaults()` **does not** modify the global request API; -instead, it **returns a wrapper** that has your default settings applied to it. - -**Note:** You can call `.defaults()` on the wrapper that is returned from -`request.defaults` to add/override defaults that were previously defaulted. - -For example: -```js -//requests using baseRequest() will set the 'x-token' header -const baseRequest = request.defaults({ - headers: {'x-token': 'my-token'} -}) - -//requests using specialRequest() will include the 'x-token' header set in -//baseRequest and will also include the 'special' header -const specialRequest = baseRequest.defaults({ - headers: {special: 'special value'} -}) -``` - -### request.METHOD() - -These HTTP method convenience functions act just like `request()` but with a default method already set for you: - -- *request.get()*: Defaults to `method: "GET"`. -- *request.post()*: Defaults to `method: "POST"`. -- *request.put()*: Defaults to `method: "PUT"`. -- *request.patch()*: Defaults to `method: "PATCH"`. -- *request.del() / request.delete()*: Defaults to `method: "DELETE"`. -- *request.head()*: Defaults to `method: "HEAD"`. -- *request.options()*: Defaults to `method: "OPTIONS"`. - -### request.cookie() - -Function that creates a new cookie. - -```js -request.cookie('key1=value1') -``` -### request.jar() - -Function that creates a new cookie jar. - -```js -request.jar() -``` - -### response.caseless.get('header-name') - -Function that returns the specified response header field using a [case-insensitive match](https://tools.ietf.org/html/rfc7230#section-3.2) - -```js -request('http://www.google.com', function (error, response, body) { - // print the Content-Type header even if the server returned it as 'content-type' (lowercase) - console.log('Content-Type is:', response.caseless.get('Content-Type')); -}); -``` - -[back to top](#table-of-contents) - - ---- - - -## Debugging - -There are at least three ways to debug the operation of `request`: - -1. Launch the node process like `NODE_DEBUG=request node script.js` - (`lib,request,otherlib` works too). - -2. Set `require('request').debug = true` at any time (this does the same thing - as #1). - -3. Use the [request-debug module](https://github.com/request/request-debug) to - view request and response headers and bodies. - -[back to top](#table-of-contents) - - ---- - -## Timeouts - -Most requests to external servers should have a timeout attached, in case the -server is not responding in a timely manner. Without a timeout, your code may -have a socket open/consume resources for minutes or more. - -There are two main types of timeouts: **connection timeouts** and **read -timeouts**. A connect timeout occurs if the timeout is hit while your client is -attempting to establish a connection to a remote machine (corresponding to the -[connect() call][connect] on the socket). A read timeout occurs any time the -server is too slow to send back a part of the response. - -These two situations have widely different implications for what went wrong -with the request, so it's useful to be able to distinguish them. You can detect -timeout errors by checking `err.code` for an 'ETIMEDOUT' value. Further, you -can detect whether the timeout was a connection timeout by checking if the -`err.connect` property is set to `true`. - -```js -request.get('http://10.255.255.1', {timeout: 1500}, function(err) { - console.log(err.code === 'ETIMEDOUT'); - // Set to `true` if the timeout was a connection timeout, `false` or - // `undefined` otherwise. - console.log(err.connect === true); - process.exit(0); -}); -``` - -[connect]: http://linux.die.net/man/2/connect - -## Examples: - -```js - const request = require('request') - , rand = Math.floor(Math.random()*100000000).toString() - ; - request( - { method: 'PUT' - , uri: 'http://mikeal.iriscouch.com/testjs/' + rand - , multipart: - [ { 'content-type': 'application/json' - , body: JSON.stringify({foo: 'bar', _attachments: {'message.txt': {follows: true, length: 18, 'content_type': 'text/plain' }}}) - } - , { body: 'I am an attachment' } - ] - } - , function (error, response, body) { - if(response.statusCode == 201){ - console.log('document saved as: http://mikeal.iriscouch.com/testjs/'+ rand) - } else { - console.log('error: '+ response.statusCode) - console.log(body) - } - } - ) -``` - -For backwards-compatibility, response compression is not supported by default. -To accept gzip-compressed responses, set the `gzip` option to `true`. Note -that the body data passed through `request` is automatically decompressed -while the response object is unmodified and will contain compressed data if -the server sent a compressed response. - -```js - const request = require('request') - request( - { method: 'GET' - , uri: 'http://www.google.com' - , gzip: true - } - , function (error, response, body) { - // body is the decompressed response body - console.log('server encoded the data as: ' + (response.headers['content-encoding'] || 'identity')) - console.log('the decoded data is: ' + body) - } - ) - .on('data', function(data) { - // decompressed data as it is received - console.log('decoded chunk: ' + data) - }) - .on('response', function(response) { - // unmodified http.IncomingMessage object - response.on('data', function(data) { - // compressed data as it is received - console.log('received ' + data.length + ' bytes of compressed data') - }) - }) -``` - -Cookies are disabled by default (else, they would be used in subsequent requests). To enable cookies, set `jar` to `true` (either in `defaults` or `options`). - -```js -const request = request.defaults({jar: true}) -request('http://www.google.com', function () { - request('http://images.google.com') -}) -``` - -To use a custom cookie jar (instead of `request`’s global cookie jar), set `jar` to an instance of `request.jar()` (either in `defaults` or `options`) - -```js -const j = request.jar() -const request = request.defaults({jar:j}) -request('http://www.google.com', function () { - request('http://images.google.com') -}) -``` - -OR - -```js -const j = request.jar(); -const cookie = request.cookie('key1=value1'); -const url = 'http://www.google.com'; -j.setCookie(cookie, url); -request({url: url, jar: j}, function () { - request('http://images.google.com') -}) -``` - -To use a custom cookie store (such as a -[`FileCookieStore`](https://github.com/mitsuru/tough-cookie-filestore) -which supports saving to and restoring from JSON files), pass it as a parameter -to `request.jar()`: - -```js -const FileCookieStore = require('tough-cookie-filestore'); -// NOTE - currently the 'cookies.json' file must already exist! -const j = request.jar(new FileCookieStore('cookies.json')); -request = request.defaults({ jar : j }) -request('http://www.google.com', function() { - request('http://images.google.com') -}) -``` - -The cookie store must be a -[`tough-cookie`](https://github.com/SalesforceEng/tough-cookie) -store and it must support synchronous operations; see the -[`CookieStore` API docs](https://github.com/SalesforceEng/tough-cookie#api) -for details. - -To inspect your cookie jar after a request: - -```js -const j = request.jar() -request({url: 'http://www.google.com', jar: j}, function () { - const cookie_string = j.getCookieString(url); // "key1=value1; key2=value2; ..." - const cookies = j.getCookies(url); - // [{key: 'key1', value: 'value1', domain: "www.google.com", ...}, ...] -}) -``` - -[back to top](#table-of-contents) diff --git a/node_modules/form-data/License b/node_modules/request/node_modules/form-data/License similarity index 100% rename from node_modules/form-data/License rename to node_modules/request/node_modules/form-data/License diff --git a/node_modules/form-data/lib/browser.js b/node_modules/request/node_modules/form-data/lib/browser.js similarity index 100% rename from node_modules/form-data/lib/browser.js rename to node_modules/request/node_modules/form-data/lib/browser.js diff --git a/node_modules/form-data/lib/form_data.js b/node_modules/request/node_modules/form-data/lib/form_data.js similarity index 100% rename from node_modules/form-data/lib/form_data.js rename to node_modules/request/node_modules/form-data/lib/form_data.js diff --git a/node_modules/form-data/lib/populate.js b/node_modules/request/node_modules/form-data/lib/populate.js similarity index 100% rename from node_modules/form-data/lib/populate.js rename to node_modules/request/node_modules/form-data/lib/populate.js diff --git a/node_modules/form-data/package.json b/node_modules/request/node_modules/form-data/package.json similarity index 100% rename from node_modules/form-data/package.json rename to node_modules/request/node_modules/form-data/package.json diff --git a/node_modules/form-data/yarn.lock b/node_modules/request/node_modules/form-data/yarn.lock similarity index 100% rename from node_modules/form-data/yarn.lock rename to node_modules/request/node_modules/form-data/yarn.lock diff --git a/node_modules/request/node_modules/tough-cookie/README.md b/node_modules/request/node_modules/tough-cookie/README.md deleted file mode 100644 index 656a25556c3c5..0000000000000 --- a/node_modules/request/node_modules/tough-cookie/README.md +++ /dev/null @@ -1,527 +0,0 @@ -[RFC6265](https://tools.ietf.org/html/rfc6265) Cookies and CookieJar for Node.js - -[![npm package](https://nodei.co/npm/tough-cookie.png?downloads=true&downloadRank=true&stars=true)](https://nodei.co/npm/tough-cookie/) - -[![Build Status](https://travis-ci.org/salesforce/tough-cookie.png?branch=master)](https://travis-ci.org/salesforce/tough-cookie) - -# Synopsis - -``` javascript -var tough = require('tough-cookie'); -var Cookie = tough.Cookie; -var cookie = Cookie.parse(header); -cookie.value = 'somethingdifferent'; -header = cookie.toString(); - -var cookiejar = new tough.CookieJar(); -cookiejar.setCookie(cookie, 'http://currentdomain.example.com/path', cb); -// ... -cookiejar.getCookies('http://example.com/otherpath',function(err,cookies) { - res.headers['cookie'] = cookies.join('; '); -}); -``` - -# Installation - -It's _so_ easy! - -`npm install tough-cookie` - -Why the name? NPM modules `cookie`, `cookies` and `cookiejar` were already taken. - -## Version Support - -Support for versions of node.js will follow that of the [request](https://www.npmjs.com/package/request) module. - -# API - -## tough - -Functions on the module you get from `require('tough-cookie')`. All can be used as pure functions and don't need to be "bound". - -**Note**: prior to 1.0.x, several of these functions took a `strict` parameter. This has since been removed from the API as it was no longer necessary. - -### `parseDate(string)` - -Parse a cookie date string into a `Date`. Parses according to RFC6265 Section 5.1.1, not `Date.parse()`. - -### `formatDate(date)` - -Format a Date into a RFC1123 string (the RFC6265-recommended format). - -### `canonicalDomain(str)` - -Transforms a domain-name into a canonical domain-name. The canonical domain-name is a trimmed, lowercased, stripped-of-leading-dot and optionally punycode-encoded domain-name (Section 5.1.2 of RFC6265). For the most part, this function is idempotent (can be run again on its output without ill effects). - -### `domainMatch(str,domStr[,canonicalize=true])` - -Answers "does this real domain match the domain in a cookie?". The `str` is the "current" domain-name and the `domStr` is the "cookie" domain-name. Matches according to RFC6265 Section 5.1.3, but it helps to think of it as a "suffix match". - -The `canonicalize` parameter will run the other two parameters through `canonicalDomain` or not. - -### `defaultPath(path)` - -Given a current request/response path, gives the Path apropriate for storing in a cookie. This is basically the "directory" of a "file" in the path, but is specified by Section 5.1.4 of the RFC. - -The `path` parameter MUST be _only_ the pathname part of a URI (i.e. excludes the hostname, query, fragment, etc.). This is the `.pathname` property of node's `uri.parse()` output. - -### `pathMatch(reqPath,cookiePath)` - -Answers "does the request-path path-match a given cookie-path?" as per RFC6265 Section 5.1.4. Returns a boolean. - -This is essentially a prefix-match where `cookiePath` is a prefix of `reqPath`. - -### `parse(cookieString[, options])` - -alias for `Cookie.parse(cookieString[, options])` - -### `fromJSON(string)` - -alias for `Cookie.fromJSON(string)` - -### `getPublicSuffix(hostname)` - -Returns the public suffix of this hostname. The public suffix is the shortest domain-name upon which a cookie can be set. Returns `null` if the hostname cannot have cookies set for it. - -For example: `www.example.com` and `www.subdomain.example.com` both have public suffix `example.com`. - -For further information, see http://publicsuffix.org/. This module derives its list from that site. This call is currently a wrapper around [`psl`](https://www.npmjs.com/package/psl)'s [get() method](https://www.npmjs.com/package/psl#pslgetdomain). - -### `cookieCompare(a,b)` - -For use with `.sort()`, sorts a list of cookies into the recommended order given in the RFC (Section 5.4 step 2). The sort algorithm is, in order of precedence: - -* Longest `.path` -* oldest `.creation` (which has a 1ms precision, same as `Date`) -* lowest `.creationIndex` (to get beyond the 1ms precision) - -``` javascript -var cookies = [ /* unsorted array of Cookie objects */ ]; -cookies = cookies.sort(cookieCompare); -``` - -**Note**: Since JavaScript's `Date` is limited to a 1ms precision, cookies within the same milisecond are entirely possible. This is especially true when using the `now` option to `.setCookie()`. The `.creationIndex` property is a per-process global counter, assigned during construction with `new Cookie()`. This preserves the spirit of the RFC sorting: older cookies go first. This works great for `MemoryCookieStore`, since `Set-Cookie` headers are parsed in order, but may not be so great for distributed systems. Sophisticated `Store`s may wish to set this to some other _logical clock_ such that if cookies A and B are created in the same millisecond, but cookie A is created before cookie B, then `A.creationIndex < B.creationIndex`. If you want to alter the global counter, which you probably _shouldn't_ do, it's stored in `Cookie.cookiesCreated`. - -### `permuteDomain(domain)` - -Generates a list of all possible domains that `domainMatch()` the parameter. May be handy for implementing cookie stores. - -### `permutePath(path)` - -Generates a list of all possible paths that `pathMatch()` the parameter. May be handy for implementing cookie stores. - - -## Cookie - -Exported via `tough.Cookie`. - -### `Cookie.parse(cookieString[, options])` - -Parses a single Cookie or Set-Cookie HTTP header into a `Cookie` object. Returns `undefined` if the string can't be parsed. - -The options parameter is not required and currently has only one property: - - * _loose_ - boolean - if `true` enable parsing of key-less cookies like `=abc` and `=`, which are not RFC-compliant. - -If options is not an object, it is ignored, which means you can use `Array#map` with it. - -Here's how to process the Set-Cookie header(s) on a node HTTP/HTTPS response: - -``` javascript -if (res.headers['set-cookie'] instanceof Array) - cookies = res.headers['set-cookie'].map(Cookie.parse); -else - cookies = [Cookie.parse(res.headers['set-cookie'])]; -``` - -_Note:_ in version 2.3.3, tough-cookie limited the number of spaces before the `=` to 256 characters. This limitation has since been removed. -See [Issue 92](https://github.com/salesforce/tough-cookie/issues/92) - -### Properties - -Cookie object properties: - - * _key_ - string - the name or key of the cookie (default "") - * _value_ - string - the value of the cookie (default "") - * _expires_ - `Date` - if set, the `Expires=` attribute of the cookie (defaults to the string `"Infinity"`). See `setExpires()` - * _maxAge_ - seconds - if set, the `Max-Age=` attribute _in seconds_ of the cookie. May also be set to strings `"Infinity"` and `"-Infinity"` for non-expiry and immediate-expiry, respectively. See `setMaxAge()` - * _domain_ - string - the `Domain=` attribute of the cookie - * _path_ - string - the `Path=` of the cookie - * _secure_ - boolean - the `Secure` cookie flag - * _httpOnly_ - boolean - the `HttpOnly` cookie flag - * _extensions_ - `Array` - any unrecognized cookie attributes as strings (even if equal-signs inside) - * _creation_ - `Date` - when this cookie was constructed - * _creationIndex_ - number - set at construction, used to provide greater sort precision (please see `cookieCompare(a,b)` for a full explanation) - -After a cookie has been passed through `CookieJar.setCookie()` it will have the following additional attributes: - - * _hostOnly_ - boolean - is this a host-only cookie (i.e. no Domain field was set, but was instead implied) - * _pathIsDefault_ - boolean - if true, there was no Path field on the cookie and `defaultPath()` was used to derive one. - * _creation_ - `Date` - **modified** from construction to when the cookie was added to the jar - * _lastAccessed_ - `Date` - last time the cookie got accessed. Will affect cookie cleaning once implemented. Using `cookiejar.getCookies(...)` will update this attribute. - -### `Cookie([{properties}])` - -Receives an options object that can contain any of the above Cookie properties, uses the default for unspecified properties. - -### `.toString()` - -encode to a Set-Cookie header value. The Expires cookie field is set using `formatDate()`, but is omitted entirely if `.expires` is `Infinity`. - -### `.cookieString()` - -encode to a Cookie header value (i.e. the `.key` and `.value` properties joined with '='). - -### `.setExpires(String)` - -sets the expiry based on a date-string passed through `parseDate()`. If parseDate returns `null` (i.e. can't parse this date string), `.expires` is set to `"Infinity"` (a string) is set. - -### `.setMaxAge(number)` - -sets the maxAge in seconds. Coerces `-Infinity` to `"-Infinity"` and `Infinity` to `"Infinity"` so it JSON serializes correctly. - -### `.expiryTime([now=Date.now()])` - -### `.expiryDate([now=Date.now()])` - -expiryTime() Computes the absolute unix-epoch milliseconds that this cookie expires. expiryDate() works similarly, except it returns a `Date` object. Note that in both cases the `now` parameter should be milliseconds. - -Max-Age takes precedence over Expires (as per the RFC). The `.creation` attribute -- or, by default, the `now` parameter -- is used to offset the `.maxAge` attribute. - -If Expires (`.expires`) is set, that's returned. - -Otherwise, `expiryTime()` returns `Infinity` and `expiryDate()` returns a `Date` object for "Tue, 19 Jan 2038 03:14:07 GMT" (latest date that can be expressed by a 32-bit `time_t`; the common limit for most user-agents). - -### `.TTL([now=Date.now()])` - -compute the TTL relative to `now` (milliseconds). The same precedence rules as for `expiryTime`/`expiryDate` apply. - -The "number" `Infinity` is returned for cookies without an explicit expiry and `0` is returned if the cookie is expired. Otherwise a time-to-live in milliseconds is returned. - -### `.canonicalizedDomain()` - -### `.cdomain()` - -return the canonicalized `.domain` field. This is lower-cased and punycode (RFC3490) encoded if the domain has any non-ASCII characters. - -### `.toJSON()` - -For convenience in using `JSON.serialize(cookie)`. Returns a plain-old `Object` that can be JSON-serialized. - -Any `Date` properties (i.e., `.expires`, `.creation`, and `.lastAccessed`) are exported in ISO format (`.toISOString()`). - -**NOTE**: Custom `Cookie` properties will be discarded. In tough-cookie 1.x, since there was no `.toJSON` method explicitly defined, all enumerable properties were captured. If you want a property to be serialized, add the property name to the `Cookie.serializableProperties` Array. - -### `Cookie.fromJSON(strOrObj)` - -Does the reverse of `cookie.toJSON()`. If passed a string, will `JSON.parse()` that first. - -Any `Date` properties (i.e., `.expires`, `.creation`, and `.lastAccessed`) are parsed via `Date.parse()`, not the tough-cookie `parseDate`, since it's JavaScript/JSON-y timestamps being handled at this layer. - -Returns `null` upon JSON parsing error. - -### `.clone()` - -Does a deep clone of this cookie, exactly implemented as `Cookie.fromJSON(cookie.toJSON())`. - -### `.validate()` - -Status: *IN PROGRESS*. Works for a few things, but is by no means comprehensive. - -validates cookie attributes for semantic correctness. Useful for "lint" checking any Set-Cookie headers you generate. For now, it returns a boolean, but eventually could return a reason string -- you can future-proof with this construct: - -``` javascript -if (cookie.validate() === true) { - // it's tasty -} else { - // yuck! -} -``` - - -## CookieJar - -Exported via `tough.CookieJar`. - -### `CookieJar([store],[options])` - -Simply use `new CookieJar()`. If you'd like to use a custom store, pass that to the constructor otherwise a `MemoryCookieStore` will be created and used. - -The `options` object can be omitted and can have the following properties: - - * _rejectPublicSuffixes_ - boolean - default `true` - reject cookies with domains like "com" and "co.uk" - * _looseMode_ - boolean - default `false` - accept malformed cookies like `bar` and `=bar`, which have an implied empty name. - This is not in the standard, but is used sometimes on the web and is accepted by (most) browsers. - -Since eventually this module would like to support database/remote/etc. CookieJars, continuation passing style is used for CookieJar methods. - -### `.setCookie(cookieOrString, currentUrl, [{options},] cb(err,cookie))` - -Attempt to set the cookie in the cookie jar. If the operation fails, an error will be given to the callback `cb`, otherwise the cookie is passed through. The cookie will have updated `.creation`, `.lastAccessed` and `.hostOnly` properties. - -The `options` object can be omitted and can have the following properties: - - * _http_ - boolean - default `true` - indicates if this is an HTTP or non-HTTP API. Affects HttpOnly cookies. - * _secure_ - boolean - autodetect from url - indicates if this is a "Secure" API. If the currentUrl starts with `https:` or `wss:` then this is defaulted to `true`, otherwise `false`. - * _now_ - Date - default `new Date()` - what to use for the creation/access time of cookies - * _ignoreError_ - boolean - default `false` - silently ignore things like parse errors and invalid domains. `Store` errors aren't ignored by this option. - -As per the RFC, the `.hostOnly` property is set if there was no "Domain=" parameter in the cookie string (or `.domain` was null on the Cookie object). The `.domain` property is set to the fully-qualified hostname of `currentUrl` in this case. Matching this cookie requires an exact hostname match (not a `domainMatch` as per usual). - -### `.setCookieSync(cookieOrString, currentUrl, [{options}])` - -Synchronous version of `setCookie`; only works with synchronous stores (e.g. the default `MemoryCookieStore`). - -### `.getCookies(currentUrl, [{options},] cb(err,cookies))` - -Retrieve the list of cookies that can be sent in a Cookie header for the current url. - -If an error is encountered, that's passed as `err` to the callback, otherwise an `Array` of `Cookie` objects is passed. The array is sorted with `cookieCompare()` unless the `{sort:false}` option is given. - -The `options` object can be omitted and can have the following properties: - - * _http_ - boolean - default `true` - indicates if this is an HTTP or non-HTTP API. Affects HttpOnly cookies. - * _secure_ - boolean - autodetect from url - indicates if this is a "Secure" API. If the currentUrl starts with `https:` or `wss:` then this is defaulted to `true`, otherwise `false`. - * _now_ - Date - default `new Date()` - what to use for the creation/access time of cookies - * _expire_ - boolean - default `true` - perform expiry-time checking of cookies and asynchronously remove expired cookies from the store. Using `false` will return expired cookies and **not** remove them from the store (which is useful for replaying Set-Cookie headers, potentially). - * _allPaths_ - boolean - default `false` - if `true`, do not scope cookies by path. The default uses RFC-compliant path scoping. **Note**: may not be supported by the underlying store (the default `MemoryCookieStore` supports it). - -The `.lastAccessed` property of the returned cookies will have been updated. - -### `.getCookiesSync(currentUrl, [{options}])` - -Synchronous version of `getCookies`; only works with synchronous stores (e.g. the default `MemoryCookieStore`). - -### `.getCookieString(...)` - -Accepts the same options as `.getCookies()` but passes a string suitable for a Cookie header rather than an array to the callback. Simply maps the `Cookie` array via `.cookieString()`. - -### `.getCookieStringSync(...)` - -Synchronous version of `getCookieString`; only works with synchronous stores (e.g. the default `MemoryCookieStore`). - -### `.getSetCookieStrings(...)` - -Returns an array of strings suitable for **Set-Cookie** headers. Accepts the same options as `.getCookies()`. Simply maps the cookie array via `.toString()`. - -### `.getSetCookieStringsSync(...)` - -Synchronous version of `getSetCookieStrings`; only works with synchronous stores (e.g. the default `MemoryCookieStore`). - -### `.serialize(cb(err,serializedObject))` - -Serialize the Jar if the underlying store supports `.getAllCookies`. - -**NOTE**: Custom `Cookie` properties will be discarded. If you want a property to be serialized, add the property name to the `Cookie.serializableProperties` Array. - -See [Serialization Format]. - -### `.serializeSync()` - -Sync version of .serialize - -### `.toJSON()` - -Alias of .serializeSync() for the convenience of `JSON.stringify(cookiejar)`. - -### `CookieJar.deserialize(serialized, [store], cb(err,object))` - -A new Jar is created and the serialized Cookies are added to the underlying store. Each `Cookie` is added via `store.putCookie` in the order in which they appear in the serialization. - -The `store` argument is optional, but should be an instance of `Store`. By default, a new instance of `MemoryCookieStore` is created. - -As a convenience, if `serialized` is a string, it is passed through `JSON.parse` first. If that throws an error, this is passed to the callback. - -### `CookieJar.deserializeSync(serialized, [store])` - -Sync version of `.deserialize`. _Note_ that the `store` must be synchronous for this to work. - -### `CookieJar.fromJSON(string)` - -Alias of `.deserializeSync` to provide consistency with `Cookie.fromJSON()`. - -### `.clone([store,]cb(err,newJar))` - -Produces a deep clone of this jar. Modifications to the original won't affect the clone, and vice versa. - -The `store` argument is optional, but should be an instance of `Store`. By default, a new instance of `MemoryCookieStore` is created. Transferring between store types is supported so long as the source implements `.getAllCookies()` and the destination implements `.putCookie()`. - -### `.cloneSync([store])` - -Synchronous version of `.clone`, returning a new `CookieJar` instance. - -The `store` argument is optional, but must be a _synchronous_ `Store` instance if specified. If not passed, a new instance of `MemoryCookieStore` is used. - -The _source_ and _destination_ must both be synchronous `Store`s. If one or both stores are asynchronous, use `.clone` instead. Recall that `MemoryCookieStore` supports both synchronous and asynchronous API calls. - -### `.removeAllCookies(cb(err))` - -Removes all cookies from the jar. - -This is a new backwards-compatible feature of `tough-cookie` version 2.5, so not all Stores will implement it efficiently. For Stores that do not implement `removeAllCookies`, the fallback is to call `removeCookie` after `getAllCookies`. If `getAllCookies` fails or isn't implemented in the Store, that error is returned. If one or more of the `removeCookie` calls fail, only the first error is returned. - -### `.removeAllCookiesSync()` - -Sync version of `.removeAllCookies()` - -## Store - -Base class for CookieJar stores. Available as `tough.Store`. - -## Store API - -The storage model for each `CookieJar` instance can be replaced with a custom implementation. The default is `MemoryCookieStore` which can be found in the `lib/memstore.js` file. The API uses continuation-passing-style to allow for asynchronous stores. - -Stores should inherit from the base `Store` class, which is available as `require('tough-cookie').Store`. - -Stores are asynchronous by default, but if `store.synchronous` is set to `true`, then the `*Sync` methods on the of the containing `CookieJar` can be used (however, the continuation-passing style - -All `domain` parameters will have been normalized before calling. - -The Cookie store must have all of the following methods. - -### `store.findCookie(domain, path, key, cb(err,cookie))` - -Retrieve a cookie with the given domain, path and key (a.k.a. name). The RFC maintains that exactly one of these cookies should exist in a store. If the store is using versioning, this means that the latest/newest such cookie should be returned. - -Callback takes an error and the resulting `Cookie` object. If no cookie is found then `null` MUST be passed instead (i.e. not an error). - -### `store.findCookies(domain, path, cb(err,cookies))` - -Locates cookies matching the given domain and path. This is most often called in the context of `cookiejar.getCookies()` above. - -If no cookies are found, the callback MUST be passed an empty array. - -The resulting list will be checked for applicability to the current request according to the RFC (domain-match, path-match, http-only-flag, secure-flag, expiry, etc.), so it's OK to use an optimistic search algorithm when implementing this method. However, the search algorithm used SHOULD try to find cookies that `domainMatch()` the domain and `pathMatch()` the path in order to limit the amount of checking that needs to be done. - -As of version 0.9.12, the `allPaths` option to `cookiejar.getCookies()` above will cause the path here to be `null`. If the path is `null`, path-matching MUST NOT be performed (i.e. domain-matching only). - -### `store.putCookie(cookie, cb(err))` - -Adds a new cookie to the store. The implementation SHOULD replace any existing cookie with the same `.domain`, `.path`, and `.key` properties -- depending on the nature of the implementation, it's possible that between the call to `fetchCookie` and `putCookie` that a duplicate `putCookie` can occur. - -The `cookie` object MUST NOT be modified; the caller will have already updated the `.creation` and `.lastAccessed` properties. - -Pass an error if the cookie cannot be stored. - -### `store.updateCookie(oldCookie, newCookie, cb(err))` - -Update an existing cookie. The implementation MUST update the `.value` for a cookie with the same `domain`, `.path` and `.key`. The implementation SHOULD check that the old value in the store is equivalent to `oldCookie` - how the conflict is resolved is up to the store. - -The `.lastAccessed` property will always be different between the two objects (to the precision possible via JavaScript's clock). Both `.creation` and `.creationIndex` are guaranteed to be the same. Stores MAY ignore or defer the `.lastAccessed` change at the cost of affecting how cookies are selected for automatic deletion (e.g., least-recently-used, which is up to the store to implement). - -Stores may wish to optimize changing the `.value` of the cookie in the store versus storing a new cookie. If the implementation doesn't define this method a stub that calls `putCookie(newCookie,cb)` will be added to the store object. - -The `newCookie` and `oldCookie` objects MUST NOT be modified. - -Pass an error if the newCookie cannot be stored. - -### `store.removeCookie(domain, path, key, cb(err))` - -Remove a cookie from the store (see notes on `findCookie` about the uniqueness constraint). - -The implementation MUST NOT pass an error if the cookie doesn't exist; only pass an error due to the failure to remove an existing cookie. - -### `store.removeCookies(domain, path, cb(err))` - -Removes matching cookies from the store. The `path` parameter is optional, and if missing means all paths in a domain should be removed. - -Pass an error ONLY if removing any existing cookies failed. - -### `store.removeAllCookies(cb(err))` - -_Optional_. Removes all cookies from the store. - -Pass an error if one or more cookies can't be removed. - -**Note**: New method as of `tough-cookie` version 2.5, so not all Stores will implement this, plus some stores may choose not to implement this. - -### `store.getAllCookies(cb(err, cookies))` - -_Optional_. Produces an `Array` of all cookies during `jar.serialize()`. The items in the array can be true `Cookie` objects or generic `Object`s with the [Serialization Format] data structure. - -Cookies SHOULD be returned in creation order to preserve sorting via `compareCookies()`. For reference, `MemoryCookieStore` will sort by `.creationIndex` since it uses true `Cookie` objects internally. If you don't return the cookies in creation order, they'll still be sorted by creation time, but this only has a precision of 1ms. See `compareCookies` for more detail. - -Pass an error if retrieval fails. - -**Note**: not all Stores can implement this due to technical limitations, so it is optional. - -## MemoryCookieStore - -Inherits from `Store`. - -A just-in-memory CookieJar synchronous store implementation, used by default. Despite being a synchronous implementation, it's usable with both the synchronous and asynchronous forms of the `CookieJar` API. Supports serialization, `getAllCookies`, and `removeAllCookies`. - -## Community Cookie Stores - -These are some Store implementations authored and maintained by the community. They aren't official and we don't vouch for them but you may be interested to have a look: - -- [`db-cookie-store`](https://github.com/JSBizon/db-cookie-store): SQL including SQLite-based databases -- [`file-cookie-store`](https://github.com/JSBizon/file-cookie-store): Netscape cookie file format on disk -- [`redis-cookie-store`](https://github.com/benkroeger/redis-cookie-store): Redis -- [`tough-cookie-filestore`](https://github.com/mitsuru/tough-cookie-filestore): JSON on disk -- [`tough-cookie-web-storage-store`](https://github.com/exponentjs/tough-cookie-web-storage-store): DOM localStorage and sessionStorage - - -# Serialization Format - -**NOTE**: if you want to have custom `Cookie` properties serialized, add the property name to `Cookie.serializableProperties`. - -```js - { - // The version of tough-cookie that serialized this jar. - version: 'tough-cookie@1.x.y', - - // add the store type, to make humans happy: - storeType: 'MemoryCookieStore', - - // CookieJar configuration: - rejectPublicSuffixes: true, - // ... future items go here - - // Gets filled from jar.store.getAllCookies(): - cookies: [ - { - key: 'string', - value: 'string', - // ... - /* other Cookie.serializableProperties go here */ - } - ] - } -``` - -# Copyright and License - -BSD-3-Clause: - -```text - Copyright (c) 2015, Salesforce.com, Inc. - All rights reserved. - - Redistribution and use in source and binary forms, with or without - modification, are permitted provided that the following conditions are met: - - 1. Redistributions of source code must retain the above copyright notice, - this list of conditions and the following disclaimer. - - 2. Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - - 3. Neither the name of Salesforce.com nor the names of its contributors may - be used to endorse or promote products derived from this software without - specific prior written permission. - - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE - LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - POSSIBILITY OF SUCH DAMAGE. -``` diff --git a/node_modules/resolve/.editorconfig b/node_modules/resolve/.editorconfig deleted file mode 100644 index d63f0bb6cdfb9..0000000000000 --- a/node_modules/resolve/.editorconfig +++ /dev/null @@ -1,37 +0,0 @@ -root = true - -[*] -indent_style = space -indent_size = 2 -end_of_line = lf -charset = utf-8 -trim_trailing_whitespace = true -insert_final_newline = true -max_line_length = 200 - -[*.js] -block_comment_start = /* -block_comment = * -block_comment_end = */ - -[*.yml] -indent_size = 1 - -[package.json] -indent_style = tab - -[lib/core.json] -indent_style = tab - -[CHANGELOG.md] -indent_style = space -indent_size = 2 - -[{*.json,Makefile}] -max_line_length = off - -[test/{dotdot,resolver,module_dir,multirepo,node_path,pathfilter,precedence}/**/*] -indent_style = off -indent_size = off -max_line_length = off -insert_final_newline = off diff --git a/node_modules/resolve/.eslintignore b/node_modules/resolve/.eslintignore deleted file mode 100644 index 3c3629e647f5d..0000000000000 --- a/node_modules/resolve/.eslintignore +++ /dev/null @@ -1 +0,0 @@ -node_modules diff --git a/node_modules/resolve/test/resolver/symlinked/_/symlink_target/.gitkeep b/node_modules/resolve/test/resolver/symlinked/_/symlink_target/.gitkeep deleted file mode 100644 index e69de29bb2d1d..0000000000000 diff --git a/node_modules/retry/.npmignore b/node_modules/retry/.npmignore deleted file mode 100644 index 432f2855d6839..0000000000000 --- a/node_modules/retry/.npmignore +++ /dev/null @@ -1,3 +0,0 @@ -/node_modules/* -npm-debug.log -coverage diff --git a/node_modules/retry/.travis.yml b/node_modules/retry/.travis.yml deleted file mode 100644 index bcde2122b9006..0000000000000 --- a/node_modules/retry/.travis.yml +++ /dev/null @@ -1,15 +0,0 @@ -language: node_js -node_js: - - "4" -before_install: - - pip install --user codecov -after_success: - - codecov --file coverage/lcov.info --disable search -# travis encrypt [subdomain]:[api token]@[room id] -# notifications: -# email: false -# campfire: -# rooms: -# secure: xyz -# on_failure: always -# on_success: always diff --git a/node_modules/retry/README.md b/node_modules/retry/README.md deleted file mode 100644 index 16e28ec267d6d..0000000000000 --- a/node_modules/retry/README.md +++ /dev/null @@ -1,227 +0,0 @@ -<!-- badges/ --> -[![Build Status](https://secure.travis-ci.org/tim-kos/node-retry.png?branch=master)](http://travis-ci.org/tim-kos/node-retry "Check this project's build status on TravisCI") -[![codecov](https://codecov.io/gh/tim-kos/node-retry/branch/master/graph/badge.svg)](https://codecov.io/gh/tim-kos/node-retry) -<!-- /badges --> - -# retry - -Abstraction for exponential and custom retry strategies for failed operations. - -## Installation - - npm install retry - -## Current Status - -This module has been tested and is ready to be used. - -## Tutorial - -The example below will retry a potentially failing `dns.resolve` operation -`10` times using an exponential backoff strategy. With the default settings, this -means the last attempt is made after `17 minutes and 3 seconds`. - -``` javascript -var dns = require('dns'); -var retry = require('retry'); - -function faultTolerantResolve(address, cb) { - var operation = retry.operation(); - - operation.attempt(function(currentAttempt) { - dns.resolve(address, function(err, addresses) { - if (operation.retry(err)) { - return; - } - - cb(err ? operation.mainError() : null, addresses); - }); - }); -} - -faultTolerantResolve('nodejs.org', function(err, addresses) { - console.log(err, addresses); -}); -``` - -Of course you can also configure the factors that go into the exponential -backoff. See the API documentation below for all available settings. -currentAttempt is an int representing the number of attempts so far. - -``` javascript -var operation = retry.operation({ - retries: 5, - factor: 3, - minTimeout: 1 * 1000, - maxTimeout: 60 * 1000, - randomize: true, -}); -``` - -## API - -### retry.operation([options]) - -Creates a new `RetryOperation` object. `options` is the same as `retry.timeouts()`'s `options`, with two additions: - -* `forever`: Whether to retry forever, defaults to `false`. -* `unref`: Whether to [unref](https://nodejs.org/api/timers.html#timers_unref) the setTimeout's, defaults to `false`. -* `maxRetryTime`: The maximum time (in milliseconds) that the retried operation is allowed to run. Default is `Infinity`. - -### retry.timeouts([options]) - -Returns an array of timeouts. All time `options` and return values are in -milliseconds. If `options` is an array, a copy of that array is returned. - -`options` is a JS object that can contain any of the following keys: - -* `retries`: The maximum amount of times to retry the operation. Default is `10`. Seting this to `1` means `do it once, then retry it once`. -* `factor`: The exponential factor to use. Default is `2`. -* `minTimeout`: The number of milliseconds before starting the first retry. Default is `1000`. -* `maxTimeout`: The maximum number of milliseconds between two retries. Default is `Infinity`. -* `randomize`: Randomizes the timeouts by multiplying with a factor between `1` to `2`. Default is `false`. - -The formula used to calculate the individual timeouts is: - -``` -Math.min(random * minTimeout * Math.pow(factor, attempt), maxTimeout) -``` - -Have a look at [this article][article] for a better explanation of approach. - -If you want to tune your `factor` / `times` settings to attempt the last retry -after a certain amount of time, you can use wolfram alpha. For example in order -to tune for `10` attempts in `5 minutes`, you can use this equation: - -![screenshot](https://github.com/tim-kos/node-retry/raw/master/equation.gif) - -Explaining the various values from left to right: - -* `k = 0 ... 9`: The `retries` value (10) -* `1000`: The `minTimeout` value in ms (1000) -* `x^k`: No need to change this, `x` will be your resulting factor -* `5 * 60 * 1000`: The desired total amount of time for retrying in ms (5 minutes) - -To make this a little easier for you, use wolfram alpha to do the calculations: - -<http://www.wolframalpha.com/input/?i=Sum%5B1000*x^k%2C+{k%2C+0%2C+9}%5D+%3D+5+*+60+*+1000> - -[article]: http://dthain.blogspot.com/2009/02/exponential-backoff-in-distributed.html - -### retry.createTimeout(attempt, opts) - -Returns a new `timeout` (integer in milliseconds) based on the given parameters. - -`attempt` is an integer representing for which retry the timeout should be calculated. If your retry operation was executed 4 times you had one attempt and 3 retries. If you then want to calculate a new timeout, you should set `attempt` to 4 (attempts are zero-indexed). - -`opts` can include `factor`, `minTimeout`, `randomize` (boolean) and `maxTimeout`. They are documented above. - -`retry.createTimeout()` is used internally by `retry.timeouts()` and is public for you to be able to create your own timeouts for reinserting an item, see [issue #13](https://github.com/tim-kos/node-retry/issues/13). - -### retry.wrap(obj, [options], [methodNames]) - -Wrap all functions of the `obj` with retry. Optionally you can pass operation options and -an array of method names which need to be wrapped. - -``` -retry.wrap(obj) - -retry.wrap(obj, ['method1', 'method2']) - -retry.wrap(obj, {retries: 3}) - -retry.wrap(obj, {retries: 3}, ['method1', 'method2']) -``` -The `options` object can take any options that the usual call to `retry.operation` can take. - -### new RetryOperation(timeouts, [options]) - -Creates a new `RetryOperation` where `timeouts` is an array where each value is -a timeout given in milliseconds. - -Available options: -* `forever`: Whether to retry forever, defaults to `false`. -* `unref`: Wether to [unref](https://nodejs.org/api/timers.html#timers_unref) the setTimeout's, defaults to `false`. - -If `forever` is true, the following changes happen: -* `RetryOperation.errors()` will only output an array of one item: the last error. -* `RetryOperation` will repeatedly use the `timeouts` array. Once all of its timeouts have been used up, it restarts with the first timeout, then uses the second and so on. - -#### retryOperation.errors() - -Returns an array of all errors that have been passed to `retryOperation.retry()` so far. The -returning array has the errors ordered chronologically based on when they were passed to -`retryOperation.retry()`, which means the first passed error is at index zero and the last is -at the last index. - -#### retryOperation.mainError() - -A reference to the error object that occured most frequently. Errors are -compared using the `error.message` property. - -If multiple error messages occured the same amount of time, the last error -object with that message is returned. - -If no errors occured so far, the value is `null`. - -#### retryOperation.attempt(fn, timeoutOps) - -Defines the function `fn` that is to be retried and executes it for the first -time right away. The `fn` function can receive an optional `currentAttempt` callback that represents the number of attempts to execute `fn` so far. - -Optionally defines `timeoutOps` which is an object having a property `timeout` in miliseconds and a property `cb` callback function. -Whenever your retry operation takes longer than `timeout` to execute, the timeout callback function `cb` is called. - - -#### retryOperation.try(fn) - -This is an alias for `retryOperation.attempt(fn)`. This is deprecated. Please use `retryOperation.attempt(fn)` instead. - -#### retryOperation.start(fn) - -This is an alias for `retryOperation.attempt(fn)`. This is deprecated. Please use `retryOperation.attempt(fn)` instead. - -#### retryOperation.retry(error) - -Returns `false` when no `error` value is given, or the maximum amount of retries -has been reached. - -Otherwise it returns `true`, and retries the operation after the timeout for -the current attempt number. - -#### retryOperation.stop() - -Allows you to stop the operation being retried. Useful for aborting the operation on a fatal error etc. - -#### retryOperation.reset() - -Resets the internal state of the operation object, so that you can call `attempt()` again as if this was a new operation object. - -#### retryOperation.attempts() - -Returns an int representing the number of attempts it took to call `fn` before it was successful. - -## License - -retry is licensed under the MIT license. - - -# Changelog - -0.10.0 Adding `stop` functionality, thanks to @maxnachlinger. - -0.9.0 Adding `unref` functionality, thanks to @satazor. - -0.8.0 Implementing retry.wrap. - -0.7.0 Some bug fixes and made retry.createTimeout() public. Fixed issues [#10](https://github.com/tim-kos/node-retry/issues/10), [#12](https://github.com/tim-kos/node-retry/issues/12), and [#13](https://github.com/tim-kos/node-retry/issues/13). - -0.6.0 Introduced optional timeOps parameter for the attempt() function which is an object having a property timeout in milliseconds and a property cb callback function. Whenever your retry operation takes longer than timeout to execute, the timeout callback function cb is called. - -0.5.0 Some minor refactoring. - -0.4.0 Changed retryOperation.try() to retryOperation.attempt(). Deprecated the aliases start() and try() for it. - -0.3.0 Added retryOperation.start() which is an alias for retryOperation.try(). - -0.2.0 Added attempts() function and parameter to retryOperation.try() representing the number of attempts it took to call fn(). diff --git a/node_modules/rimraf/CHANGELOG.md b/node_modules/rimraf/CHANGELOG.md deleted file mode 100644 index f116f1414d76d..0000000000000 --- a/node_modules/rimraf/CHANGELOG.md +++ /dev/null @@ -1,65 +0,0 @@ -# v3.0 - -- Add `--preserve-root` option to executable (default true) -- Drop support for Node.js below version 6 - -# v2.7 - -- Make `glob` an optional dependency - -# 2.6 - -- Retry on EBUSY on non-windows platforms as well -- Make `rimraf.sync` 10000% more reliable on Windows - -# 2.5 - -- Handle Windows EPERM when lstat-ing read-only dirs -- Add glob option to pass options to glob - -# 2.4 - -- Add EPERM to delay/retry loop -- Add `disableGlob` option - -# 2.3 - -- Make maxBusyTries and emfileWait configurable -- Handle weird SunOS unlink-dir issue -- Glob the CLI arg for better Windows support - -# 2.2 - -- Handle ENOENT properly on Windows -- Allow overriding fs methods -- Treat EPERM as indicative of non-empty dir -- Remove optional graceful-fs dep -- Consistently return null error instead of undefined on success -- win32: Treat ENOTEMPTY the same as EBUSY -- Add `rimraf` binary - -# 2.1 - -- Fix SunOS error code for a non-empty directory -- Try rmdir before readdir -- Treat EISDIR like EPERM -- Remove chmod -- Remove lstat polyfill, node 0.7 is not supported - -# 2.0 - -- Fix myGid call to check process.getgid -- Simplify the EBUSY backoff logic. -- Use fs.lstat in node >= 0.7.9 -- Remove gently option -- remove fiber implementation -- Delete files that are marked read-only - -# 1.0 - -- Allow ENOENT in sync method -- Throw when no callback is provided -- Make opts.gently an absolute path -- use 'stat' if 'lstat' is not available -- Consistent error naming, and rethrow non-ENOENT stat errors -- add fiber implementation diff --git a/node_modules/rimraf/README.md b/node_modules/rimraf/README.md deleted file mode 100644 index 423b8cf854ad3..0000000000000 --- a/node_modules/rimraf/README.md +++ /dev/null @@ -1,101 +0,0 @@ -[![Build Status](https://travis-ci.org/isaacs/rimraf.svg?branch=master)](https://travis-ci.org/isaacs/rimraf) [![Dependency Status](https://david-dm.org/isaacs/rimraf.svg)](https://david-dm.org/isaacs/rimraf) [![devDependency Status](https://david-dm.org/isaacs/rimraf/dev-status.svg)](https://david-dm.org/isaacs/rimraf#info=devDependencies) - -The [UNIX command](http://en.wikipedia.org/wiki/Rm_(Unix)) `rm -rf` for node. - -Install with `npm install rimraf`, or just drop rimraf.js somewhere. - -## API - -`rimraf(f, [opts], callback)` - -The first parameter will be interpreted as a globbing pattern for files. If you -want to disable globbing you can do so with `opts.disableGlob` (defaults to -`false`). This might be handy, for instance, if you have filenames that contain -globbing wildcard characters. - -The callback will be called with an error if there is one. Certain -errors are handled for you: - -* Windows: `EBUSY` and `ENOTEMPTY` - rimraf will back off a maximum of - `opts.maxBusyTries` times before giving up, adding 100ms of wait - between each attempt. The default `maxBusyTries` is 3. -* `ENOENT` - If the file doesn't exist, rimraf will return - successfully, since your desired outcome is already the case. -* `EMFILE` - Since `readdir` requires opening a file descriptor, it's - possible to hit `EMFILE` if too many file descriptors are in use. - In the sync case, there's nothing to be done for this. But in the - async case, rimraf will gradually back off with timeouts up to - `opts.emfileWait` ms, which defaults to 1000. - -## options - -* unlink, chmod, stat, lstat, rmdir, readdir, - unlinkSync, chmodSync, statSync, lstatSync, rmdirSync, readdirSync - - In order to use a custom file system library, you can override - specific fs functions on the options object. - - If any of these functions are present on the options object, then - the supplied function will be used instead of the default fs - method. - - Sync methods are only relevant for `rimraf.sync()`, of course. - - For example: - - ```javascript - var myCustomFS = require('some-custom-fs') - - rimraf('some-thing', myCustomFS, callback) - ``` - -* maxBusyTries - - If an `EBUSY`, `ENOTEMPTY`, or `EPERM` error code is encountered - on Windows systems, then rimraf will retry with a linear backoff - wait of 100ms longer on each try. The default maxBusyTries is 3. - - Only relevant for async usage. - -* emfileWait - - If an `EMFILE` error is encountered, then rimraf will retry - repeatedly with a linear backoff of 1ms longer on each try, until - the timeout counter hits this max. The default limit is 1000. - - If you repeatedly encounter `EMFILE` errors, then consider using - [graceful-fs](http://npm.im/graceful-fs) in your program. - - Only relevant for async usage. - -* glob - - Set to `false` to disable [glob](http://npm.im/glob) pattern - matching. - - Set to an object to pass options to the glob module. The default - glob options are `{ nosort: true, silent: true }`. - - Glob version 6 is used in this module. - - Relevant for both sync and async usage. - -* disableGlob - - Set to any non-falsey value to disable globbing entirely. - (Equivalent to setting `glob: false`.) - -## rimraf.sync - -It can remove stuff synchronously, too. But that's not so good. Use -the async API. It's better. - -## CLI - -If installed with `npm install rimraf -g` it can be used as a global -command `rimraf <path> [<path> ...]` which is useful for cross platform support. - -## mkdirp - -If you need to create a directory recursively, check out -[mkdirp](https://github.com/substack/node-mkdirp). diff --git a/node_modules/safe-buffer/README.md b/node_modules/safe-buffer/README.md deleted file mode 100644 index e9a81afd0406f..0000000000000 --- a/node_modules/safe-buffer/README.md +++ /dev/null @@ -1,584 +0,0 @@ -# safe-buffer [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url] - -[travis-image]: https://img.shields.io/travis/feross/safe-buffer/master.svg -[travis-url]: https://travis-ci.org/feross/safe-buffer -[npm-image]: https://img.shields.io/npm/v/safe-buffer.svg -[npm-url]: https://npmjs.org/package/safe-buffer -[downloads-image]: https://img.shields.io/npm/dm/safe-buffer.svg -[downloads-url]: https://npmjs.org/package/safe-buffer -[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg -[standard-url]: https://standardjs.com - -#### Safer Node.js Buffer API - -**Use the new Node.js Buffer APIs (`Buffer.from`, `Buffer.alloc`, -`Buffer.allocUnsafe`, `Buffer.allocUnsafeSlow`) in all versions of Node.js.** - -**Uses the built-in implementation when available.** - -## install - -``` -npm install safe-buffer -``` - -## usage - -The goal of this package is to provide a safe replacement for the node.js `Buffer`. - -It's a drop-in replacement for `Buffer`. You can use it by adding one `require` line to -the top of your node.js modules: - -```js -var Buffer = require('safe-buffer').Buffer - -// Existing buffer code will continue to work without issues: - -new Buffer('hey', 'utf8') -new Buffer([1, 2, 3], 'utf8') -new Buffer(obj) -new Buffer(16) // create an uninitialized buffer (potentially unsafe) - -// But you can use these new explicit APIs to make clear what you want: - -Buffer.from('hey', 'utf8') // convert from many types to a Buffer -Buffer.alloc(16) // create a zero-filled buffer (safe) -Buffer.allocUnsafe(16) // create an uninitialized buffer (potentially unsafe) -``` - -## api - -### Class Method: Buffer.from(array) -<!-- YAML -added: v3.0.0 ---> - -* `array` {Array} - -Allocates a new `Buffer` using an `array` of octets. - -```js -const buf = Buffer.from([0x62,0x75,0x66,0x66,0x65,0x72]); - // creates a new Buffer containing ASCII bytes - // ['b','u','f','f','e','r'] -``` - -A `TypeError` will be thrown if `array` is not an `Array`. - -### Class Method: Buffer.from(arrayBuffer[, byteOffset[, length]]) -<!-- YAML -added: v5.10.0 ---> - -* `arrayBuffer` {ArrayBuffer} The `.buffer` property of a `TypedArray` or - a `new ArrayBuffer()` -* `byteOffset` {Number} Default: `0` -* `length` {Number} Default: `arrayBuffer.length - byteOffset` - -When passed a reference to the `.buffer` property of a `TypedArray` instance, -the newly created `Buffer` will share the same allocated memory as the -TypedArray. - -```js -const arr = new Uint16Array(2); -arr[0] = 5000; -arr[1] = 4000; - -const buf = Buffer.from(arr.buffer); // shares the memory with arr; - -console.log(buf); - // Prints: <Buffer 88 13 a0 0f> - -// changing the TypedArray changes the Buffer also -arr[1] = 6000; - -console.log(buf); - // Prints: <Buffer 88 13 70 17> -``` - -The optional `byteOffset` and `length` arguments specify a memory range within -the `arrayBuffer` that will be shared by the `Buffer`. - -```js -const ab = new ArrayBuffer(10); -const buf = Buffer.from(ab, 0, 2); -console.log(buf.length); - // Prints: 2 -``` - -A `TypeError` will be thrown if `arrayBuffer` is not an `ArrayBuffer`. - -### Class Method: Buffer.from(buffer) -<!-- YAML -added: v3.0.0 ---> - -* `buffer` {Buffer} - -Copies the passed `buffer` data onto a new `Buffer` instance. - -```js -const buf1 = Buffer.from('buffer'); -const buf2 = Buffer.from(buf1); - -buf1[0] = 0x61; -console.log(buf1.toString()); - // 'auffer' -console.log(buf2.toString()); - // 'buffer' (copy is not changed) -``` - -A `TypeError` will be thrown if `buffer` is not a `Buffer`. - -### Class Method: Buffer.from(str[, encoding]) -<!-- YAML -added: v5.10.0 ---> - -* `str` {String} String to encode. -* `encoding` {String} Encoding to use, Default: `'utf8'` - -Creates a new `Buffer` containing the given JavaScript string `str`. If -provided, the `encoding` parameter identifies the character encoding. -If not provided, `encoding` defaults to `'utf8'`. - -```js -const buf1 = Buffer.from('this is a tést'); -console.log(buf1.toString()); - // prints: this is a tést -console.log(buf1.toString('ascii')); - // prints: this is a tC)st - -const buf2 = Buffer.from('7468697320697320612074c3a97374', 'hex'); -console.log(buf2.toString()); - // prints: this is a tést -``` - -A `TypeError` will be thrown if `str` is not a string. - -### Class Method: Buffer.alloc(size[, fill[, encoding]]) -<!-- YAML -added: v5.10.0 ---> - -* `size` {Number} -* `fill` {Value} Default: `undefined` -* `encoding` {String} Default: `utf8` - -Allocates a new `Buffer` of `size` bytes. If `fill` is `undefined`, the -`Buffer` will be *zero-filled*. - -```js -const buf = Buffer.alloc(5); -console.log(buf); - // <Buffer 00 00 00 00 00> -``` - -The `size` must be less than or equal to the value of -`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is -`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will -be created if a `size` less than or equal to 0 is specified. - -If `fill` is specified, the allocated `Buffer` will be initialized by calling -`buf.fill(fill)`. See [`buf.fill()`][] for more information. - -```js -const buf = Buffer.alloc(5, 'a'); -console.log(buf); - // <Buffer 61 61 61 61 61> -``` - -If both `fill` and `encoding` are specified, the allocated `Buffer` will be -initialized by calling `buf.fill(fill, encoding)`. For example: - -```js -const buf = Buffer.alloc(11, 'aGVsbG8gd29ybGQ=', 'base64'); -console.log(buf); - // <Buffer 68 65 6c 6c 6f 20 77 6f 72 6c 64> -``` - -Calling `Buffer.alloc(size)` can be significantly slower than the alternative -`Buffer.allocUnsafe(size)` but ensures that the newly created `Buffer` instance -contents will *never contain sensitive data*. - -A `TypeError` will be thrown if `size` is not a number. - -### Class Method: Buffer.allocUnsafe(size) -<!-- YAML -added: v5.10.0 ---> - -* `size` {Number} - -Allocates a new *non-zero-filled* `Buffer` of `size` bytes. The `size` must -be less than or equal to the value of `require('buffer').kMaxLength` (on 64-bit -architectures, `kMaxLength` is `(2^31)-1`). Otherwise, a [`RangeError`][] is -thrown. A zero-length Buffer will be created if a `size` less than or equal to -0 is specified. - -The underlying memory for `Buffer` instances created in this way is *not -initialized*. The contents of the newly created `Buffer` are unknown and -*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such -`Buffer` instances to zeroes. - -```js -const buf = Buffer.allocUnsafe(5); -console.log(buf); - // <Buffer 78 e0 82 02 01> - // (octets will be different, every time) -buf.fill(0); -console.log(buf); - // <Buffer 00 00 00 00 00> -``` - -A `TypeError` will be thrown if `size` is not a number. - -Note that the `Buffer` module pre-allocates an internal `Buffer` instance of -size `Buffer.poolSize` that is used as a pool for the fast allocation of new -`Buffer` instances created using `Buffer.allocUnsafe(size)` (and the deprecated -`new Buffer(size)` constructor) only when `size` is less than or equal to -`Buffer.poolSize >> 1` (floor of `Buffer.poolSize` divided by two). The default -value of `Buffer.poolSize` is `8192` but can be modified. - -Use of this pre-allocated internal memory pool is a key difference between -calling `Buffer.alloc(size, fill)` vs. `Buffer.allocUnsafe(size).fill(fill)`. -Specifically, `Buffer.alloc(size, fill)` will *never* use the internal Buffer -pool, while `Buffer.allocUnsafe(size).fill(fill)` *will* use the internal -Buffer pool if `size` is less than or equal to half `Buffer.poolSize`. The -difference is subtle but can be important when an application requires the -additional performance that `Buffer.allocUnsafe(size)` provides. - -### Class Method: Buffer.allocUnsafeSlow(size) -<!-- YAML -added: v5.10.0 ---> - -* `size` {Number} - -Allocates a new *non-zero-filled* and non-pooled `Buffer` of `size` bytes. The -`size` must be less than or equal to the value of -`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is -`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will -be created if a `size` less than or equal to 0 is specified. - -The underlying memory for `Buffer` instances created in this way is *not -initialized*. The contents of the newly created `Buffer` are unknown and -*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such -`Buffer` instances to zeroes. - -When using `Buffer.allocUnsafe()` to allocate new `Buffer` instances, -allocations under 4KB are, by default, sliced from a single pre-allocated -`Buffer`. This allows applications to avoid the garbage collection overhead of -creating many individually allocated Buffers. This approach improves both -performance and memory usage by eliminating the need to track and cleanup as -many `Persistent` objects. - -However, in the case where a developer may need to retain a small chunk of -memory from a pool for an indeterminate amount of time, it may be appropriate -to create an un-pooled Buffer instance using `Buffer.allocUnsafeSlow()` then -copy out the relevant bits. - -```js -// need to keep around a few small chunks of memory -const store = []; - -socket.on('readable', () => { - const data = socket.read(); - // allocate for retained data - const sb = Buffer.allocUnsafeSlow(10); - // copy the data into the new allocation - data.copy(sb, 0, 0, 10); - store.push(sb); -}); -``` - -Use of `Buffer.allocUnsafeSlow()` should be used only as a last resort *after* -a developer has observed undue memory retention in their applications. - -A `TypeError` will be thrown if `size` is not a number. - -### All the Rest - -The rest of the `Buffer` API is exactly the same as in node.js. -[See the docs](https://nodejs.org/api/buffer.html). - - -## Related links - -- [Node.js issue: Buffer(number) is unsafe](https://github.com/nodejs/node/issues/4660) -- [Node.js Enhancement Proposal: Buffer.from/Buffer.alloc/Buffer.zalloc/Buffer() soft-deprecate](https://github.com/nodejs/node-eps/pull/4) - -## Why is `Buffer` unsafe? - -Today, the node.js `Buffer` constructor is overloaded to handle many different argument -types like `String`, `Array`, `Object`, `TypedArrayView` (`Uint8Array`, etc.), -`ArrayBuffer`, and also `Number`. - -The API is optimized for convenience: you can throw any type at it, and it will try to do -what you want. - -Because the Buffer constructor is so powerful, you often see code like this: - -```js -// Convert UTF-8 strings to hex -function toHex (str) { - return new Buffer(str).toString('hex') -} -``` - -***But what happens if `toHex` is called with a `Number` argument?*** - -### Remote Memory Disclosure - -If an attacker can make your program call the `Buffer` constructor with a `Number` -argument, then they can make it allocate uninitialized memory from the node.js process. -This could potentially disclose TLS private keys, user data, or database passwords. - -When the `Buffer` constructor is passed a `Number` argument, it returns an -**UNINITIALIZED** block of memory of the specified `size`. When you create a `Buffer` like -this, you **MUST** overwrite the contents before returning it to the user. - -From the [node.js docs](https://nodejs.org/api/buffer.html#buffer_new_buffer_size): - -> `new Buffer(size)` -> -> - `size` Number -> -> The underlying memory for `Buffer` instances created in this way is not initialized. -> **The contents of a newly created `Buffer` are unknown and could contain sensitive -> data.** Use `buf.fill(0)` to initialize a Buffer to zeroes. - -(Emphasis our own.) - -Whenever the programmer intended to create an uninitialized `Buffer` you often see code -like this: - -```js -var buf = new Buffer(16) - -// Immediately overwrite the uninitialized buffer with data from another buffer -for (var i = 0; i < buf.length; i++) { - buf[i] = otherBuf[i] -} -``` - - -### Would this ever be a problem in real code? - -Yes. It's surprisingly common to forget to check the type of your variables in a -dynamically-typed language like JavaScript. - -Usually the consequences of assuming the wrong type is that your program crashes with an -uncaught exception. But the failure mode for forgetting to check the type of arguments to -the `Buffer` constructor is more catastrophic. - -Here's an example of a vulnerable service that takes a JSON payload and converts it to -hex: - -```js -// Take a JSON payload {str: "some string"} and convert it to hex -var server = http.createServer(function (req, res) { - var data = '' - req.setEncoding('utf8') - req.on('data', function (chunk) { - data += chunk - }) - req.on('end', function () { - var body = JSON.parse(data) - res.end(new Buffer(body.str).toString('hex')) - }) -}) - -server.listen(8080) -``` - -In this example, an http client just has to send: - -```json -{ - "str": 1000 -} -``` - -and it will get back 1,000 bytes of uninitialized memory from the server. - -This is a very serious bug. It's similar in severity to the -[the Heartbleed bug](http://heartbleed.com/) that allowed disclosure of OpenSSL process -memory by remote attackers. - - -### Which real-world packages were vulnerable? - -#### [`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht) - -[Mathias Buus](https://github.com/mafintosh) and I -([Feross Aboukhadijeh](http://feross.org/)) found this issue in one of our own packages, -[`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht). The bug would allow -anyone on the internet to send a series of messages to a user of `bittorrent-dht` and get -them to reveal 20 bytes at a time of uninitialized memory from the node.js process. - -Here's -[the commit](https://github.com/feross/bittorrent-dht/commit/6c7da04025d5633699800a99ec3fbadf70ad35b8) -that fixed it. We released a new fixed version, created a -[Node Security Project disclosure](https://nodesecurity.io/advisories/68), and deprecated all -vulnerable versions on npm so users will get a warning to upgrade to a newer version. - -#### [`ws`](https://www.npmjs.com/package/ws) - -That got us wondering if there were other vulnerable packages. Sure enough, within a short -period of time, we found the same issue in [`ws`](https://www.npmjs.com/package/ws), the -most popular WebSocket implementation in node.js. - -If certain APIs were called with `Number` parameters instead of `String` or `Buffer` as -expected, then uninitialized server memory would be disclosed to the remote peer. - -These were the vulnerable methods: - -```js -socket.send(number) -socket.ping(number) -socket.pong(number) -``` - -Here's a vulnerable socket server with some echo functionality: - -```js -server.on('connection', function (socket) { - socket.on('message', function (message) { - message = JSON.parse(message) - if (message.type === 'echo') { - socket.send(message.data) // send back the user's message - } - }) -}) -``` - -`socket.send(number)` called on the server, will disclose server memory. - -Here's [the release](https://github.com/websockets/ws/releases/tag/1.0.1) where the issue -was fixed, with a more detailed explanation. Props to -[Arnout Kazemier](https://github.com/3rd-Eden) for the quick fix. Here's the -[Node Security Project disclosure](https://nodesecurity.io/advisories/67). - - -### What's the solution? - -It's important that node.js offers a fast way to get memory otherwise performance-critical -applications would needlessly get a lot slower. - -But we need a better way to *signal our intent* as programmers. **When we want -uninitialized memory, we should request it explicitly.** - -Sensitive functionality should not be packed into a developer-friendly API that loosely -accepts many different types. This type of API encourages the lazy practice of passing -variables in without checking the type very carefully. - -#### A new API: `Buffer.allocUnsafe(number)` - -The functionality of creating buffers with uninitialized memory should be part of another -API. We propose `Buffer.allocUnsafe(number)`. This way, it's not part of an API that -frequently gets user input of all sorts of different types passed into it. - -```js -var buf = Buffer.allocUnsafe(16) // careful, uninitialized memory! - -// Immediately overwrite the uninitialized buffer with data from another buffer -for (var i = 0; i < buf.length; i++) { - buf[i] = otherBuf[i] -} -``` - - -### How do we fix node.js core? - -We sent [a PR to node.js core](https://github.com/nodejs/node/pull/4514) (merged as -`semver-major`) which defends against one case: - -```js -var str = 16 -new Buffer(str, 'utf8') -``` - -In this situation, it's implied that the programmer intended the first argument to be a -string, since they passed an encoding as a second argument. Today, node.js will allocate -uninitialized memory in the case of `new Buffer(number, encoding)`, which is probably not -what the programmer intended. - -But this is only a partial solution, since if the programmer does `new Buffer(variable)` -(without an `encoding` parameter) there's no way to know what they intended. If `variable` -is sometimes a number, then uninitialized memory will sometimes be returned. - -### What's the real long-term fix? - -We could deprecate and remove `new Buffer(number)` and use `Buffer.allocUnsafe(number)` when -we need uninitialized memory. But that would break 1000s of packages. - -~~We believe the best solution is to:~~ - -~~1. Change `new Buffer(number)` to return safe, zeroed-out memory~~ - -~~2. Create a new API for creating uninitialized Buffers. We propose: `Buffer.allocUnsafe(number)`~~ - -#### Update - -We now support adding three new APIs: - -- `Buffer.from(value)` - convert from any type to a buffer -- `Buffer.alloc(size)` - create a zero-filled buffer -- `Buffer.allocUnsafe(size)` - create an uninitialized buffer with given size - -This solves the core problem that affected `ws` and `bittorrent-dht` which is -`Buffer(variable)` getting tricked into taking a number argument. - -This way, existing code continues working and the impact on the npm ecosystem will be -minimal. Over time, npm maintainers can migrate performance-critical code to use -`Buffer.allocUnsafe(number)` instead of `new Buffer(number)`. - - -### Conclusion - -We think there's a serious design issue with the `Buffer` API as it exists today. It -promotes insecure software by putting high-risk functionality into a convenient API -with friendly "developer ergonomics". - -This wasn't merely a theoretical exercise because we found the issue in some of the -most popular npm packages. - -Fortunately, there's an easy fix that can be applied today. Use `safe-buffer` in place of -`buffer`. - -```js -var Buffer = require('safe-buffer').Buffer -``` - -Eventually, we hope that node.js core can switch to this new, safer behavior. We believe -the impact on the ecosystem would be minimal since it's not a breaking change. -Well-maintained, popular packages would be updated to use `Buffer.alloc` quickly, while -older, insecure packages would magically become safe from this attack vector. - - -## links - -- [Node.js PR: buffer: throw if both length and enc are passed](https://github.com/nodejs/node/pull/4514) -- [Node Security Project disclosure for `ws`](https://nodesecurity.io/advisories/67) -- [Node Security Project disclosure for`bittorrent-dht`](https://nodesecurity.io/advisories/68) - - -## credit - -The original issues in `bittorrent-dht` -([disclosure](https://nodesecurity.io/advisories/68)) and -`ws` ([disclosure](https://nodesecurity.io/advisories/67)) were discovered by -[Mathias Buus](https://github.com/mafintosh) and -[Feross Aboukhadijeh](http://feross.org/). - -Thanks to [Adam Baldwin](https://github.com/evilpacket) for helping disclose these issues -and for his work running the [Node Security Project](https://nodesecurity.io/). - -Thanks to [John Hiesey](https://github.com/jhiesey) for proofreading this README and -auditing the code. - - -## license - -MIT. Copyright (C) [Feross Aboukhadijeh](http://feross.org) diff --git a/node_modules/semver/CHANGELOG.md b/node_modules/semver/CHANGELOG.md deleted file mode 100644 index 220af176f748e..0000000000000 --- a/node_modules/semver/CHANGELOG.md +++ /dev/null @@ -1,111 +0,0 @@ -# changes log - -## 7.3.0 - -* Add `subset(r1, r2)` method to determine if `r1` range is entirely - contained by `r2` range. - -## 7.2.3 - -* Fix handling of `includePrelease` mode where version ranges like `1.0.0 - - 2.0.0` would include `3.0.0-pre` and not `1.0.0-pre`. - -## 7.2.2 - -* Fix bug where `2.0.0-pre` would be included in `^1.0.0` if - `includePrerelease` was set to true. - -## 7.2.0 - -* Add `simplifyRange` method to attempt to generate a more human-readable - range expression that is equivalent to a supplied range, for a given set - of versions. - -## 7.1.2 - -* Remove fancy lazy-loading logic, as it was causing problems for webpack - users. - -## 7.1.0 - -* Add `require('semver/preload')` to load the entire module without using - lazy getter methods. - -## 7.0.0 - -* Refactor module into separate files for better tree-shaking -* Drop support for very old node versions, use const/let, `=>` functions, - and classes. - -## 6.3.0 - -* Expose the token enum on the exports - -## 6.2.0 - -* Coerce numbers to strings when passed to semver.coerce() -* Add `rtl` option to coerce from right to left - -## 6.1.3 - -* Handle X-ranges properly in includePrerelease mode - -## 6.1.2 - -* Do not throw when testing invalid version strings - -## 6.1.1 - -* Add options support for semver.coerce() -* Handle undefined version passed to Range.test - -## 6.1.0 - -* Add semver.compareBuild function -* Support `*` in semver.intersects - -## 6.0 - -* Fix `intersects` logic. - - This is technically a bug fix, but since it is also a change to behavior - that may require users updating their code, it is marked as a major - version increment. - -## 5.7 - -* Add `minVersion` method - -## 5.6 - -* Move boolean `loose` param to an options object, with - backwards-compatibility protection. -* Add ability to opt out of special prerelease version handling with - the `includePrerelease` option flag. - -## 5.5 - -* Add version coercion capabilities - -## 5.4 - -* Add intersection checking - -## 5.3 - -* Add `minSatisfying` method - -## 5.2 - -* Add `prerelease(v)` that returns prerelease components - -## 5.1 - -* Add Backus-Naur for ranges -* Remove excessively cute inspection methods - -## 5.0 - -* Remove AMD/Browserified build artifacts -* Fix ltr and gtr when using the `*` range -* Fix for range `*` with a prerelease identifier diff --git a/node_modules/semver/README.md b/node_modules/semver/README.md deleted file mode 100644 index 9bef045af21ac..0000000000000 --- a/node_modules/semver/README.md +++ /dev/null @@ -1,566 +0,0 @@ -semver(1) -- The semantic versioner for npm -=========================================== - -## Install - -```bash -npm install semver -```` - -## Usage - -As a node module: - -```js -const semver = require('semver') - -semver.valid('1.2.3') // '1.2.3' -semver.valid('a.b.c') // null -semver.clean(' =v1.2.3 ') // '1.2.3' -semver.satisfies('1.2.3', '1.x || >=2.5.0 || 5.0.0 - 7.2.3') // true -semver.gt('1.2.3', '9.8.7') // false -semver.lt('1.2.3', '9.8.7') // true -semver.minVersion('>=1.0.0') // '1.0.0' -semver.valid(semver.coerce('v2')) // '2.0.0' -semver.valid(semver.coerce('42.6.7.9.3-alpha')) // '42.6.7' -``` - -You can also just load the module for the function that you care about, if -you'd like to minimize your footprint. - -```js -// load the whole API at once in a single object -const semver = require('semver') - -// or just load the bits you need -// all of them listed here, just pick and choose what you want - -// classes -const SemVer = require('semver/classes/semver') -const Comparator = require('semver/classes/comparator') -const Range = require('semver/classes/range') - -// functions for working with versions -const semverParse = require('semver/functions/parse') -const semverValid = require('semver/functions/valid') -const semverClean = require('semver/functions/clean') -const semverInc = require('semver/functions/inc') -const semverDiff = require('semver/functions/diff') -const semverMajor = require('semver/functions/major') -const semverMinor = require('semver/functions/minor') -const semverPatch = require('semver/functions/patch') -const semverPrerelease = require('semver/functions/prerelease') -const semverCompare = require('semver/functions/compare') -const semverRcompare = require('semver/functions/rcompare') -const semverCompareLoose = require('semver/functions/compare-loose') -const semverCompareBuild = require('semver/functions/compare-build') -const semverSort = require('semver/functions/sort') -const semverRsort = require('semver/functions/rsort') - -// low-level comparators between versions -const semverGt = require('semver/functions/gt') -const semverLt = require('semver/functions/lt') -const semverEq = require('semver/functions/eq') -const semverNeq = require('semver/functions/neq') -const semverGte = require('semver/functions/gte') -const semverLte = require('semver/functions/lte') -const semverCmp = require('semver/functions/cmp') -const semverCoerce = require('semver/functions/coerce') - -// working with ranges -const semverSatisfies = require('semver/functions/satisfies') -const semverMaxSatisfying = require('semver/ranges/max-satisfying') -const semverMinSatisfying = require('semver/ranges/min-satisfying') -const semverToComparators = require('semver/ranges/to-comparators') -const semverMinVersion = require('semver/ranges/min-version') -const semverValidRange = require('semver/ranges/valid') -const semverOutside = require('semver/ranges/outside') -const semverGtr = require('semver/ranges/gtr') -const semverLtr = require('semver/ranges/ltr') -const semverIntersects = require('semver/ranges/intersects') -const simplifyRange = require('semver/ranges/simplify') -const rangeSubset = require('semver/ranges/subset') -``` - -As a command-line utility: - -``` -$ semver -h - -A JavaScript implementation of the https://semver.org/ specification -Copyright Isaac Z. Schlueter - -Usage: semver [options] <version> [<version> [...]] -Prints valid versions sorted by SemVer precedence - -Options: --r --range <range> - Print versions that match the specified range. - --i --increment [<level>] - Increment a version by the specified level. Level can - be one of: major, minor, patch, premajor, preminor, - prepatch, or prerelease. Default level is 'patch'. - Only one version may be specified. - ---preid <identifier> - Identifier to be used to prefix premajor, preminor, - prepatch or prerelease version increments. - --l --loose - Interpret versions and ranges loosely - --p --include-prerelease - Always include prerelease versions in range matching - --c --coerce - Coerce a string into SemVer if possible - (does not imply --loose) - ---rtl - Coerce version strings right to left - ---ltr - Coerce version strings left to right (default) - -Program exits successfully if any valid version satisfies -all supplied ranges, and prints all satisfying versions. - -If no satisfying versions are found, then exits failure. - -Versions are printed in ascending order, so supplying -multiple versions to the utility will just sort them. -``` - -## Versions - -A "version" is described by the `v2.0.0` specification found at -<https://semver.org/>. - -A leading `"="` or `"v"` character is stripped off and ignored. - -## Ranges - -A `version range` is a set of `comparators` which specify versions -that satisfy the range. - -A `comparator` is composed of an `operator` and a `version`. The set -of primitive `operators` is: - -* `<` Less than -* `<=` Less than or equal to -* `>` Greater than -* `>=` Greater than or equal to -* `=` Equal. If no operator is specified, then equality is assumed, - so this operator is optional, but MAY be included. - -For example, the comparator `>=1.2.7` would match the versions -`1.2.7`, `1.2.8`, `2.5.3`, and `1.3.9`, but not the versions `1.2.6` -or `1.1.0`. - -Comparators can be joined by whitespace to form a `comparator set`, -which is satisfied by the **intersection** of all of the comparators -it includes. - -A range is composed of one or more comparator sets, joined by `||`. A -version matches a range if and only if every comparator in at least -one of the `||`-separated comparator sets is satisfied by the version. - -For example, the range `>=1.2.7 <1.3.0` would match the versions -`1.2.7`, `1.2.8`, and `1.2.99`, but not the versions `1.2.6`, `1.3.0`, -or `1.1.0`. - -The range `1.2.7 || >=1.2.9 <2.0.0` would match the versions `1.2.7`, -`1.2.9`, and `1.4.6`, but not the versions `1.2.8` or `2.0.0`. - -### Prerelease Tags - -If a version has a prerelease tag (for example, `1.2.3-alpha.3`) then -it will only be allowed to satisfy comparator sets if at least one -comparator with the same `[major, minor, patch]` tuple also has a -prerelease tag. - -For example, the range `>1.2.3-alpha.3` would be allowed to match the -version `1.2.3-alpha.7`, but it would *not* be satisfied by -`3.4.5-alpha.9`, even though `3.4.5-alpha.9` is technically "greater -than" `1.2.3-alpha.3` according to the SemVer sort rules. The version -range only accepts prerelease tags on the `1.2.3` version. The -version `3.4.5` *would* satisfy the range, because it does not have a -prerelease flag, and `3.4.5` is greater than `1.2.3-alpha.7`. - -The purpose for this behavior is twofold. First, prerelease versions -frequently are updated very quickly, and contain many breaking changes -that are (by the author's design) not yet fit for public consumption. -Therefore, by default, they are excluded from range matching -semantics. - -Second, a user who has opted into using a prerelease version has -clearly indicated the intent to use *that specific* set of -alpha/beta/rc versions. By including a prerelease tag in the range, -the user is indicating that they are aware of the risk. However, it -is still not appropriate to assume that they have opted into taking a -similar risk on the *next* set of prerelease versions. - -Note that this behavior can be suppressed (treating all prerelease -versions as if they were normal versions, for the purpose of range -matching) by setting the `includePrerelease` flag on the options -object to any -[functions](https://github.com/npm/node-semver#functions) that do -range matching. - -#### Prerelease Identifiers - -The method `.inc` takes an additional `identifier` string argument that -will append the value of the string as a prerelease identifier: - -```javascript -semver.inc('1.2.3', 'prerelease', 'beta') -// '1.2.4-beta.0' -``` - -command-line example: - -```bash -$ semver 1.2.3 -i prerelease --preid beta -1.2.4-beta.0 -``` - -Which then can be used to increment further: - -```bash -$ semver 1.2.4-beta.0 -i prerelease -1.2.4-beta.1 -``` - -### Advanced Range Syntax - -Advanced range syntax desugars to primitive comparators in -deterministic ways. - -Advanced ranges may be combined in the same way as primitive -comparators using white space or `||`. - -#### Hyphen Ranges `X.Y.Z - A.B.C` - -Specifies an inclusive set. - -* `1.2.3 - 2.3.4` := `>=1.2.3 <=2.3.4` - -If a partial version is provided as the first version in the inclusive -range, then the missing pieces are replaced with zeroes. - -* `1.2 - 2.3.4` := `>=1.2.0 <=2.3.4` - -If a partial version is provided as the second version in the -inclusive range, then all versions that start with the supplied parts -of the tuple are accepted, but nothing that would be greater than the -provided tuple parts. - -* `1.2.3 - 2.3` := `>=1.2.3 <2.4.0-0` -* `1.2.3 - 2` := `>=1.2.3 <3.0.0-0` - -#### X-Ranges `1.2.x` `1.X` `1.2.*` `*` - -Any of `X`, `x`, or `*` may be used to "stand in" for one of the -numeric values in the `[major, minor, patch]` tuple. - -* `*` := `>=0.0.0` (Any version satisfies) -* `1.x` := `>=1.0.0 <2.0.0-0` (Matching major version) -* `1.2.x` := `>=1.2.0 <1.3.0-0` (Matching major and minor versions) - -A partial version range is treated as an X-Range, so the special -character is in fact optional. - -* `""` (empty string) := `*` := `>=0.0.0` -* `1` := `1.x.x` := `>=1.0.0 <2.0.0-0` -* `1.2` := `1.2.x` := `>=1.2.0 <1.3.0-0` - -#### Tilde Ranges `~1.2.3` `~1.2` `~1` - -Allows patch-level changes if a minor version is specified on the -comparator. Allows minor-level changes if not. - -* `~1.2.3` := `>=1.2.3 <1.(2+1).0` := `>=1.2.3 <1.3.0-0` -* `~1.2` := `>=1.2.0 <1.(2+1).0` := `>=1.2.0 <1.3.0-0` (Same as `1.2.x`) -* `~1` := `>=1.0.0 <(1+1).0.0` := `>=1.0.0 <2.0.0-0` (Same as `1.x`) -* `~0.2.3` := `>=0.2.3 <0.(2+1).0` := `>=0.2.3 <0.3.0-0` -* `~0.2` := `>=0.2.0 <0.(2+1).0` := `>=0.2.0 <0.3.0-0` (Same as `0.2.x`) -* `~0` := `>=0.0.0 <(0+1).0.0` := `>=0.0.0 <1.0.0-0` (Same as `0.x`) -* `~1.2.3-beta.2` := `>=1.2.3-beta.2 <1.3.0-0` Note that prereleases in - the `1.2.3` version will be allowed, if they are greater than or - equal to `beta.2`. So, `1.2.3-beta.4` would be allowed, but - `1.2.4-beta.2` would not, because it is a prerelease of a - different `[major, minor, patch]` tuple. - -#### Caret Ranges `^1.2.3` `^0.2.5` `^0.0.4` - -Allows changes that do not modify the left-most non-zero element in the -`[major, minor, patch]` tuple. In other words, this allows patch and -minor updates for versions `1.0.0` and above, patch updates for -versions `0.X >=0.1.0`, and *no* updates for versions `0.0.X`. - -Many authors treat a `0.x` version as if the `x` were the major -"breaking-change" indicator. - -Caret ranges are ideal when an author may make breaking changes -between `0.2.4` and `0.3.0` releases, which is a common practice. -However, it presumes that there will *not* be breaking changes between -`0.2.4` and `0.2.5`. It allows for changes that are presumed to be -additive (but non-breaking), according to commonly observed practices. - -* `^1.2.3` := `>=1.2.3 <2.0.0-0` -* `^0.2.3` := `>=0.2.3 <0.3.0-0` -* `^0.0.3` := `>=0.0.3 <0.0.4-0` -* `^1.2.3-beta.2` := `>=1.2.3-beta.2 <2.0.0-0` Note that prereleases in - the `1.2.3` version will be allowed, if they are greater than or - equal to `beta.2`. So, `1.2.3-beta.4` would be allowed, but - `1.2.4-beta.2` would not, because it is a prerelease of a - different `[major, minor, patch]` tuple. -* `^0.0.3-beta` := `>=0.0.3-beta <0.0.4-0` Note that prereleases in the - `0.0.3` version *only* will be allowed, if they are greater than or - equal to `beta`. So, `0.0.3-pr.2` would be allowed. - -When parsing caret ranges, a missing `patch` value desugars to the -number `0`, but will allow flexibility within that value, even if the -major and minor versions are both `0`. - -* `^1.2.x` := `>=1.2.0 <2.0.0-0` -* `^0.0.x` := `>=0.0.0 <0.1.0-0` -* `^0.0` := `>=0.0.0 <0.1.0-0` - -A missing `minor` and `patch` values will desugar to zero, but also -allow flexibility within those values, even if the major version is -zero. - -* `^1.x` := `>=1.0.0 <2.0.0-0` -* `^0.x` := `>=0.0.0 <1.0.0-0` - -### Range Grammar - -Putting all this together, here is a Backus-Naur grammar for ranges, -for the benefit of parser authors: - -```bnf -range-set ::= range ( logical-or range ) * -logical-or ::= ( ' ' ) * '||' ( ' ' ) * -range ::= hyphen | simple ( ' ' simple ) * | '' -hyphen ::= partial ' - ' partial -simple ::= primitive | partial | tilde | caret -primitive ::= ( '<' | '>' | '>=' | '<=' | '=' ) partial -partial ::= xr ( '.' xr ( '.' xr qualifier ? )? )? -xr ::= 'x' | 'X' | '*' | nr -nr ::= '0' | ['1'-'9'] ( ['0'-'9'] ) * -tilde ::= '~' partial -caret ::= '^' partial -qualifier ::= ( '-' pre )? ( '+' build )? -pre ::= parts -build ::= parts -parts ::= part ( '.' part ) * -part ::= nr | [-0-9A-Za-z]+ -``` - -## Functions - -All methods and classes take a final `options` object argument. All -options in this object are `false` by default. The options supported -are: - -- `loose` Be more forgiving about not-quite-valid semver strings. - (Any resulting output will always be 100% strict compliant, of - course.) For backwards compatibility reasons, if the `options` - argument is a boolean value instead of an object, it is interpreted - to be the `loose` param. -- `includePrerelease` Set to suppress the [default - behavior](https://github.com/npm/node-semver#prerelease-tags) of - excluding prerelease tagged versions from ranges unless they are - explicitly opted into. - -Strict-mode Comparators and Ranges will be strict about the SemVer -strings that they parse. - -* `valid(v)`: Return the parsed version, or null if it's not valid. -* `inc(v, release)`: Return the version incremented by the release - type (`major`, `premajor`, `minor`, `preminor`, `patch`, - `prepatch`, or `prerelease`), or null if it's not valid - * `premajor` in one call will bump the version up to the next major - version and down to a prerelease of that major version. - `preminor`, and `prepatch` work the same way. - * If called from a non-prerelease version, the `prerelease` will work the - same as `prepatch`. It increments the patch version, then makes a - prerelease. If the input version is already a prerelease it simply - increments it. -* `prerelease(v)`: Returns an array of prerelease components, or null - if none exist. Example: `prerelease('1.2.3-alpha.1') -> ['alpha', 1]` -* `major(v)`: Return the major version number. -* `minor(v)`: Return the minor version number. -* `patch(v)`: Return the patch version number. -* `intersects(r1, r2, loose)`: Return true if the two supplied ranges - or comparators intersect. -* `parse(v)`: Attempt to parse a string as a semantic version, returning either - a `SemVer` object or `null`. - -### Comparison - -* `gt(v1, v2)`: `v1 > v2` -* `gte(v1, v2)`: `v1 >= v2` -* `lt(v1, v2)`: `v1 < v2` -* `lte(v1, v2)`: `v1 <= v2` -* `eq(v1, v2)`: `v1 == v2` This is true if they're logically equivalent, - even if they're not the exact same string. You already know how to - compare strings. -* `neq(v1, v2)`: `v1 != v2` The opposite of `eq`. -* `cmp(v1, comparator, v2)`: Pass in a comparison string, and it'll call - the corresponding function above. `"==="` and `"!=="` do simple - string comparison, but are included for completeness. Throws if an - invalid comparison string is provided. -* `compare(v1, v2)`: Return `0` if `v1 == v2`, or `1` if `v1` is greater, or `-1` if - `v2` is greater. Sorts in ascending order if passed to `Array.sort()`. -* `rcompare(v1, v2)`: The reverse of compare. Sorts an array of versions - in descending order when passed to `Array.sort()`. -* `compareBuild(v1, v2)`: The same as `compare` but considers `build` when two versions - are equal. Sorts in ascending order if passed to `Array.sort()`. - `v2` is greater. Sorts in ascending order if passed to `Array.sort()`. -* `diff(v1, v2)`: Returns difference between two versions by the release type - (`major`, `premajor`, `minor`, `preminor`, `patch`, `prepatch`, or `prerelease`), - or null if the versions are the same. - -### Comparators - -* `intersects(comparator)`: Return true if the comparators intersect - -### Ranges - -* `validRange(range)`: Return the valid range or null if it's not valid -* `satisfies(version, range)`: Return true if the version satisfies the - range. -* `maxSatisfying(versions, range)`: Return the highest version in the list - that satisfies the range, or `null` if none of them do. -* `minSatisfying(versions, range)`: Return the lowest version in the list - that satisfies the range, or `null` if none of them do. -* `minVersion(range)`: Return the lowest version that can possibly match - the given range. -* `gtr(version, range)`: Return `true` if version is greater than all the - versions possible in the range. -* `ltr(version, range)`: Return `true` if version is less than all the - versions possible in the range. -* `outside(version, range, hilo)`: Return true if the version is outside - the bounds of the range in either the high or low direction. The - `hilo` argument must be either the string `'>'` or `'<'`. (This is - the function called by `gtr` and `ltr`.) -* `intersects(range)`: Return true if any of the ranges comparators intersect -* `simplifyRange(versions, range)`: Return a "simplified" range that - matches the same items in `versions` list as the range specified. Note - that it does *not* guarantee that it would match the same versions in all - cases, only for the set of versions provided. This is useful when - generating ranges by joining together multiple versions with `||` - programmatically, to provide the user with something a bit more - ergonomic. If the provided range is shorter in string-length than the - generated range, then that is returned. -* `subset(subRange, superRange)`: Return `true` if the `subRange` range is - entirely contained by the `superRange` range. - -Note that, since ranges may be non-contiguous, a version might not be -greater than a range, less than a range, *or* satisfy a range! For -example, the range `1.2 <1.2.9 || >2.0.0` would have a hole from `1.2.9` -until `2.0.0`, so the version `1.2.10` would not be greater than the -range (because `2.0.1` satisfies, which is higher), nor less than the -range (since `1.2.8` satisfies, which is lower), and it also does not -satisfy the range. - -If you want to know if a version satisfies or does not satisfy a -range, use the `satisfies(version, range)` function. - -### Coercion - -* `coerce(version, options)`: Coerces a string to semver if possible - -This aims to provide a very forgiving translation of a non-semver string to -semver. It looks for the first digit in a string, and consumes all -remaining characters which satisfy at least a partial semver (e.g., `1`, -`1.2`, `1.2.3`) up to the max permitted length (256 characters). Longer -versions are simply truncated (`4.6.3.9.2-alpha2` becomes `4.6.3`). All -surrounding text is simply ignored (`v3.4 replaces v3.3.1` becomes -`3.4.0`). Only text which lacks digits will fail coercion (`version one` -is not valid). The maximum length for any semver component considered for -coercion is 16 characters; longer components will be ignored -(`10000000000000000.4.7.4` becomes `4.7.4`). The maximum value for any -semver component is `Number.MAX_SAFE_INTEGER || (2**53 - 1)`; higher value -components are invalid (`9999999999999999.4.7.4` is likely invalid). - -If the `options.rtl` flag is set, then `coerce` will return the right-most -coercible tuple that does not share an ending index with a longer coercible -tuple. For example, `1.2.3.4` will return `2.3.4` in rtl mode, not -`4.0.0`. `1.2.3/4` will return `4.0.0`, because the `4` is not a part of -any other overlapping SemVer tuple. - -### Clean - -* `clean(version)`: Clean a string to be a valid semver if possible - -This will return a cleaned and trimmed semver version. If the provided -version is not valid a null will be returned. This does not work for -ranges. - -ex. -* `s.clean(' = v 2.1.5foo')`: `null` -* `s.clean(' = v 2.1.5foo', { loose: true })`: `'2.1.5-foo'` -* `s.clean(' = v 2.1.5-foo')`: `null` -* `s.clean(' = v 2.1.5-foo', { loose: true })`: `'2.1.5-foo'` -* `s.clean('=v2.1.5')`: `'2.1.5'` -* `s.clean(' =v2.1.5')`: `2.1.5` -* `s.clean(' 2.1.5 ')`: `'2.1.5'` -* `s.clean('~1.0.0')`: `null` - -## Exported Modules - -<!-- -TODO: Make sure that all of these items are documented (classes aren't, -eg), and then pull the module name into the documentation for that specific -thing. ---> - -You may pull in just the part of this semver utility that you need, if you -are sensitive to packing and tree-shaking concerns. The main -`require('semver')` export uses getter functions to lazily load the parts -of the API that are used. - -The following modules are available: - -* `require('semver')` -* `require('semver/classes')` -* `require('semver/classes/comparator')` -* `require('semver/classes/range')` -* `require('semver/classes/semver')` -* `require('semver/functions/clean')` -* `require('semver/functions/cmp')` -* `require('semver/functions/coerce')` -* `require('semver/functions/compare')` -* `require('semver/functions/compare-build')` -* `require('semver/functions/compare-loose')` -* `require('semver/functions/diff')` -* `require('semver/functions/eq')` -* `require('semver/functions/gt')` -* `require('semver/functions/gte')` -* `require('semver/functions/inc')` -* `require('semver/functions/lt')` -* `require('semver/functions/lte')` -* `require('semver/functions/major')` -* `require('semver/functions/minor')` -* `require('semver/functions/neq')` -* `require('semver/functions/parse')` -* `require('semver/functions/patch')` -* `require('semver/functions/prerelease')` -* `require('semver/functions/rcompare')` -* `require('semver/functions/rsort')` -* `require('semver/functions/satisfies')` -* `require('semver/functions/sort')` -* `require('semver/functions/valid')` -* `require('semver/ranges/gtr')` -* `require('semver/ranges/intersects')` -* `require('semver/ranges/ltr')` -* `require('semver/ranges/max-satisfying')` -* `require('semver/ranges/min-satisfying')` -* `require('semver/ranges/min-version')` -* `require('semver/ranges/outside')` -* `require('semver/ranges/to-comparators')` -* `require('semver/ranges/valid')` diff --git a/node_modules/semver/package.json b/node_modules/semver/package.json index d4043d38a1352..4e1154195a5f1 100644 --- a/node_modules/semver/package.json +++ b/node_modules/semver/package.json @@ -1,6 +1,6 @@ { "name": "semver", - "version": "7.3.4", + "version": "7.3.5", "description": "The semantic version parser used by npm.", "main": "index.js", "scripts": { diff --git a/node_modules/semver/ranges/subset.js b/node_modules/semver/ranges/subset.js index bb7d15fe2696b..532fd1364ce75 100644 --- a/node_modules/semver/ranges/subset.js +++ b/node_modules/semver/ranges/subset.js @@ -1,20 +1,28 @@ const Range = require('../classes/range.js') -const { ANY } = require('../classes/comparator.js') +const Comparator = require('../classes/comparator.js') +const { ANY } = Comparator const satisfies = require('../functions/satisfies.js') const compare = require('../functions/compare.js') // Complex range `r1 || r2 || ...` is a subset of `R1 || R2 || ...` iff: -// - Every simple range `r1, r2, ...` is a subset of some `R1, R2, ...` +// - Every simple range `r1, r2, ...` is a null set, OR +// - Every simple range `r1, r2, ...` which is not a null set is a subset of +// some `R1, R2, ...` // // Simple range `c1 c2 ...` is a subset of simple range `C1 C2 ...` iff: // - If c is only the ANY comparator // - If C is only the ANY comparator, return true -// - Else return false +// - Else if in prerelease mode, return false +// - else replace c with `[>=0.0.0]` +// - If C is only the ANY comparator +// - if in prerelease mode, return true +// - else replace C with `[>=0.0.0]` // - Let EQ be the set of = comparators in c // - If EQ is more than one, return true (null set) // - Let GT be the highest > or >= comparator in c // - Let LT be the lowest < or <= comparator in c // - If GT and LT, and GT.semver > LT.semver, return true (null set) +// - If any C is a = range, and GT or LT are set, return false // - If EQ // - If GT, and EQ does not satisfy GT, return true (null set) // - If LT, and EQ does not satisfy LT, return true (null set) @@ -23,13 +31,16 @@ const compare = require('../functions/compare.js') // - If GT // - If GT.semver is lower than any > or >= comp in C, return false // - If GT is >=, and GT.semver does not satisfy every C, return false +// - If GT.semver has a prerelease, and not in prerelease mode +// - If no C has a prerelease and the GT.semver tuple, return false // - If LT // - If LT.semver is greater than any < or <= comp in C, return false // - If LT is <=, and LT.semver does not satisfy every C, return false -// - If any C is a = range, and GT or LT are set, return false +// - If GT.semver has a prerelease, and not in prerelease mode +// - If no C has a prerelease and the LT.semver tuple, return false // - Else return true -const subset = (sub, dom, options) => { +const subset = (sub, dom, options = {}) => { if (sub === dom) return true @@ -58,8 +69,21 @@ const simpleSubset = (sub, dom, options) => { if (sub === dom) return true - if (sub.length === 1 && sub[0].semver === ANY) - return dom.length === 1 && dom[0].semver === ANY + if (sub.length === 1 && sub[0].semver === ANY) { + if (dom.length === 1 && dom[0].semver === ANY) + return true + else if (options.includePrerelease) + sub = [ new Comparator('>=0.0.0-0') ] + else + sub = [ new Comparator('>=0.0.0') ] + } + + if (dom.length === 1 && dom[0].semver === ANY) { + if (options.includePrerelease) + return true + else + dom = [ new Comparator('>=0.0.0') ] + } const eqSet = new Set() let gt, lt @@ -102,10 +126,32 @@ const simpleSubset = (sub, dom, options) => { let higher, lower let hasDomLT, hasDomGT + // if the subset has a prerelease, we need a comparator in the superset + // with the same tuple and a prerelease, or it's not a subset + let needDomLTPre = lt && + !options.includePrerelease && + lt.semver.prerelease.length ? lt.semver : false + let needDomGTPre = gt && + !options.includePrerelease && + gt.semver.prerelease.length ? gt.semver : false + // exception: <1.2.3-0 is the same as <1.2.3 + if (needDomLTPre && needDomLTPre.prerelease.length === 1 && + lt.operator === '<' && needDomLTPre.prerelease[0] === 0) { + needDomLTPre = false + } + for (const c of dom) { hasDomGT = hasDomGT || c.operator === '>' || c.operator === '>=' hasDomLT = hasDomLT || c.operator === '<' || c.operator === '<=' if (gt) { + if (needDomGTPre) { + if (c.semver.prerelease && c.semver.prerelease.length && + c.semver.major === needDomGTPre.major && + c.semver.minor === needDomGTPre.minor && + c.semver.patch === needDomGTPre.patch) { + needDomGTPre = false + } + } if (c.operator === '>' || c.operator === '>=') { higher = higherGT(gt, c, options) if (higher === c && higher !== gt) @@ -114,6 +160,14 @@ const simpleSubset = (sub, dom, options) => { return false } if (lt) { + if (needDomLTPre) { + if (c.semver.prerelease && c.semver.prerelease.length && + c.semver.major === needDomLTPre.major && + c.semver.minor === needDomLTPre.minor && + c.semver.patch === needDomLTPre.patch) { + needDomLTPre = false + } + } if (c.operator === '<' || c.operator === '<=') { lower = lowerLT(lt, c, options) if (lower === c && lower !== lt) @@ -134,6 +188,12 @@ const simpleSubset = (sub, dom, options) => { if (lt && hasDomGT && !gt && gtltComp !== 0) return false + // we needed a prerelease range in a specific tuple, but didn't get one + // then this isn't a subset. eg >=1.2.3-pre is not a subset of >=1.0.0, + // because it includes prereleases in the 1.2.3 tuple + if (needDomGTPre || needDomLTPre) + return false + return true } diff --git a/node_modules/set-blocking/CHANGELOG.md b/node_modules/set-blocking/CHANGELOG.md deleted file mode 100644 index 03bf591923d78..0000000000000 --- a/node_modules/set-blocking/CHANGELOG.md +++ /dev/null @@ -1,26 +0,0 @@ -# Change Log - -All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. - -<a name="2.0.0"></a> -# [2.0.0](https://github.com/yargs/set-blocking/compare/v1.0.0...v2.0.0) (2016-05-17) - - -### Features - -* add an isTTY check ([#3](https://github.com/yargs/set-blocking/issues/3)) ([66ce277](https://github.com/yargs/set-blocking/commit/66ce277)) - - -### BREAKING CHANGES - -* stdio/stderr will not be set to blocking if isTTY === false - - - -<a name="1.0.0"></a> -# 1.0.0 (2016-05-14) - - -### Features - -* implemented shim for stream._handle.setBlocking ([6bde0c0](https://github.com/yargs/set-blocking/commit/6bde0c0)) diff --git a/node_modules/set-blocking/README.md b/node_modules/set-blocking/README.md deleted file mode 100644 index e93b4202b59d6..0000000000000 --- a/node_modules/set-blocking/README.md +++ /dev/null @@ -1,31 +0,0 @@ -# set-blocking - -[![Build Status](https://travis-ci.org/yargs/set-blocking.svg)](https://travis-ci.org/yargs/set-blocking) -[![NPM version](https://img.shields.io/npm/v/set-blocking.svg)](https://www.npmjs.com/package/set-blocking) -[![Coverage Status](https://coveralls.io/repos/yargs/set-blocking/badge.svg?branch=)](https://coveralls.io/r/yargs/set-blocking?branch=master) -[![Standard Version](https://img.shields.io/badge/release-standard%20version-brightgreen.svg)](https://github.com/conventional-changelog/standard-version) - -set blocking `stdio` and `stderr` ensuring that terminal output does not truncate. - -```js -const setBlocking = require('set-blocking') -setBlocking(true) -console.log(someLargeStringToOutput) -``` - -## Historical Context/Word of Warning - -This was created as a shim to address the bug discussed in [node #6456](https://github.com/nodejs/node/issues/6456). This bug crops up on -newer versions of Node.js (`0.12+`), truncating terminal output. - -You should be mindful of the side-effects caused by using `set-blocking`: - -* if your module sets blocking to `true`, it will effect other modules - consuming your library. In [yargs](https://github.com/yargs/yargs/blob/master/yargs.js#L653) we only call - `setBlocking(true)` once we already know we are about to call `process.exit(code)`. -* this patch will not apply to subprocesses spawned with `isTTY = true`, this is - the [default `spawn()` behavior](https://nodejs.org/api/child_process.html#child_process_child_process_spawn_command_args_options). - -## License - -ISC diff --git a/node_modules/signal-exit/CHANGELOG.md b/node_modules/signal-exit/CHANGELOG.md deleted file mode 100644 index ed104f41bb71b..0000000000000 --- a/node_modules/signal-exit/CHANGELOG.md +++ /dev/null @@ -1,35 +0,0 @@ -# Changelog - -All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. - -### [3.0.3](https://github.com/tapjs/signal-exit/compare/v3.0.2...v3.0.3) (2020-03-26) - - -### Bug Fixes - -* patch `SIGHUP` to `SIGINT` when on Windows ([cfd1046](https://github.com/tapjs/signal-exit/commit/cfd1046079af4f0e44f93c69c237a09de8c23ef2)) -* **ci:** use Travis for Windows builds ([007add7](https://github.com/tapjs/signal-exit/commit/007add793d2b5ae3c382512103adbf321768a0b8)) - -<a name="3.0.1"></a> -## [3.0.1](https://github.com/tapjs/signal-exit/compare/v3.0.0...v3.0.1) (2016-09-08) - - -### Bug Fixes - -* do not listen on SIGBUS, SIGFPE, SIGSEGV and SIGILL ([#40](https://github.com/tapjs/signal-exit/issues/40)) ([5b105fb](https://github.com/tapjs/signal-exit/commit/5b105fb)) - - - -<a name="3.0.0"></a> -# [3.0.0](https://github.com/tapjs/signal-exit/compare/v2.1.2...v3.0.0) (2016-06-13) - - -### Bug Fixes - -* get our test suite running on Windows ([#23](https://github.com/tapjs/signal-exit/issues/23)) ([6f3eda8](https://github.com/tapjs/signal-exit/commit/6f3eda8)) -* hooking SIGPROF was interfering with profilers see [#21](https://github.com/tapjs/signal-exit/issues/21) ([#24](https://github.com/tapjs/signal-exit/issues/24)) ([1248a4c](https://github.com/tapjs/signal-exit/commit/1248a4c)) - - -### BREAKING CHANGES - -* signal-exit no longer wires into SIGPROF diff --git a/node_modules/signal-exit/README.md b/node_modules/signal-exit/README.md deleted file mode 100644 index 9f8eb5917dc79..0000000000000 --- a/node_modules/signal-exit/README.md +++ /dev/null @@ -1,39 +0,0 @@ -# signal-exit - -[![Build Status](https://travis-ci.org/tapjs/signal-exit.png)](https://travis-ci.org/tapjs/signal-exit) -[![Coverage](https://coveralls.io/repos/tapjs/signal-exit/badge.svg?branch=master)](https://coveralls.io/r/tapjs/signal-exit?branch=master) -[![NPM version](https://img.shields.io/npm/v/signal-exit.svg)](https://www.npmjs.com/package/signal-exit) -[![Standard Version](https://img.shields.io/badge/release-standard%20version-brightgreen.svg)](https://github.com/conventional-changelog/standard-version) - -When you want to fire an event no matter how a process exits: - -* reaching the end of execution. -* explicitly having `process.exit(code)` called. -* having `process.kill(pid, sig)` called. -* receiving a fatal signal from outside the process - -Use `signal-exit`. - -```js -var onExit = require('signal-exit') - -onExit(function (code, signal) { - console.log('process exited!') -}) -``` - -## API - -`var remove = onExit(function (code, signal) {}, options)` - -The return value of the function is a function that will remove the -handler. - -Note that the function *only* fires for signals if the signal would -cause the proces to exit. That is, there are no other listeners, and -it is a fatal signal. - -## Options - -* `alwaysLast`: Run this handler after any other signal or exit - handlers. This causes `process.emit` to be monkeypatched. diff --git a/node_modules/smart-buffer/.prettierrc.yaml b/node_modules/smart-buffer/.prettierrc.yaml deleted file mode 100644 index 9a4f5ed754dd2..0000000000000 --- a/node_modules/smart-buffer/.prettierrc.yaml +++ /dev/null @@ -1,5 +0,0 @@ -parser: typescript -printWidth: 120 -tabWidth: 2 -singleQuote: true -trailingComma: none \ No newline at end of file diff --git a/node_modules/smart-buffer/.travis.yml b/node_modules/smart-buffer/.travis.yml deleted file mode 100644 index eec71cecaab48..0000000000000 --- a/node_modules/smart-buffer/.travis.yml +++ /dev/null @@ -1,13 +0,0 @@ -language: node_js -node_js: - - 6 - - 8 - - 10 - - 12 - - stable - -before_script: - - npm install -g typescript - - tsc -p ./ - -script: "npm run coveralls" \ No newline at end of file diff --git a/node_modules/smart-buffer/README.md b/node_modules/smart-buffer/README.md deleted file mode 100644 index 4cd328d9e0c3d..0000000000000 --- a/node_modules/smart-buffer/README.md +++ /dev/null @@ -1,632 +0,0 @@ -smart-buffer [![Build Status](https://travis-ci.org/JoshGlazebrook/smart-buffer.svg?branch=master)](https://travis-ci.org/JoshGlazebrook/smart-buffer) [![Coverage Status](https://coveralls.io/repos/github/JoshGlazebrook/smart-buffer/badge.svg?branch=master)](https://coveralls.io/github/JoshGlazebrook/smart-buffer?branch=master) -============= - -smart-buffer is a Buffer wrapper that adds automatic read & write offset tracking, string operations, data insertions, and more. - -![stats](https://nodei.co/npm/smart-buffer.png?downloads=true&downloadRank=true&stars=true "stats") - -**Key Features**: -* Proxies all of the Buffer write and read functions -* Keeps track of read and write offsets automatically -* Grows the internal Buffer as needed -* Useful string operations. (Null terminating strings) -* Allows for inserting values at specific points in the Buffer -* Built in TypeScript -* Type Definitions Provided -* Browser Support (using Webpack/Browserify) -* Full test coverage - -**Requirements**: -* Node v4.0+ is supported at this time. (Versions prior to 2.0 will work on node 0.10) - - - -## Breaking Changes in v4.0 - -* Old constructor patterns have been completely removed. It's now required to use the SmartBuffer.fromXXX() factory constructors. -* rewind(), skip(), moveTo() have been removed. (see [offsets](#offsets)) -* Internal private properties are now prefixed with underscores (_) -* **All** writeXXX() methods that are given an offset will now **overwrite data** instead of insert. (see [write vs insert](#write-vs-insert)) -* insertXXX() methods have been added for when you want to insert data at a specific offset (this replaces the old behavior of writeXXX() when an offset was provided) - - -## Looking for v3 docs? - -Legacy documentation for version 3 and prior can be found [here](https://github.com/JoshGlazebrook/smart-buffer/blob/master/docs/README_v3.md). - -## Installing: - -`yarn add smart-buffer` - -or - -`npm install smart-buffer` - -Note: The published NPM package includes the built javascript library. -If you cloned this repo and wish to build the library manually use: - -`npm run build` - -## Using smart-buffer - -```javascript -// Javascript -const SmartBuffer = require('smart-buffer').SmartBuffer; - -// Typescript -import { SmartBuffer, SmartBufferOptions} from 'smart-buffer'; -``` - -### Simple Example - -Building a packet that uses the following protocol specification: - -`[PacketType:2][PacketLength:2][Data:XX]` - -To build this packet using the vanilla Buffer class, you would have to count up the length of the data payload beforehand. You would also need to keep track of the current "cursor" position in your Buffer so you write everything in the right places. With smart-buffer you don't have to do either of those things. - -```javascript -function createLoginPacket(username, password, age, country) { - const packet = new SmartBuffer(); - packet.writeUInt16LE(0x0060); // Some packet type - packet.writeStringNT(username); - packet.writeStringNT(password); - packet.writeUInt8(age); - packet.writeStringNT(country); - packet.insertUInt16LE(packet.length - 2, 2); - - return packet.toBuffer(); -} -``` -With the above function, you now can do this: -```javascript -const login = createLoginPacket("Josh", "secret123", 22, "United States"); - -// <Buffer 60 00 1e 00 4a 6f 73 68 00 73 65 63 72 65 74 31 32 33 00 16 55 6e 69 74 65 64 20 53 74 61 74 65 73 00> -``` -Notice that the `[PacketLength:2]` value (1e 00) was inserted at position 2. - -Reading back the packet we created above is just as easy: -```javascript - -const reader = SmartBuffer.fromBuffer(login); - -const logininfo = { - packetType: reader.readUInt16LE(), - packetLength: reader.readUInt16LE(), - username: reader.readStringNT(), - password: reader.readStringNT(), - age: reader.readUInt8(), - country: reader.readStringNT() -}; - -/* -{ - packetType: 96, (0x0060) - packetLength: 30, - username: 'Josh', - password: 'secret123', - age: 22, - country: 'United States' -} -*/ -``` - - -## Write vs Insert -In prior versions of SmartBuffer, .writeXXX(value, offset) calls would insert data when an offset was provided. In version 4, this will now overwrite the data at the offset position. To insert data there are now corresponding .insertXXX(value, offset) methods. - -**SmartBuffer v3**: -```javascript -const buff = SmartBuffer.fromBuffer(new Buffer([1,2,3,4,5,6])); -buff.writeInt8(7, 2); -console.log(buff.toBuffer()) - -// <Buffer 01 02 07 03 04 05 06> -``` - -**SmartBuffer v4**: -```javascript -const buff = SmartBuffer.fromBuffer(new Buffer([1,2,3,4,5,6])); -buff.writeInt8(7, 2); -console.log(buff.toBuffer()); - -// <Buffer 01 02 07 04 05 06> -``` - -To insert you instead should use: -```javascript -const buff = SmartBuffer.fromBuffer(new Buffer([1,2,3,4,5,6])); -buff.insertInt8(7, 2); -console.log(buff.toBuffer()); - -// <Buffer 01 02 07 03 04 05 06> -``` - -**Note:** Insert/Writing to a position beyond the currently tracked internal Buffer will zero pad to your offset. - -## Constructing a smart-buffer - -There are a few different ways to construct a SmartBuffer instance. - -```javascript -// Creating SmartBuffer from existing Buffer -const buff = SmartBuffer.fromBuffer(buffer); // Creates instance from buffer. (Uses default utf8 encoding) -const buff = SmartBuffer.fromBuffer(buffer, 'ascii'); // Creates instance from buffer with ascii encoding for strings. - -// Creating SmartBuffer with specified internal Buffer size. (Note: this is not a hard cap, the internal buffer will grow as needed). -const buff = SmartBuffer.fromSize(1024); // Creates instance with internal Buffer size of 1024. -const buff = SmartBuffer.fromSize(1024, 'utf8'); // Creates instance with internal Buffer size of 1024, and utf8 encoding for strings. - -// Creating SmartBuffer with options object. This one specifies size and encoding. -const buff = SmartBuffer.fromOptions({ - size: 1024, - encoding: 'ascii' -}); - -// Creating SmartBuffer with options object. This one specified an existing Buffer. -const buff = SmartBuffer.fromOptions({ - buff: buffer -}); - -// Creating SmartBuffer from a string. -const buff = SmartBuffer.fromBuffer(Buffer.from('some string', 'utf8')); - -// Just want a regular SmartBuffer with all default options? -const buff = new SmartBuffer(); -``` - -# Api Reference: - -**Note:** SmartBuffer is fully documented with Typescript definitions as well as jsdocs so your favorite editor/IDE will have intellisense. - -**Table of Contents** - -1. [Constructing](#constructing) -2. **Numbers** - 1. [Integers](#integers) - 2. [Floating Points](#floating-point-numbers) -3. **Strings** - 1. [Strings](#strings) - 2. [Null Terminated Strings](#null-terminated-strings) -4. [Buffers](#buffers) -5. [Offsets](#offsets) -6. [Other](#other) - - -## Constructing - -### constructor() -### constructor([options]) -- ```options``` *{SmartBufferOptions}* An optional options object to construct a SmartBuffer with. - -Examples: -```javascript -const buff = new SmartBuffer(); -const buff = new SmartBuffer({ - size: 1024, - encoding: 'ascii' -}); -``` - -### Class Method: fromBuffer(buffer[, encoding]) -- ```buffer``` *{Buffer}* The Buffer instance to wrap. -- ```encoding``` *{string}* The string encoding to use. ```Default: 'utf8'``` - -Examples: -```javascript -const someBuffer = Buffer.from('some string'); -const buff = SmartBuffer.fromBuffer(someBuffer); // Defaults to utf8 -const buff = SmartBuffer.fromBuffer(someBuffer, 'ascii'); -``` - -### Class Method: fromSize(size[, encoding]) -- ```size``` *{number}* The size to initialize the internal Buffer. -- ```encoding``` *{string}* The string encoding to use. ```Default: 'utf8'``` - -Examples: -```javascript -const buff = SmartBuffer.fromSize(1024); // Defaults to utf8 -const buff = SmartBuffer.fromSize(1024, 'ascii'); -``` - -### Class Method: fromOptions(options) -- ```options``` *{SmartBufferOptions}* The Buffer instance to wrap. - -```typescript -interface SmartBufferOptions { - encoding?: BufferEncoding; // Defaults to utf8 - size?: number; // Defaults to 4096 - buff?: Buffer; -} -``` - -Examples: -```javascript -const buff = SmartBuffer.fromOptions({ - size: 1024 -}; -const buff = SmartBuffer.fromOptions({ - size: 1024, - encoding: 'utf8' -}); -const buff = SmartBuffer.fromOptions({ - encoding: 'utf8' -}); - -const someBuff = Buffer.from('some string', 'utf8'); -const buff = SmartBuffer.fromOptions({ - buffer: someBuff, - encoding: 'utf8' -}); -``` - -## Integers - -### readInt8([offset]) -- ```offset``` *{number}* Optional position to start reading data from. **Default**: ```Auto managed offset``` -- Returns *{number}* - -Read a Int8 value. - -### buff.readInt16BE([offset]) -### buff.readInt16LE([offset]) -### buff.readUInt16BE([offset]) -### buff.readUInt16LE([offset]) -- ```offset``` *{number}* Optional position to start reading data from. **Default**: ```Auto managed offset``` -- Returns *{number}* - -Read a 16 bit integer value. - -### buff.readInt32BE([offset]) -### buff.readInt32LE([offset]) -### buff.readUInt32BE([offset]) -### buff.readUInt32LE([offset]) -- ```offset``` *{number}* Optional position to start reading data from. **Default**: ```Auto managed offset``` -- Returns *{number}* - -Read a 32 bit integer value. - - -### buff.writeInt8(value[, offset]) -### buff.writeUInt8(value[, offset]) -- ```value``` *{number}* The value to write. -- ```offset``` *{number}* An optional offset to write this value to. **Default:** ```Auto managed offset``` -- Returns *{this}* - -Write a Int8 value. - -### buff.insertInt8(value, offset) -### buff.insertUInt8(value, offset) -- ```value``` *{number}* The value to insert. -- ```offset``` *{number}* The offset to insert this data at. -- Returns *{this}* - -Insert a Int8 value. - - -### buff.writeInt16BE(value[, offset]) -### buff.writeInt16LE(value[, offset]) -### buff.writeUInt16BE(value[, offset]) -### buff.writeUInt16LE(value[, offset]) -- ```value``` *{number}* The value to write. -- ```offset``` *{number}* An optional offset to write this value to. **Default:** ```Auto managed offset``` -- Returns *{this}* - -Write a 16 bit integer value. - -### buff.insertInt16BE(value, offset) -### buff.insertInt16LE(value, offset) -### buff.insertUInt16BE(value, offset) -### buff.insertUInt16LE(value, offset) -- ```value``` *{number}* The value to insert. -- ```offset``` *{number}* The offset to insert this data at. -- Returns *{this}* - -Insert a 16 bit integer value. - - -### buff.writeInt32BE(value[, offset]) -### buff.writeInt32LE(value[, offset]) -### buff.writeUInt32BE(value[, offset]) -### buff.writeUInt32LE(value[, offset]) -- ```value``` *{number}* The value to write. -- ```offset``` *{number}* An optional offset to write this value to. **Default:** ```Auto managed offset``` -- Returns *{this}* - -Write a 32 bit integer value. - -### buff.insertInt32BE(value, offset) -### buff.insertInt32LE(value, offset) -### buff.insertUInt32BE(value, offset) -### buff.nsertUInt32LE(value, offset) -- ```value``` *{number}* The value to insert. -- ```offset``` *{number}* The offset to insert this data at. -- Returns *{this}* - -Insert a 32 bit integer value. - - -## Floating Point Numbers - -### buff.readFloatBE([offset]) -### buff.readFloatLE([offset]) -- ```offset``` *{number}* Optional position to start reading data from. **Default**: ```Auto managed offset``` -- Returns *{number}* - -Read a Float value. - -### buff.eadDoubleBE([offset]) -### buff.readDoubleLE([offset]) -- ```offset``` *{number}* Optional position to start reading data from. **Default**: ```Auto managed offset``` -- Returns *{number}* - -Read a Double value. - - -### buff.writeFloatBE(value[, offset]) -### buff.writeFloatLE(value[, offset]) -- ```value``` *{number}* The value to write. -- ```offset``` *{number}* An optional offset to write this value to. **Default:** ```Auto managed offset``` -- Returns *{this}* - -Write a Float value. - -### buff.insertFloatBE(value, offset) -### buff.insertFloatLE(value, offset) -- ```value``` *{number}* The value to insert. -- ```offset``` *{number}* The offset to insert this data at. -- Returns *{this}* - -Insert a Float value. - - -### buff.writeDoubleBE(value[, offset]) -### buff.writeDoubleLE(value[, offset]) -- ```value``` *{number}* The value to write. -- ```offset``` *{number}* An optional offset to write this value to. **Default:** ```Auto managed offset``` -- Returns *{this}* - -Write a Double value. - -### buff.insertDoubleBE(value, offset) -### buff.insertDoubleLE(value, offset) -- ```value``` *{number}* The value to insert. -- ```offset``` *{number}* The offset to insert this data at. -- Returns *{this}* - -Insert a Double value. - -## Strings - -### buff.readString() -### buff.readString(size[, encoding]) -### buff.readString(encoding) -- ```size``` *{number}* The number of bytes to read. **Default:** ```Reads to the end of the Buffer.``` -- ```encoding``` *{string}* The string encoding to use. **Default:** ```utf8```. - -Read a string value. - -Examples: -```javascript -const buff = SmartBuffer.fromBuffer(Buffer.from('hello there', 'utf8')); -buff.readString(); // 'hello there' -buff.readString(2); // 'he' -buff.readString(2, 'utf8'); // 'he' -buff.readString('utf8'); // 'hello there' -``` - -### buff.writeString(value) -### buff.writeString(value[, offset]) -### buff.writeString(value[, encoding]) -### buff.writeString(value[, offset[, encoding]]) -- ```value``` *{string}* The string value to write. -- ```offset``` *{number}* The offset to write this value to. **Default:** ```Auto managed offset``` -- ```encoding``` *{string}* An optional string encoding to use. **Default:** ```utf8``` - -Write a string value. - -Examples: -```javascript -buff.writeString('hello'); // Auto managed offset -buff.writeString('hello', 2); -buff.writeString('hello', 'utf8') // Auto managed offset -buff.writeString('hello', 2, 'utf8'); -``` - -### buff.insertString(value, offset[, encoding]) -- ```value``` *{string}* The string value to write. -- ```offset``` *{number}* The offset to write this value to. -- ```encoding``` *{string}* An optional string encoding to use. **Default:** ```utf8``` - -Insert a string value. - -Examples: -```javascript -buff.insertString('hello', 2); -buff.insertString('hello', 2, 'utf8'); -``` - -## Null Terminated Strings - -### buff.readStringNT() -### buff.readStringNT(encoding) -- ```encoding``` *{string}* The string encoding to use. **Default:** ```utf8```. - -Read a null terminated string value. (If a null is not found, it will read to the end of the Buffer). - -Examples: -```javascript -const buff = SmartBuffer.fromBuffer(Buffer.from('hello\0 there', 'utf8')); -buff.readStringNT(); // 'hello' - -// If we called this again: -buff.readStringNT(); // ' there' -``` - -### buff.writeStringNT(value) -### buff.writeStringNT(value[, offset]) -### buff.writeStringNT(value[, encoding]) -### buff.writeStringNT(value[, offset[, encoding]]) -- ```value``` *{string}* The string value to write. -- ```offset``` *{number}* The offset to write this value to. **Default:** ```Auto managed offset``` -- ```encoding``` *{string}* An optional string encoding to use. **Default:** ```utf8``` - -Write a null terminated string value. - -Examples: -```javascript -buff.writeStringNT('hello'); // Auto managed offset <Buffer 68 65 6c 6c 6f 00> -buff.writeStringNT('hello', 2); // <Buffer 00 00 68 65 6c 6c 6f 00> -buff.writeStringNT('hello', 'utf8') // Auto managed offset -buff.writeStringNT('hello', 2, 'utf8'); -``` - -### buff.insertStringNT(value, offset[, encoding]) -- ```value``` *{string}* The string value to write. -- ```offset``` *{number}* The offset to write this value to. -- ```encoding``` *{string}* An optional string encoding to use. **Default:** ```utf8``` - -Insert a null terminated string value. - -Examples: -```javascript -buff.insertStringNT('hello', 2); -buff.insertStringNT('hello', 2, 'utf8'); -``` - -## Buffers - -### buff.readBuffer([length]) -- ```length``` *{number}* The number of bytes to read into a Buffer. **Default:** ```Reads to the end of the Buffer``` - -Read a Buffer of a specified size. - -### buff.writeBuffer(value[, offset]) -- ```value``` *{Buffer}* The buffer value to write. -- ```offset``` *{number}* An optional offset to write the value to. **Default:** ```Auto managed offset``` - -### buff.insertBuffer(value, offset) -- ```value``` *{Buffer}* The buffer value to write. -- ```offset``` *{number}* The offset to write the value to. - - -### buff.readBufferNT() - -Read a null terminated Buffer. - -### buff.writeBufferNT(value[, offset]) -- ```value``` *{Buffer}* The buffer value to write. -- ```offset``` *{number}* An optional offset to write the value to. **Default:** ```Auto managed offset``` - -Write a null terminated Buffer. - - -### buff.insertBufferNT(value, offset) -- ```value``` *{Buffer}* The buffer value to write. -- ```offset``` *{number}* The offset to write the value to. - -Insert a null terminated Buffer. - - -## Offsets - -### buff.readOffset -### buff.readOffset(offset) -- ```offset``` *{number}* The new read offset value to set. -- Returns: ```The current read offset``` - -Gets or sets the current read offset. - -Examples: -```javascript -const currentOffset = buff.readOffset; // 5 - -buff.readOffset = 10; - -console.log(buff.readOffset) // 10 -``` - -### buff.writeOffset -### buff.writeOffset(offset) -- ```offset``` *{number}* The new write offset value to set. -- Returns: ```The current write offset``` - -Gets or sets the current write offset. - -Examples: -```javascript -const currentOffset = buff.writeOffset; // 5 - -buff.writeOffset = 10; - -console.log(buff.writeOffset) // 10 -``` - -### buff.encoding -### buff.encoding(encoding) -- ```encoding``` *{string}* The new string encoding to set. -- Returns: ```The current string encoding``` - -Gets or sets the current string encoding. - -Examples: -```javascript -const currentEncoding = buff.encoding; // 'utf8' - -buff.encoding = 'ascii'; - -console.log(buff.encoding) // 'ascii' -``` - -## Other - -### buff.clear() - -Clear and resets the SmartBuffer instance. - -### buff.remaining() -- Returns ```Remaining data left to be read``` - -Gets the number of remaining bytes to be read. - - -### buff.internalBuffer -- Returns: *{Buffer}* - -Gets the internally managed Buffer (Includes unmanaged data). - -Examples: -```javascript -const buff = SmartBuffer.fromSize(16); -buff.writeString('hello'); -console.log(buff.InternalBuffer); // <Buffer 68 65 6c 6c 6f 00 00 00 00 00 00 00 00 00 00 00> -``` - -### buff.toBuffer() -- Returns: *{Buffer}* - -Gets a sliced Buffer instance of the internally managed Buffer. (Only includes managed data) - -Examples: -```javascript -const buff = SmartBuffer.fromSize(16); -buff.writeString('hello'); -console.log(buff.toBuffer()); // <Buffer 68 65 6c 6c 6f> -``` - -### buff.toString([encoding]) -- ```encoding``` *{string}* The string encoding to use when converting to a string. **Default:** ```utf8``` -- Returns *{string}* - -Gets a string representation of all data in the SmartBuffer. - -### buff.destroy() - -Destroys the SmartBuffer instance. - - - -## License - -This work is licensed under the [MIT license](http://en.wikipedia.org/wiki/MIT_License). \ No newline at end of file diff --git a/node_modules/smart-buffer/docs/CHANGELOG.md b/node_modules/smart-buffer/docs/CHANGELOG.md deleted file mode 100644 index 1199a4d6d2353..0000000000000 --- a/node_modules/smart-buffer/docs/CHANGELOG.md +++ /dev/null @@ -1,70 +0,0 @@ -# Change Log -## 4.1.0 -> Released 07/24/2019 -* Adds int64 support for node v12+ -* Drops support for node v4 - -## 4.0 -> Released 10/21/2017 -* Major breaking changes arriving in v4. - -### New Features -* Ability to read data from a specific offset. ex: readInt8(5) -* Ability to write over data when an offset is given (see breaking changes) ex: writeInt8(5, 0); -* Ability to set internal read and write offsets. - - - -### Breaking Changes - -* Old constructor patterns have been completely removed. It's now required to use the SmartBuffer.fromXXX() factory constructors. Read more on the v4 docs. -* rewind(), skip(), moveTo() have been removed. -* Internal private properties are now prefixed with underscores (_). -* **All** writeXXX() methods that are given an offset will now **overwrite data** instead of insert -* insertXXX() methods have been added for when you want to insert data at a specific offset (this replaces the old behavior of writeXXX() when an offset was provided) - - -### Other Changes -* Standardizd error messaging -* Standardized offset/length bounds and sanity checking -* General overall cleanup of code. - -## 3.0.3 -> Released 02/19/2017 -* Adds missing type definitions for some internal functions. - -## 3.0.2 -> Released 02/17/2017 - -### Bug Fixes -* Fixes a bug where using readString with a length of zero resulted in reading the remaining data instead of returning an empty string. (Fixed by Seldszar) - -## 3.0.1 -> Released 02/15/2017 - -### Bug Fixes -* Fixes a bug leftover from the TypeScript refactor where .readIntXXX() resulted in .readUIntXXX() being called by mistake. - -## 3.0 -> Released 02/12/2017 - -### Bug Fixes -* readUIntXXXX() methods will now throw an exception if they attempt to read beyond the bounds of the valid buffer data available. - * **Note** This is technically a breaking change, so version is bumped to 3.x. - -## 2.0 -> Relased 01/30/2017 - -### New Features: - -* Entire package re-written in TypeScript (2.1) -* Backwards compatibility is preserved for now -* New factory methods for creating SmartBuffer instances - * SmartBuffer.fromSize() - * SmartBuffer.fromBuffer() - * SmartBuffer.fromOptions() -* New SmartBufferOptions constructor options -* Added additional tests - -### Bug Fixes: -* Fixes a bug where reading null terminated strings may result in an exception. diff --git a/node_modules/smart-buffer/docs/README_v3.md b/node_modules/smart-buffer/docs/README_v3.md deleted file mode 100644 index b7c48b8b5444e..0000000000000 --- a/node_modules/smart-buffer/docs/README_v3.md +++ /dev/null @@ -1,367 +0,0 @@ -smart-buffer [![Build Status](https://travis-ci.org/JoshGlazebrook/smart-buffer.svg?branch=master)](https://travis-ci.org/JoshGlazebrook/smart-buffer) [![Coverage Status](https://coveralls.io/repos/github/JoshGlazebrook/smart-buffer/badge.svg?branch=master)](https://coveralls.io/github/JoshGlazebrook/smart-buffer?branch=master) -============= - -smart-buffer is a light Buffer wrapper that takes away the need to keep track of what position to read and write data to and from the underlying Buffer. It also adds null terminating string operations and **grows** as you add more data. - -![stats](https://nodei.co/npm/smart-buffer.png?downloads=true&downloadRank=true&stars=true "stats") - -### What it's useful for: - -I created smart-buffer because I wanted to simplify the process of using Buffer for building and reading network packets to send over a socket. Rather than having to keep track of which position I need to write a UInt16 to after adding a string of variable length, I simply don't have to. - -Key Features: -* Proxies all of the Buffer write and read functions. -* Keeps track of read and write positions for you. -* Grows the internal Buffer as you add data to it. -* Useful string operations. (Null terminating strings) -* Allows for inserting values at specific points in the internal Buffer. -* Built in TypeScript -* Type Definitions Provided - -Requirements: -* Node v4.0+ is supported at this time. (Versions prior to 2.0 will work on node 0.10) - - -#### Note: -smart-buffer can be used for writing to an underlying buffer as well as reading from it. It however does not function correctly if you're mixing both read and write operations with each other. - -## Breaking Changes with 2.0 -The latest version (2.0+) is written in TypeScript, and are compiled to ES6 Javascript. This means the earliest Node.js it supports will be 4.x (in strict mode.) If you're using version 6 and above it will work without any issues. From an API standpoint, 2.0 is backwards compatible. The only difference is SmartBuffer is not exported directly as the root module. - -## Breaking Changes with 3.0 -Starting with 3.0, if any of the readIntXXXX() methods are called and the requested data is larger than the bounds of the internally managed valid buffer data, an exception will now be thrown. - -## Installing: - -`npm install smart-buffer` - -or - -`yarn add smart-buffer` - -Note: The published NPM package includes the built javascript library. -If you cloned this repo and wish to build the library manually use: - -`tsc -p ./` - -## Using smart-buffer - -### Example - -Say you were building a packet that had to conform to the following protocol: - -`[PacketType:2][PacketLength:2][Data:XX]` - -To build this packet using the vanilla Buffer class, you would have to count up the length of the data payload beforehand. You would also need to keep track of the current "cursor" position in your Buffer so you write everything in the right places. With smart-buffer you don't have to do either of those things. - -```javascript -// 1.x (javascript) -var SmartBuffer = require('smart-buffer'); - -// 1.x (typescript) -import SmartBuffer = require('smart-buffer'); - -// 2.x+ (javascript) -const SmartBuffer = require('smart-buffer').SmartBuffer; - -// 2.x+ (typescript) -import { SmartBuffer, SmartBufferOptions} from 'smart-buffer'; - -function createLoginPacket(username, password, age, country) { - let packet = new SmartBuffer(); - packet.writeUInt16LE(0x0060); // Login Packet Type/ID - packet.writeStringNT(username); - packet.writeStringNT(password); - packet.writeUInt8(age); - packet.writeStringNT(country); - packet.writeUInt16LE(packet.length - 2, 2); - - return packet.toBuffer(); -} -``` -With the above function, you now can do this: -```javascript -let login = createLoginPacket("Josh", "secret123", 22, "United States"); - -// <Buffer 60 00 1e 00 4a 6f 73 68 00 73 65 63 72 65 74 31 32 33 00 16 55 6e 69 74 65 64 20 53 74 61 74 65 73 00> -``` -Notice that the `[PacketLength:2]` part of the packet was inserted after we had added everything else, and as shown in the Buffer dump above, is in the correct location along with everything else. - -Reading back the packet we created above is just as easy: -```javascript - -let reader = SmartBuffer.fromBuffer(login); - -let logininfo = { - packetType: reader.readUInt16LE(), - packetLength: reader.readUInt16LE(), - username: reader.readStringNT(), - password: reader.readStringNT(), - age: reader.readUInt8(), - country: reader.readStringNT() -}; - -/* -{ - packetType: 96, (0x0060) - packetLength: 30, - username: 'Josh', - password: 'secret123', - age: 22, - country: 'United States' -}; -*/ -``` - -# Api Reference: - -### Constructing a smart-buffer - -smart-buffer has a few different ways to construct an instance. Starting with version 2.0, the following factory methods are preffered. - -```javascript -let SmartBuffer = require('smart-buffer'); - -// Creating SmartBuffer from existing Buffer -let buff = SmartBuffer.fromBuffer(buffer); // Creates instance from buffer. (Uses default utf8 encoding) -let buff = SmartBuffer.fromBuffer(buffer, 'ascii'); // Creates instance from buffer with ascii encoding for Strings. - -// Creating SmartBuffer with specified internal Buffer size. -let buff = SmartBuffer.fromSize(1024); // Creates instance with internal Buffer size of 1024. -let buff = SmartBuffer.fromSize(1024, 'utf8'); // Creates instance with intenral Buffer size of 1024, and utf8 encoding. - -// Creating SmartBuffer with options object. This one specifies size and encoding. -let buff = SmartBuffer.fromOptions({ - size: 1024, - encoding: 'ascii' -}); - -// Creating SmartBuffer with options object. This one specified an existing Buffer. -let buff = SmartBuffer.fromOptions({ - buff: buffer -}); - -// Just want a regular SmartBuffer with all default options? -let buff = new SmartBuffer(); -``` - -## Backwards Compatibility: - -All constructors used prior to 2.0 still are supported. However it's not recommended to use these. - -```javascript -let writer = new SmartBuffer(); // Defaults to utf8, 4096 length internal Buffer. -let writer = new SmartBuffer(1024); // Defaults to utf8, 1024 length internal Buffer. -let writer = new SmartBuffer('ascii'); // Sets to ascii encoding, 4096 length internal buffer. -let writer = new SmartBuffer(1024, 'ascii'); // Sets to ascii encoding, 1024 length internal buffer. -``` - -## Reading Data - -smart-buffer supports all of the common read functions you will find in the vanilla Buffer class. The only difference is, you do not need to specify which location to start reading from. This is possible because as you read data out of a smart-buffer, it automatically progresses an internal read offset/position to know where to pick up from on the next read. - -## Reading Numeric Values - -When numeric values, you simply need to call the function you want, and the data is returned. - -Supported Operations: -* readInt8 -* readInt16BE -* readInt16LE -* readInt32BE -* readInt32LE -* readBigInt64LE -* readBigInt64BE -* readUInt8 -* readUInt16BE -* readUInt16LE -* readUInt32BE -* readUInt32LE -* readBigUInt64LE -* readBigUInt64BE -* readFloatBE -* readFloatLE -* readDoubleBE -* readDoubleLE - -```javascript -let reader = new SmartBuffer(somebuffer); -let num = reader.readInt8(); -``` - -## Reading String Values - -When reading String values, you can either choose to read a null terminated string, or a string of a specified length. - -### SmartBuffer.readStringNT( [encoding] ) -> `String` **String encoding to use** - Defaults to the encoding set in the constructor. - -returns `String` - -> Note: When readStringNT is called and there is no null character found, smart-buffer will read to the end of the internal Buffer. - -### SmartBuffer.readString( [length] ) -### SmartBuffer.readString( [encoding] ) -### SmartBuffer.readString( [length], [encoding] ) -> `Number` **Length of the string to read** - -> `String` **String encoding to use** - Defaults to the encoding set in the constructor, or utf8. - -returns `String` - -> Note: When readString is called without a specified length, smart-buffer will read to the end of the internal Buffer. - - - -## Reading Buffer Values - -### SmartBuffer.readBuffer( length ) -> `Number` **Length of data to read into a Buffer** - -returns `Buffer` - -> Note: This function uses `slice` to retrieve the Buffer. - - -### SmartBuffer.readBufferNT() - -returns `Buffer` - -> Note: This reads the next sequence of bytes in the buffer until a null (0x00) value is found. (Null terminated buffer) -> Note: This function uses `slice` to retrieve the Buffer. - - -## Writing Data - -smart-buffer supports all of the common write functions you will find in the vanilla Buffer class. The only difference is, you do not need to specify which location to write to in your Buffer by default. You do however have the option of **inserting** a piece of data into your smart-buffer at a given location. - - -## Writing Numeric Values - - -For numeric values, you simply need to call the function you want, and the data is written at the end of the internal Buffer's current write position. You can specify a offset/position to **insert** the given value at, but keep in mind this does not override data at the given position. This feature also does not work properly when inserting a value beyond the current internal length of the smart-buffer (length being the .length property of the smart-buffer instance you're writing to) - -Supported Operations: -* writeInt8 -* writeInt16BE -* writeInt16LE -* writeInt32BE -* writeInt32LE -* writeBigInt64BE -* writeBigInt64LE -* writeUInt8 -* writeUInt16BE -* writeUInt16LE -* writeUInt32BE -* writeUInt32LE -* writeBigUInt64BE -* writeBigUInt64LE -* writeFloatBE -* writeFloatLE -* writeDoubleBE -* writeDoubleLE - -The following signature is the same for all the above functions: - -### SmartBuffer.writeInt8( value, [offset] ) -> `Number` **A valid Int8 number** - -> `Number` **The position to insert this value at** - -returns this - -> Note: All write operations return `this` to allow for chaining. - -## Writing String Values - -When reading String values, you can either choose to write a null terminated string, or a non null terminated string. - -### SmartBuffer.writeStringNT( value, [offset], [encoding] ) -### SmartBuffer.writeStringNT( value, [offset] ) -### SmartBuffer.writeStringNT( value, [encoding] ) -> `String` **String value to write** - -> `Number` **The position to insert this String at** - -> `String` **The String encoding to use.** - Defaults to the encoding set in the constructor, or utf8. - -returns this - -### SmartBuffer.writeString( value, [offset], [encoding] ) -### SmartBuffer.writeString( value, [offset] ) -### SmartBuffer.writeString( value, [encoding] ) -> `String` **String value to write** - -> `Number` **The position to insert this String at** - -> `String` **The String encoding to use** - Defaults to the encoding set in the constructor, or utf8. - -returns this - - -## Writing Buffer Values - -### SmartBuffer.writeBuffer( value, [offset] ) -> `Buffer` **Buffer value to write** - -> `Number` **The position to insert this Buffer's content at** - -returns this - -### SmartBuffer.writeBufferNT( value, [offset] ) -> `Buffer` **Buffer value to write** - -> `Number` **The position to insert this Buffer's content at** - -returns this - - -## Utility Functions - -### SmartBuffer.clear() -Resets the SmartBuffer to its default state where it can be reused for reading or writing. - -### SmartBuffer.remaining() - -returns `Number` The amount of data left to read based on the current read Position. - -### SmartBuffer.skip( value ) -> `Number` **The amount of bytes to skip ahead** - -Skips the read position ahead by the given value. - -returns this - -### SmartBuffer.rewind( value ) -> `Number` **The amount of bytes to reward backwards** - -Rewinds the read position backwards by the given value. - -returns this - -### SmartBuffer.moveTo( position ) -> `Number` **The point to skip the read position to** - -Moves the read position to the given point. -returns this - -### SmartBuffer.toBuffer() - -returns `Buffer` A Buffer containing the contents of the internal Buffer. - -> Note: This uses the slice function. - -### SmartBuffer.toString( [encoding] ) -> `String` **The String encoding to use** - Defaults to the encoding set in the constructor, or utf8. - -returns `String` The internal Buffer in String representation. - -## Properties - -### SmartBuffer.length - -returns `Number` **The length of the data that is being tracked in the internal Buffer** - Does NOT return the absolute length of the internal Buffer being written to. - -## License - -This work is licensed under the [MIT license](http://en.wikipedia.org/wiki/MIT_License). \ No newline at end of file diff --git a/node_modules/socks-proxy-agent/README.md b/node_modules/socks-proxy-agent/README.md deleted file mode 100644 index 4df184ffaac8a..0000000000000 --- a/node_modules/socks-proxy-agent/README.md +++ /dev/null @@ -1,152 +0,0 @@ -socks-proxy-agent -================ -### A SOCKS proxy `http.Agent` implementation for HTTP and HTTPS -[![Build Status](https://github.com/TooTallNate/node-socks-proxy-agent/workflows/Node%20CI/badge.svg)](https://github.com/TooTallNate/node-socks-proxy-agent/actions?workflow=Node+CI) - -This module provides an `http.Agent` implementation that connects to a -specified SOCKS proxy server, and can be used with the built-in `http` -and `https` modules. - -It can also be used in conjunction with the `ws` module to establish a WebSocket -connection over a SOCKS proxy. See the "Examples" section below. - -Installation ------------- - -Install with `npm`: - -``` bash -$ npm install socks-proxy-agent -``` - - -Examples --------- - -#### TypeScript example - -```ts -import https from 'https'; -import { SocksProxyAgent } from 'socks-proxy-agent'; - -const info = { - host: 'br41.nordvpn.com', - userId: 'your-name@gmail.com', - password: 'abcdef12345124' -}; -const agent = new SocksProxyAgent(info); - -https.get('https://jsonip.org', { agent }, (res) => { - console.log(res.headers); - res.pipe(process.stdout); -}); -``` - -#### `http` module example - -```js -var url = require('url'); -var http = require('http'); -var SocksProxyAgent = require('socks-proxy-agent'); - -// SOCKS proxy to connect to -var proxy = process.env.socks_proxy || 'socks://127.0.0.1:1080'; -console.log('using proxy server %j', proxy); - -// HTTP endpoint for the proxy to connect to -var endpoint = process.argv[2] || 'http://nodejs.org/api/'; -console.log('attempting to GET %j', endpoint); -var opts = url.parse(endpoint); - -// create an instance of the `SocksProxyAgent` class with the proxy server information -var agent = new SocksProxyAgent(proxy); -opts.agent = agent; - -http.get(opts, function (res) { - console.log('"response" event!', res.headers); - res.pipe(process.stdout); -}); -``` - -#### `https` module example - -```js -var url = require('url'); -var https = require('https'); -var SocksProxyAgent = require('socks-proxy-agent'); - -// SOCKS proxy to connect to -var proxy = process.env.socks_proxy || 'socks://127.0.0.1:1080'; -console.log('using proxy server %j', proxy); - -// HTTP endpoint for the proxy to connect to -var endpoint = process.argv[2] || 'https://encrypted.google.com/'; -console.log('attempting to GET %j', endpoint); -var opts = url.parse(endpoint); - -// create an instance of the `SocksProxyAgent` class with the proxy server information -var agent = new SocksProxyAgent(proxy); -opts.agent = agent; - -https.get(opts, function (res) { - console.log('"response" event!', res.headers); - res.pipe(process.stdout); -}); -``` - -#### `ws` WebSocket connection example - -``` js -var WebSocket = require('ws'); -var SocksProxyAgent = require('socks-proxy-agent'); - -// SOCKS proxy to connect to -var proxy = process.env.socks_proxy || 'socks://127.0.0.1:1080'; -console.log('using proxy server %j', proxy); - -// WebSocket endpoint for the proxy to connect to -var endpoint = process.argv[2] || 'ws://echo.websocket.org'; -console.log('attempting to connect to WebSocket %j', endpoint); - -// create an instance of the `SocksProxyAgent` class with the proxy server information -var agent = new SocksProxyAgent(proxy); - -// initiate the WebSocket connection -var socket = new WebSocket(endpoint, { agent: agent }); - -socket.on('open', function () { - console.log('"open" event!'); - socket.send('hello world'); -}); - -socket.on('message', function (data, flags) { - console.log('"message" event! %j %j', data, flags); - socket.close(); -}); -``` - -License -------- - -(The MIT License) - -Copyright (c) 2013 Nathan Rajlich <nathan@tootallnate.net> - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -'Software'), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, -TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE -SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/socks/.prettierrc.yaml b/node_modules/socks/.prettierrc.yaml deleted file mode 100644 index d7b7335049b99..0000000000000 --- a/node_modules/socks/.prettierrc.yaml +++ /dev/null @@ -1,7 +0,0 @@ -parser: typescript -printWidth: 80 -tabWidth: 2 -singleQuote: true -trailingComma: all -arrowParens: always -bracketSpacing: false \ No newline at end of file diff --git a/node_modules/socks/.travis.yml b/node_modules/socks/.travis.yml deleted file mode 100644 index 2c6099bcdc2be..0000000000000 --- a/node_modules/socks/.travis.yml +++ /dev/null @@ -1,11 +0,0 @@ -language: node_js -node_js: - - 10 - - 12 - - 14 - - stable - -before_install: - - npm add -g typescript prettier tslint coveralls - -script: "npm run build && npm run coveralls" \ No newline at end of file diff --git a/node_modules/socks/README.md b/node_modules/socks/README.md deleted file mode 100644 index f7fba45748711..0000000000000 --- a/node_modules/socks/README.md +++ /dev/null @@ -1,670 +0,0 @@ -# socks [![Build Status](https://travis-ci.org/JoshGlazebrook/socks.svg?branch=master)](https://travis-ci.org/JoshGlazebrook/socks) [![Coverage Status](https://coveralls.io/repos/github/JoshGlazebrook/socks/badge.svg?branch=master)](https://coveralls.io/github/JoshGlazebrook/socks?branch=v2) - -Fully featured SOCKS proxy client supporting SOCKSv4, SOCKSv4a, and SOCKSv5. Includes Bind and Associate functionality. - -### Features - -* Supports SOCKS v4, v4a, and v5 protocols. -* Supports the CONNECT, BIND, and ASSOCIATE commands. -* Supports callbacks, promises, and events for proxy connection creation async flow control. -* Supports proxy chaining (CONNECT only). -* Supports user/pass authentication. -* Built in UDP frame creation & parse functions. -* Created with TypeScript, type definitions are provided. - -### Requirements - -* Node.js v10.0+ (Please use [v1](https://github.com/JoshGlazebrook/socks/tree/82d83923ad960693d8b774cafe17443ded7ed584) for older versions of Node.js) - -### Looking for v1? -* Docs for v1 are available [here](https://github.com/JoshGlazebrook/socks/tree/82d83923ad960693d8b774cafe17443ded7ed584) - -## Installation - -`yarn add socks` - -or - -`npm install --save socks` - -## Usage - -```typescript -// TypeScript -import { SocksClient, SocksClientOptions, SocksClientChainOptions } from 'socks'; - -// ES6 JavaScript -import { SocksClient } from 'socks'; - -// Legacy JavaScript -const SocksClient = require('socks').SocksClient; -``` - -## Quick Start Example - -Connect to github.com (192.30.253.113) on port 80, using a SOCKS proxy. - -```javascript -const options = { - proxy: { - host: '159.203.75.200', // ipv4 or ipv6 or hostname - port: 1080, - type: 5 // Proxy version (4 or 5) - }, - - command: 'connect', // SOCKS command (createConnection factory function only supports the connect command) - - destination: { - host: '192.30.253.113', // github.com (hostname lookups are supported with SOCKS v4a and 5) - port: 80 - } -}; - -// Async/Await -try { - const info = await SocksClient.createConnection(options); - - console.log(info.socket); - // <Socket ...> (this is a raw net.Socket that is established to the destination host through the given proxy server) -} catch (err) { - // Handle errors -} - -// Promises -SocksClient.createConnection(options) -.then(info => { - console.log(info.socket); - // <Socket ...> (this is a raw net.Socket that is established to the destination host through the given proxy server) -}) -.catch(err => { - // Handle errors -}); - -// Callbacks -SocksClient.createConnection(options, (err, info) => { - if (!err) { - console.log(info.socket); - // <Socket ...> (this is a raw net.Socket that is established to the destination host through the given proxy server) - } else { - // Handle errors - } -}); -``` - -## Chaining Proxies - -**Note:** Chaining is only supported when using the SOCKS connect command, and chaining can only be done through the special factory chaining function. - -This example makes a proxy chain through two SOCKS proxies to ip-api.com. Once the connection to the destination is established it sends an HTTP request to get a JSON response that returns ip info for the requesting ip. - -```javascript -const options = { - destination: { - host: 'ip-api.com', // host names are supported with SOCKS v4a and SOCKS v5. - port: 80 - }, - command: 'connect', // Only the connect command is supported when chaining proxies. - proxies: [ // The chain order is the order in the proxies array, meaning the last proxy will establish a connection to the destination. - { - host: '159.203.75.235', // ipv4, ipv6, or hostname - port: 1081, - type: 5 - }, - { - host: '104.131.124.203', // ipv4, ipv6, or hostname - port: 1081, - type: 5 - } - ] -} - -// Async/Await -try { - const info = await SocksClient.createConnectionChain(options); - - console.log(info.socket); - // <Socket ...> (this is a raw net.Socket that is established to the destination host through the given proxy servers) - - console.log(info.socket.remoteAddress) // The remote address of the returned socket is the first proxy in the chain. - // 159.203.75.235 - - info.socket.write('GET /json HTTP/1.1\nHost: ip-api.com\n\n'); - info.socket.on('data', (data) => { - console.log(data.toString()); // ip-api.com sees that the last proxy in the chain (104.131.124.203) is connected to it. - /* - HTTP/1.1 200 OK - Access-Control-Allow-Origin: * - Content-Type: application/json; charset=utf-8 - Date: Sun, 24 Dec 2017 03:47:51 GMT - Content-Length: 300 - - { - "as":"AS14061 Digital Ocean, Inc.", - "city":"Clifton", - "country":"United States", - "countryCode":"US", - "isp":"Digital Ocean", - "lat":40.8326, - "lon":-74.1307, - "org":"Digital Ocean", - "query":"104.131.124.203", - "region":"NJ", - "regionName":"New Jersey", - "status":"success", - "timezone":"America/New_York", - "zip":"07014" - } - */ - }); -} catch (err) { - // Handle errors -} - -// Promises -SocksClient.createConnectionChain(options) -.then(info => { - console.log(info.socket); - // <Socket ...> (this is a raw net.Socket that is established to the destination host through the given proxy server) - - console.log(info.socket.remoteAddress) // The remote address of the returned socket is the first proxy in the chain. - // 159.203.75.235 - - info.socket.write('GET /json HTTP/1.1\nHost: ip-api.com\n\n'); - info.socket.on('data', (data) => { - console.log(data.toString()); // ip-api.com sees that the last proxy in the chain (104.131.124.203) is connected to it. - /* - HTTP/1.1 200 OK - Access-Control-Allow-Origin: * - Content-Type: application/json; charset=utf-8 - Date: Sun, 24 Dec 2017 03:47:51 GMT - Content-Length: 300 - - { - "as":"AS14061 Digital Ocean, Inc.", - "city":"Clifton", - "country":"United States", - "countryCode":"US", - "isp":"Digital Ocean", - "lat":40.8326, - "lon":-74.1307, - "org":"Digital Ocean", - "query":"104.131.124.203", - "region":"NJ", - "regionName":"New Jersey", - "status":"success", - "timezone":"America/New_York", - "zip":"07014" - } - */ - }); -}) -.catch(err => { - // Handle errors -}); - -// Callbacks -SocksClient.createConnectionChain(options, (err, info) => { - if (!err) { - console.log(info.socket); - // <Socket ...> (this is a raw net.Socket that is established to the destination host through the given proxy server) - - console.log(info.socket.remoteAddress) // The remote address of the returned socket is the first proxy in the chain. - // 159.203.75.235 - - info.socket.write('GET /json HTTP/1.1\nHost: ip-api.com\n\n'); - info.socket.on('data', (data) => { - console.log(data.toString()); // ip-api.com sees that the last proxy in the chain (104.131.124.203) is connected to it. - /* - HTTP/1.1 200 OK - Access-Control-Allow-Origin: * - Content-Type: application/json; charset=utf-8 - Date: Sun, 24 Dec 2017 03:47:51 GMT - Content-Length: 300 - - { - "as":"AS14061 Digital Ocean, Inc.", - "city":"Clifton", - "country":"United States", - "countryCode":"US", - "isp":"Digital Ocean", - "lat":40.8326, - "lon":-74.1307, - "org":"Digital Ocean", - "query":"104.131.124.203", - "region":"NJ", - "regionName":"New Jersey", - "status":"success", - "timezone":"America/New_York", - "zip":"07014" - } - */ - }); - } else { - // Handle errors - } -}); -``` - -## Bind Example (TCP Relay) - -When the bind command is sent to a SOCKS v4/v5 proxy server, the proxy server starts listening on a new TCP port and the proxy relays then remote host information back to the client. When another remote client connects to the proxy server on this port the SOCKS proxy sends a notification that an incoming connection has been accepted to the initial client and a full duplex stream is now established to the initial client and the client that connected to that special port. - -```javascript -const options = { - proxy: { - host: '159.203.75.235', // ipv4, ipv6, or hostname - port: 1081, - type: 5 - }, - - command: 'bind', - - // When using BIND, the destination should be the remote client that is expected to connect to the SOCKS proxy. Using 0.0.0.0 makes the Proxy accept any incoming connection on that port. - destination: { - host: '0.0.0.0', - port: 0 - } -}; - -// Creates a new SocksClient instance. -const client = new SocksClient(options); - -// When the SOCKS proxy has bound a new port and started listening, this event is fired. -client.on('bound', info => { - console.log(info.remoteHost); - /* - { - host: "159.203.75.235", - port: 57362 - } - */ -}); - -// When a client connects to the newly bound port on the SOCKS proxy, this event is fired. -client.on('established', info => { - // info.remoteHost is the remote address of the client that connected to the SOCKS proxy. - console.log(info.remoteHost); - /* - host: 67.171.34.23, - port: 49823 - */ - - console.log(info.socket); - // <Socket ...> (This is a raw net.Socket that is a connection between the initial client and the remote client that connected to the proxy) - - // Handle received data... - info.socket.on('data', data => { - console.log('recv', data); - }); -}); - -// An error occurred trying to establish this SOCKS connection. -client.on('error', err => { - console.error(err); -}); - -// Start connection to proxy -client.connect(); -``` - -## Associate Example (UDP Relay) - -When the associate command is sent to a SOCKS v5 proxy server, it sets up a UDP relay that allows the client to send UDP packets to a remote host through the proxy server, and also receive UDP packet responses back through the proxy server. - -```javascript -const options = { - proxy: { - host: '159.203.75.235', // ipv4, ipv6, or hostname - port: 1081, - type: 5 - }, - - command: 'associate', - - // When using associate, the destination should be the remote client that is expected to send UDP packets to the proxy server to be forwarded. This should be your local ip, or optionally the wildcard address (0.0.0.0) UDP Client <-> Proxy <-> UDP Client - destination: { - host: '0.0.0.0', - port: 0 - } -}; - -// Create a local UDP socket for sending packets to the proxy. -const udpSocket = dgram.createSocket('udp4'); -udpSocket.bind(); - -// Listen for incoming UDP packets from the proxy server. -udpSocket.on('message', (message, rinfo) => { - console.log(SocksClient.parseUDPFrame(message)); - /* - { frameNumber: 0, - remoteHost: { host: '165.227.108.231', port: 4444 }, // The remote host that replied with a UDP packet - data: <Buffer 74 65 73 74 0a> // The data - } - */ -}); - -let client = new SocksClient(associateOptions); - -// When the UDP relay is established, this event is fired and includes the UDP relay port to send data to on the proxy server. -client.on('established', info => { - console.log(info.remoteHost); - /* - { - host: '159.203.75.235', - port: 44711 - } - */ - - // Send 'hello' to 165.227.108.231:4444 - const packet = SocksClient.createUDPFrame({ - remoteHost: { host: '165.227.108.231', port: 4444 }, - data: Buffer.from(line) - }); - udpSocket.send(packet, info.remoteHost.port, info.remoteHost.host); -}); - -// Start connection -client.connect(); -``` - -**Note:** The associate TCP connection to the proxy must remain open for the UDP relay to work. - -## Additional Examples - -[Documentation](docs/index.md) - - -## Migrating from v1 - -Looking for a guide to migrate from v1? Look [here](docs/migratingFromV1.md) - -## Api Reference: - -**Note:** socks includes full TypeScript definitions. These can even be used without using TypeScript as most IDEs (such as VS Code) will use these type definition files for auto completion intellisense even in JavaScript files. - -* Class: SocksClient - * [new SocksClient(options[, callback])](#new-socksclientoptions) - * [Class Method: SocksClient.createConnection(options[, callback])](#class-method-socksclientcreateconnectionoptions-callback) - * [Class Method: SocksClient.createConnectionChain(options[, callback])](#class-method-socksclientcreateconnectionchainoptions-callback) - * [Class Method: SocksClient.createUDPFrame(options)](#class-method-socksclientcreateudpframedetails) - * [Class Method: SocksClient.parseUDPFrame(data)](#class-method-socksclientparseudpframedata) - * [Event: 'error'](#event-error) - * [Event: 'bound'](#event-bound) - * [Event: 'established'](#event-established) - * [client.connect()](#clientconnect) - * [client.socksClientOptions](#clientconnect) - -### SocksClient - -SocksClient establishes SOCKS proxy connections to remote destination hosts. These proxy connections are fully transparent to the server and once established act as full duplex streams. SOCKS v4, v4a, and v5 are supported, as well as the connect, bind, and associate commands. - -SocksClient supports creating connections using callbacks, promises, and async/await flow control using two static factory functions createConnection and createConnectionChain. It also internally extends EventEmitter which results in allowing event handling based async flow control. - -**SOCKS Compatibility Table** - -| Socks Version | TCP | UDP | IPv4 | IPv6 | Hostname | -| --- | :---: | :---: | :---: | :---: | :---: | -| SOCKS v4 | ✅ | ❌ | ✅ | ❌ | ❌ | -| SOCKS v4a | ✅ | ❌ | ✅ | ❌ | ✅ | -| SOCKS v5 | ✅ | ✅ | ✅ | ✅ | ✅ | - -### new SocksClient(options) - -* ```options``` {SocksClientOptions} - An object describing the SOCKS proxy to use, the command to send and establish, and the destination host to connect to. - -### SocksClientOptions - -```typescript -{ - proxy: { - host: '159.203.75.200', // ipv4, ipv6, or hostname - port: 1080, - type: 5 // Proxy version (4 or 5). For v4a, just use 4. - - // Optional fields - userId: 'some username', // Used for SOCKS4 userId auth, and SOCKS5 user/pass auth in conjunction with password. - password: 'some password' // Used in conjunction with userId for user/pass auth for SOCKS5 proxies. - }, - - command: 'connect', // connect, bind, associate - - destination: { - host: '192.30.253.113', // ipv4, ipv6, hostname. Hostnames work with v4a and v5. - port: 80 - }, - - // Optional fields - timeout: 30000, // How long to wait to establish a proxy connection. (defaults to 30 seconds) - - set_tcp_nodelay: true // If true, will turn on the underlying sockets TCP_NODELAY option. -} -``` - -### Class Method: SocksClient.createConnection(options[, callback]) -* ```options``` { SocksClientOptions } - An object describing the SOCKS proxy to use, the command to send and establish, and the destination host to connect to. -* ```callback``` { Function } - Optional callback function that is called when the proxy connection is established, or an error occurs. -* ```returns``` { Promise } - A Promise is returned that is resolved when the proxy connection is established, or rejected when an error occurs. - -Creates a new proxy connection through the given proxy to the given destination host. This factory function supports callbacks and promises for async flow control. - -**Note:** If a callback function is provided, the promise will always resolve regardless of an error occurring. Please be sure to exclusively use either promises or callbacks when using this factory function. - -```typescript -const options = { - proxy: { - host: '159.203.75.200', // ipv4, ipv6, or hostname - port: 1080, - type: 5 // Proxy version (4 or 5) - }, - - command: 'connect', // connect, bind, associate - - destination: { - host: '192.30.253.113', // ipv4, ipv6, or hostname - port: 80 - } -} - -// Await/Async (uses a Promise) -try { - const info = await SocksClient.createConnection(options); - console.log(info); - /* - { - socket: <Socket ...>, // Raw net.Socket - } - */ - / <Socket ...> (this is a raw net.Socket that is established to the destination host through the given proxy server) - -} catch (err) { - // Handle error... -} - -// Promise -SocksClient.createConnection(options) -.then(info => { - console.log(info); - /* - { - socket: <Socket ...>, // Raw net.Socket - } - */ -}) -.catch(err => { - // Handle error... -}); - -// Callback -SocksClient.createConnection(options, (err, info) => { - if (!err) { - console.log(info); - /* - { - socket: <Socket ...>, // Raw net.Socket - } - */ - } else { - // Handle error... - } -}); -``` - -### Class Method: SocksClient.createConnectionChain(options[, callback]) -* ```options``` { SocksClientChainOptions } - An object describing a list of SOCKS proxies to use, the command to send and establish, and the destination host to connect to. -* ```callback``` { Function } - Optional callback function that is called when the proxy connection chain is established, or an error occurs. -* ```returns``` { Promise } - A Promise is returned that is resolved when the proxy connection chain is established, or rejected when an error occurs. - -Creates a new proxy connection chain through a list of at least two SOCKS proxies to the given destination host. This factory method supports callbacks and promises for async flow control. - -**Note:** If a callback function is provided, the promise will always resolve regardless of an error occurring. Please be sure to exclusively use either promises or callbacks when using this factory function. - -**Note:** At least two proxies must be provided for the chain to be established. - -```typescript -const options = { - proxies: [ // The chain order is the order in the proxies array, meaning the last proxy will establish a connection to the destination. - { - host: '159.203.75.235', // ipv4, ipv6, or hostname - port: 1081, - type: 5 - }, - { - host: '104.131.124.203', // ipv4, ipv6, or hostname - port: 1081, - type: 5 - } - ] - - command: 'connect', // Only connect is supported in chaining mode. - - destination: { - host: '192.30.253.113', // ipv4, ipv6, hostname - port: 80 - } -} -``` - -### Class Method: SocksClient.createUDPFrame(details) -* ```details``` { SocksUDPFrameDetails } - An object containing the remote host, frame number, and frame data to use when creating a SOCKS UDP frame packet. -* ```returns``` { Buffer } - A Buffer containing all of the UDP frame data. - -Creates a SOCKS UDP frame relay packet that is sent and received via a SOCKS proxy when using the associate command for UDP packet forwarding. - -**SocksUDPFrameDetails** - -```typescript -{ - frameNumber: 0, // The frame number (used for breaking up larger packets) - - remoteHost: { // The remote host to have the proxy send data to, or the remote host that send this data. - host: '1.2.3.4', - port: 1234 - }, - - data: <Buffer 01 02 03 04...> // A Buffer instance of data to include in the packet (actual data sent to the remote host) -} -interface SocksUDPFrameDetails { - // The frame number of the packet. - frameNumber?: number; - - // The remote host. - remoteHost: SocksRemoteHost; - - // The packet data. - data: Buffer; -} -``` - -### Class Method: SocksClient.parseUDPFrame(data) -* ```data``` { Buffer } - A Buffer instance containing SOCKS UDP frame data to parse. -* ```returns``` { SocksUDPFrameDetails } - An object containing the remote host, frame number, and frame data of the SOCKS UDP frame. - -```typescript -const frame = SocksClient.parseUDPFrame(data); -console.log(frame); -/* -{ - frameNumber: 0, - remoteHost: { - host: '1.2.3.4', - port: 1234 - }, - data: <Buffer 01 02 03 04 ...> -} -*/ -``` - -Parses a Buffer instance and returns the parsed SocksUDPFrameDetails object. - -## Event: 'error' -* ```err``` { SocksClientError } - An Error object containing an error message and the original SocksClientOptions. - -This event is emitted if an error occurs when trying to establish the proxy connection. - -## Event: 'bound' -* ```info``` { SocksClientBoundEvent } An object containing a Socket and SocksRemoteHost info. - -This event is emitted when using the BIND command on a remote SOCKS proxy server. This event indicates the proxy server is now listening for incoming connections on a specified port. - -**SocksClientBoundEvent** -```typescript -{ - socket: net.Socket, // The underlying raw Socket - remoteHost: { - host: '1.2.3.4', // The remote host that is listening (usually the proxy itself) - port: 4444 // The remote port the proxy is listening on for incoming connections (when using BIND). - } -} -``` - -## Event: 'established' -* ```info``` { SocksClientEstablishedEvent } An object containing a Socket and SocksRemoteHost info. - -This event is emitted when the following conditions are met: -1. When using the CONNECT command, and a proxy connection has been established to the remote host. -2. When using the BIND command, and an incoming connection has been accepted by the proxy and a TCP relay has been established. -3. When using the ASSOCIATE command, and a UDP relay has been established. - -When using BIND, 'bound' is first emitted to indicate the SOCKS server is waiting for an incoming connection, and provides the remote port the SOCKS server is listening on. - -When using ASSOCIATE, 'established' is emitted with the remote UDP port the SOCKS server is accepting UDP frame packets on. - -**SocksClientEstablishedEvent** -```typescript -{ - socket: net.Socket, // The underlying raw Socket - remoteHost: { - host: '1.2.3.4', // The remote host that is listening (usually the proxy itself) - port: 52738 // The remote port the proxy is listening on for incoming connections (when using BIND). - } -} -``` - -## client.connect() - -Starts connecting to the remote SOCKS proxy server to establish a proxy connection to the destination host. - -## client.socksClientOptions -* ```returns``` { SocksClientOptions } The options that were passed to the SocksClient. - -Gets the options that were passed to the SocksClient when it was created. - - -**SocksClientError** -```typescript -{ // Subclassed from Error. - message: 'An error has occurred', - options: { - // SocksClientOptions - } -} -``` - -# Further Reading: - -Please read the SOCKS 5 specifications for more information on how to use BIND and Associate. -http://www.ietf.org/rfc/rfc1928.txt - -# License - -This work is licensed under the [MIT license](http://en.wikipedia.org/wiki/MIT_License). diff --git a/node_modules/socks/build/client/socksclient.js b/node_modules/socks/build/client/socksclient.js index 14ac671497354..8bda6f7e06c5d 100644 --- a/node_modules/socks/build/client/socksclient.js +++ b/node_modules/socks/build/client/socksclient.js @@ -45,7 +45,7 @@ class SocksClient extends events_1.EventEmitter { catch (err) { if (typeof callback === 'function') { callback(err); - return resolve(); // Resolves pending promise (prevents memory leaks). + return resolve(err); // Resolves pending promise (prevents memory leaks). } else { return reject(err); @@ -57,7 +57,7 @@ class SocksClient extends events_1.EventEmitter { client.removeAllListeners(); if (typeof callback === 'function') { callback(null, info); - resolve(); // Resolves pending promise (prevents memory leaks). + resolve(info); // Resolves pending promise (prevents memory leaks). } else { resolve(info); @@ -68,7 +68,7 @@ class SocksClient extends events_1.EventEmitter { client.removeAllListeners(); if (typeof callback === 'function') { callback(err); - resolve(); // Resolves pending promise (prevents memory leaks). + resolve(err); // Resolves pending promise (prevents memory leaks). } else { reject(err); @@ -94,7 +94,7 @@ class SocksClient extends events_1.EventEmitter { catch (err) { if (typeof callback === 'function') { callback(err); - return resolve(); // Resolves pending promise (prevents memory leaks). + return resolve(err); // Resolves pending promise (prevents memory leaks). } else { return reject(err); @@ -121,6 +121,7 @@ class SocksClient extends events_1.EventEmitter { command: 'connect', proxy: nextProxy, destination: nextDestination, + // Initial connection ignores this as sock is undefined. Subsequent connections re-use the first proxy socket to form a chain. }); // If sock is undefined, assign it here. if (!sock) { @@ -129,7 +130,7 @@ class SocksClient extends events_1.EventEmitter { } if (typeof callback === 'function') { callback(null, { socket: sock }); - resolve(); // Resolves pending promise (prevents memory leaks). + resolve({ socket: sock }); // Resolves pending promise (prevents memory leaks). } else { resolve({ socket: sock }); @@ -138,7 +139,7 @@ class SocksClient extends events_1.EventEmitter { catch (err) { if (typeof callback === 'function') { callback(err); - resolve(); // Resolves pending promise (prevents memory leaks). + resolve(err); // Resolves pending promise (prevents memory leaks). } else { reject(err); @@ -472,17 +473,22 @@ class SocksClient extends events_1.EventEmitter { */ sendSocks5InitialHandshake() { const buff = new smart_buffer_1.SmartBuffer(); - buff.writeUInt8(0x05); + // By default we always support no auth. + const supportedAuthMethods = [constants_1.Socks5Auth.NoAuth]; // We should only tell the proxy we support user/pass auth if auth info is actually provided. // Note: As of Tor v0.3.5.7+, if user/pass auth is an option from the client, by default it will always take priority. if (this.options.proxy.userId || this.options.proxy.password) { - buff.writeUInt8(2); - buff.writeUInt8(constants_1.Socks5Auth.NoAuth); - buff.writeUInt8(constants_1.Socks5Auth.UserPass); + supportedAuthMethods.push(constants_1.Socks5Auth.UserPass); } - else { - buff.writeUInt8(1); - buff.writeUInt8(constants_1.Socks5Auth.NoAuth); + // Custom auth method? + if (this.options.proxy.custom_auth_method !== undefined) { + supportedAuthMethods.push(this.options.proxy.custom_auth_method); + } + // Build handshake packet + buff.writeUInt8(0x05); + buff.writeUInt8(supportedAuthMethods.length); + for (const authMethod of supportedAuthMethods) { + buff.writeUInt8(authMethod); } this.nextRequiredPacketBufferSize = constants_1.SOCKS_INCOMING_PACKET_SIZES.Socks5InitialHandshakeResponse; @@ -498,17 +504,24 @@ class SocksClient extends events_1.EventEmitter { if (data[0] !== 0x05) { this.closeSocket(constants_1.ERRORS.InvalidSocks5IntiailHandshakeSocksVersion); } - else if (data[1] === 0xff) { + else if (data[1] === constants_1.SOCKS5_NO_ACCEPTABLE_AUTH) { this.closeSocket(constants_1.ERRORS.InvalidSocks5InitialHandshakeNoAcceptedAuthType); } else { // If selected Socks v5 auth method is no auth, send final handshake request. if (data[1] === constants_1.Socks5Auth.NoAuth) { + this.socks5ChosenAuthType = constants_1.Socks5Auth.NoAuth; this.sendSocks5CommandRequest(); // If selected Socks v5 auth method is user/password, send auth handshake. } else if (data[1] === constants_1.Socks5Auth.UserPass) { + this.socks5ChosenAuthType = constants_1.Socks5Auth.UserPass; this.sendSocks5UserPassAuthentication(); + // If selected Socks v5 auth method is the custom_auth_method, send custom handshake. + } + else if (data[1] === this.options.proxy.custom_auth_method) { + this.socks5ChosenAuthType = this.options.proxy.custom_auth_method; + this.sendSocks5CustomAuthentication(); } else { this.closeSocket(constants_1.ERRORS.InvalidSocks5InitialHandshakeUnknownAuthType); @@ -534,19 +547,52 @@ class SocksClient extends events_1.EventEmitter { this.socket.write(buff.toBuffer()); this.setState(constants_1.SocksClientState.SentAuthentication); } + sendSocks5CustomAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + this.nextRequiredPacketBufferSize = this.options.proxy.custom_auth_response_size; + this.socket.write(yield this.options.proxy.custom_auth_request_handler()); + this.setState(constants_1.SocksClientState.SentAuthentication); + }); + } + handleSocks5CustomAuthHandshakeResponse(data) { + return __awaiter(this, void 0, void 0, function* () { + return yield this.options.proxy.custom_auth_response_handler(data); + }); + } + handleSocks5AuthenticationNoAuthHandshakeResponse(data) { + return __awaiter(this, void 0, void 0, function* () { + return data[1] === 0x00; + }); + } + handleSocks5AuthenticationUserPassHandshakeResponse(data) { + return __awaiter(this, void 0, void 0, function* () { + return data[1] === 0x00; + }); + } /** * Handles Socks v5 auth handshake response. * @param data */ handleInitialSocks5AuthenticationHandshakeResponse() { - this.setState(constants_1.SocksClientState.ReceivedAuthenticationResponse); - const data = this.receiveBuffer.get(2); - if (data[1] !== 0x00) { - this.closeSocket(constants_1.ERRORS.Socks5AuthenticationFailed); - } - else { - this.sendSocks5CommandRequest(); - } + return __awaiter(this, void 0, void 0, function* () { + this.setState(constants_1.SocksClientState.ReceivedAuthenticationResponse); + let authResult = false; + if (this.socks5ChosenAuthType === constants_1.Socks5Auth.NoAuth) { + authResult = yield this.handleSocks5AuthenticationNoAuthHandshakeResponse(this.receiveBuffer.get(2)); + } + else if (this.socks5ChosenAuthType === constants_1.Socks5Auth.UserPass) { + authResult = yield this.handleSocks5AuthenticationUserPassHandshakeResponse(this.receiveBuffer.get(2)); + } + else if (this.socks5ChosenAuthType === this.options.proxy.custom_auth_method) { + authResult = yield this.handleSocks5CustomAuthHandshakeResponse(this.receiveBuffer.get(this.options.proxy.custom_auth_response_size)); + } + if (!authResult) { + this.closeSocket(constants_1.ERRORS.Socks5AuthenticationFailed); + } + else { + this.sendSocks5CommandRequest(); + } + }); } /** * Sends Socks v5 final handshake request. @@ -644,7 +690,7 @@ class SocksClient extends events_1.EventEmitter { if (constants_1.SocksCommand[this.options.command] === constants_1.SocksCommand.connect) { this.setState(constants_1.SocksClientState.Established); this.removeInternalSocketHandlers(); - this.emit('established', { socket: this.socket }); + this.emit('established', { remoteHost, socket: this.socket }); } else if (constants_1.SocksCommand[this.options.command] === constants_1.SocksCommand.bind) { /* If using BIND, the Socks client is now in BoundWaitingForConnection state. diff --git a/node_modules/socks/build/client/socksclient.js.map b/node_modules/socks/build/client/socksclient.js.map index 2145dacf5f00e..25843ac0ef21f 100644 --- a/node_modules/socks/build/client/socksclient.js.map +++ b/node_modules/socks/build/client/socksclient.js.map @@ -1 +1 @@ -{"version":3,"file":"socksclient.js","sourceRoot":"","sources":["../../src/client/socksclient.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,mCAAoC;AACpC,2BAA2B;AAC3B,yBAAyB;AACzB,+CAAyC;AACzC,mDAiB6B;AAC7B,+CAG2B;AAC3B,2DAAsD;AACtD,yCAA8D;AAw3B5D,iGAx3BM,uBAAgB,OAw3BN;AA91BlB,MAAM,WAAY,SAAQ,qBAAY;IAepC,YAAY,OAA2B;QACrC,KAAK,EAAE,CAAC;QACR,IAAI,CAAC,OAAO,qBACP,OAAO,CACX,CAAC;QAEF,8BAA8B;QAC9B,oCAA0B,CAAC,OAAO,CAAC,CAAC;QAEpC,gBAAgB;QAChB,IAAI,CAAC,QAAQ,CAAC,4BAAgB,CAAC,OAAO,CAAC,CAAC;IAC1C,CAAC;IAED;;;;;;;OAOG;IACH,MAAM,CAAC,gBAAgB,CACrB,OAA2B,EAC3B,QAAmB;QAEnB,OAAO,IAAI,OAAO,CAA8B,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;YAClE,8BAA8B;YAC9B,IAAI;gBACF,oCAA0B,CAAC,OAAO,EAAE,CAAC,SAAS,CAAC,CAAC,CAAC;aAClD;YAAC,OAAO,GAAG,EAAE;gBACZ,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;oBAClC,QAAQ,CAAC,GAAG,CAAC,CAAC;oBACd,OAAO,OAAO,EAAE,CAAC,CAAC,oDAAoD;iBACvE;qBAAM;oBACL,OAAO,MAAM,CAAC,GAAG,CAAC,CAAC;iBACpB;aACF;YAED,MAAM,MAAM,GAAG,IAAI,WAAW,CAAC,OAAO,CAAC,CAAC;YACxC,MAAM,CAAC,OAAO,CAAC,OAAO,CAAC,eAAe,CAAC,CAAC;YACxC,MAAM,CAAC,IAAI,CAAC,aAAa,EAAE,CAAC,IAAiC,EAAE,EAAE;gBAC/D,MAAM,CAAC,kBAAkB,EAAE,CAAC;gBAC5B,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;oBAClC,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;oBACrB,OAAO,EAAE,CAAC,CAAC,oDAAoD;iBAChE;qBAAM;oBACL,OAAO,CAAC,IAAI,CAAC,CAAC;iBACf;YACH,CAAC,CAAC,CAAC;YAEH,kDAAkD;YAClD,MAAM,CAAC,IAAI,CAAC,OAAO,EAAE,CAAC,GAAU,EAAE,EAAE;gBAClC,MAAM,CAAC,kBAAkB,EAAE,CAAC;gBAC5B,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;oBAClC,QAAQ,CAAC,GAAG,CAAC,CAAC;oBACd,OAAO,EAAE,CAAC,CAAC,oDAAoD;iBAChE;qBAAM;oBACL,MAAM,CAAC,GAAG,CAAC,CAAC;iBACb;YACH,CAAC,CAAC,CAAC;QACL,CAAC,CAAC,CAAC;IACL,CAAC;IAED;;;;;;;;OAQG;IACH,MAAM,CAAC,qBAAqB,CAC1B,OAAgC,EAChC,QAAmB;QAEnB,OAAO,IAAI,OAAO,CAA8B,CAAO,OAAO,EAAE,MAAM,EAAE,EAAE;YACxE,mCAAmC;YACnC,IAAI;gBACF,yCAA+B,CAAC,OAAO,CAAC,CAAC;aAC1C;YAAC,OAAO,GAAG,EAAE;gBACZ,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;oBAClC,QAAQ,CAAC,GAAG,CAAC,CAAC;oBACd,OAAO,OAAO,EAAE,CAAC,CAAC,oDAAoD;iBACvE;qBAAM;oBACL,OAAO,MAAM,CAAC,GAAG,CAAC,CAAC;iBACpB;aACF;YAED,IAAI,IAAgB,CAAC;YAErB,kBAAkB;YAClB,IAAI,OAAO,CAAC,cAAc,EAAE;gBAC1B,mBAAY,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;aAC/B;YAED,IAAI;gBACF,kDAAkD;gBAClD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,OAAO,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;oBAC/C,MAAM,SAAS,GAAG,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC;oBAErC,0HAA0H;oBAC1H,MAAM,eAAe,GACnB,CAAC,KAAK,OAAO,CAAC,OAAO,CAAC,MAAM,GAAG,CAAC;wBAC9B,CAAC,CAAC,OAAO,CAAC,WAAW;wBACrB,CAAC,CAAC;4BACE,IAAI,EAAE,OAAO,CAAC,OAAO,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,SAAS;4BACtC,IAAI,EAAE,OAAO,CAAC,OAAO,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI;yBAClC,CAAC;oBAER,4CAA4C;oBAC5C,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,gBAAgB,CAAC;wBAChD,OAAO,EAAE,SAAS;wBAClB,KAAK,EAAE,SAAS;wBAChB,WAAW,EAAE,eAAe;qBAE7B,CAAC,CAAC;oBAEH,wCAAwC;oBACxC,IAAI,CAAC,IAAI,EAAE;wBACT,IAAI,GAAG,MAAM,CAAC,MAAM,CAAC;qBACtB;iBACF;gBAED,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;oBAClC,QAAQ,CAAC,IAAI,EAAE,EAAC,MAAM,EAAE,IAAI,EAAC,CAAC,CAAC;oBAC/B,OAAO,EAAE,CAAC,CAAC,oDAAoD;iBAChE;qBAAM;oBACL,OAAO,CAAC,EAAC,MAAM,EAAE,IAAI,EAAC,CAAC,CAAC;iBACzB;aACF;YAAC,OAAO,GAAG,EAAE;gBACZ,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;oBAClC,QAAQ,CAAC,GAAG,CAAC,CAAC;oBACd,OAAO,EAAE,CAAC,CAAC,oDAAoD;iBAChE;qBAAM;oBACL,MAAM,CAAC,GAAG,CAAC,CAAC;iBACb;aACF;QACH,CAAC,CAAA,CAAC,CAAC;IACL,CAAC;IAED;;;OAGG;IACH,MAAM,CAAC,cAAc,CAAC,OAA6B;QACjD,MAAM,IAAI,GAAG,IAAI,0BAAW,EAAE,CAAC;QAC/B,IAAI,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC;QACtB,IAAI,CAAC,UAAU,CAAC,OAAO,CAAC,WAAW,IAAI,CAAC,CAAC,CAAC;QAE1C,qBAAqB;QACrB,IAAI,GAAG,CAAC,MAAM,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE;YACvC,IAAI,CAAC,UAAU,CAAC,0BAAc,CAAC,IAAI,CAAC,CAAC;YACrC,IAAI,CAAC,aAAa,CAAC,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC,CAAC;SACxD;aAAM,IAAI,GAAG,CAAC,MAAM,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE;YAC9C,IAAI,CAAC,UAAU,CAAC,0BAAc,CAAC,IAAI,CAAC,CAAC;YACrC,IAAI,CAAC,WAAW,CAAC,EAAE,CAAC,QAAQ,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC,CAAC;SACxD;aAAM;YACL,IAAI,CAAC,UAAU,CAAC,0BAAc,CAAC,QAAQ,CAAC,CAAC;YACzC,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC,UAAU,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC,CAAC;YAC5D,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;SAC3C;QAED,OAAO;QACP,IAAI,CAAC,aAAa,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;QAE5C,OAAO;QACP,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;QAE/B,OAAO,IAAI,CAAC,QAAQ,EAAE,CAAC;IACzB,CAAC;IAED;;;OAGG;IACH,MAAM,CAAC,aAAa,CAAC,IAAY;QAC/B,MAAM,IAAI,GAAG,0BAAW,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;QAC1C,IAAI,CAAC,UAAU,GAAG,CAAC,CAAC;QAEpB,MAAM,WAAW,GAAG,IAAI,CAAC,SAAS,EAAE,CAAC;QACrC,MAAM,QAAQ,GAAmB,IAAI,CAAC,SAAS,EAAE,CAAC;QAClD,IAAI,UAAU,CAAC;QAEf,IAAI,QAAQ,KAAK,0BAAc,CAAC,IAAI,EAAE;YACpC,UAAU,GAAG,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,YAAY,EAAE,CAAC,CAAC;SAC/C;aAAM,IAAI,QAAQ,KAAK,0BAAc,CAAC,IAAI,EAAE;YAC3C,UAAU,GAAG,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC,CAAC;SAC/C;aAAM;YACL,UAAU,GAAG,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,SAAS,EAAE,CAAC,CAAC;SAChD;QAED,MAAM,UAAU,GAAG,IAAI,CAAC,YAAY,EAAE,CAAC;QAEvC,OAAO;YACL,WAAW;YACX,UAAU,EAAE;gBACV,IAAI,EAAE,UAAU;gBAChB,IAAI,EAAE,UAAU;aACjB;YACD,IAAI,EAAE,IAAI,CAAC,UAAU,EAAE;SACxB,CAAC;IACJ,CAAC;IAED;;OAEG;IACK,QAAQ,CAAC,QAA0B;QACzC,IAAI,IAAI,CAAC,KAAK,KAAK,4BAAgB,CAAC,KAAK,EAAE;YACzC,IAAI,CAAC,KAAK,GAAG,QAAQ,CAAC;SACvB;IACH,CAAC;IAED;;;OAGG;IACI,OAAO,CAAC,cAAuB;QACpC,IAAI,CAAC,cAAc,GAAG,CAAC,IAAY,EAAE,EAAE,CAAC,IAAI,CAAC,qBAAqB,CAAC,IAAI,CAAC,CAAC;QACzE,IAAI,CAAC,OAAO,GAAG,GAAG,EAAE,CAAC,IAAI,CAAC,cAAc,EAAE,CAAC;QAC3C,IAAI,CAAC,OAAO,GAAG,CAAC,GAAU,EAAE,EAAE,CAAC,IAAI,CAAC,cAAc,CAAC,GAAG,CAAC,CAAC;QACxD,IAAI,CAAC,SAAS,GAAG,GAAG,EAAE,CAAC,IAAI,CAAC,gBAAgB,EAAE,CAAC;QAE/C,+CAA+C;QAC/C,MAAM,KAAK,GAAG,UAAU,CACtB,GAAG,EAAE,CAAC,IAAI,CAAC,oBAAoB,EAAE,EACjC,IAAI,CAAC,OAAO,CAAC,OAAO,IAAI,2BAAe,CACxC,CAAC;QAEF,8EAA8E;QAC9E,IAAI,KAAK,CAAC,KAAK,IAAI,OAAO,KAAK,CAAC,KAAK,KAAK,UAAU,EAAE;YACpD,KAAK,CAAC,KAAK,EAAE,CAAC;SACf;QAED,yGAAyG;QACzG,IAAI,cAAc,EAAE;YAClB,IAAI,CAAC,MAAM,GAAG,cAAc,CAAC;SAC9B;aAAM;YACL,IAAI,CAAC,MAAM,GAAG,IAAI,GAAG,CAAC,MAAM,EAAE,CAAC;SAChC;QAED,gCAAgC;QAChC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;QACxC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;QACxC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,SAAS,EAAE,IAAI,CAAC,SAAS,CAAC,CAAC;QAC5C,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,MAAM,EAAE,IAAI,CAAC,cAAc,CAAC,CAAC;QAE5C,IAAI,CAAC,QAAQ,CAAC,4BAAgB,CAAC,UAAU,CAAC,CAAC;QAC3C,IAAI,CAAC,aAAa,GAAG,IAAI,6BAAa,EAAE,CAAC;QAEzC,IAAI,cAAc,EAAE;YAClB,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;SAC7B;aAAM;YACJ,IAAI,CAAC,MAAqB,CAAC,OAAO,CAAC,IAAI,CAAC,gBAAgB,EAAE,CAAC,CAAC;YAE7D,IACE,IAAI,CAAC,OAAO,CAAC,eAAe,KAAK,SAAS;gBAC1C,IAAI,CAAC,OAAO,CAAC,eAAe,KAAK,IAAI,EACrC;gBACC,IAAI,CAAC,MAAqB,CAAC,UAAU,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,eAAe,CAAC,CAAC;aACxE;SACF;QAED,6FAA6F;QAC7F,IAAI,CAAC,mBAAmB,CAAC,aAAa,EAAE,CAAC,IAAI,EAAE,EAAE;YAC/C,YAAY,CAAC,GAAG,EAAE;gBAChB,IAAI,IAAI,CAAC,aAAa,CAAC,MAAM,GAAG,CAAC,EAAE;oBACjC,MAAM,UAAU,GAAG,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,IAAI,CAAC,aAAa,CAAC,MAAM,CAAC,CAAC;oBAErE,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,UAAU,CAAC,CAAC;iBACtC;gBACD,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC;YACvB,CAAC,CAAC,CAAC;QACL,CAAC,CAAC,CAAC;IACL,CAAC;IAED,+EAA+E;IACvE,gBAAgB;QACtB,uCACK,IAAI,CAAC,OAAO,CAAC,cAAc,KAC9B,IAAI,EAAE,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,IAAI,IAAI,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,SAAS,EAC7D,IAAI,EAAE,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,IAAI,IAC7B;IACJ,CAAC;IAED;;;OAGG;IACK,oBAAoB;QAC1B,IACE,IAAI,CAAC,KAAK,KAAK,4BAAgB,CAAC,WAAW;YAC3C,IAAI,CAAC,KAAK,KAAK,4BAAgB,CAAC,yBAAyB,EACzD;YACA,IAAI,CAAC,WAAW,CAAC,kBAAM,CAAC,uBAAuB,CAAC,CAAC;SAClD;IACH,CAAC;IAED;;OAEG;IACK,gBAAgB;QACtB,IAAI,CAAC,QAAQ,CAAC,4BAAgB,CAAC,SAAS,CAAC,CAAC;QAE1C,0BAA0B;QAC1B,IAAI,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,IAAI,KAAK,CAAC,EAAE;YACjC,IAAI,CAAC,0BAA0B,EAAE,CAAC;SACnC;aAAM;YACL,IAAI,CAAC,0BAA0B,EAAE,CAAC;SACnC;QAED,IAAI,CAAC,QAAQ,CAAC,4BAAgB,CAAC,oBAAoB,CAAC,CAAC;IACvD,CAAC;IAED;;;OAGG;IACK,qBAAqB,CAAC,IAAY;QACxC;;;UAGE;QACF,IAAI,CAAC,aAAa,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;QAEhC,6BAA6B;QAC7B,IAAI,CAAC,WAAW,EAAE,CAAC;IACrB,CAAC;IAED;;OAEG;IACK,WAAW;QACjB,mFAAmF;QACnF,OACE,IAAI,CAAC,KAAK,KAAK,4BAAgB,CAAC,WAAW;YAC3C,IAAI,CAAC,KAAK,KAAK,4BAAgB,CAAC,KAAK;YACrC,IAAI,CAAC,aAAa,CAAC,MAAM,IAAI,IAAI,CAAC,4BAA4B,EAC9D;YACA,gDAAgD;YAChD,IAAI,IAAI,CAAC,KAAK,KAAK,4BAAgB,CAAC,oBAAoB,EAAE;gBACxD,IAAI,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,IAAI,KAAK,CAAC,EAAE;oBACjC,4CAA4C;oBAC5C,IAAI,CAAC,kCAAkC,EAAE,CAAC;iBAC3C;qBAAM;oBACL,wDAAwD;oBACxD,IAAI,CAAC,oCAAoC,EAAE,CAAC;iBAC7C;gBACD,wDAAwD;aACzD;iBAAM,IAAI,IAAI,CAAC,KAAK,KAAK,4BAAgB,CAAC,kBAAkB,EAAE;gBAC7D,IAAI,CAAC,kDAAkD,EAAE,CAAC;gBAC1D,6DAA6D;aAC9D;iBAAM,IAAI,IAAI,CAAC,KAAK,KAAK,4BAAgB,CAAC,kBAAkB,EAAE;gBAC7D,IAAI,CAAC,kCAAkC,EAAE,CAAC;gBAC1C,mEAAmE;aACpE;iBAAM,IAAI,IAAI,CAAC,KAAK,KAAK,4BAAgB,CAAC,yBAAyB,EAAE;gBACpE,IAAI,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,IAAI,KAAK,CAAC,EAAE;oBACjC,IAAI,CAAC,sCAAsC,EAAE,CAAC;iBAC/C;qBAAM;oBACL,IAAI,CAAC,sCAAsC,EAAE,CAAC;iBAC/C;aACF;iBAAM;gBACL,IAAI,CAAC,WAAW,CAAC,kBAAM,CAAC,aAAa,CAAC,CAAC;gBACvC,MAAM;aACP;SACF;IACH,CAAC;IAED;;;OAGG;IACK,cAAc;QACpB,IAAI,CAAC,WAAW,CAAC,kBAAM,CAAC,YAAY,CAAC,CAAC;IACxC,CAAC;IAED;;;OAGG;IACK,cAAc,CAAC,GAAU;QAC/B,IAAI,CAAC,WAAW,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC;IAChC,CAAC;IAED;;OAEG;IACK,4BAA4B;QAClC,6FAA6F;QAC7F,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAC;QACpB,IAAI,CAAC,MAAM,CAAC,cAAc,CAAC,MAAM,EAAE,IAAI,CAAC,cAAc,CAAC,CAAC;QACxD,IAAI,CAAC,MAAM,CAAC,cAAc,CAAC,OAAO,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;QAClD,IAAI,CAAC,MAAM,CAAC,cAAc,CAAC,OAAO,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;QAClD,IAAI,CAAC,MAAM,CAAC,cAAc,CAAC,SAAS,EAAE,IAAI,CAAC,SAAS,CAAC,CAAC;IACxD,CAAC;IAED;;;OAGG;IACK,WAAW,CAAC,GAAW;QAC7B,2FAA2F;QAC3F,IAAI,IAAI,CAAC,KAAK,KAAK,4BAAgB,CAAC,KAAK,EAAE;YACzC,+BAA+B;YAC/B,IAAI,CAAC,QAAQ,CAAC,4BAAgB,CAAC,KAAK,CAAC,CAAC;YAEtC,iBAAiB;YACjB,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;YAEtB,4BAA4B;YAC5B,IAAI,CAAC,4BAA4B,EAAE,CAAC;YAEpC,sBAAsB;YACtB,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,uBAAgB,CAAC,GAAG,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC;SAC7D;IACH,CAAC;IAED;;OAEG;IACK,0BAA0B;QAChC,MAAM,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,MAAM,IAAI,EAAE,CAAC;QAE/C,MAAM,IAAI,GAAG,IAAI,0BAAW,EAAE,CAAC;QAC/B,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;QACtB,IAAI,CAAC,UAAU,CAAC,wBAAY,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC;QACpD,IAAI,CAAC,aAAa,CAAC,IAAI,CAAC,OAAO,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;QAElD,iBAAiB;QACjB,IAAI,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,WAAW,CAAC,IAAI,CAAC,EAAE;YAC7C,IAAI,CAAC,WAAW,CAAC,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC,CAAC;YAC7D,IAAI,CAAC,aAAa,CAAC,MAAM,CAAC,CAAC;YAC3B,sBAAsB;SACvB;aAAM;YACL,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;YACtB,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;YACtB,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;YACtB,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;YACtB,IAAI,CAAC,aAAa,CAAC,MAAM,CAAC,CAAC;YAC3B,IAAI,CAAC,aAAa,CAAC,IAAI,CAAC,OAAO,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;SACnD;QAED,IAAI,CAAC,4BAA4B;YAC/B,uCAA2B,CAAC,cAAc,CAAC;QAC7C,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAC;IACrC,CAAC;IAED;;;OAGG;IACK,kCAAkC;QACxC,MAAM,IAAI,GAAG,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;QAEvC,IAAI,IAAI,CAAC,CAAC,CAAC,KAAK,0BAAc,CAAC,OAAO,EAAE;YACtC,IAAI,CAAC,WAAW,CACd,GAAG,kBAAM,CAAC,6BAA6B,OACrC,0BAAc,CAAC,IAAI,CAAC,CAAC,CAAC,CACxB,GAAG,CACJ,CAAC;SACH;aAAM;YACL,gBAAgB;YAChB,IAAI,wBAAY,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,KAAK,wBAAY,CAAC,IAAI,EAAE;gBAC5D,MAAM,IAAI,GAAG,0BAAW,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;gBAC1C,IAAI,CAAC,UAAU,GAAG,CAAC,CAAC;gBAEpB,MAAM,UAAU,GAAoB;oBAClC,IAAI,EAAE,IAAI,CAAC,YAAY,EAAE;oBACzB,IAAI,EAAE,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,YAAY,EAAE,CAAC;iBACvC,CAAC;gBAEF,yCAAyC;gBACzC,IAAI,UAAU,CAAC,IAAI,KAAK,SAAS,EAAE;oBACjC,UAAU,CAAC,IAAI,GAAG,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,SAAS,CAAC;iBAChD;gBACD,IAAI,CAAC,QAAQ,CAAC,4BAAgB,CAAC,yBAAyB,CAAC,CAAC;gBAC1D,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,EAAC,UAAU,EAAE,MAAM,EAAE,IAAI,CAAC,MAAM,EAAC,CAAC,CAAC;gBAEtD,mBAAmB;aACpB;iBAAM;gBACL,IAAI,CAAC,QAAQ,CAAC,4BAAgB,CAAC,WAAW,CAAC,CAAC;gBAC5C,IAAI,CAAC,4BAA4B,EAAE,CAAC;gBACpC,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,EAAC,MAAM,EAAE,IAAI,CAAC,MAAM,EAAC,CAAC,CAAC;aACjD;SACF;IACH,CAAC;IAED;;;OAGG;IACK,sCAAsC;QAC5C,MAAM,IAAI,GAAG,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;QAEvC,IAAI,IAAI,CAAC,CAAC,CAAC,KAAK,0BAAc,CAAC,OAAO,EAAE;YACtC,IAAI,CAAC,WAAW,CACd,GAAG,kBAAM,CAAC,0CAA0C,OAClD,0BAAc,CAAC,IAAI,CAAC,CAAC,CAAC,CACxB,GAAG,CACJ,CAAC;SACH;aAAM;YACL,MAAM,IAAI,GAAG,0BAAW,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;YAC1C,IAAI,CAAC,UAAU,GAAG,CAAC,CAAC;YAEpB,MAAM,UAAU,GAAoB;gBAClC,IAAI,EAAE,IAAI,CAAC,YAAY,EAAE;gBACzB,IAAI,EAAE,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,YAAY,EAAE,CAAC;aACvC,CAAC;YAEF,IAAI,CAAC,QAAQ,CAAC,4BAAgB,CAAC,WAAW,CAAC,CAAC;YAC5C,IAAI,CAAC,4BAA4B,EAAE,CAAC;YACpC,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,EAAC,UAAU,EAAE,MAAM,EAAE,IAAI,CAAC,MAAM,EAAC,CAAC,CAAC;SAC7D;IACH,CAAC;IAED;;OAEG;IACK,0BAA0B;QAChC,MAAM,IAAI,GAAG,IAAI,0BAAW,EAAE,CAAC;QAC/B,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;QAEtB,6FAA6F;QAC7F,sHAAsH;QACtH,IAAI,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,MAAM,IAAI,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,QAAQ,EAAE;YAC5D,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;YACnB,IAAI,CAAC,UAAU,CAAC,sBAAU,CAAC,MAAM,CAAC,CAAC;YACnC,IAAI,CAAC,UAAU,CAAC,sBAAU,CAAC,QAAQ,CAAC,CAAC;SACtC;aAAM;YACL,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;YACnB,IAAI,CAAC,UAAU,CAAC,sBAAU,CAAC,MAAM,CAAC,CAAC;SACpC;QAED,IAAI,CAAC,4BAA4B;YAC/B,uCAA2B,CAAC,8BAA8B,CAAC;QAC7D,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAC;QACnC,IAAI,CAAC,QAAQ,CAAC,4BAAgB,CAAC,oBAAoB,CAAC,CAAC;IACvD,CAAC;IAED;;;OAGG;IACK,oCAAoC;QAC1C,MAAM,IAAI,GAAG,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;QAEvC,IAAI,IAAI,CAAC,CAAC,CAAC,KAAK,IAAI,EAAE;YACpB,IAAI,CAAC,WAAW,CAAC,kBAAM,CAAC,yCAAyC,CAAC,CAAC;SACpE;aAAM,IAAI,IAAI,CAAC,CAAC,CAAC,KAAK,IAAI,EAAE;YAC3B,IAAI,CAAC,WAAW,CAAC,kBAAM,CAAC,+CAA+C,CAAC,CAAC;SAC1E;aAAM;YACL,6EAA6E;YAC7E,IAAI,IAAI,CAAC,CAAC,CAAC,KAAK,sBAAU,CAAC,MAAM,EAAE;gBACjC,IAAI,CAAC,wBAAwB,EAAE,CAAC;gBAChC,0EAA0E;aAC3E;iBAAM,IAAI,IAAI,CAAC,CAAC,CAAC,KAAK,sBAAU,CAAC,QAAQ,EAAE;gBAC1C,IAAI,CAAC,gCAAgC,EAAE,CAAC;aACzC;iBAAM;gBACL,IAAI,CAAC,WAAW,CAAC,kBAAM,CAAC,4CAA4C,CAAC,CAAC;aACvE;SACF;IACH,CAAC;IAED;;;;OAIG;IACK,gCAAgC;QACtC,MAAM,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,MAAM,IAAI,EAAE,CAAC;QAC/C,MAAM,QAAQ,GAAG,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,QAAQ,IAAI,EAAE,CAAC;QAEnD,MAAM,IAAI,GAAG,IAAI,0BAAW,EAAE,CAAC;QAC/B,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;QACtB,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC,CAAC;QAC3C,IAAI,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC;QACzB,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC,UAAU,CAAC,QAAQ,CAAC,CAAC,CAAC;QAC7C,IAAI,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC;QAE3B,IAAI,CAAC,4BAA4B;YAC/B,uCAA2B,CAAC,oCAAoC,CAAC;QACnE,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAC;QACnC,IAAI,CAAC,QAAQ,CAAC,4BAAgB,CAAC,kBAAkB,CAAC,CAAC;IACrD,CAAC;IAED;;;OAGG;IACK,kDAAkD;QACxD,IAAI,CAAC,QAAQ,CAAC,4BAAgB,CAAC,8BAA8B,CAAC,CAAC;QAE/D,MAAM,IAAI,GAAG,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;QAEvC,IAAI,IAAI,CAAC,CAAC,CAAC,KAAK,IAAI,EAAE;YACpB,IAAI,CAAC,WAAW,CAAC,kBAAM,CAAC,0BAA0B,CAAC,CAAC;SACrD;aAAM;YACL,IAAI,CAAC,wBAAwB,EAAE,CAAC;SACjC;IACH,CAAC;IAED;;OAEG;IACK,wBAAwB;QAC9B,MAAM,IAAI,GAAG,IAAI,0BAAW,EAAE,CAAC;QAE/B,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;QACtB,IAAI,CAAC,UAAU,CAAC,wBAAY,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC;QACpD,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;QAEtB,sBAAsB;QACtB,IAAI,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,WAAW,CAAC,IAAI,CAAC,EAAE;YAC7C,IAAI,CAAC,UAAU,CAAC,0BAAc,CAAC,IAAI,CAAC,CAAC;YACrC,IAAI,CAAC,WAAW,CAAC,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC,CAAC;SAC9D;aAAM,IAAI,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,WAAW,CAAC,IAAI,CAAC,EAAE;YACpD,IAAI,CAAC,UAAU,CAAC,0BAAc,CAAC,IAAI,CAAC,CAAC;YACrC,IAAI,CAAC,WAAW,CAAC,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC,CAAC;SAC9D;aAAM;YACL,IAAI,CAAC,UAAU,CAAC,0BAAc,CAAC,QAAQ,CAAC,CAAC;YACzC,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,CAAC,WAAW,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;YACtD,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC,OAAO,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;SACjD;QACD,IAAI,CAAC,aAAa,CAAC,IAAI,CAAC,OAAO,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;QAElD,IAAI,CAAC,4BAA4B;YAC/B,uCAA2B,CAAC,oBAAoB,CAAC;QACnD,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAC;QACnC,IAAI,CAAC,QAAQ,CAAC,4BAAgB,CAAC,kBAAkB,CAAC,CAAC;IACrD,CAAC;IAED;;;OAGG;IACK,kCAAkC;QACxC,+EAA+E;QAC/E,MAAM,MAAM,GAAG,IAAI,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;QAE1C,IAAI,MAAM,CAAC,CAAC,CAAC,KAAK,IAAI,IAAI,MAAM,CAAC,CAAC,CAAC,KAAK,0BAAc,CAAC,OAAO,EAAE;YAC9D,IAAI,CAAC,WAAW,CACd,GAAG,kBAAM,CAAC,mCAAmC,MAC3C,0BAAc,CAAC,MAAM,CAAC,CAAC,CAAC,CAC1B,EAAE,CACH,CAAC;SACH;aAAM;YACL,oBAAoB;YACpB,MAAM,WAAW,GAAG,MAAM,CAAC,CAAC,CAAC,CAAC;YAE9B,IAAI,UAA2B,CAAC;YAChC,IAAI,IAAiB,CAAC;YAEtB,OAAO;YACP,IAAI,WAAW,KAAK,0BAAc,CAAC,IAAI,EAAE;gBACvC,8BAA8B;gBAC9B,MAAM,UAAU,GAAG,uCAA2B,CAAC,kBAAkB,CAAC;gBAClE,IAAI,IAAI,CAAC,aAAa,CAAC,MAAM,GAAG,UAAU,EAAE;oBAC1C,IAAI,CAAC,4BAA4B,GAAG,UAAU,CAAC;oBAC/C,OAAO;iBACR;gBAED,IAAI,GAAG,0BAAW,CAAC,UAAU,CAC3B,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAC5C,CAAC;gBAEF,UAAU,GAAG;oBACX,IAAI,EAAE,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,YAAY,EAAE,CAAC;oBACtC,IAAI,EAAE,IAAI,CAAC,YAAY,EAAE;iBAC1B,CAAC;gBAEF,4DAA4D;gBAC5D,IAAI,UAAU,CAAC,IAAI,KAAK,SAAS,EAAE;oBACjC,UAAU,CAAC,IAAI,GAAG,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,SAAS,CAAC;iBAChD;gBAED,WAAW;aACZ;iBAAM,IAAI,WAAW,KAAK,0BAAc,CAAC,QAAQ,EAAE;gBAClD,MAAM,UAAU,GAAG,MAAM,CAAC,CAAC,CAAC,CAAC;gBAC7B,MAAM,UAAU,GAAG,uCAA2B,CAAC,sBAAsB,CACnE,UAAU,CACX,CAAC,CAAC,qCAAqC;gBAExC,8BAA8B;gBAC9B,IAAI,IAAI,CAAC,aAAa,CAAC,MAAM,GAAG,UAAU,EAAE;oBAC1C,IAAI,CAAC,4BAA4B,GAAG,UAAU,CAAC;oBAC/C,OAAO;iBACR;gBAED,IAAI,GAAG,0BAAW,CAAC,UAAU,CAC3B,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAC5C,CAAC;gBAEF,UAAU,GAAG;oBACX,IAAI,EAAE,IAAI,CAAC,UAAU,CAAC,UAAU,CAAC;oBACjC,IAAI,EAAE,IAAI,CAAC,YAAY,EAAE;iBAC1B,CAAC;gBACF,OAAO;aACR;iBAAM,IAAI,WAAW,KAAK,0BAAc,CAAC,IAAI,EAAE;gBAC9C,8BAA8B;gBAC9B,MAAM,UAAU,GAAG,uCAA2B,CAAC,kBAAkB,CAAC;gBAClE,IAAI,IAAI,CAAC,aAAa,CAAC,MAAM,GAAG,UAAU,EAAE;oBAC1C,IAAI,CAAC,4BAA4B,GAAG,UAAU,CAAC;oBAC/C,OAAO;iBACR;gBAED,IAAI,GAAG,0BAAW,CAAC,UAAU,CAC3B,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAC5C,CAAC;gBAEF,UAAU,GAAG;oBACX,IAAI,EAAE,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC;oBACtC,IAAI,EAAE,IAAI,CAAC,YAAY,EAAE;iBAC1B,CAAC;aACH;YAED,6BAA6B;YAC7B,IAAI,CAAC,QAAQ,CAAC,4BAAgB,CAAC,qBAAqB,CAAC,CAAC;YAEtD,gEAAgE;YAChE,IAAI,wBAAY,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,KAAK,wBAAY,CAAC,OAAO,EAAE;gBAC/D,IAAI,CAAC,QAAQ,CAAC,4BAAgB,CAAC,WAAW,CAAC,CAAC;gBAC5C,IAAI,CAAC,4BAA4B,EAAE,CAAC;gBACpC,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,EAAC,MAAM,EAAE,IAAI,CAAC,MAAM,EAAC,CAAC,CAAC;aACjD;iBAAM,IAAI,wBAAY,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,KAAK,wBAAY,CAAC,IAAI,EAAE;gBACnE;mHACmG;gBACnG,IAAI,CAAC,QAAQ,CAAC,4BAAgB,CAAC,yBAAyB,CAAC,CAAC;gBAC1D,IAAI,CAAC,4BAA4B;oBAC/B,uCAA2B,CAAC,oBAAoB,CAAC;gBACnD,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,EAAC,UAAU,EAAE,MAAM,EAAE,IAAI,CAAC,MAAM,EAAC,CAAC,CAAC;gBACtD;;;kBAGE;aACH;iBAAM,IACL,wBAAY,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,KAAK,wBAAY,CAAC,SAAS,EAC7D;gBACA,IAAI,CAAC,QAAQ,CAAC,4BAAgB,CAAC,WAAW,CAAC,CAAC;gBAC5C,IAAI,CAAC,4BAA4B,EAAE,CAAC;gBACpC,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE;oBACvB,UAAU;oBACV,MAAM,EAAE,IAAI,CAAC,MAAM;iBACpB,CAAC,CAAC;aACJ;SACF;IACH,CAAC;IAED;;OAEG;IACK,sCAAsC;QAC5C,+EAA+E;QAC/E,MAAM,MAAM,GAAG,IAAI,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;QAE1C,IAAI,MAAM,CAAC,CAAC,CAAC,KAAK,IAAI,IAAI,MAAM,CAAC,CAAC,CAAC,KAAK,0BAAc,CAAC,OAAO,EAAE;YAC9D,IAAI,CAAC,WAAW,CACd,GAAG,kBAAM,CAAC,0CAA0C,MAClD,0BAAc,CAAC,MAAM,CAAC,CAAC,CAAC,CAC1B,EAAE,CACH,CAAC;SACH;aAAM;YACL,oBAAoB;YACpB,MAAM,WAAW,GAAG,MAAM,CAAC,CAAC,CAAC,CAAC;YAE9B,IAAI,UAA2B,CAAC;YAChC,IAAI,IAAiB,CAAC;YAEtB,OAAO;YACP,IAAI,WAAW,KAAK,0BAAc,CAAC,IAAI,EAAE;gBACvC,8BAA8B;gBAC9B,MAAM,UAAU,GAAG,uCAA2B,CAAC,kBAAkB,CAAC;gBAClE,IAAI,IAAI,CAAC,aAAa,CAAC,MAAM,GAAG,UAAU,EAAE;oBAC1C,IAAI,CAAC,4BAA4B,GAAG,UAAU,CAAC;oBAC/C,OAAO;iBACR;gBAED,IAAI,GAAG,0BAAW,CAAC,UAAU,CAC3B,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAC5C,CAAC;gBAEF,UAAU,GAAG;oBACX,IAAI,EAAE,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,YAAY,EAAE,CAAC;oBACtC,IAAI,EAAE,IAAI,CAAC,YAAY,EAAE;iBAC1B,CAAC;gBAEF,4DAA4D;gBAC5D,IAAI,UAAU,CAAC,IAAI,KAAK,SAAS,EAAE;oBACjC,UAAU,CAAC,IAAI,GAAG,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,SAAS,CAAC;iBAChD;gBAED,WAAW;aACZ;iBAAM,IAAI,WAAW,KAAK,0BAAc,CAAC,QAAQ,EAAE;gBAClD,MAAM,UAAU,GAAG,MAAM,CAAC,CAAC,CAAC,CAAC;gBAC7B,MAAM,UAAU,GAAG,uCAA2B,CAAC,sBAAsB,CACnE,UAAU,CACX,CAAC,CAAC,8BAA8B;gBAEjC,8BAA8B;gBAC9B,IAAI,IAAI,CAAC,aAAa,CAAC,MAAM,GAAG,UAAU,EAAE;oBAC1C,IAAI,CAAC,4BAA4B,GAAG,UAAU,CAAC;oBAC/C,OAAO;iBACR;gBAED,IAAI,GAAG,0BAAW,CAAC,UAAU,CAC3B,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAC5C,CAAC;gBAEF,UAAU,GAAG;oBACX,IAAI,EAAE,IAAI,CAAC,UAAU,CAAC,UAAU,CAAC;oBACjC,IAAI,EAAE,IAAI,CAAC,YAAY,EAAE;iBAC1B,CAAC;gBACF,OAAO;aACR;iBAAM,IAAI,WAAW,KAAK,0BAAc,CAAC,IAAI,EAAE;gBAC9C,8BAA8B;gBAC9B,MAAM,UAAU,GAAG,uCAA2B,CAAC,kBAAkB,CAAC;gBAClE,IAAI,IAAI,CAAC,aAAa,CAAC,MAAM,GAAG,UAAU,EAAE;oBAC1C,IAAI,CAAC,4BAA4B,GAAG,UAAU,CAAC;oBAC/C,OAAO;iBACR;gBAED,IAAI,GAAG,0BAAW,CAAC,UAAU,CAC3B,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAC5C,CAAC;gBAEF,UAAU,GAAG;oBACX,IAAI,EAAE,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC;oBACtC,IAAI,EAAE,IAAI,CAAC,YAAY,EAAE;iBAC1B,CAAC;aACH;YAED,IAAI,CAAC,QAAQ,CAAC,4BAAgB,CAAC,WAAW,CAAC,CAAC;YAC5C,IAAI,CAAC,4BAA4B,EAAE,CAAC;YACpC,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,EAAC,UAAU,EAAE,MAAM,EAAE,IAAI,CAAC,MAAM,EAAC,CAAC,CAAC;SAC7D;IACH,CAAC;IAED,IAAI,kBAAkB;QACpB,yBACK,IAAI,CAAC,OAAO,EACf;IACJ,CAAC;CACF;AAGC,kCAAW"} \ No newline at end of file +{"version":3,"file":"socksclient.js","sourceRoot":"","sources":["../../src/client/socksclient.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,mCAAoC;AACpC,2BAA2B;AAC3B,yBAAyB;AACzB,+CAAyC;AACzC,mDAkB6B;AAC7B,+CAG2B;AAC3B,2DAAsD;AACtD,yCAA8D;AA86B5D,iGA96BM,uBAAgB,OA86BN;AAp5BlB,MAAM,WAAY,SAAQ,qBAAY;IAgBpC,YAAY,OAA2B;QACrC,KAAK,EAAE,CAAC;QACR,IAAI,CAAC,OAAO,qBACP,OAAO,CACX,CAAC;QAEF,8BAA8B;QAC9B,oCAA0B,CAAC,OAAO,CAAC,CAAC;QAEpC,gBAAgB;QAChB,IAAI,CAAC,QAAQ,CAAC,4BAAgB,CAAC,OAAO,CAAC,CAAC;IAC1C,CAAC;IAED;;;;;;;OAOG;IACH,MAAM,CAAC,gBAAgB,CACrB,OAA2B,EAC3B,QAAmB;QAEnB,OAAO,IAAI,OAAO,CAA8B,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;YAClE,8BAA8B;YAC9B,IAAI;gBACF,oCAA0B,CAAC,OAAO,EAAE,CAAC,SAAS,CAAC,CAAC,CAAC;aAClD;YAAC,OAAO,GAAG,EAAE;gBACZ,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;oBAClC,QAAQ,CAAC,GAAG,CAAC,CAAC;oBACd,OAAO,OAAO,CAAC,GAAU,CAAC,CAAC,CAAC,oDAAoD;iBACjF;qBAAM;oBACL,OAAO,MAAM,CAAC,GAAG,CAAC,CAAC;iBACpB;aACF;YAED,MAAM,MAAM,GAAG,IAAI,WAAW,CAAC,OAAO,CAAC,CAAC;YACxC,MAAM,CAAC,OAAO,CAAC,OAAO,CAAC,eAAe,CAAC,CAAC;YACxC,MAAM,CAAC,IAAI,CAAC,aAAa,EAAE,CAAC,IAAiC,EAAE,EAAE;gBAC/D,MAAM,CAAC,kBAAkB,EAAE,CAAC;gBAC5B,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;oBAClC,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;oBACrB,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,oDAAoD;iBACpE;qBAAM;oBACL,OAAO,CAAC,IAAI,CAAC,CAAC;iBACf;YACH,CAAC,CAAC,CAAC;YAEH,kDAAkD;YAClD,MAAM,CAAC,IAAI,CAAC,OAAO,EAAE,CAAC,GAAU,EAAE,EAAE;gBAClC,MAAM,CAAC,kBAAkB,EAAE,CAAC;gBAC5B,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;oBAClC,QAAQ,CAAC,GAAG,CAAC,CAAC;oBACd,OAAO,CAAC,GAAU,CAAC,CAAC,CAAC,oDAAoD;iBAC1E;qBAAM;oBACL,MAAM,CAAC,GAAG,CAAC,CAAC;iBACb;YACH,CAAC,CAAC,CAAC;QACL,CAAC,CAAC,CAAC;IACL,CAAC;IAED;;;;;;;;OAQG;IACH,MAAM,CAAC,qBAAqB,CAC1B,OAAgC,EAChC,QAAmB;QAEnB,OAAO,IAAI,OAAO,CAA8B,CAAO,OAAO,EAAE,MAAM,EAAE,EAAE;YACxE,mCAAmC;YACnC,IAAI;gBACF,yCAA+B,CAAC,OAAO,CAAC,CAAC;aAC1C;YAAC,OAAO,GAAG,EAAE;gBACZ,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;oBAClC,QAAQ,CAAC,GAAG,CAAC,CAAC;oBACd,OAAO,OAAO,CAAC,GAAU,CAAC,CAAC,CAAC,oDAAoD;iBACjF;qBAAM;oBACL,OAAO,MAAM,CAAC,GAAG,CAAC,CAAC;iBACpB;aACF;YAED,IAAI,IAAgB,CAAC;YAErB,kBAAkB;YAClB,IAAI,OAAO,CAAC,cAAc,EAAE;gBAC1B,mBAAY,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;aAC/B;YAED,IAAI;gBACF,kDAAkD;gBAClD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,OAAO,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;oBAC/C,MAAM,SAAS,GAAG,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC;oBAErC,0HAA0H;oBAC1H,MAAM,eAAe,GACnB,CAAC,KAAK,OAAO,CAAC,OAAO,CAAC,MAAM,GAAG,CAAC;wBAC9B,CAAC,CAAC,OAAO,CAAC,WAAW;wBACrB,CAAC,CAAC;4BACE,IAAI,EAAE,OAAO,CAAC,OAAO,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,SAAS;4BACtC,IAAI,EAAE,OAAO,CAAC,OAAO,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI;yBAClC,CAAC;oBAER,4CAA4C;oBAC5C,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,gBAAgB,CAAC;wBAChD,OAAO,EAAE,SAAS;wBAClB,KAAK,EAAE,SAAS;wBAChB,WAAW,EAAE,eAAe;wBAC5B,8HAA8H;qBAC/H,CAAC,CAAC;oBAEH,wCAAwC;oBACxC,IAAI,CAAC,IAAI,EAAE;wBACT,IAAI,GAAG,MAAM,CAAC,MAAM,CAAC;qBACtB;iBACF;gBAED,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;oBAClC,QAAQ,CAAC,IAAI,EAAE,EAAC,MAAM,EAAE,IAAI,EAAC,CAAC,CAAC;oBAC/B,OAAO,CAAC,EAAC,MAAM,EAAE,IAAI,EAAC,CAAC,CAAC,CAAC,oDAAoD;iBAC9E;qBAAM;oBACL,OAAO,CAAC,EAAC,MAAM,EAAE,IAAI,EAAC,CAAC,CAAC;iBACzB;aACF;YAAC,OAAO,GAAG,EAAE;gBACZ,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;oBAClC,QAAQ,CAAC,GAAG,CAAC,CAAC;oBACd,OAAO,CAAC,GAAU,CAAC,CAAC,CAAC,oDAAoD;iBAC1E;qBAAM;oBACL,MAAM,CAAC,GAAG,CAAC,CAAC;iBACb;aACF;QACH,CAAC,CAAA,CAAC,CAAC;IACL,CAAC;IAED;;;OAGG;IACH,MAAM,CAAC,cAAc,CAAC,OAA6B;QACjD,MAAM,IAAI,GAAG,IAAI,0BAAW,EAAE,CAAC;QAC/B,IAAI,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC;QACtB,IAAI,CAAC,UAAU,CAAC,OAAO,CAAC,WAAW,IAAI,CAAC,CAAC,CAAC;QAE1C,qBAAqB;QACrB,IAAI,GAAG,CAAC,MAAM,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE;YACvC,IAAI,CAAC,UAAU,CAAC,0BAAc,CAAC,IAAI,CAAC,CAAC;YACrC,IAAI,CAAC,aAAa,CAAC,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC,CAAC;SACxD;aAAM,IAAI,GAAG,CAAC,MAAM,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE;YAC9C,IAAI,CAAC,UAAU,CAAC,0BAAc,CAAC,IAAI,CAAC,CAAC;YACrC,IAAI,CAAC,WAAW,CAAC,EAAE,CAAC,QAAQ,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC,CAAC;SACxD;aAAM;YACL,IAAI,CAAC,UAAU,CAAC,0BAAc,CAAC,QAAQ,CAAC,CAAC;YACzC,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC,UAAU,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC,CAAC;YAC5D,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;SAC3C;QAED,OAAO;QACP,IAAI,CAAC,aAAa,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;QAE5C,OAAO;QACP,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;QAE/B,OAAO,IAAI,CAAC,QAAQ,EAAE,CAAC;IACzB,CAAC;IAED;;;OAGG;IACH,MAAM,CAAC,aAAa,CAAC,IAAY;QAC/B,MAAM,IAAI,GAAG,0BAAW,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;QAC1C,IAAI,CAAC,UAAU,GAAG,CAAC,CAAC;QAEpB,MAAM,WAAW,GAAG,IAAI,CAAC,SAAS,EAAE,CAAC;QACrC,MAAM,QAAQ,GAAmB,IAAI,CAAC,SAAS,EAAE,CAAC;QAClD,IAAI,UAAU,CAAC;QAEf,IAAI,QAAQ,KAAK,0BAAc,CAAC,IAAI,EAAE;YACpC,UAAU,GAAG,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,YAAY,EAAE,CAAC,CAAC;SAC/C;aAAM,IAAI,QAAQ,KAAK,0BAAc,CAAC,IAAI,EAAE;YAC3C,UAAU,GAAG,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC,CAAC;SAC/C;aAAM;YACL,UAAU,GAAG,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,SAAS,EAAE,CAAC,CAAC;SAChD;QAED,MAAM,UAAU,GAAG,IAAI,CAAC,YAAY,EAAE,CAAC;QAEvC,OAAO;YACL,WAAW;YACX,UAAU,EAAE;gBACV,IAAI,EAAE,UAAU;gBAChB,IAAI,EAAE,UAAU;aACjB;YACD,IAAI,EAAE,IAAI,CAAC,UAAU,EAAE;SACxB,CAAC;IACJ,CAAC;IAED;;OAEG;IACK,QAAQ,CAAC,QAA0B;QACzC,IAAI,IAAI,CAAC,KAAK,KAAK,4BAAgB,CAAC,KAAK,EAAE;YACzC,IAAI,CAAC,KAAK,GAAG,QAAQ,CAAC;SACvB;IACH,CAAC;IAED;;;OAGG;IACI,OAAO,CAAC,cAAuB;QACpC,IAAI,CAAC,cAAc,GAAG,CAAC,IAAY,EAAE,EAAE,CAAC,IAAI,CAAC,qBAAqB,CAAC,IAAI,CAAC,CAAC;QACzE,IAAI,CAAC,OAAO,GAAG,GAAG,EAAE,CAAC,IAAI,CAAC,cAAc,EAAE,CAAC;QAC3C,IAAI,CAAC,OAAO,GAAG,CAAC,GAAU,EAAE,EAAE,CAAC,IAAI,CAAC,cAAc,CAAC,GAAG,CAAC,CAAC;QACxD,IAAI,CAAC,SAAS,GAAG,GAAG,EAAE,CAAC,IAAI,CAAC,gBAAgB,EAAE,CAAC;QAE/C,+CAA+C;QAC/C,MAAM,KAAK,GAAG,UAAU,CACtB,GAAG,EAAE,CAAC,IAAI,CAAC,oBAAoB,EAAE,EACjC,IAAI,CAAC,OAAO,CAAC,OAAO,IAAI,2BAAe,CACxC,CAAC;QAEF,8EAA8E;QAC9E,IAAI,KAAK,CAAC,KAAK,IAAI,OAAO,KAAK,CAAC,KAAK,KAAK,UAAU,EAAE;YACpD,KAAK,CAAC,KAAK,EAAE,CAAC;SACf;QAED,yGAAyG;QACzG,IAAI,cAAc,EAAE;YAClB,IAAI,CAAC,MAAM,GAAG,cAAc,CAAC;SAC9B;aAAM;YACL,IAAI,CAAC,MAAM,GAAG,IAAI,GAAG,CAAC,MAAM,EAAE,CAAC;SAChC;QAED,gCAAgC;QAChC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;QACxC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;QACxC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,SAAS,EAAE,IAAI,CAAC,SAAS,CAAC,CAAC;QAC5C,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,MAAM,EAAE,IAAI,CAAC,cAAc,CAAC,CAAC;QAE5C,IAAI,CAAC,QAAQ,CAAC,4BAAgB,CAAC,UAAU,CAAC,CAAC;QAC3C,IAAI,CAAC,aAAa,GAAG,IAAI,6BAAa,EAAE,CAAC;QAEzC,IAAI,cAAc,EAAE;YAClB,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;SAC7B;aAAM;YACJ,IAAI,CAAC,MAAqB,CAAC,OAAO,CAAC,IAAI,CAAC,gBAAgB,EAAE,CAAC,CAAC;YAE7D,IACE,IAAI,CAAC,OAAO,CAAC,eAAe,KAAK,SAAS;gBAC1C,IAAI,CAAC,OAAO,CAAC,eAAe,KAAK,IAAI,EACrC;gBACC,IAAI,CAAC,MAAqB,CAAC,UAAU,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,eAAe,CAAC,CAAC;aACxE;SACF;QAED,6FAA6F;QAC7F,IAAI,CAAC,mBAAmB,CAAC,aAAa,EAAE,CAAC,IAAI,EAAE,EAAE;YAC/C,YAAY,CAAC,GAAG,EAAE;gBAChB,IAAI,IAAI,CAAC,aAAa,CAAC,MAAM,GAAG,CAAC,EAAE;oBACjC,MAAM,UAAU,GAAG,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,IAAI,CAAC,aAAa,CAAC,MAAM,CAAC,CAAC;oBAErE,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,UAAU,CAAC,CAAC;iBACtC;gBACD,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC;YACvB,CAAC,CAAC,CAAC;QACL,CAAC,CAAC,CAAC;IACL,CAAC;IAED,+EAA+E;IACvE,gBAAgB;QACtB,uCACK,IAAI,CAAC,OAAO,CAAC,cAAc,KAC9B,IAAI,EAAE,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,IAAI,IAAI,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,SAAS,EAC7D,IAAI,EAAE,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,IAAI,IAC7B;IACJ,CAAC;IAED;;;OAGG;IACK,oBAAoB;QAC1B,IACE,IAAI,CAAC,KAAK,KAAK,4BAAgB,CAAC,WAAW;YAC3C,IAAI,CAAC,KAAK,KAAK,4BAAgB,CAAC,yBAAyB,EACzD;YACA,IAAI,CAAC,WAAW,CAAC,kBAAM,CAAC,uBAAuB,CAAC,CAAC;SAClD;IACH,CAAC;IAED;;OAEG;IACK,gBAAgB;QACtB,IAAI,CAAC,QAAQ,CAAC,4BAAgB,CAAC,SAAS,CAAC,CAAC;QAE1C,0BAA0B;QAC1B,IAAI,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,IAAI,KAAK,CAAC,EAAE;YACjC,IAAI,CAAC,0BAA0B,EAAE,CAAC;SACnC;aAAM;YACL,IAAI,CAAC,0BAA0B,EAAE,CAAC;SACnC;QAED,IAAI,CAAC,QAAQ,CAAC,4BAAgB,CAAC,oBAAoB,CAAC,CAAC;IACvD,CAAC;IAED;;;OAGG;IACK,qBAAqB,CAAC,IAAY;QACxC;;;UAGE;QACF,IAAI,CAAC,aAAa,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;QAEhC,6BAA6B;QAC7B,IAAI,CAAC,WAAW,EAAE,CAAC;IACrB,CAAC;IAED;;OAEG;IACK,WAAW;QACjB,mFAAmF;QACnF,OACE,IAAI,CAAC,KAAK,KAAK,4BAAgB,CAAC,WAAW;YAC3C,IAAI,CAAC,KAAK,KAAK,4BAAgB,CAAC,KAAK;YACrC,IAAI,CAAC,aAAa,CAAC,MAAM,IAAI,IAAI,CAAC,4BAA4B,EAC9D;YACA,gDAAgD;YAChD,IAAI,IAAI,CAAC,KAAK,KAAK,4BAAgB,CAAC,oBAAoB,EAAE;gBACxD,IAAI,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,IAAI,KAAK,CAAC,EAAE;oBACjC,4CAA4C;oBAC5C,IAAI,CAAC,kCAAkC,EAAE,CAAC;iBAC3C;qBAAM;oBACL,wDAAwD;oBACxD,IAAI,CAAC,oCAAoC,EAAE,CAAC;iBAC7C;gBACD,wDAAwD;aACzD;iBAAM,IAAI,IAAI,CAAC,KAAK,KAAK,4BAAgB,CAAC,kBAAkB,EAAE;gBAC7D,IAAI,CAAC,kDAAkD,EAAE,CAAC;gBAC1D,6DAA6D;aAC9D;iBAAM,IAAI,IAAI,CAAC,KAAK,KAAK,4BAAgB,CAAC,kBAAkB,EAAE;gBAC7D,IAAI,CAAC,kCAAkC,EAAE,CAAC;gBAC1C,mEAAmE;aACpE;iBAAM,IAAI,IAAI,CAAC,KAAK,KAAK,4BAAgB,CAAC,yBAAyB,EAAE;gBACpE,IAAI,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,IAAI,KAAK,CAAC,EAAE;oBACjC,IAAI,CAAC,sCAAsC,EAAE,CAAC;iBAC/C;qBAAM;oBACL,IAAI,CAAC,sCAAsC,EAAE,CAAC;iBAC/C;aACF;iBAAM;gBACL,IAAI,CAAC,WAAW,CAAC,kBAAM,CAAC,aAAa,CAAC,CAAC;gBACvC,MAAM;aACP;SACF;IACH,CAAC;IAED;;;OAGG;IACK,cAAc;QACpB,IAAI,CAAC,WAAW,CAAC,kBAAM,CAAC,YAAY,CAAC,CAAC;IACxC,CAAC;IAED;;;OAGG;IACK,cAAc,CAAC,GAAU;QAC/B,IAAI,CAAC,WAAW,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC;IAChC,CAAC;IAED;;OAEG;IACK,4BAA4B;QAClC,6FAA6F;QAC7F,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAC;QACpB,IAAI,CAAC,MAAM,CAAC,cAAc,CAAC,MAAM,EAAE,IAAI,CAAC,cAAc,CAAC,CAAC;QACxD,IAAI,CAAC,MAAM,CAAC,cAAc,CAAC,OAAO,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;QAClD,IAAI,CAAC,MAAM,CAAC,cAAc,CAAC,OAAO,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;QAClD,IAAI,CAAC,MAAM,CAAC,cAAc,CAAC,SAAS,EAAE,IAAI,CAAC,SAAS,CAAC,CAAC;IACxD,CAAC;IAED;;;OAGG;IACK,WAAW,CAAC,GAAW;QAC7B,2FAA2F;QAC3F,IAAI,IAAI,CAAC,KAAK,KAAK,4BAAgB,CAAC,KAAK,EAAE;YACzC,+BAA+B;YAC/B,IAAI,CAAC,QAAQ,CAAC,4BAAgB,CAAC,KAAK,CAAC,CAAC;YAEtC,iBAAiB;YACjB,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;YAEtB,4BAA4B;YAC5B,IAAI,CAAC,4BAA4B,EAAE,CAAC;YAEpC,sBAAsB;YACtB,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,uBAAgB,CAAC,GAAG,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC;SAC7D;IACH,CAAC;IAED;;OAEG;IACK,0BAA0B;QAChC,MAAM,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,MAAM,IAAI,EAAE,CAAC;QAE/C,MAAM,IAAI,GAAG,IAAI,0BAAW,EAAE,CAAC;QAC/B,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;QACtB,IAAI,CAAC,UAAU,CAAC,wBAAY,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC;QACpD,IAAI,CAAC,aAAa,CAAC,IAAI,CAAC,OAAO,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;QAElD,iBAAiB;QACjB,IAAI,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,WAAW,CAAC,IAAI,CAAC,EAAE;YAC7C,IAAI,CAAC,WAAW,CAAC,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC,CAAC;YAC7D,IAAI,CAAC,aAAa,CAAC,MAAM,CAAC,CAAC;YAC3B,sBAAsB;SACvB;aAAM;YACL,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;YACtB,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;YACtB,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;YACtB,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;YACtB,IAAI,CAAC,aAAa,CAAC,MAAM,CAAC,CAAC;YAC3B,IAAI,CAAC,aAAa,CAAC,IAAI,CAAC,OAAO,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;SACnD;QAED,IAAI,CAAC,4BAA4B;YAC/B,uCAA2B,CAAC,cAAc,CAAC;QAC7C,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAC;IACrC,CAAC;IAED;;;OAGG;IACK,kCAAkC;QACxC,MAAM,IAAI,GAAG,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;QAEvC,IAAI,IAAI,CAAC,CAAC,CAAC,KAAK,0BAAc,CAAC,OAAO,EAAE;YACtC,IAAI,CAAC,WAAW,CACd,GAAG,kBAAM,CAAC,6BAA6B,OACrC,0BAAc,CAAC,IAAI,CAAC,CAAC,CAAC,CACxB,GAAG,CACJ,CAAC;SACH;aAAM;YACL,gBAAgB;YAChB,IAAI,wBAAY,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,KAAK,wBAAY,CAAC,IAAI,EAAE;gBAC5D,MAAM,IAAI,GAAG,0BAAW,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;gBAC1C,IAAI,CAAC,UAAU,GAAG,CAAC,CAAC;gBAEpB,MAAM,UAAU,GAAoB;oBAClC,IAAI,EAAE,IAAI,CAAC,YAAY,EAAE;oBACzB,IAAI,EAAE,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,YAAY,EAAE,CAAC;iBACvC,CAAC;gBAEF,yCAAyC;gBACzC,IAAI,UAAU,CAAC,IAAI,KAAK,SAAS,EAAE;oBACjC,UAAU,CAAC,IAAI,GAAG,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,SAAS,CAAC;iBAChD;gBACD,IAAI,CAAC,QAAQ,CAAC,4BAAgB,CAAC,yBAAyB,CAAC,CAAC;gBAC1D,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,EAAC,UAAU,EAAE,MAAM,EAAE,IAAI,CAAC,MAAM,EAAC,CAAC,CAAC;gBAEtD,mBAAmB;aACpB;iBAAM;gBACL,IAAI,CAAC,QAAQ,CAAC,4BAAgB,CAAC,WAAW,CAAC,CAAC;gBAC5C,IAAI,CAAC,4BAA4B,EAAE,CAAC;gBACpC,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,EAAC,MAAM,EAAE,IAAI,CAAC,MAAM,EAAC,CAAC,CAAC;aACjD;SACF;IACH,CAAC;IAED;;;OAGG;IACK,sCAAsC;QAC5C,MAAM,IAAI,GAAG,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;QAEvC,IAAI,IAAI,CAAC,CAAC,CAAC,KAAK,0BAAc,CAAC,OAAO,EAAE;YACtC,IAAI,CAAC,WAAW,CACd,GAAG,kBAAM,CAAC,0CAA0C,OAClD,0BAAc,CAAC,IAAI,CAAC,CAAC,CAAC,CACxB,GAAG,CACJ,CAAC;SACH;aAAM;YACL,MAAM,IAAI,GAAG,0BAAW,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;YAC1C,IAAI,CAAC,UAAU,GAAG,CAAC,CAAC;YAEpB,MAAM,UAAU,GAAoB;gBAClC,IAAI,EAAE,IAAI,CAAC,YAAY,EAAE;gBACzB,IAAI,EAAE,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,YAAY,EAAE,CAAC;aACvC,CAAC;YAEF,IAAI,CAAC,QAAQ,CAAC,4BAAgB,CAAC,WAAW,CAAC,CAAC;YAC5C,IAAI,CAAC,4BAA4B,EAAE,CAAC;YACpC,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,EAAC,UAAU,EAAE,MAAM,EAAE,IAAI,CAAC,MAAM,EAAC,CAAC,CAAC;SAC7D;IACH,CAAC;IAED;;OAEG;IACK,0BAA0B;QAChC,MAAM,IAAI,GAAG,IAAI,0BAAW,EAAE,CAAC;QAE/B,wCAAwC;QACxC,MAAM,oBAAoB,GAAG,CAAC,sBAAU,CAAC,MAAM,CAAC,CAAC;QAEjD,6FAA6F;QAC7F,sHAAsH;QACtH,IAAI,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,MAAM,IAAI,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,QAAQ,EAAE;YAC5D,oBAAoB,CAAC,IAAI,CAAC,sBAAU,CAAC,QAAQ,CAAC,CAAC;SAChD;QAED,sBAAsB;QACtB,IAAI,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,kBAAkB,KAAK,SAAS,EAAE;YACvD,oBAAoB,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,kBAAkB,CAAC,CAAC;SAClE;QAED,yBAAyB;QACzB,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;QACtB,IAAI,CAAC,UAAU,CAAC,oBAAoB,CAAC,MAAM,CAAC,CAAC;QAC7C,KAAK,MAAM,UAAU,IAAI,oBAAoB,EAAE;YAC7C,IAAI,CAAC,UAAU,CAAC,UAAU,CAAC,CAAC;SAC7B;QAED,IAAI,CAAC,4BAA4B;YAC/B,uCAA2B,CAAC,8BAA8B,CAAC;QAC7D,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAC;QACnC,IAAI,CAAC,QAAQ,CAAC,4BAAgB,CAAC,oBAAoB,CAAC,CAAC;IACvD,CAAC;IAED;;;OAGG;IACK,oCAAoC;QAC1C,MAAM,IAAI,GAAG,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;QAEvC,IAAI,IAAI,CAAC,CAAC,CAAC,KAAK,IAAI,EAAE;YACpB,IAAI,CAAC,WAAW,CAAC,kBAAM,CAAC,yCAAyC,CAAC,CAAC;SACpE;aAAM,IAAI,IAAI,CAAC,CAAC,CAAC,KAAK,qCAAyB,EAAE;YAChD,IAAI,CAAC,WAAW,CAAC,kBAAM,CAAC,+CAA+C,CAAC,CAAC;SAC1E;aAAM;YACL,6EAA6E;YAC7E,IAAI,IAAI,CAAC,CAAC,CAAC,KAAK,sBAAU,CAAC,MAAM,EAAE;gBACjC,IAAI,CAAC,oBAAoB,GAAG,sBAAU,CAAC,MAAM,CAAC;gBAC9C,IAAI,CAAC,wBAAwB,EAAE,CAAC;gBAChC,0EAA0E;aAC3E;iBAAM,IAAI,IAAI,CAAC,CAAC,CAAC,KAAK,sBAAU,CAAC,QAAQ,EAAE;gBAC1C,IAAI,CAAC,oBAAoB,GAAG,sBAAU,CAAC,QAAQ,CAAC;gBAChD,IAAI,CAAC,gCAAgC,EAAE,CAAC;gBACxC,qFAAqF;aACtF;iBAAM,IAAI,IAAI,CAAC,CAAC,CAAC,KAAK,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,kBAAkB,EAAE;gBAC5D,IAAI,CAAC,oBAAoB,GAAG,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,kBAAkB,CAAC;gBAClE,IAAI,CAAC,8BAA8B,EAAE,CAAC;aACvC;iBAAM;gBACL,IAAI,CAAC,WAAW,CAAC,kBAAM,CAAC,4CAA4C,CAAC,CAAC;aACvE;SACF;IACH,CAAC;IAED;;;;OAIG;IACK,gCAAgC;QACtC,MAAM,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,MAAM,IAAI,EAAE,CAAC;QAC/C,MAAM,QAAQ,GAAG,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,QAAQ,IAAI,EAAE,CAAC;QAEnD,MAAM,IAAI,GAAG,IAAI,0BAAW,EAAE,CAAC;QAC/B,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;QACtB,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC,CAAC;QAC3C,IAAI,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC;QACzB,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC,UAAU,CAAC,QAAQ,CAAC,CAAC,CAAC;QAC7C,IAAI,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC;QAE3B,IAAI,CAAC,4BAA4B;YAC/B,uCAA2B,CAAC,oCAAoC,CAAC;QACnE,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAC;QACnC,IAAI,CAAC,QAAQ,CAAC,4BAAgB,CAAC,kBAAkB,CAAC,CAAC;IACrD,CAAC;IAEa,8BAA8B;;YAC1C,IAAI,CAAC,4BAA4B,GAAG,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,yBAAyB,CAAC;YACjF,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,2BAA2B,EAAE,CAAC,CAAC;YAC1E,IAAI,CAAC,QAAQ,CAAC,4BAAgB,CAAC,kBAAkB,CAAC,CAAC;QACrD,CAAC;KAAA;IAEa,uCAAuC,CAAC,IAAY;;YAChE,OAAO,MAAM,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,4BAA4B,CAAC,IAAI,CAAC,CAAC;QACrE,CAAC;KAAA;IAEa,iDAAiD,CAC7D,IAAY;;YAEZ,OAAO,IAAI,CAAC,CAAC,CAAC,KAAK,IAAI,CAAC;QAC1B,CAAC;KAAA;IAEa,mDAAmD,CAC/D,IAAY;;YAEZ,OAAO,IAAI,CAAC,CAAC,CAAC,KAAK,IAAI,CAAC;QAC1B,CAAC;KAAA;IAED;;;OAGG;IACW,kDAAkD;;YAC9D,IAAI,CAAC,QAAQ,CAAC,4BAAgB,CAAC,8BAA8B,CAAC,CAAC;YAE/D,IAAI,UAAU,GAAY,KAAK,CAAC;YAEhC,IAAI,IAAI,CAAC,oBAAoB,KAAK,sBAAU,CAAC,MAAM,EAAE;gBACnD,UAAU,GAAG,MAAM,IAAI,CAAC,iDAAiD,CACvE,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,CAAC,CAAC,CAC1B,CAAC;aACH;iBAAM,IAAI,IAAI,CAAC,oBAAoB,KAAK,sBAAU,CAAC,QAAQ,EAAE;gBAC5D,UAAU,GAAG,MAAM,IAAI,CAAC,mDAAmD,CACzE,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,CAAC,CAAC,CAC1B,CAAC;aACH;iBAAM,IACL,IAAI,CAAC,oBAAoB,KAAK,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,kBAAkB,EACnE;gBACA,UAAU,GAAG,MAAM,IAAI,CAAC,uCAAuC,CAC7D,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,yBAAyB,CAAC,CACrE,CAAC;aACH;YAED,IAAI,CAAC,UAAU,EAAE;gBACf,IAAI,CAAC,WAAW,CAAC,kBAAM,CAAC,0BAA0B,CAAC,CAAC;aACrD;iBAAM;gBACL,IAAI,CAAC,wBAAwB,EAAE,CAAC;aACjC;QACH,CAAC;KAAA;IAED;;OAEG;IACK,wBAAwB;QAC9B,MAAM,IAAI,GAAG,IAAI,0BAAW,EAAE,CAAC;QAE/B,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;QACtB,IAAI,CAAC,UAAU,CAAC,wBAAY,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC;QACpD,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;QAEtB,sBAAsB;QACtB,IAAI,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,WAAW,CAAC,IAAI,CAAC,EAAE;YAC7C,IAAI,CAAC,UAAU,CAAC,0BAAc,CAAC,IAAI,CAAC,CAAC;YACrC,IAAI,CAAC,WAAW,CAAC,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC,CAAC;SAC9D;aAAM,IAAI,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,WAAW,CAAC,IAAI,CAAC,EAAE;YACpD,IAAI,CAAC,UAAU,CAAC,0BAAc,CAAC,IAAI,CAAC,CAAC;YACrC,IAAI,CAAC,WAAW,CAAC,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC,CAAC;SAC9D;aAAM;YACL,IAAI,CAAC,UAAU,CAAC,0BAAc,CAAC,QAAQ,CAAC,CAAC;YACzC,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,CAAC,WAAW,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;YACtD,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC,OAAO,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;SACjD;QACD,IAAI,CAAC,aAAa,CAAC,IAAI,CAAC,OAAO,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;QAElD,IAAI,CAAC,4BAA4B;YAC/B,uCAA2B,CAAC,oBAAoB,CAAC;QACnD,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAC;QACnC,IAAI,CAAC,QAAQ,CAAC,4BAAgB,CAAC,kBAAkB,CAAC,CAAC;IACrD,CAAC;IAED;;;OAGG;IACK,kCAAkC;QACxC,+EAA+E;QAC/E,MAAM,MAAM,GAAG,IAAI,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;QAE1C,IAAI,MAAM,CAAC,CAAC,CAAC,KAAK,IAAI,IAAI,MAAM,CAAC,CAAC,CAAC,KAAK,0BAAc,CAAC,OAAO,EAAE;YAC9D,IAAI,CAAC,WAAW,CACd,GAAG,kBAAM,CAAC,mCAAmC,MAC3C,0BAAc,CAAC,MAAM,CAAC,CAAC,CAAC,CAC1B,EAAE,CACH,CAAC;SACH;aAAM;YACL,oBAAoB;YACpB,MAAM,WAAW,GAAG,MAAM,CAAC,CAAC,CAAC,CAAC;YAE9B,IAAI,UAA2B,CAAC;YAChC,IAAI,IAAiB,CAAC;YAEtB,OAAO;YACP,IAAI,WAAW,KAAK,0BAAc,CAAC,IAAI,EAAE;gBACvC,8BAA8B;gBAC9B,MAAM,UAAU,GAAG,uCAA2B,CAAC,kBAAkB,CAAC;gBAClE,IAAI,IAAI,CAAC,aAAa,CAAC,MAAM,GAAG,UAAU,EAAE;oBAC1C,IAAI,CAAC,4BAA4B,GAAG,UAAU,CAAC;oBAC/C,OAAO;iBACR;gBAED,IAAI,GAAG,0BAAW,CAAC,UAAU,CAC3B,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAC5C,CAAC;gBAEF,UAAU,GAAG;oBACX,IAAI,EAAE,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,YAAY,EAAE,CAAC;oBACtC,IAAI,EAAE,IAAI,CAAC,YAAY,EAAE;iBAC1B,CAAC;gBAEF,4DAA4D;gBAC5D,IAAI,UAAU,CAAC,IAAI,KAAK,SAAS,EAAE;oBACjC,UAAU,CAAC,IAAI,GAAG,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,SAAS,CAAC;iBAChD;gBAED,WAAW;aACZ;iBAAM,IAAI,WAAW,KAAK,0BAAc,CAAC,QAAQ,EAAE;gBAClD,MAAM,UAAU,GAAG,MAAM,CAAC,CAAC,CAAC,CAAC;gBAC7B,MAAM,UAAU,GAAG,uCAA2B,CAAC,sBAAsB,CACnE,UAAU,CACX,CAAC,CAAC,qCAAqC;gBAExC,8BAA8B;gBAC9B,IAAI,IAAI,CAAC,aAAa,CAAC,MAAM,GAAG,UAAU,EAAE;oBAC1C,IAAI,CAAC,4BAA4B,GAAG,UAAU,CAAC;oBAC/C,OAAO;iBACR;gBAED,IAAI,GAAG,0BAAW,CAAC,UAAU,CAC3B,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAC5C,CAAC;gBAEF,UAAU,GAAG;oBACX,IAAI,EAAE,IAAI,CAAC,UAAU,CAAC,UAAU,CAAC;oBACjC,IAAI,EAAE,IAAI,CAAC,YAAY,EAAE;iBAC1B,CAAC;gBACF,OAAO;aACR;iBAAM,IAAI,WAAW,KAAK,0BAAc,CAAC,IAAI,EAAE;gBAC9C,8BAA8B;gBAC9B,MAAM,UAAU,GAAG,uCAA2B,CAAC,kBAAkB,CAAC;gBAClE,IAAI,IAAI,CAAC,aAAa,CAAC,MAAM,GAAG,UAAU,EAAE;oBAC1C,IAAI,CAAC,4BAA4B,GAAG,UAAU,CAAC;oBAC/C,OAAO;iBACR;gBAED,IAAI,GAAG,0BAAW,CAAC,UAAU,CAC3B,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAC5C,CAAC;gBAEF,UAAU,GAAG;oBACX,IAAI,EAAE,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC;oBACtC,IAAI,EAAE,IAAI,CAAC,YAAY,EAAE;iBAC1B,CAAC;aACH;YAED,6BAA6B;YAC7B,IAAI,CAAC,QAAQ,CAAC,4BAAgB,CAAC,qBAAqB,CAAC,CAAC;YAEtD,gEAAgE;YAChE,IAAI,wBAAY,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,KAAK,wBAAY,CAAC,OAAO,EAAE;gBAC/D,IAAI,CAAC,QAAQ,CAAC,4BAAgB,CAAC,WAAW,CAAC,CAAC;gBAC5C,IAAI,CAAC,4BAA4B,EAAE,CAAC;gBACpC,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,EAAC,UAAU,EAAE,MAAM,EAAE,IAAI,CAAC,MAAM,EAAC,CAAC,CAAC;aAC7D;iBAAM,IAAI,wBAAY,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,KAAK,wBAAY,CAAC,IAAI,EAAE;gBACnE;mHACmG;gBACnG,IAAI,CAAC,QAAQ,CAAC,4BAAgB,CAAC,yBAAyB,CAAC,CAAC;gBAC1D,IAAI,CAAC,4BAA4B;oBAC/B,uCAA2B,CAAC,oBAAoB,CAAC;gBACnD,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,EAAC,UAAU,EAAE,MAAM,EAAE,IAAI,CAAC,MAAM,EAAC,CAAC,CAAC;gBACtD;;;kBAGE;aACH;iBAAM,IACL,wBAAY,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,KAAK,wBAAY,CAAC,SAAS,EAC7D;gBACA,IAAI,CAAC,QAAQ,CAAC,4BAAgB,CAAC,WAAW,CAAC,CAAC;gBAC5C,IAAI,CAAC,4BAA4B,EAAE,CAAC;gBACpC,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE;oBACvB,UAAU;oBACV,MAAM,EAAE,IAAI,CAAC,MAAM;iBACpB,CAAC,CAAC;aACJ;SACF;IACH,CAAC;IAED;;OAEG;IACK,sCAAsC;QAC5C,+EAA+E;QAC/E,MAAM,MAAM,GAAG,IAAI,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;QAE1C,IAAI,MAAM,CAAC,CAAC,CAAC,KAAK,IAAI,IAAI,MAAM,CAAC,CAAC,CAAC,KAAK,0BAAc,CAAC,OAAO,EAAE;YAC9D,IAAI,CAAC,WAAW,CACd,GAAG,kBAAM,CAAC,0CAA0C,MAClD,0BAAc,CAAC,MAAM,CAAC,CAAC,CAAC,CAC1B,EAAE,CACH,CAAC;SACH;aAAM;YACL,oBAAoB;YACpB,MAAM,WAAW,GAAG,MAAM,CAAC,CAAC,CAAC,CAAC;YAE9B,IAAI,UAA2B,CAAC;YAChC,IAAI,IAAiB,CAAC;YAEtB,OAAO;YACP,IAAI,WAAW,KAAK,0BAAc,CAAC,IAAI,EAAE;gBACvC,8BAA8B;gBAC9B,MAAM,UAAU,GAAG,uCAA2B,CAAC,kBAAkB,CAAC;gBAClE,IAAI,IAAI,CAAC,aAAa,CAAC,MAAM,GAAG,UAAU,EAAE;oBAC1C,IAAI,CAAC,4BAA4B,GAAG,UAAU,CAAC;oBAC/C,OAAO;iBACR;gBAED,IAAI,GAAG,0BAAW,CAAC,UAAU,CAC3B,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAC5C,CAAC;gBAEF,UAAU,GAAG;oBACX,IAAI,EAAE,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,YAAY,EAAE,CAAC;oBACtC,IAAI,EAAE,IAAI,CAAC,YAAY,EAAE;iBAC1B,CAAC;gBAEF,4DAA4D;gBAC5D,IAAI,UAAU,CAAC,IAAI,KAAK,SAAS,EAAE;oBACjC,UAAU,CAAC,IAAI,GAAG,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,SAAS,CAAC;iBAChD;gBAED,WAAW;aACZ;iBAAM,IAAI,WAAW,KAAK,0BAAc,CAAC,QAAQ,EAAE;gBAClD,MAAM,UAAU,GAAG,MAAM,CAAC,CAAC,CAAC,CAAC;gBAC7B,MAAM,UAAU,GAAG,uCAA2B,CAAC,sBAAsB,CACnE,UAAU,CACX,CAAC,CAAC,8BAA8B;gBAEjC,8BAA8B;gBAC9B,IAAI,IAAI,CAAC,aAAa,CAAC,MAAM,GAAG,UAAU,EAAE;oBAC1C,IAAI,CAAC,4BAA4B,GAAG,UAAU,CAAC;oBAC/C,OAAO;iBACR;gBAED,IAAI,GAAG,0BAAW,CAAC,UAAU,CAC3B,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAC5C,CAAC;gBAEF,UAAU,GAAG;oBACX,IAAI,EAAE,IAAI,CAAC,UAAU,CAAC,UAAU,CAAC;oBACjC,IAAI,EAAE,IAAI,CAAC,YAAY,EAAE;iBAC1B,CAAC;gBACF,OAAO;aACR;iBAAM,IAAI,WAAW,KAAK,0BAAc,CAAC,IAAI,EAAE;gBAC9C,8BAA8B;gBAC9B,MAAM,UAAU,GAAG,uCAA2B,CAAC,kBAAkB,CAAC;gBAClE,IAAI,IAAI,CAAC,aAAa,CAAC,MAAM,GAAG,UAAU,EAAE;oBAC1C,IAAI,CAAC,4BAA4B,GAAG,UAAU,CAAC;oBAC/C,OAAO;iBACR;gBAED,IAAI,GAAG,0BAAW,CAAC,UAAU,CAC3B,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAC5C,CAAC;gBAEF,UAAU,GAAG;oBACX,IAAI,EAAE,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC;oBACtC,IAAI,EAAE,IAAI,CAAC,YAAY,EAAE;iBAC1B,CAAC;aACH;YAED,IAAI,CAAC,QAAQ,CAAC,4BAAgB,CAAC,WAAW,CAAC,CAAC;YAC5C,IAAI,CAAC,4BAA4B,EAAE,CAAC;YACpC,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,EAAC,UAAU,EAAE,MAAM,EAAE,IAAI,CAAC,MAAM,EAAC,CAAC,CAAC;SAC7D;IACH,CAAC;IAED,IAAI,kBAAkB;QACpB,yBACK,IAAI,CAAC,OAAO,EACf;IACJ,CAAC;CACF;AAGC,kCAAW"} \ No newline at end of file diff --git a/node_modules/socks/build/common/constants.js b/node_modules/socks/build/common/constants.js index 8f8f5436ca142..3c9ff90ac9feb 100644 --- a/node_modules/socks/build/common/constants.js +++ b/node_modules/socks/build/common/constants.js @@ -1,6 +1,6 @@ "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); -exports.SOCKS_INCOMING_PACKET_SIZES = exports.SocksClientState = exports.Socks5Response = exports.Socks5HostType = exports.Socks5Auth = exports.Socks4Response = exports.SocksCommand = exports.ERRORS = exports.DEFAULT_TIMEOUT = void 0; +exports.SOCKS5_NO_ACCEPTABLE_AUTH = exports.SOCKS5_CUSTOM_AUTH_END = exports.SOCKS5_CUSTOM_AUTH_START = exports.SOCKS_INCOMING_PACKET_SIZES = exports.SocksClientState = exports.Socks5Response = exports.Socks5HostType = exports.Socks5Auth = exports.Socks4Response = exports.SocksCommand = exports.ERRORS = exports.DEFAULT_TIMEOUT = void 0; const DEFAULT_TIMEOUT = 30000; exports.DEFAULT_TIMEOUT = DEFAULT_TIMEOUT; // prettier-ignore @@ -13,6 +13,8 @@ const ERRORS = { InvalidSocksClientOptionsProxy: 'Invalid SOCKS proxy details were provided.', InvalidSocksClientOptionsTimeout: 'An invalid timeout value was provided. Please enter a value above 0 (in ms).', InvalidSocksClientOptionsProxiesLength: 'At least two socks proxies must be provided for chaining.', + InvalidSocksClientOptionsCustomAuthRange: 'Custom auth must be a value between 0x80 and 0xFE.', + InvalidSocksClientOptionsCustomAuthOptions: 'When a custom_auth_method is provided, custom_auth_request_handler, custom_auth_response_size, and custom_auth_response_handler must also be provided and valid.', NegotiationError: 'Negotiation error', SocketClosed: 'Socket closed', ProxyConnectionTimedOut: 'Proxy connection timed out', @@ -41,7 +43,7 @@ const SOCKS_INCOMING_PACKET_SIZES = { Socks5ResponseIPv6: 22, Socks5ResponseHostname: (hostNameLength) => hostNameLength + 7, // Command response + incoming connection (bind) - Socks4Response: 8, + Socks4Response: 8, // 2 header + 2 port + 4 ip }; exports.SOCKS_INCOMING_PACKET_SIZES = SOCKS_INCOMING_PACKET_SIZES; var SocksCommand; @@ -66,6 +68,12 @@ var Socks5Auth; Socks5Auth[Socks5Auth["UserPass"] = 2] = "UserPass"; })(Socks5Auth || (Socks5Auth = {})); exports.Socks5Auth = Socks5Auth; +const SOCKS5_CUSTOM_AUTH_START = 0x80; +exports.SOCKS5_CUSTOM_AUTH_START = SOCKS5_CUSTOM_AUTH_START; +const SOCKS5_CUSTOM_AUTH_END = 0xfe; +exports.SOCKS5_CUSTOM_AUTH_END = SOCKS5_CUSTOM_AUTH_END; +const SOCKS5_NO_ACCEPTABLE_AUTH = 0xff; +exports.SOCKS5_NO_ACCEPTABLE_AUTH = SOCKS5_NO_ACCEPTABLE_AUTH; var Socks5Response; (function (Socks5Response) { Socks5Response[Socks5Response["Granted"] = 0] = "Granted"; diff --git a/node_modules/socks/build/common/constants.js.map b/node_modules/socks/build/common/constants.js.map index 70d31db294c55..c1e070dea4ac3 100644 --- a/node_modules/socks/build/common/constants.js.map +++ b/node_modules/socks/build/common/constants.js.map @@ -1 +1 @@ -{"version":3,"file":"constants.js","sourceRoot":"","sources":["../../src/common/constants.ts"],"names":[],"mappings":";;;AAIA,MAAM,eAAe,GAAG,KAAK,CAAC;AA6L5B,0CAAe;AAzLjB,kBAAkB;AAClB,MAAM,MAAM,GAAG;IACb,mBAAmB,EAAE,wFAAwF;IAC7G,+BAA+B,EAAE,oGAAoG;IACrI,wBAAwB,EAAE,8FAA8F;IACxH,oCAAoC,EAAE,2CAA2C;IACjF,uCAAuC,EAAE,uFAAuF;IAChI,8BAA8B,EAAE,4CAA4C;IAC5E,gCAAgC,EAAE,8EAA8E;IAChH,sCAAsC,EAAE,2DAA2D;IACnG,gBAAgB,EAAE,mBAAmB;IACrC,YAAY,EAAE,eAAe;IAC7B,uBAAuB,EAAE,4BAA4B;IACrD,aAAa,EAAE,qDAAqD;IACpE,8BAA8B,EAAE,4CAA4C;IAC5E,6BAA6B,EAAE,kCAAkC;IACjE,uCAAuC,EAAE,6CAA6C;IACtF,0CAA0C,EAAE,iDAAiD;IAC7F,qCAAqC,EAAE,oDAAoD;IAC3F,yCAAyC,EAAE,mEAAmE;IAC9G,+CAA+C,EAAE,6EAA6E;IAC9H,4CAA4C,EAAE,yEAAyE;IACvH,0BAA0B,EAAE,8BAA8B;IAC1D,2BAA2B,EAAE,kDAAkD;IAC/E,mCAAmC,EAAE,kCAAkC;IACvE,uCAAuC,EAAE,sDAAsD;IAC/F,0CAA0C,EAAE,iDAAiD;CAC9F,CAAC;AA+JA,wBAAM;AA7JR,MAAM,2BAA2B,GAAG;IAClC,8BAA8B,EAAE,CAAC;IACjC,oCAAoC,EAAE,CAAC;IACvC,gDAAgD;IAChD,oBAAoB,EAAE,CAAC;IACvB,kBAAkB,EAAE,EAAE;IACtB,kBAAkB,EAAE,EAAE;IACtB,sBAAsB,EAAE,CAAC,cAAsB,EAAE,EAAE,CAAC,cAAc,GAAG,CAAC;IACtE,gDAAgD;IAChD,cAAc,EAAE,CAAC;CAClB,CAAC;AAmKA,kEAA2B;AA/J7B,IAAK,YAIJ;AAJD,WAAK,YAAY;IACf,qDAAc,CAAA;IACd,+CAAW,CAAA;IACX,yDAAgB,CAAA;AAClB,CAAC,EAJI,YAAY,KAAZ,YAAY,QAIhB;AA6IC,oCAAY;AA3Id,IAAK,cAKJ;AALD,WAAK,cAAc;IACjB,0DAAc,CAAA;IACd,wDAAa,CAAA;IACb,4DAAe,CAAA;IACf,sEAAoB,CAAA;AACtB,CAAC,EALI,cAAc,KAAd,cAAc,QAKlB;AAuIC,wCAAc;AArIhB,IAAK,UAIJ;AAJD,WAAK,UAAU;IACb,+CAAa,CAAA;IACb,+CAAa,CAAA;IACb,mDAAe,CAAA;AACjB,CAAC,EAJI,UAAU,KAAV,UAAU,QAId;AAkIC,gCAAU;AAhIZ,IAAK,cAUJ;AAVD,WAAK,cAAc;IACjB,yDAAc,CAAA;IACd,yDAAc,CAAA;IACd,+DAAiB,CAAA;IACjB,+EAAyB,CAAA;IACzB,yEAAsB,CAAA;IACtB,6EAAwB,CAAA;IACxB,+DAAiB,CAAA;IACjB,iFAA0B,CAAA;IAC1B,iFAA0B,CAAA;AAC5B,CAAC,EAVI,cAAc,KAAd,cAAc,QAUlB;AAwHC,wCAAc;AAtHhB,IAAK,cAIJ;AAJD,WAAK,cAAc;IACjB,mDAAW,CAAA;IACX,2DAAe,CAAA;IACf,mDAAW,CAAA;AACb,CAAC,EAJI,cAAc,KAAd,cAAc,QAIlB;AAiHC,wCAAc;AA/GhB,IAAK,gBAcJ;AAdD,WAAK,gBAAgB;IACnB,6DAAW,CAAA;IACX,mEAAc,CAAA;IACd,iEAAa,CAAA;IACb,uFAAwB,CAAA;IACxB,+GAAoC,CAAA;IACpC,mFAAsB,CAAA;IACtB,2GAAkC,CAAA;IAClC,mFAAsB,CAAA;IACtB,yFAAyB,CAAA;IACzB,iGAA6B,CAAA;IAC7B,sEAAgB,CAAA;IAChB,wEAAiB,CAAA;IACjB,0DAAU,CAAA;AACZ,CAAC,EAdI,gBAAgB,KAAhB,gBAAgB,QAcpB;AAmGC,4CAAgB"} \ No newline at end of file +{"version":3,"file":"constants.js","sourceRoot":"","sources":["../../src/common/constants.ts"],"names":[],"mappings":";;;AAIA,MAAM,eAAe,GAAG,KAAK,CAAC;AA4M5B,0CAAe;AAxMjB,kBAAkB;AAClB,MAAM,MAAM,GAAG;IACb,mBAAmB,EAAE,wFAAwF;IAC7G,+BAA+B,EAAE,oGAAoG;IACrI,wBAAwB,EAAE,8FAA8F;IACxH,oCAAoC,EAAE,2CAA2C;IACjF,uCAAuC,EAAE,uFAAuF;IAChI,8BAA8B,EAAE,4CAA4C;IAC5E,gCAAgC,EAAE,8EAA8E;IAChH,sCAAsC,EAAE,2DAA2D;IACnG,wCAAwC,EAAE,oDAAoD;IAC9F,0CAA0C,EAAE,kKAAkK;IAC9M,gBAAgB,EAAE,mBAAmB;IACrC,YAAY,EAAE,eAAe;IAC7B,uBAAuB,EAAE,4BAA4B;IACrD,aAAa,EAAE,qDAAqD;IACpE,8BAA8B,EAAE,4CAA4C;IAC5E,6BAA6B,EAAE,kCAAkC;IACjE,uCAAuC,EAAE,6CAA6C;IACtF,0CAA0C,EAAE,iDAAiD;IAC7F,qCAAqC,EAAE,oDAAoD;IAC3F,yCAAyC,EAAE,mEAAmE;IAC9G,+CAA+C,EAAE,6EAA6E;IAC9H,4CAA4C,EAAE,yEAAyE;IACvH,0BAA0B,EAAE,8BAA8B;IAC1D,2BAA2B,EAAE,kDAAkD;IAC/E,mCAAmC,EAAE,kCAAkC;IACvE,uCAAuC,EAAE,sDAAsD;IAC/F,0CAA0C,EAAE,iDAAiD;CAC9F,CAAC;AA4KA,wBAAM;AA1KR,MAAM,2BAA2B,GAAG;IAClC,8BAA8B,EAAE,CAAC;IACjC,oCAAoC,EAAE,CAAC;IACvC,gDAAgD;IAChD,oBAAoB,EAAE,CAAC;IACvB,kBAAkB,EAAE,EAAE;IACtB,kBAAkB,EAAE,EAAE;IACtB,sBAAsB,EAAE,CAAC,cAAsB,EAAE,EAAE,CAAC,cAAc,GAAG,CAAC;IACtE,gDAAgD;IAChD,cAAc,EAAE,CAAC,EAAE,2BAA2B;CAC/C,CAAC;AAgLA,kEAA2B;AA5K7B,IAAK,YAIJ;AAJD,WAAK,YAAY;IACf,qDAAc,CAAA;IACd,+CAAW,CAAA;IACX,yDAAgB,CAAA;AAClB,CAAC,EAJI,YAAY,KAAZ,YAAY,QAIhB;AA0JC,oCAAY;AAxJd,IAAK,cAKJ;AALD,WAAK,cAAc;IACjB,0DAAc,CAAA;IACd,wDAAa,CAAA;IACb,4DAAe,CAAA;IACf,sEAAoB,CAAA;AACtB,CAAC,EALI,cAAc,KAAd,cAAc,QAKlB;AAoJC,wCAAc;AAlJhB,IAAK,UAIJ;AAJD,WAAK,UAAU;IACb,+CAAa,CAAA;IACb,+CAAa,CAAA;IACb,mDAAe,CAAA;AACjB,CAAC,EAJI,UAAU,KAAV,UAAU,QAId;AA+IC,gCAAU;AA7IZ,MAAM,wBAAwB,GAAG,IAAI,CAAC;AA0JpC,4DAAwB;AAzJ1B,MAAM,sBAAsB,GAAG,IAAI,CAAC;AA0JlC,wDAAsB;AAxJxB,MAAM,yBAAyB,GAAG,IAAI,CAAC;AAyJrC,8DAAyB;AAvJ3B,IAAK,cAUJ;AAVD,WAAK,cAAc;IACjB,yDAAc,CAAA;IACd,yDAAc,CAAA;IACd,+DAAiB,CAAA;IACjB,+EAAyB,CAAA;IACzB,yEAAsB,CAAA;IACtB,6EAAwB,CAAA;IACxB,+DAAiB,CAAA;IACjB,iFAA0B,CAAA;IAC1B,iFAA0B,CAAA;AAC5B,CAAC,EAVI,cAAc,KAAd,cAAc,QAUlB;AAgIC,wCAAc;AA9HhB,IAAK,cAIJ;AAJD,WAAK,cAAc;IACjB,mDAAW,CAAA;IACX,2DAAe,CAAA;IACf,mDAAW,CAAA;AACb,CAAC,EAJI,cAAc,KAAd,cAAc,QAIlB;AAyHC,wCAAc;AAvHhB,IAAK,gBAcJ;AAdD,WAAK,gBAAgB;IACnB,6DAAW,CAAA;IACX,mEAAc,CAAA;IACd,iEAAa,CAAA;IACb,uFAAwB,CAAA;IACxB,+GAAoC,CAAA;IACpC,mFAAsB,CAAA;IACtB,2GAAkC,CAAA;IAClC,mFAAsB,CAAA;IACtB,yFAAyB,CAAA;IACzB,iGAA6B,CAAA;IAC7B,sEAAgB,CAAA;IAChB,wEAAiB,CAAA;IACjB,0DAAU,CAAA;AACZ,CAAC,EAdI,gBAAgB,KAAhB,gBAAgB,QAcpB;AA2GC,4CAAgB"} \ No newline at end of file diff --git a/node_modules/socks/build/common/helpers.js b/node_modules/socks/build/common/helpers.js index 5bf4cc47744c5..f84db8f6729d6 100644 --- a/node_modules/socks/build/common/helpers.js +++ b/node_modules/socks/build/common/helpers.js @@ -26,6 +26,8 @@ function validateSocksClientOptions(options, acceptedCommands = ['connect', 'bin if (!isValidSocksProxy(options.proxy)) { throw new util_1.SocksClientError(constants_1.ERRORS.InvalidSocksClientOptionsProxy, options); } + // Validate custom auth (if set) + validateCustomProxyAuth(options.proxy, options); // Check timeout if (options.timeout && !isValidTimeoutValue(options.timeout)) { throw new util_1.SocksClientError(constants_1.ERRORS.InvalidSocksClientOptionsTimeout, options); @@ -61,6 +63,8 @@ function validateSocksClientChainOptions(options) { if (!isValidSocksProxy(proxy)) { throw new util_1.SocksClientError(constants_1.ERRORS.InvalidSocksClientOptionsProxy, options); } + // Validate custom auth (if set) + validateCustomProxyAuth(proxy, options); }); // Check timeout if (options.timeout && !isValidTimeoutValue(options.timeout)) { @@ -68,6 +72,29 @@ function validateSocksClientChainOptions(options) { } } exports.validateSocksClientChainOptions = validateSocksClientChainOptions; +function validateCustomProxyAuth(proxy, options) { + if (proxy.custom_auth_method !== undefined) { + // Invalid auth method range + if (proxy.custom_auth_method < constants_1.SOCKS5_CUSTOM_AUTH_START || + proxy.custom_auth_method > constants_1.SOCKS5_CUSTOM_AUTH_END) { + throw new util_1.SocksClientError(constants_1.ERRORS.InvalidSocksClientOptionsCustomAuthRange, options); + } + // Missing custom_auth_request_handler + if (proxy.custom_auth_request_handler === undefined || + typeof proxy.custom_auth_request_handler !== 'function') { + throw new util_1.SocksClientError(constants_1.ERRORS.InvalidSocksClientOptionsCustomAuthOptions, options); + } + // Missing custom_auth_response_size + if (proxy.custom_auth_response_size === undefined) { + throw new util_1.SocksClientError(constants_1.ERRORS.InvalidSocksClientOptionsCustomAuthOptions, options); + } + // Missing/invalid custom_auth_response_handler + if (proxy.custom_auth_response_handler === undefined || + typeof proxy.custom_auth_response_handler !== 'function') { + throw new util_1.SocksClientError(constants_1.ERRORS.InvalidSocksClientOptionsCustomAuthOptions, options); + } + } +} /** * Validates a SocksRemoteHost * @param remoteHost { SocksRemoteHost } diff --git a/node_modules/socks/build/common/helpers.js.map b/node_modules/socks/build/common/helpers.js.map index 3313a3c0323af..dae124861aa90 100644 --- a/node_modules/socks/build/common/helpers.js.map +++ b/node_modules/socks/build/common/helpers.js.map @@ -1 +1 @@ -{"version":3,"file":"helpers.js","sourceRoot":"","sources":["../../src/common/helpers.ts"],"names":[],"mappings":";;;AAKA,iCAAwC;AACxC,2CAA6D;AAC7D,iCAAiC;AAEjC;;;;GAIG;AACH,SAAS,0BAA0B,CACjC,OAA2B,EAC3B,gBAAgB,GAAG,CAAC,SAAS,EAAE,MAAM,EAAE,WAAW,CAAC;IAEnD,8BAA8B;IAC9B,IAAI,CAAC,wBAAY,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;QAClC,MAAM,IAAI,uBAAgB,CAAC,kBAAM,CAAC,mBAAmB,EAAE,OAAO,CAAC,CAAC;KACjE;IAED,6CAA6C;IAC7C,IAAI,gBAAgB,CAAC,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,EAAE;QACpD,MAAM,IAAI,uBAAgB,CAAC,kBAAM,CAAC,+BAA+B,EAAE,OAAO,CAAC,CAAC;KAC7E;IAED,oBAAoB;IACpB,IAAI,CAAC,sBAAsB,CAAC,OAAO,CAAC,WAAW,CAAC,EAAE;QAChD,MAAM,IAAI,uBAAgB,CACxB,kBAAM,CAAC,oCAAoC,EAC3C,OAAO,CACR,CAAC;KACH;IAED,2BAA2B;IAC3B,IAAI,CAAC,iBAAiB,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;QACrC,MAAM,IAAI,uBAAgB,CAAC,kBAAM,CAAC,8BAA8B,EAAE,OAAO,CAAC,CAAC;KAC5E;IAED,gBAAgB;IAChB,IAAI,OAAO,CAAC,OAAO,IAAI,CAAC,mBAAmB,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;QAC5D,MAAM,IAAI,uBAAgB,CACxB,kBAAM,CAAC,gCAAgC,EACvC,OAAO,CACR,CAAC;KACH;IAED,sCAAsC;IACtC,IACE,OAAO,CAAC,eAAe;QACvB,CAAC,CAAC,OAAO,CAAC,eAAe,YAAY,MAAM,CAAC,MAAM,CAAC,EACnD;QACA,MAAM,IAAI,uBAAgB,CACxB,kBAAM,CAAC,uCAAuC,EAC9C,OAAO,CACR,CAAC;KACH;AACH,CAAC;AA0FO,gEAA0B;AAxFlC;;;GAGG;AACH,SAAS,+BAA+B,CAAC,OAAgC;IACvE,2CAA2C;IAC3C,IAAI,OAAO,CAAC,OAAO,KAAK,SAAS,EAAE;QACjC,MAAM,IAAI,uBAAgB,CAAC,kBAAM,CAAC,wBAAwB,EAAE,OAAO,CAAC,CAAC;KACtE;IAED,oBAAoB;IACpB,IAAI,CAAC,sBAAsB,CAAC,OAAO,CAAC,WAAW,CAAC,EAAE;QAChD,MAAM,IAAI,uBAAgB,CACxB,kBAAM,CAAC,oCAAoC,EAC3C,OAAO,CACR,CAAC;KACH;IAED,4BAA4B;IAC5B,IACE,CAAC,CACC,OAAO,CAAC,OAAO;QACf,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC;QAC9B,OAAO,CAAC,OAAO,CAAC,MAAM,IAAI,CAAC,CAC5B,EACD;QACA,MAAM,IAAI,uBAAgB,CACxB,kBAAM,CAAC,sCAAsC,EAC7C,OAAO,CACR,CAAC;KACH;IAED,mBAAmB;IACnB,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,KAAiB,EAAE,EAAE;QAC5C,IAAI,CAAC,iBAAiB,CAAC,KAAK,CAAC,EAAE;YAC7B,MAAM,IAAI,uBAAgB,CACxB,kBAAM,CAAC,8BAA8B,EACrC,OAAO,CACR,CAAC;SACH;IACH,CAAC,CAAC,CAAC;IAEH,gBAAgB;IAChB,IAAI,OAAO,CAAC,OAAO,IAAI,CAAC,mBAAmB,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;QAC5D,MAAM,IAAI,uBAAgB,CACxB,kBAAM,CAAC,gCAAgC,EACvC,OAAO,CACR,CAAC;KACH;AACH,CAAC;AAuCmC,0EAA+B;AArCnE;;;GAGG;AACH,SAAS,sBAAsB,CAAC,UAA2B;IACzD,OAAO,CACL,UAAU;QACV,OAAO,UAAU,CAAC,IAAI,KAAK,QAAQ;QACnC,OAAO,UAAU,CAAC,IAAI,KAAK,QAAQ;QACnC,UAAU,CAAC,IAAI,IAAI,CAAC;QACpB,UAAU,CAAC,IAAI,IAAI,KAAK,CACzB,CAAC;AACJ,CAAC;AAED;;;GAGG;AACH,SAAS,iBAAiB,CAAC,KAAiB;IAC1C,OAAO,CACL,KAAK;QACL,CAAC,OAAO,KAAK,CAAC,IAAI,KAAK,QAAQ,IAAI,OAAO,KAAK,CAAC,SAAS,KAAK,QAAQ,CAAC;QACvE,OAAO,KAAK,CAAC,IAAI,KAAK,QAAQ;QAC9B,KAAK,CAAC,IAAI,IAAI,CAAC;QACf,KAAK,CAAC,IAAI,IAAI,KAAK;QACnB,CAAC,KAAK,CAAC,IAAI,KAAK,CAAC,IAAI,KAAK,CAAC,IAAI,KAAK,CAAC,CAAC,CACvC,CAAC;AACJ,CAAC;AAED;;;GAGG;AACH,SAAS,mBAAmB,CAAC,KAAa;IACxC,OAAO,OAAO,KAAK,KAAK,QAAQ,IAAI,KAAK,GAAG,CAAC,CAAC;AAChD,CAAC"} \ No newline at end of file +{"version":3,"file":"helpers.js","sourceRoot":"","sources":["../../src/common/helpers.ts"],"names":[],"mappings":";;;AAKA,iCAAwC;AACxC,2CAMqB;AACrB,iCAAiC;AAEjC;;;;GAIG;AACH,SAAS,0BAA0B,CACjC,OAA2B,EAC3B,gBAAgB,GAAG,CAAC,SAAS,EAAE,MAAM,EAAE,WAAW,CAAC;IAEnD,8BAA8B;IAC9B,IAAI,CAAC,wBAAY,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;QAClC,MAAM,IAAI,uBAAgB,CAAC,kBAAM,CAAC,mBAAmB,EAAE,OAAO,CAAC,CAAC;KACjE;IAED,6CAA6C;IAC7C,IAAI,gBAAgB,CAAC,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,EAAE;QACpD,MAAM,IAAI,uBAAgB,CAAC,kBAAM,CAAC,+BAA+B,EAAE,OAAO,CAAC,CAAC;KAC7E;IAED,oBAAoB;IACpB,IAAI,CAAC,sBAAsB,CAAC,OAAO,CAAC,WAAW,CAAC,EAAE;QAChD,MAAM,IAAI,uBAAgB,CACxB,kBAAM,CAAC,oCAAoC,EAC3C,OAAO,CACR,CAAC;KACH;IAED,2BAA2B;IAC3B,IAAI,CAAC,iBAAiB,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;QACrC,MAAM,IAAI,uBAAgB,CAAC,kBAAM,CAAC,8BAA8B,EAAE,OAAO,CAAC,CAAC;KAC5E;IAED,gCAAgC;IAChC,uBAAuB,CAAC,OAAO,CAAC,KAAK,EAAE,OAAO,CAAC,CAAC;IAEhD,gBAAgB;IAChB,IAAI,OAAO,CAAC,OAAO,IAAI,CAAC,mBAAmB,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;QAC5D,MAAM,IAAI,uBAAgB,CACxB,kBAAM,CAAC,gCAAgC,EACvC,OAAO,CACR,CAAC;KACH;IAED,sCAAsC;IACtC,IACE,OAAO,CAAC,eAAe;QACvB,CAAC,CAAC,OAAO,CAAC,eAAe,YAAY,MAAM,CAAC,MAAM,CAAC,EACnD;QACA,MAAM,IAAI,uBAAgB,CACxB,kBAAM,CAAC,uCAAuC,EAC9C,OAAO,CACR,CAAC;KACH;AACH,CAAC;AA6IO,gEAA0B;AA3IlC;;;GAGG;AACH,SAAS,+BAA+B,CAAC,OAAgC;IACvE,2CAA2C;IAC3C,IAAI,OAAO,CAAC,OAAO,KAAK,SAAS,EAAE;QACjC,MAAM,IAAI,uBAAgB,CAAC,kBAAM,CAAC,wBAAwB,EAAE,OAAO,CAAC,CAAC;KACtE;IAED,oBAAoB;IACpB,IAAI,CAAC,sBAAsB,CAAC,OAAO,CAAC,WAAW,CAAC,EAAE;QAChD,MAAM,IAAI,uBAAgB,CACxB,kBAAM,CAAC,oCAAoC,EAC3C,OAAO,CACR,CAAC;KACH;IAED,4BAA4B;IAC5B,IACE,CAAC,CACC,OAAO,CAAC,OAAO;QACf,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC;QAC9B,OAAO,CAAC,OAAO,CAAC,MAAM,IAAI,CAAC,CAC5B,EACD;QACA,MAAM,IAAI,uBAAgB,CACxB,kBAAM,CAAC,sCAAsC,EAC7C,OAAO,CACR,CAAC;KACH;IAED,mBAAmB;IACnB,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,KAAiB,EAAE,EAAE;QAC5C,IAAI,CAAC,iBAAiB,CAAC,KAAK,CAAC,EAAE;YAC7B,MAAM,IAAI,uBAAgB,CACxB,kBAAM,CAAC,8BAA8B,EACrC,OAAO,CACR,CAAC;SACH;QAED,gCAAgC;QAChC,uBAAuB,CAAC,KAAK,EAAE,OAAO,CAAC,CAAC;IAC1C,CAAC,CAAC,CAAC;IAEH,gBAAgB;IAChB,IAAI,OAAO,CAAC,OAAO,IAAI,CAAC,mBAAmB,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;QAC5D,MAAM,IAAI,uBAAgB,CACxB,kBAAM,CAAC,gCAAgC,EACvC,OAAO,CACR,CAAC;KACH;AACH,CAAC;AAuFmC,0EAA+B;AArFnE,SAAS,uBAAuB,CAC9B,KAAiB,EACjB,OAAqD;IAErD,IAAI,KAAK,CAAC,kBAAkB,KAAK,SAAS,EAAE;QAC1C,4BAA4B;QAC5B,IACE,KAAK,CAAC,kBAAkB,GAAG,oCAAwB;YACnD,KAAK,CAAC,kBAAkB,GAAG,kCAAsB,EACjD;YACA,MAAM,IAAI,uBAAgB,CACxB,kBAAM,CAAC,wCAAwC,EAC/C,OAAO,CACR,CAAC;SACH;QAED,sCAAsC;QACtC,IACE,KAAK,CAAC,2BAA2B,KAAK,SAAS;YAC/C,OAAO,KAAK,CAAC,2BAA2B,KAAK,UAAU,EACvD;YACA,MAAM,IAAI,uBAAgB,CACxB,kBAAM,CAAC,0CAA0C,EACjD,OAAO,CACR,CAAC;SACH;QAED,oCAAoC;QACpC,IAAI,KAAK,CAAC,yBAAyB,KAAK,SAAS,EAAE;YACjD,MAAM,IAAI,uBAAgB,CACxB,kBAAM,CAAC,0CAA0C,EACjD,OAAO,CACR,CAAC;SACH;QAED,+CAA+C;QAC/C,IACE,KAAK,CAAC,4BAA4B,KAAK,SAAS;YAChD,OAAO,KAAK,CAAC,4BAA4B,KAAK,UAAU,EACxD;YACA,MAAM,IAAI,uBAAgB,CACxB,kBAAM,CAAC,0CAA0C,EACjD,OAAO,CACR,CAAC;SACH;KACF;AACH,CAAC;AAED;;;GAGG;AACH,SAAS,sBAAsB,CAAC,UAA2B;IACzD,OAAO,CACL,UAAU;QACV,OAAO,UAAU,CAAC,IAAI,KAAK,QAAQ;QACnC,OAAO,UAAU,CAAC,IAAI,KAAK,QAAQ;QACnC,UAAU,CAAC,IAAI,IAAI,CAAC;QACpB,UAAU,CAAC,IAAI,IAAI,KAAK,CACzB,CAAC;AACJ,CAAC;AAED;;;GAGG;AACH,SAAS,iBAAiB,CAAC,KAAiB;IAC1C,OAAO,CACL,KAAK;QACL,CAAC,OAAO,KAAK,CAAC,IAAI,KAAK,QAAQ,IAAI,OAAO,KAAK,CAAC,SAAS,KAAK,QAAQ,CAAC;QACvE,OAAO,KAAK,CAAC,IAAI,KAAK,QAAQ;QAC9B,KAAK,CAAC,IAAI,IAAI,CAAC;QACf,KAAK,CAAC,IAAI,IAAI,KAAK;QACnB,CAAC,KAAK,CAAC,IAAI,KAAK,CAAC,IAAI,KAAK,CAAC,IAAI,KAAK,CAAC,CAAC,CACvC,CAAC;AACJ,CAAC;AAED;;;GAGG;AACH,SAAS,mBAAmB,CAAC,KAAa;IACxC,OAAO,OAAO,KAAK,KAAK,QAAQ,IAAI,KAAK,GAAG,CAAC,CAAC;AAChD,CAAC"} \ No newline at end of file diff --git a/node_modules/socks/package.json b/node_modules/socks/package.json index 8900ebbbb581b..c518b1ef7cf06 100644 --- a/node_modules/socks/package.json +++ b/node_modules/socks/package.json @@ -1,7 +1,7 @@ { "name": "socks", "private": false, - "version": "2.5.1", + "version": "2.6.1", "description": "Fully featured SOCKS proxy client supporting SOCKSv4, SOCKSv4a, and SOCKSv5. Includes Bind and Associate functionality.", "main": "build/index.js", "typings": "typings/index.d.ts", @@ -34,17 +34,17 @@ "readmeFilename": "README.md", "devDependencies": { "@types/ip": "1.1.0", - "@types/mocha": "^8.0.3", - "@types/node": "^14.14.3", + "@types/mocha": "^8.2.2", + "@types/node": "^14.14.41", "coveralls": "3.1.0", - "mocha": "^8.2.0", + "mocha": "^8.3.2", "nyc": "15.1.0", - "prettier": "^2.1.2", + "prettier": "^2.2.1", "socks5-server": "^0.1.1", - "ts-node": "^9.0.0", + "ts-node": "^9.1.1", "tslint": "^6.1.3", "tslint-config-airbnb": "^5.11.2", - "typescript": "^4.0.3" + "typescript": "^4.2.4" }, "dependencies": { "ip": "^1.1.5", @@ -57,7 +57,7 @@ "coverage": "NODE_ENV=test nyc npm test", "coveralls": "NODE_ENV=test nyc npm test && nyc report --reporter=text-lcov | coveralls", "lint": "tslint --project tsconfig.json 'src/**/*.ts'", - "build": "rm -rf build typings && tslint --project tsconfig.json && prettier --write ./src/**/*.ts --config .prettierrc.yaml && tsc -p ." + "build": "rm -rf build typings && prettier --write ./src/**/*.ts --config .prettierrc.yaml && tsc -p ." }, "nyc": { "extension": [ diff --git a/node_modules/socks/typings/client/socksclient.d.ts b/node_modules/socks/typings/client/socksclient.d.ts index ff762a8a663f3..d8ce1b965f0e0 100644 --- a/node_modules/socks/typings/client/socksclient.d.ts +++ b/node_modules/socks/typings/client/socksclient.d.ts @@ -22,6 +22,7 @@ declare class SocksClient extends EventEmitter implements SocksClient { private state; private receiveBuffer; private nextRequiredPacketBufferSize; + private socks5ChosenAuthType; private onDataReceived; private onClose; private onError; @@ -132,6 +133,10 @@ declare class SocksClient extends EventEmitter implements SocksClient { * Note: No auth and user/pass are currently supported. */ private sendSocks5UserPassAuthentication; + private sendSocks5CustomAuthentication; + private handleSocks5CustomAuthHandshakeResponse; + private handleSocks5AuthenticationNoAuthHandshakeResponse; + private handleSocks5AuthenticationUserPassHandshakeResponse; /** * Handles Socks v5 auth handshake response. * @param data diff --git a/node_modules/socks/typings/common/constants.d.ts b/node_modules/socks/typings/common/constants.d.ts index b801c1e0607e9..664795cb180fb 100644 --- a/node_modules/socks/typings/common/constants.d.ts +++ b/node_modules/socks/typings/common/constants.d.ts @@ -13,6 +13,8 @@ declare const ERRORS: { InvalidSocksClientOptionsProxy: string; InvalidSocksClientOptionsTimeout: string; InvalidSocksClientOptionsProxiesLength: string; + InvalidSocksClientOptionsCustomAuthRange: string; + InvalidSocksClientOptionsCustomAuthOptions: string; NegotiationError: string; SocketClosed: string; ProxyConnectionTimedOut: string; @@ -57,6 +59,9 @@ declare enum Socks5Auth { GSSApi = 1, UserPass = 2 } +declare const SOCKS5_CUSTOM_AUTH_START = 128; +declare const SOCKS5_CUSTOM_AUTH_END = 254; +declare const SOCKS5_NO_ACCEPTABLE_AUTH = 255; declare enum Socks5Response { Granted = 0, Failure = 1, @@ -98,6 +103,10 @@ declare type SocksProxy = RequireOnlyOne<{ type: SocksProxyType; userId?: string; password?: string; + custom_auth_method?: number; + custom_auth_request_handler?: () => Promise<Buffer>; + custom_auth_response_size?: number; + custom_auth_response_handler?: (data: Buffer) => Promise<boolean>; }, 'host' | 'ipaddress'>; /** * Represents a remote host @@ -138,4 +147,4 @@ interface SocksUDPFrameDetails { remoteHost: SocksRemoteHost; data: Buffer; } -export { DEFAULT_TIMEOUT, ERRORS, SocksProxyType, SocksCommand, Socks4Response, Socks5Auth, Socks5HostType, Socks5Response, SocksClientState, SocksProxy, SocksRemoteHost, SocksCommandOption, SocksClientOptions, SocksClientChainOptions, SocksClientEstablishedEvent, SocksClientBoundEvent, SocksUDPFrameDetails, SOCKS_INCOMING_PACKET_SIZES, }; +export { DEFAULT_TIMEOUT, ERRORS, SocksProxyType, SocksCommand, Socks4Response, Socks5Auth, Socks5HostType, Socks5Response, SocksClientState, SocksProxy, SocksRemoteHost, SocksCommandOption, SocksClientOptions, SocksClientChainOptions, SocksClientEstablishedEvent, SocksClientBoundEvent, SocksUDPFrameDetails, SOCKS_INCOMING_PACKET_SIZES, SOCKS5_CUSTOM_AUTH_START, SOCKS5_CUSTOM_AUTH_END, SOCKS5_NO_ACCEPTABLE_AUTH, }; diff --git a/node_modules/socks/typings/common/receiveBuffer.d.ts b/node_modules/socks/typings/common/receivebuffer.d.ts similarity index 100% rename from node_modules/socks/typings/common/receiveBuffer.d.ts rename to node_modules/socks/typings/common/receivebuffer.d.ts diff --git a/node_modules/spdx-correct/README.md b/node_modules/spdx-correct/README.md deleted file mode 100644 index ab388cf940648..0000000000000 --- a/node_modules/spdx-correct/README.md +++ /dev/null @@ -1,14 +0,0 @@ -```javascript -var correct = require('spdx-correct') -var assert = require('assert') - -assert.equal(correct('mit'), 'MIT') - -assert.equal(correct('Apache 2'), 'Apache-2.0') - -assert(correct('No idea what license') === null) - -// disable upgrade option -assert(correct('GPL-3.0'), 'GPL-3.0-or-later') -assert(correct('GPL-3.0', { upgrade: false }), 'GPL-3.0') -``` diff --git a/node_modules/spdx-exceptions/README.md b/node_modules/spdx-exceptions/README.md deleted file mode 100644 index 6c927ecc69119..0000000000000 --- a/node_modules/spdx-exceptions/README.md +++ /dev/null @@ -1,36 +0,0 @@ -The package exports an array of strings. Each string is an identifier -for a license exception under the [Software Package Data Exchange -(SPDX)][SPDX] software license metadata standard. - -[SPDX]: https://spdx.org - -## Copyright and Licensing - -### SPDX - -"SPDX" is a federally registered United States trademark of The Linux -Foundation Corporation. - -From version 2.0 of the [SPDX] specification: - -> Copyright © 2010-2015 Linux Foundation and its Contributors. Licensed -> under the Creative Commons Attribution License 3.0 Unported. All other -> rights are expressly reserved. - -The Linux Foundation and the SPDX working groups are good people. Only -they decide what "SPDX" means, as a standard and otherwise. I respect -their work and their rights. You should, too. - -### This Package - -> I created this package by copying exception identifiers out of the -> SPDX specification. That work was mechanical, routine, and required no -> creativity whatsoever. - Kyle Mitchell, package author - -United States users concerned about intellectual property may wish to -discuss the following Supreme Court decisions with their attorneys: - -- _Baker v. Selden_, 101 U.S. 99 (1879) - -- _Feist Publications, Inc., v. Rural Telephone Service Co._, - 499 U.S. 340 (1991) diff --git a/node_modules/spdx-expression-parse/README.md b/node_modules/spdx-expression-parse/README.md deleted file mode 100644 index 9406462e3cff3..0000000000000 --- a/node_modules/spdx-expression-parse/README.md +++ /dev/null @@ -1,91 +0,0 @@ -This package parses [SPDX license expression](https://spdx.org/spdx-specification-21-web-version#h.jxpfx0ykyb60) strings describing license terms, like [package.json license strings](https://docs.npmjs.com/files/package.json#license), into consistently structured ECMAScript objects. The npm command-line interface depends on this package, as do many automatic license-audit tools. - -In a nutshell: - -```javascript -var parse = require('spdx-expression-parse') -var assert = require('assert') - -assert.deepEqual( - // Licensed under the terms of the Two-Clause BSD License. - parse('BSD-2-Clause'), - {license: 'BSD-2-Clause'} -) - -assert.throws(function () { - // An invalid SPDX license expression. - // Should be `Apache-2.0`. - parse('Apache 2') -}) - -assert.deepEqual( - // Dual licensed under either: - // - LGPL 2.1 - // - a combination of Three-Clause BSD and MIT - parse('(LGPL-2.1 OR BSD-3-Clause AND MIT)'), - { - left: {license: 'LGPL-2.1'}, - conjunction: 'or', - right: { - left: {license: 'BSD-3-Clause'}, - conjunction: 'and', - right: {license: 'MIT'} - } - } -) -``` - -The syntax comes from the [Software Package Data eXchange (SPDX)](https://spdx.org/), a standard from the [Linux Foundation](https://www.linuxfoundation.org) for shareable data about software package license terms. SPDX aims to make sharing and auditing license data easy, especially for users of open-source software. - -The bulk of the SPDX standard describes syntax and semantics of XML metadata files. This package implements two lightweight, plain-text components of that larger standard: - -1. The [license list](https://spdx.org/licenses), a mapping from specific string identifiers, like `Apache-2.0`, to standard form license texts and bolt-on license exceptions. The [spdx-license-ids](https://www.npmjs.com/package/spdx-license-ids) and [spdx-exceptions](https://www.npmjs.com/package/spdx-exceptions) packages implement the license list. `spdx-expression-parse` depends on and `require()`s them. - - Any license identifier from the license list is a valid license expression: - - ```javascript - var identifiers = [] - .concat(require('spdx-license-ids')) - .concat(require('spdx-license-ids/deprecated')) - - identifiers.forEach(function (id) { - assert.deepEqual(parse(id), {license: id}) - }) - ``` - - So is any license identifier `WITH` a standardized license exception: - - ```javascript - identifiers.forEach(function (id) { - require('spdx-exceptions').forEach(function (e) { - assert.deepEqual( - parse(id + ' WITH ' + e), - {license: id, exception: e} - ) - }) - }) - ``` - -2. The license expression language, for describing simple and complex license terms, like `MIT` for MIT-licensed and `(GPL-2.0 OR Apache-2.0)` for dual-licensing under GPL 2.0 and Apache 2.0. `spdx-expression-parse` itself implements license expression language, exporting a parser. - - ```javascript - assert.deepEqual( - // Licensed under a combination of: - // - the MIT License AND - // - a combination of: - // - LGPL 2.1 (or a later version) AND - // - Three-Clause BSD - parse('(MIT AND (LGPL-2.1+ AND BSD-3-Clause))'), - { - left: {license: 'MIT'}, - conjunction: 'and', - right: { - left: {license: 'LGPL-2.1', plus: true}, - conjunction: 'and', - right: {license: 'BSD-3-Clause'} - } - } - ) - ``` - -The Linux Foundation and its contributors license the SPDX standard under the terms of [the Creative Commons Attribution License 3.0 Unported (SPDX: "CC-BY-3.0")](http://spdx.org/licenses/CC-BY-3.0). "SPDX" is a United States federally registered trademark of the Linux Foundation. The authors of this package license their work under the terms of the MIT License. diff --git a/node_modules/spdx-license-ids/README.md b/node_modules/spdx-license-ids/README.md deleted file mode 100644 index 699514d1a28aa..0000000000000 --- a/node_modules/spdx-license-ids/README.md +++ /dev/null @@ -1,52 +0,0 @@ -# spdx-license-ids - -[![npm version](https://img.shields.io/npm/v/spdx-license-ids.svg)](https://www.npmjs.com/package/spdx-license-ids) -[![Github Actions](https://action-badges.now.sh/shinnn/spdx-license-ids)](https://wdp9fww0r9.execute-api.us-west-2.amazonaws.com/production/results/shinnn/spdx-license-ids) - -A list of [SPDX license](https://spdx.org/licenses/) identifiers - -## Installation - -[Download JSON directly](https://raw.githubusercontent.com/shinnn/spdx-license-ids/master/index.json), or [use](https://docs.npmjs.com/cli/install) [npm](https://docs.npmjs.com/about-npm/): - -``` -npm install spdx-license-ids -``` - -## [Node.js](https://nodejs.org/) API - -### require('spdx-license-ids') - -Type: `string[]` - -All license IDs except for the currently deprecated ones. - -```javascript -const ids = require('spdx-license-ids'); -//=> ['0BSD', 'AAL', 'ADSL', 'AFL-1.1', 'AFL-1.2', 'AFL-2.0', 'AFL-2.1', 'AFL-3.0', 'AGPL-1.0-only', ...] - -ids.includes('BSD-3-Clause'); //=> true -ids.includes('CC-BY-1.0'); //=> true - -ids.includes('GPL-3.0'); //=> false -``` - -### require('spdx-license-ids/deprecated') - -Type: `string[]` - -Deprecated license IDs. - -```javascript -const deprecatedIds = require('spdx-license-ids/deprecated'); -//=> ['AGPL-1.0', 'AGPL-3.0', 'GFDL-1.1', 'GFDL-1.2', 'GFDL-1.3', 'GPL-1.0', 'GPL-2.0', ...] - -deprecatedIds.includes('BSD-3-Clause'); //=> false -deprecatedIds.includes('CC-BY-1.0'); //=> false - -deprecatedIds.includes('GPL-3.0'); //=> true -``` - -## License - -[Creative Commons Zero v1.0 Universal](https://creativecommons.org/publicdomain/zero/1.0/deed) diff --git a/node_modules/spdx-license-ids/index.json b/node_modules/spdx-license-ids/index.json index 864d2410c83a9..c2d5e017b2967 100644 --- a/node_modules/spdx-license-ids/index.json +++ b/node_modules/spdx-license-ids/index.json @@ -42,11 +42,14 @@ "BSD-3-Clause-Attribution", "BSD-3-Clause-Clear", "BSD-3-Clause-LBNL", + "BSD-3-Clause-Modification", + "BSD-3-Clause-No-Military-License", "BSD-3-Clause-No-Nuclear-License", "BSD-3-Clause-No-Nuclear-License-2014", "BSD-3-Clause-No-Nuclear-Warranty", "BSD-3-Clause-Open-MPI", "BSD-4-Clause", + "BSD-4-Clause-Shortened", "BSD-4-Clause-UC", "BSD-Protection", "BSD-Source-Code", @@ -59,6 +62,7 @@ "BitTorrent-1.1", "BlueOak-1.0.0", "Borceux", + "C-UDA-1.0", "CAL-1.0", "CAL-1.0-Combined-Work-Exception", "CATOSL-1.1", @@ -93,6 +97,7 @@ "CC-BY-SA-1.0", "CC-BY-SA-2.0", "CC-BY-SA-2.0-UK", + "CC-BY-SA-2.1-JP", "CC-BY-SA-2.5", "CC-BY-SA-3.0", "CC-BY-SA-3.0-AT", @@ -101,6 +106,7 @@ "CC0-1.0", "CDDL-1.0", "CDDL-1.1", + "CDL-1.0", "CDLA-Permissive-1.0", "CDLA-Sharing-1.0", "CECILL-1.0", @@ -129,6 +135,7 @@ "Cube", "D-FSL-1.0", "DOC", + "DRL-1.0", "DSDP", "Dotseqn", "ECL-1.0", @@ -151,7 +158,9 @@ "FTL", "Fair", "Frameworx-1.0", + "FreeBSD-DOC", "FreeImage", + "GD", "GFDL-1.1-invariants-only", "GFDL-1.1-invariants-or-later", "GFDL-1.1-no-invariants-only", @@ -227,6 +236,7 @@ "MIT", "MIT-0", "MIT-CMU", + "MIT-Modern-Variant", "MIT-advertising", "MIT-enna", "MIT-feh", @@ -246,6 +256,7 @@ "MulanPSL-2.0", "Multics", "Mup", + "NAIST-2003", "NASA-1.3", "NBPL-1.0", "NCGL-UK-2.0", @@ -280,6 +291,7 @@ "OFL-1.1-RFN", "OFL-1.1-no-RFN", "OGC-1.0", + "OGDL-Taiwan-1.0", "OGL-Canada-2.0", "OGL-UK-1.0", "OGL-UK-2.0", diff --git a/node_modules/spdx-license-ids/package.json b/node_modules/spdx-license-ids/package.json index eea631250e53e..5639091b87704 100644 --- a/node_modules/spdx-license-ids/package.json +++ b/node_modules/spdx-license-ids/package.json @@ -1,6 +1,6 @@ { "name": "spdx-license-ids", - "version": "3.0.7", + "version": "3.0.9", "description": "A list of SPDX license identifiers", "repository": "jslicense/spdx-license-ids", "author": "Shinnosuke Watanabe (https://github.com/shinnn)", diff --git a/node_modules/sshpk/.npmignore b/node_modules/sshpk/.npmignore deleted file mode 100644 index 8000b595bb4e2..0000000000000 --- a/node_modules/sshpk/.npmignore +++ /dev/null @@ -1,9 +0,0 @@ -.gitmodules -deps -docs -Makefile -node_modules -test -tools -coverage -man/src diff --git a/node_modules/sshpk/.travis.yml b/node_modules/sshpk/.travis.yml deleted file mode 100644 index c3394c258fc2a..0000000000000 --- a/node_modules/sshpk/.travis.yml +++ /dev/null @@ -1,11 +0,0 @@ -language: node_js -node_js: - - "5.10" - - "4.4" - - "4.1" - - "0.12" - - "0.10" -before_install: - - "make check" -after_success: - - '[ "${TRAVIS_NODE_VERSION}" = "4.4" ] && make codecovio' diff --git a/node_modules/sshpk/README.md b/node_modules/sshpk/README.md deleted file mode 100644 index 5740f74d17327..0000000000000 --- a/node_modules/sshpk/README.md +++ /dev/null @@ -1,804 +0,0 @@ -sshpk -========= - -Parse, convert, fingerprint and use SSH keys (both public and private) in pure -node -- no `ssh-keygen` or other external dependencies. - -Supports RSA, DSA, ECDSA (nistp-\*) and ED25519 key types, in PEM (PKCS#1, -PKCS#8) and OpenSSH formats. - -This library has been extracted from -[`node-http-signature`](https://github.com/joyent/node-http-signature) -(work by [Mark Cavage](https://github.com/mcavage) and -[Dave Eddy](https://github.com/bahamas10)) and -[`node-ssh-fingerprint`](https://github.com/bahamas10/node-ssh-fingerprint) -(work by Dave Eddy), with additions (including ECDSA support) by -[Alex Wilson](https://github.com/arekinath). - -Install -------- - -``` -npm install sshpk -``` - -Examples --------- - -```js -var sshpk = require('sshpk'); - -var fs = require('fs'); - -/* Read in an OpenSSH-format public key */ -var keyPub = fs.readFileSync('id_rsa.pub'); -var key = sshpk.parseKey(keyPub, 'ssh'); - -/* Get metadata about the key */ -console.log('type => %s', key.type); -console.log('size => %d bits', key.size); -console.log('comment => %s', key.comment); - -/* Compute key fingerprints, in new OpenSSH (>6.7) format, and old MD5 */ -console.log('fingerprint => %s', key.fingerprint().toString()); -console.log('old-style fingerprint => %s', key.fingerprint('md5').toString()); -``` - -Example output: - -``` -type => rsa -size => 2048 bits -comment => foo@foo.com -fingerprint => SHA256:PYC9kPVC6J873CSIbfp0LwYeczP/W4ffObNCuDJ1u5w -old-style fingerprint => a0:c8:ad:6c:32:9a:32:fa:59:cc:a9:8c:0a:0d:6e:bd -``` - -More examples: converting between formats: - -```js -/* Read in a PEM public key */ -var keyPem = fs.readFileSync('id_rsa.pem'); -var key = sshpk.parseKey(keyPem, 'pem'); - -/* Convert to PEM PKCS#8 public key format */ -var pemBuf = key.toBuffer('pkcs8'); - -/* Convert to SSH public key format (and return as a string) */ -var sshKey = key.toString('ssh'); -``` - -Signing and verifying: - -```js -/* Read in an OpenSSH/PEM *private* key */ -var keyPriv = fs.readFileSync('id_ecdsa'); -var key = sshpk.parsePrivateKey(keyPriv, 'pem'); - -var data = 'some data'; - -/* Sign some data with the key */ -var s = key.createSign('sha1'); -s.update(data); -var signature = s.sign(); - -/* Now load the public key (could also use just key.toPublic()) */ -var keyPub = fs.readFileSync('id_ecdsa.pub'); -key = sshpk.parseKey(keyPub, 'ssh'); - -/* Make a crypto.Verifier with this key */ -var v = key.createVerify('sha1'); -v.update(data); -var valid = v.verify(signature); -/* => true! */ -``` - -Matching fingerprints with keys: - -```js -var fp = sshpk.parseFingerprint('SHA256:PYC9kPVC6J873CSIbfp0LwYeczP/W4ffObNCuDJ1u5w'); - -var keys = [sshpk.parseKey(...), sshpk.parseKey(...), ...]; - -keys.forEach(function (key) { - if (fp.matches(key)) - console.log('found it!'); -}); -``` - -Usage ------ - -## Public keys - -### `parseKey(data[, format = 'auto'[, options]])` - -Parses a key from a given data format and returns a new `Key` object. - -Parameters - -- `data` -- Either a Buffer or String, containing the key -- `format` -- String name of format to use, valid options are: - - `auto`: choose automatically from all below - - `pem`: supports both PKCS#1 and PKCS#8 - - `ssh`: standard OpenSSH format, - - `pkcs1`, `pkcs8`: variants of `pem` - - `rfc4253`: raw OpenSSH wire format - - `openssh`: new post-OpenSSH 6.5 internal format, produced by - `ssh-keygen -o` - - `dnssec`: `.key` file format output by `dnssec-keygen` etc - - `putty`: the PuTTY `.ppk` file format (supports truncated variant without - all the lines from `Private-Lines:` onwards) -- `options` -- Optional Object, extra options, with keys: - - `filename` -- Optional String, name for the key being parsed - (eg. the filename that was opened). Used to generate - Error messages - - `passphrase` -- Optional String, encryption passphrase used to decrypt an - encrypted PEM file - -### `Key.isKey(obj)` - -Returns `true` if the given object is a valid `Key` object created by a version -of `sshpk` compatible with this one. - -Parameters - -- `obj` -- Object to identify - -### `Key#type` - -String, the type of key. Valid options are `rsa`, `dsa`, `ecdsa`. - -### `Key#size` - -Integer, "size" of the key in bits. For RSA/DSA this is the size of the modulus; -for ECDSA this is the bit size of the curve in use. - -### `Key#comment` - -Optional string, a key comment used by some formats (eg the `ssh` format). - -### `Key#curve` - -Only present if `this.type === 'ecdsa'`, string containing the name of the -named curve used with this key. Possible values include `nistp256`, `nistp384` -and `nistp521`. - -### `Key#toBuffer([format = 'ssh'])` - -Convert the key into a given data format and return the serialized key as -a Buffer. - -Parameters - -- `format` -- String name of format to use, for valid options see `parseKey()` - -### `Key#toString([format = 'ssh])` - -Same as `this.toBuffer(format).toString()`. - -### `Key#fingerprint([algorithm = 'sha256'[, hashType = 'ssh']])` - -Creates a new `Fingerprint` object representing this Key's fingerprint. - -Parameters - -- `algorithm` -- String name of hash algorithm to use, valid options are `md5`, - `sha1`, `sha256`, `sha384`, `sha512` -- `hashType` -- String name of fingerprint hash type to use, valid options are - `ssh` (the type of fingerprint used by OpenSSH, e.g. in - `ssh-keygen`), `spki` (used by HPKP, some OpenSSL applications) - -### `Key#createVerify([hashAlgorithm])` - -Creates a `crypto.Verifier` specialized to use this Key (and the correct public -key algorithm to match it). The returned Verifier has the same API as a regular -one, except that the `verify()` function takes only the target signature as an -argument. - -Parameters - -- `hashAlgorithm` -- optional String name of hash algorithm to use, any - supported by OpenSSL are valid, usually including - `sha1`, `sha256`. - -`v.verify(signature[, format])` Parameters - -- `signature` -- either a Signature object, or a Buffer or String -- `format` -- optional String, name of format to interpret given String with. - Not valid if `signature` is a Signature or Buffer. - -### `Key#createDiffieHellman()` -### `Key#createDH()` - -Creates a Diffie-Hellman key exchange object initialized with this key and all -necessary parameters. This has the same API as a `crypto.DiffieHellman` -instance, except that functions take `Key` and `PrivateKey` objects as -arguments, and return them where indicated for. - -This is only valid for keys belonging to a cryptosystem that supports DHE -or a close analogue (i.e. `dsa`, `ecdsa` and `curve25519` keys). An attempt -to call this function on other keys will yield an `Error`. - -## Private keys - -### `parsePrivateKey(data[, format = 'auto'[, options]])` - -Parses a private key from a given data format and returns a new -`PrivateKey` object. - -Parameters - -- `data` -- Either a Buffer or String, containing the key -- `format` -- String name of format to use, valid options are: - - `auto`: choose automatically from all below - - `pem`: supports both PKCS#1 and PKCS#8 - - `ssh`, `openssh`: new post-OpenSSH 6.5 internal format, produced by - `ssh-keygen -o` - - `pkcs1`, `pkcs8`: variants of `pem` - - `rfc4253`: raw OpenSSH wire format - - `dnssec`: `.private` format output by `dnssec-keygen` etc. -- `options` -- Optional Object, extra options, with keys: - - `filename` -- Optional String, name for the key being parsed - (eg. the filename that was opened). Used to generate - Error messages - - `passphrase` -- Optional String, encryption passphrase used to decrypt an - encrypted PEM file - -### `generatePrivateKey(type[, options])` - -Generates a new private key of a certain key type, from random data. - -Parameters - -- `type` -- String, type of key to generate. Currently supported are `'ecdsa'` - and `'ed25519'` -- `options` -- optional Object, with keys: - - `curve` -- optional String, for `'ecdsa'` keys, specifies the curve to use. - If ECDSA is specified and this option is not given, defaults to - using `'nistp256'`. - -### `PrivateKey.isPrivateKey(obj)` - -Returns `true` if the given object is a valid `PrivateKey` object created by a -version of `sshpk` compatible with this one. - -Parameters - -- `obj` -- Object to identify - -### `PrivateKey#type` - -String, the type of key. Valid options are `rsa`, `dsa`, `ecdsa`. - -### `PrivateKey#size` - -Integer, "size" of the key in bits. For RSA/DSA this is the size of the modulus; -for ECDSA this is the bit size of the curve in use. - -### `PrivateKey#curve` - -Only present if `this.type === 'ecdsa'`, string containing the name of the -named curve used with this key. Possible values include `nistp256`, `nistp384` -and `nistp521`. - -### `PrivateKey#toBuffer([format = 'pkcs1'])` - -Convert the key into a given data format and return the serialized key as -a Buffer. - -Parameters - -- `format` -- String name of format to use, valid options are listed under - `parsePrivateKey`. Note that ED25519 keys default to `openssh` - format instead (as they have no `pkcs1` representation). - -### `PrivateKey#toString([format = 'pkcs1'])` - -Same as `this.toBuffer(format).toString()`. - -### `PrivateKey#toPublic()` - -Extract just the public part of this private key, and return it as a `Key` -object. - -### `PrivateKey#fingerprint([algorithm = 'sha256'])` - -Same as `this.toPublic().fingerprint()`. - -### `PrivateKey#createVerify([hashAlgorithm])` - -Same as `this.toPublic().createVerify()`. - -### `PrivateKey#createSign([hashAlgorithm])` - -Creates a `crypto.Sign` specialized to use this PrivateKey (and the correct -key algorithm to match it). The returned Signer has the same API as a regular -one, except that the `sign()` function takes no arguments, and returns a -`Signature` object. - -Parameters - -- `hashAlgorithm` -- optional String name of hash algorithm to use, any - supported by OpenSSL are valid, usually including - `sha1`, `sha256`. - -`v.sign()` Parameters - -- none - -### `PrivateKey#derive(newType)` - -Derives a related key of type `newType` from this key. Currently this is -only supported to change between `ed25519` and `curve25519` keys which are -stored with the same private key (but usually distinct public keys in order -to avoid degenerate keys that lead to a weak Diffie-Hellman exchange). - -Parameters - -- `newType` -- String, type of key to derive, either `ed25519` or `curve25519` - -## Fingerprints - -### `parseFingerprint(fingerprint[, options])` - -Pre-parses a fingerprint, creating a `Fingerprint` object that can be used to -quickly locate a key by using the `Fingerprint#matches` function. - -Parameters - -- `fingerprint` -- String, the fingerprint value, in any supported format -- `options` -- Optional Object, with properties: - - `algorithms` -- Array of strings, names of hash algorithms to limit - support to. If `fingerprint` uses a hash algorithm not on - this list, throws `InvalidAlgorithmError`. - - `hashType` -- String, the type of hash the fingerprint uses, either `ssh` - or `spki` (normally auto-detected based on the format, but - can be overridden) - - `type` -- String, the entity this fingerprint identifies, either `key` or - `certificate` - -### `Fingerprint.isFingerprint(obj)` - -Returns `true` if the given object is a valid `Fingerprint` object created by a -version of `sshpk` compatible with this one. - -Parameters - -- `obj` -- Object to identify - -### `Fingerprint#toString([format])` - -Returns a fingerprint as a string, in the given format. - -Parameters - -- `format` -- Optional String, format to use, valid options are `hex` and - `base64`. If this `Fingerprint` uses the `md5` algorithm, the - default format is `hex`. Otherwise, the default is `base64`. - -### `Fingerprint#matches(keyOrCertificate)` - -Verifies whether or not this `Fingerprint` matches a given `Key` or -`Certificate`. This function uses double-hashing to avoid leaking timing -information. Returns a boolean. - -Note that a `Key`-type Fingerprint will always return `false` if asked to match -a `Certificate` and vice versa. - -Parameters - -- `keyOrCertificate` -- a `Key` object or `Certificate` object, the entity to - match this fingerprint against - -## Signatures - -### `parseSignature(signature, algorithm, format)` - -Parses a signature in a given format, creating a `Signature` object. Useful -for converting between the SSH and ASN.1 (PKCS/OpenSSL) signature formats, and -also returned as output from `PrivateKey#createSign().sign()`. - -A Signature object can also be passed to a verifier produced by -`Key#createVerify()` and it will automatically be converted internally into the -correct format for verification. - -Parameters - -- `signature` -- a Buffer (binary) or String (base64), data of the actual - signature in the given format -- `algorithm` -- a String, name of the algorithm to be used, possible values - are `rsa`, `dsa`, `ecdsa` -- `format` -- a String, either `asn1` or `ssh` - -### `Signature.isSignature(obj)` - -Returns `true` if the given object is a valid `Signature` object created by a -version of `sshpk` compatible with this one. - -Parameters - -- `obj` -- Object to identify - -### `Signature#toBuffer([format = 'asn1'])` - -Converts a Signature to the given format and returns it as a Buffer. - -Parameters - -- `format` -- a String, either `asn1` or `ssh` - -### `Signature#toString([format = 'asn1'])` - -Same as `this.toBuffer(format).toString('base64')`. - -## Certificates - -`sshpk` includes basic support for parsing certificates in X.509 (PEM) format -and the OpenSSH certificate format. This feature is intended to be used mainly -to access basic metadata about certificates, extract public keys from them, and -also to generate simple self-signed certificates from an existing key. - -Notably, there is no implementation of CA chain-of-trust verification, and only -very minimal support for key usage restrictions. Please do the security world -a favour, and DO NOT use this code for certificate verification in the -traditional X.509 CA chain style. - -### `parseCertificate(data, format)` - -Parameters - - - `data` -- a Buffer or String - - `format` -- a String, format to use, one of `'openssh'`, `'pem'` (X.509 in a - PEM wrapper), or `'x509'` (raw DER encoded) - -### `createSelfSignedCertificate(subject, privateKey[, options])` - -Parameters - - - `subject` -- an Identity, the subject of the certificate - - `privateKey` -- a PrivateKey, the key of the subject: will be used both to be - placed in the certificate and also to sign it (since this is - a self-signed certificate) - - `options` -- optional Object, with keys: - - `lifetime` -- optional Number, lifetime of the certificate from now in - seconds - - `validFrom`, `validUntil` -- optional Dates, beginning and end of - certificate validity period. If given - `lifetime` will be ignored - - `serial` -- optional Buffer, the serial number of the certificate - - `purposes` -- optional Array of String, X.509 key usage restrictions - -### `createCertificate(subject, key, issuer, issuerKey[, options])` - -Parameters - - - `subject` -- an Identity, the subject of the certificate - - `key` -- a Key, the public key of the subject - - `issuer` -- an Identity, the issuer of the certificate who will sign it - - `issuerKey` -- a PrivateKey, the issuer's private key for signing - - `options` -- optional Object, with keys: - - `lifetime` -- optional Number, lifetime of the certificate from now in - seconds - - `validFrom`, `validUntil` -- optional Dates, beginning and end of - certificate validity period. If given - `lifetime` will be ignored - - `serial` -- optional Buffer, the serial number of the certificate - - `purposes` -- optional Array of String, X.509 key usage restrictions - -### `Certificate#subjects` - -Array of `Identity` instances describing the subject of this certificate. - -### `Certificate#issuer` - -The `Identity` of the Certificate's issuer (signer). - -### `Certificate#subjectKey` - -The public key of the subject of the certificate, as a `Key` instance. - -### `Certificate#issuerKey` - -The public key of the signing issuer of this certificate, as a `Key` instance. -May be `undefined` if the issuer's key is unknown (e.g. on an X509 certificate). - -### `Certificate#serial` - -The serial number of the certificate. As this is normally a 64-bit or wider -integer, it is returned as a Buffer. - -### `Certificate#purposes` - -Array of Strings indicating the X.509 key usage purposes that this certificate -is valid for. The possible strings at the moment are: - - * `'signature'` -- key can be used for digital signatures - * `'identity'` -- key can be used to attest about the identity of the signer - (X.509 calls this `nonRepudiation`) - * `'codeSigning'` -- key can be used to sign executable code - * `'keyEncryption'` -- key can be used to encrypt other keys - * `'encryption'` -- key can be used to encrypt data (only applies for RSA) - * `'keyAgreement'` -- key can be used for key exchange protocols such as - Diffie-Hellman - * `'ca'` -- key can be used to sign other certificates (is a Certificate - Authority) - * `'crl'` -- key can be used to sign Certificate Revocation Lists (CRLs) - -### `Certificate#getExtension(nameOrOid)` - -Retrieves information about a certificate extension, if present, or returns -`undefined` if not. The string argument `nameOrOid` should be either the OID -(for X509 extensions) or the name (for OpenSSH extensions) of the extension -to retrieve. - -The object returned will have the following properties: - - * `format` -- String, set to either `'x509'` or `'openssh'` - * `name` or `oid` -- String, only one set based on value of `format` - * `data` -- Buffer, the raw data inside the extension - -### `Certificate#getExtensions()` - -Returns an Array of all present certificate extensions, in the same manner and -format as `getExtension()`. - -### `Certificate#isExpired([when])` - -Tests whether the Certificate is currently expired (i.e. the `validFrom` and -`validUntil` dates specify a range of time that does not include the current -time). - -Parameters - - - `when` -- optional Date, if specified, tests whether the Certificate was or - will be expired at the specified time instead of now - -Returns a Boolean. - -### `Certificate#isSignedByKey(key)` - -Tests whether the Certificate was validly signed by the given (public) Key. - -Parameters - - - `key` -- a Key instance - -Returns a Boolean. - -### `Certificate#isSignedBy(certificate)` - -Tests whether this Certificate was validly signed by the subject of the given -certificate. Also tests that the issuer Identity of this Certificate and the -subject Identity of the other Certificate are equivalent. - -Parameters - - - `certificate` -- another Certificate instance - -Returns a Boolean. - -### `Certificate#fingerprint([hashAlgo])` - -Returns the X509-style fingerprint of the entire certificate (as a Fingerprint -instance). This matches what a web-browser or similar would display as the -certificate fingerprint and should not be confused with the fingerprint of the -subject's public key. - -Parameters - - - `hashAlgo` -- an optional String, any hash function name - -### `Certificate#toBuffer([format])` - -Serializes the Certificate to a Buffer and returns it. - -Parameters - - - `format` -- an optional String, output format, one of `'openssh'`, `'pem'` or - `'x509'`. Defaults to `'x509'`. - -Returns a Buffer. - -### `Certificate#toString([format])` - - - `format` -- an optional String, output format, one of `'openssh'`, `'pem'` or - `'x509'`. Defaults to `'pem'`. - -Returns a String. - -## Certificate identities - -### `identityForHost(hostname)` - -Constructs a host-type Identity for a given hostname. - -Parameters - - - `hostname` -- the fully qualified DNS name of the host - -Returns an Identity instance. - -### `identityForUser(uid)` - -Constructs a user-type Identity for a given UID. - -Parameters - - - `uid` -- a String, user identifier (login name) - -Returns an Identity instance. - -### `identityForEmail(email)` - -Constructs an email-type Identity for a given email address. - -Parameters - - - `email` -- a String, email address - -Returns an Identity instance. - -### `identityFromDN(dn)` - -Parses an LDAP-style DN string (e.g. `'CN=foo, C=US'`) and turns it into an -Identity instance. - -Parameters - - - `dn` -- a String - -Returns an Identity instance. - -### `identityFromArray(arr)` - -Constructs an Identity from an array of DN components (see `Identity#toArray()` -for the format). - -Parameters - - - `arr` -- an Array of Objects, DN components with `name` and `value` - -Returns an Identity instance. - - -Supported attributes in DNs: - -| Attribute name | OID | -| -------------- | --- | -| `cn` | `2.5.4.3` | -| `o` | `2.5.4.10` | -| `ou` | `2.5.4.11` | -| `l` | `2.5.4.7` | -| `s` | `2.5.4.8` | -| `c` | `2.5.4.6` | -| `sn` | `2.5.4.4` | -| `postalCode` | `2.5.4.17` | -| `serialNumber` | `2.5.4.5` | -| `street` | `2.5.4.9` | -| `x500UniqueIdentifier` | `2.5.4.45` | -| `role` | `2.5.4.72` | -| `telephoneNumber` | `2.5.4.20` | -| `description` | `2.5.4.13` | -| `dc` | `0.9.2342.19200300.100.1.25` | -| `uid` | `0.9.2342.19200300.100.1.1` | -| `mail` | `0.9.2342.19200300.100.1.3` | -| `title` | `2.5.4.12` | -| `gn` | `2.5.4.42` | -| `initials` | `2.5.4.43` | -| `pseudonym` | `2.5.4.65` | - -### `Identity#toString()` - -Returns the identity as an LDAP-style DN string. -e.g. `'CN=foo, O=bar corp, C=us'` - -### `Identity#type` - -The type of identity. One of `'host'`, `'user'`, `'email'` or `'unknown'` - -### `Identity#hostname` -### `Identity#uid` -### `Identity#email` - -Set when `type` is `'host'`, `'user'`, or `'email'`, respectively. Strings. - -### `Identity#cn` - -The value of the first `CN=` in the DN, if any. It's probably better to use -the `#get()` method instead of this property. - -### `Identity#get(name[, asArray])` - -Returns the value of a named attribute in the Identity DN. If there is no -attribute of the given name, returns `undefined`. If multiple components -of the DN contain an attribute of this name, an exception is thrown unless -the `asArray` argument is given as `true` -- then they will be returned as -an Array in the same order they appear in the DN. - -Parameters - - - `name` -- a String - - `asArray` -- an optional Boolean - -### `Identity#toArray()` - -Returns the Identity as an Array of DN component objects. This looks like: - -```js -[ { - "name": "cn", - "value": "Joe Bloggs" -}, -{ - "name": "o", - "value": "Organisation Ltd" -} ] -``` - -Each object has a `name` and a `value` property. The returned objects may be -safely modified. - -Errors ------- - -### `InvalidAlgorithmError` - -The specified algorithm is not valid, either because it is not supported, or -because it was not included on a list of allowed algorithms. - -Thrown by `Fingerprint.parse`, `Key#fingerprint`. - -Properties - -- `algorithm` -- the algorithm that could not be validated - -### `FingerprintFormatError` - -The fingerprint string given could not be parsed as a supported fingerprint -format, or the specified fingerprint format is invalid. - -Thrown by `Fingerprint.parse`, `Fingerprint#toString`. - -Properties - -- `fingerprint` -- if caused by a fingerprint, the string value given -- `format` -- if caused by an invalid format specification, the string value given - -### `KeyParseError` - -The key data given could not be parsed as a valid key. - -Properties - -- `keyName` -- `filename` that was given to `parseKey` -- `format` -- the `format` that was trying to parse the key (see `parseKey`) -- `innerErr` -- the inner Error thrown by the format parser - -### `KeyEncryptedError` - -The key is encrypted with a symmetric key (ie, it is password protected). The -parsing operation would succeed if it was given the `passphrase` option. - -Properties - -- `keyName` -- `filename` that was given to `parseKey` -- `format` -- the `format` that was trying to parse the key (currently can only - be `"pem"`) - -### `CertificateParseError` - -The certificate data given could not be parsed as a valid certificate. - -Properties - -- `certName` -- `filename` that was given to `parseCertificate` -- `format` -- the `format` that was trying to parse the key - (see `parseCertificate`) -- `innerErr` -- the inner Error thrown by the format parser - -Friends of sshpk ----------------- - - * [`sshpk-agent`](https://github.com/arekinath/node-sshpk-agent) is a library - for speaking the `ssh-agent` protocol from node.js, which uses `sshpk` diff --git a/node_modules/ssri/CHANGELOG.md b/node_modules/ssri/CHANGELOG.md deleted file mode 100644 index 3fea458e92ddf..0000000000000 --- a/node_modules/ssri/CHANGELOG.md +++ /dev/null @@ -1,355 +0,0 @@ -# Changelog - -All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. - -### [8.0.1](https://github.com/npm/ssri/compare/v8.0.0...v8.0.1) (2021-01-27) - - -### Bug Fixes - -* simplify regex for strict mode, add tests ([76e2233](https://github.com/npm/ssri/commit/76e223317d971f19e4db8191865bdad5edee40d2)) - -## [8.0.0](https://github.com/npm/ssri/compare/v7.1.0...v8.0.0) (2020-02-18) - - -### ⚠ BREAKING CHANGES - -* SRI values with `../` in the algorithm name now throw -as invalid (which they always probably should have!) -* adds a new error that will be thrown. Empty SRIs are -no longer considered valid for checking, only when using integrityStream -to calculate the SRI value. - -PR-URL: https://github.com/npm/ssri/pull/12 -Credit: @claudiahdz - -### Features - -* remove figgy-pudding ([0e78fd7](https://github.com/npm/ssri/commit/0e78fd7b754e2d098875eb4c57238709d96d7c27)) - - -### Bug Fixes - -* harden SRI parsing against ../ funny business ([4062735](https://github.com/npm/ssri/commit/4062735d1281941fd32ac4320b9f9965fcec278b)) -* IntegrityStream responds to mutating opts object mid-stream ([4a963e5](https://github.com/npm/ssri/commit/4a963e5982478c6b07f86848cdb72d142c765195)) -* throw null when sri is empty or bad ([a6811cb](https://github.com/npm/ssri/commit/a6811cba71e20ea1fdefa6e50c9ea3c67efc2500)), closes [#12](https://github.com/npm/ssri/issues/12) - -## [7.1.0](https://github.com/npm/ssri/compare/v7.0.1...v7.1.0) (2019-10-24) - - -### Bug Fixes - -* Do not blow up if the opts object is mutated ([806e8c8](https://github.com/npm/ssri/commit/806e8c8)) - - -### Features - -* Add Integrity#merge method ([0572c1d](https://github.com/npm/ssri/commit/0572c1d)), closes [#4](https://github.com/npm/ssri/issues/4) - -### [7.0.1](https://github.com/npm/ssri/compare/v7.0.0...v7.0.1) (2019-09-30) - -## [7.0.0](https://github.com/npm/ssri/compare/v6.0.1...v7.0.0) (2019-09-18) - - -### ⚠ BREAKING CHANGES - -* ssri no longer accepts a Promise option, and does not -use, return, or rely on Bluebird promises. -* drop support for Node.js v6. - -We knew this was coming, and the Stream changes are breaking anyway. -May as well do this now. -* **streams:** this replaces the Node.js stream with a Minipass -stream. See http://npm.im/minipass for documentation. - -### Bug Fixes - -* return super.write() return value ([55b055d](https://github.com/npm/ssri/commit/55b055d)) - - -* Use native promises only ([6d13165](https://github.com/npm/ssri/commit/6d13165)) -* update tap, standard, standard-version, travis ([2e54956](https://github.com/npm/ssri/commit/2e54956)) -* **streams:** replace transform streams with minipass ([363995e](https://github.com/npm/ssri/commit/363995e)) - -<a name="6.0.1"></a> -## [6.0.1](https://github.com/npm/ssri/compare/v6.0.0...v6.0.1) (2018-08-27) - - -### Bug Fixes - -* **opts:** use figgy-pudding to specify consumed opts ([cf86553](https://github.com/npm/ssri/commit/cf86553)) - - - -<a name="6.0.0"></a> -# [6.0.0](https://github.com/npm/ssri/compare/v5.3.0...v6.0.0) (2018-04-09) - - -### Bug Fixes - -* **docs:** minor typo ([b71ef17](https://github.com/npm/ssri/commit/b71ef17)) - - -### meta - -* drop support for node@4 ([d9bf359](https://github.com/npm/ssri/commit/d9bf359)) - - -### BREAKING CHANGES - -* node@4 is no longer supported - - - -<a name="5.3.0"></a> -# [5.3.0](https://github.com/npm/ssri/compare/v5.2.4...v5.3.0) (2018-03-13) - - -### Features - -* **checkData:** optionally throw when checkData fails ([bf26b84](https://github.com/npm/ssri/commit/bf26b84)) - - - -<a name="5.2.4"></a> -## [5.2.4](https://github.com/npm/ssri/compare/v5.2.3...v5.2.4) (2018-02-16) - - - -<a name="5.2.3"></a> -## [5.2.3](https://github.com/npm/ssri/compare/v5.2.2...v5.2.3) (2018-02-16) - - -### Bug Fixes - -* **hashes:** filter hash priority list by available hashes ([2fa30b8](https://github.com/npm/ssri/commit/2fa30b8)) -* **integrityStream:** dedupe algorithms to generate ([d56c654](https://github.com/npm/ssri/commit/d56c654)) - - - -<a name="5.2.2"></a> -## [5.2.2](https://github.com/npm/ssri/compare/v5.2.1...v5.2.2) (2018-02-14) - - -### Bug Fixes - -* **security:** tweak strict SRI regex ([#10](https://github.com/npm/ssri/issues/10)) ([d0ebcdc](https://github.com/npm/ssri/commit/d0ebcdc)) - - - -<a name="5.2.1"></a> -## [5.2.1](https://github.com/npm/ssri/compare/v5.2.0...v5.2.1) (2018-02-06) - - - -<a name="5.2.0"></a> -# [5.2.0](https://github.com/npm/ssri/compare/v5.1.0...v5.2.0) (2018-02-06) - - -### Features - -* **match:** add integrity.match() ([3c49cc4](https://github.com/npm/ssri/commit/3c49cc4)) - - - -<a name="5.1.0"></a> -# [5.1.0](https://github.com/npm/ssri/compare/v5.0.0...v5.1.0) (2018-01-18) - - -### Bug Fixes - -* **checkStream:** integrityStream now takes opts.integrity algos into account ([d262910](https://github.com/npm/ssri/commit/d262910)) - - -### Features - -* **sha3:** do some guesswork about upcoming sha3 ([7fdd9df](https://github.com/npm/ssri/commit/7fdd9df)) - - - -<a name="5.0.0"></a> -# [5.0.0](https://github.com/npm/ssri/compare/v4.1.6...v5.0.0) (2017-10-23) - - -### Features - -* **license:** relicense to ISC (#9) ([c82983a](https://github.com/npm/ssri/commit/c82983a)) - - -### BREAKING CHANGES - -* **license:** the license has been changed from CC0-1.0 to ISC. - - - -<a name="4.1.6"></a> -## [4.1.6](https://github.com/npm/ssri/compare/v4.1.5...v4.1.6) (2017-06-07) - - -### Bug Fixes - -* **checkStream:** make sure to pass all opts through ([0b1bcbe](https://github.com/npm/ssri/commit/0b1bcbe)) - - - -<a name="4.1.5"></a> -## [4.1.5](https://github.com/npm/ssri/compare/v4.1.4...v4.1.5) (2017-06-05) - - -### Bug Fixes - -* **integrityStream:** stop crashing if opts.algorithms and opts.integrity have an algo mismatch ([fb1293e](https://github.com/npm/ssri/commit/fb1293e)) - - - -<a name="4.1.4"></a> -## [4.1.4](https://github.com/npm/ssri/compare/v4.1.3...v4.1.4) (2017-05-31) - - -### Bug Fixes - -* **node:** older versions of node[@4](https://github.com/4) do not support base64buffer string parsing ([513df4e](https://github.com/npm/ssri/commit/513df4e)) - - - -<a name="4.1.3"></a> -## [4.1.3](https://github.com/npm/ssri/compare/v4.1.2...v4.1.3) (2017-05-24) - - -### Bug Fixes - -* **check:** handle various bad hash corner cases better ([c2c262b](https://github.com/npm/ssri/commit/c2c262b)) - - - -<a name="4.1.2"></a> -## [4.1.2](https://github.com/npm/ssri/compare/v4.1.1...v4.1.2) (2017-04-18) - - -### Bug Fixes - -* **stream:** _flush can be called multiple times. use on("end") ([b1c4805](https://github.com/npm/ssri/commit/b1c4805)) - - - -<a name="4.1.1"></a> -## [4.1.1](https://github.com/npm/ssri/compare/v4.1.0...v4.1.1) (2017-04-12) - - -### Bug Fixes - -* **pickAlgorithm:** error if pickAlgorithm() is used in an empty Integrity ([fab470e](https://github.com/npm/ssri/commit/fab470e)) - - - -<a name="4.1.0"></a> -# [4.1.0](https://github.com/npm/ssri/compare/v4.0.0...v4.1.0) (2017-04-07) - - -### Features - -* adding ssri.create for a crypto style interface (#2) ([96f52ad](https://github.com/npm/ssri/commit/96f52ad)) - - - -<a name="4.0.0"></a> -# [4.0.0](https://github.com/npm/ssri/compare/v3.0.2...v4.0.0) (2017-04-03) - - -### Bug Fixes - -* **integrity:** should have changed the error code before. oops ([8381afa](https://github.com/npm/ssri/commit/8381afa)) - - -### BREAKING CHANGES - -* **integrity:** EBADCHECKSUM -> EINTEGRITY for verification errors - - - -<a name="3.0.2"></a> -## [3.0.2](https://github.com/npm/ssri/compare/v3.0.1...v3.0.2) (2017-04-03) - - - -<a name="3.0.1"></a> -## [3.0.1](https://github.com/npm/ssri/compare/v3.0.0...v3.0.1) (2017-04-03) - - -### Bug Fixes - -* **package.json:** really should have these in the keywords because search ([a6ac6d0](https://github.com/npm/ssri/commit/a6ac6d0)) - - - -<a name="3.0.0"></a> -# [3.0.0](https://github.com/npm/ssri/compare/v2.0.0...v3.0.0) (2017-04-03) - - -### Bug Fixes - -* **hashes:** IntegrityMetadata -> Hash ([d04aa1f](https://github.com/npm/ssri/commit/d04aa1f)) - - -### Features - -* **check:** return IntegrityMetadata on check success ([2301e74](https://github.com/npm/ssri/commit/2301e74)) -* **fromHex:** ssri.fromHex to make it easier to generate them from hex valus ([049b89e](https://github.com/npm/ssri/commit/049b89e)) -* **hex:** utility function for getting hex version of digest ([a9f021c](https://github.com/npm/ssri/commit/a9f021c)) -* **hexDigest:** added hexDigest method to Integrity objects too ([85208ba](https://github.com/npm/ssri/commit/85208ba)) -* **integrity:** add .isIntegrity and .isIntegrityMetadata ([1b29e6f](https://github.com/npm/ssri/commit/1b29e6f)) -* **integrityStream:** new stream that can both generate and check streamed data ([fd23e1b](https://github.com/npm/ssri/commit/fd23e1b)) -* **parse:** allow parsing straight into a single IntegrityMetadata object ([c8ddf48](https://github.com/npm/ssri/commit/c8ddf48)) -* **pickAlgorithm:** Intergrity#pickAlgorithm() added ([b97a796](https://github.com/npm/ssri/commit/b97a796)) -* **size:** calculate and update stream sizes ([02ed1ad](https://github.com/npm/ssri/commit/02ed1ad)) - - -### BREAKING CHANGES - -* **hashes:** `.isIntegrityMetadata` is now `.isHash`. Also, any references to `IntegrityMetadata` now refer to `Hash`. -* **integrityStream:** createCheckerStream has been removed and replaced with a general-purpose integrityStream. - -To convert existing createCheckerStream code, move the `sri` argument into `opts.integrity` in integrityStream. All other options should be the same. -* **check:** `checkData`, `checkStream`, and `createCheckerStream` now yield a whole IntegrityMetadata instance representing the first successful hash match. - - - -<a name="2.0.0"></a> -# [2.0.0](https://github.com/npm/ssri/compare/v1.0.0...v2.0.0) (2017-03-24) - - -### Bug Fixes - -* **strict-mode:** make regexes more rigid ([122a32c](https://github.com/npm/ssri/commit/122a32c)) - - -### Features - -* **api:** added serialize alias for unparse ([999b421](https://github.com/npm/ssri/commit/999b421)) -* **concat:** add Integrity#concat() ([cae12c7](https://github.com/npm/ssri/commit/cae12c7)) -* **pickAlgo:** pick the strongest algorithm provided, by default ([58c18f7](https://github.com/npm/ssri/commit/58c18f7)) -* **strict-mode:** strict SRI support ([3f0b64c](https://github.com/npm/ssri/commit/3f0b64c)) -* **stringify:** replaced unparse/serialize with stringify ([4acad30](https://github.com/npm/ssri/commit/4acad30)) -* **verification:** add opts.pickAlgorithm ([f72e658](https://github.com/npm/ssri/commit/f72e658)) - - -### BREAKING CHANGES - -* **pickAlgo:** ssri will prioritize specific hashes now -* **stringify:** serialize and unparse have been removed. Use ssri.stringify instead. -* **strict-mode:** functions that accepted an optional `sep` argument now expect `opts.sep`. - - - -<a name="1.0.0"></a> -# 1.0.0 (2017-03-23) - - -### Features - -* **api:** implemented initial api ([4fbb16b](https://github.com/npm/ssri/commit/4fbb16b)) - - -### BREAKING CHANGES - -* **api:** Initial API established. diff --git a/node_modules/ssri/README.md b/node_modules/ssri/README.md deleted file mode 100644 index 0cd41be898596..0000000000000 --- a/node_modules/ssri/README.md +++ /dev/null @@ -1,528 +0,0 @@ -# ssri [![npm version](https://img.shields.io/npm/v/ssri.svg)](https://npm.im/ssri) [![license](https://img.shields.io/npm/l/ssri.svg)](https://npm.im/ssri) [![Travis](https://img.shields.io/travis/npm/ssri.svg)](https://travis-ci.org/npm/ssri) [![AppVeyor](https://ci.appveyor.com/api/projects/status/github/npm/ssri?svg=true)](https://ci.appveyor.com/project/npm/ssri) [![Coverage Status](https://coveralls.io/repos/github/npm/ssri/badge.svg?branch=latest)](https://coveralls.io/github/npm/ssri?branch=latest) - -[`ssri`](https://github.com/npm/ssri), short for Standard Subresource -Integrity, is a Node.js utility for parsing, manipulating, serializing, -generating, and verifying [Subresource -Integrity](https://w3c.github.io/webappsec/specs/subresourceintegrity/) hashes. - -## Install - -`$ npm install --save ssri` - -## Table of Contents - -* [Example](#example) -* [Features](#features) -* [Contributing](#contributing) -* [API](#api) - * Parsing & Serializing - * [`parse`](#parse) - * [`stringify`](#stringify) - * [`Integrity#concat`](#integrity-concat) - * [`Integrity#merge`](#integrity-merge) - * [`Integrity#toString`](#integrity-to-string) - * [`Integrity#toJSON`](#integrity-to-json) - * [`Integrity#match`](#integrity-match) - * [`Integrity#pickAlgorithm`](#integrity-pick-algorithm) - * [`Integrity#hexDigest`](#integrity-hex-digest) - * Integrity Generation - * [`fromHex`](#from-hex) - * [`fromData`](#from-data) - * [`fromStream`](#from-stream) - * [`create`](#create) - * Integrity Verification - * [`checkData`](#check-data) - * [`checkStream`](#check-stream) - * [`integrityStream`](#integrity-stream) - -### Example - -```javascript -const ssri = require('ssri') - -const integrity = 'sha512-9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==?foo' - -// Parsing and serializing -const parsed = ssri.parse(integrity) -ssri.stringify(parsed) // === integrity (works on non-Integrity objects) -parsed.toString() // === integrity - -// Async stream functions -ssri.checkStream(fs.createReadStream('./my-file'), integrity).then(...) -ssri.fromStream(fs.createReadStream('./my-file')).then(sri => { - sri.toString() === integrity -}) -fs.createReadStream('./my-file').pipe(ssri.createCheckerStream(sri)) - -// Sync data functions -ssri.fromData(fs.readFileSync('./my-file')) // === parsed -ssri.checkData(fs.readFileSync('./my-file'), integrity) // => 'sha512' -``` - -### Features - -* Parses and stringifies SRI strings. -* Generates SRI strings from raw data or Streams. -* Strict standard compliance. -* `?foo` metadata option support. -* Multiple entries for the same algorithm. -* Object-based integrity hash manipulation. -* Small footprint: no dependencies, concise implementation. -* Full test coverage. -* Customizable algorithm picker. - -### Contributing - -The ssri team enthusiastically welcomes contributions and project participation! -There's a bunch of things you can do if you want to contribute! The [Contributor -Guide](CONTRIBUTING.md) has all the information you need for everything from -reporting bugs to contributing entire new features. Please don't hesitate to -jump in if you'd like to, or even ask us questions if something isn't clear. - -### API - -#### <a name="parse"></a> `> ssri.parse(sri, [opts]) -> Integrity` - -Parses `sri` into an `Integrity` data structure. `sri` can be an integrity -string, an `Hash`-like with `digest` and `algorithm` fields and an optional -`options` field, or an `Integrity`-like object. The resulting object will be an -`Integrity` instance that has this shape: - -```javascript -{ - 'sha1': [{algorithm: 'sha1', digest: 'deadbeef', options: []}], - 'sha512': [ - {algorithm: 'sha512', digest: 'c0ffee', options: []}, - {algorithm: 'sha512', digest: 'bad1dea', options: ['foo']} - ], -} -``` - -If `opts.single` is truthy, a single `Hash` object will be returned. That is, a -single object that looks like `{algorithm, digest, options}`, as opposed to a -larger object with multiple of these. - -If `opts.strict` is truthy, the resulting object will be filtered such that -it strictly follows the Subresource Integrity spec, throwing away any entries -with any invalid components. This also means a restricted set of algorithms -will be used -- the spec limits them to `sha256`, `sha384`, and `sha512`. - -Strict mode is recommended if the integrity strings are intended for use in -browsers, or in other situations where strict adherence to the spec is needed. - -##### Example - -```javascript -ssri.parse('sha512-9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==?foo') // -> Integrity object -``` - -#### <a name="stringify"></a> `> ssri.stringify(sri, [opts]) -> String` - -This function is identical to [`Integrity#toString()`](#integrity-to-string), -except it can be used on _any_ object that [`parse`](#parse) can handle -- that -is, a string, an `Hash`-like, or an `Integrity`-like. - -The `opts.sep` option defines the string to use when joining multiple entries -together. To be spec-compliant, this _must_ be whitespace. The default is a -single space (`' '`). - -If `opts.strict` is true, the integrity string will be created using strict -parsing rules. See [`ssri.parse`](#parse). - -##### Example - -```javascript -// Useful for cleaning up input SRI strings: -ssri.stringify('\n\rsha512-foo\n\t\tsha384-bar') -// -> 'sha512-foo sha384-bar' - -// Hash-like: only a single entry. -ssri.stringify({ - algorithm: 'sha512', - digest:'9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==', - options: ['foo'] -}) -// -> -// 'sha512-9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==?foo' - -// Integrity-like: full multi-entry syntax. Similar to output of `ssri.parse` -ssri.stringify({ - 'sha512': [ - { - algorithm: 'sha512', - digest:'9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==', - options: ['foo'] - } - ] -}) -// -> -// 'sha512-9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==?foo' -``` - -#### <a name="integrity-concat"></a> `> Integrity#concat(otherIntegrity, [opts]) -> Integrity` - -Concatenates an `Integrity` object with another IntegrityLike, or an integrity -string. - -This is functionally equivalent to concatenating the string format of both -integrity arguments, and calling [`ssri.parse`](#ssri-parse) on the new string. - -If `opts.strict` is true, the new `Integrity` will be created using strict -parsing rules. See [`ssri.parse`](#parse). - -##### Example - -```javascript -// This will combine the integrity checks for two different versions of -// your index.js file so you can use a single integrity string and serve -// either of these to clients, from a single `<script>` tag. -const desktopIntegrity = ssri.fromData(fs.readFileSync('./index.desktop.js')) -const mobileIntegrity = ssri.fromData(fs.readFileSync('./index.mobile.js')) - -// Note that browsers (and ssri) will succeed as long as ONE of the entries -// for the *prioritized* algorithm succeeds. That is, in order for this fallback -// to work, both desktop and mobile *must* use the same `algorithm` values. -desktopIntegrity.concat(mobileIntegrity) -``` - -#### <a name="integrity-merge"></a> `> Integrity#merge(otherIntegrity, [opts])` - -Safely merges another IntegrityLike or integrity string into an `Integrity` -object. - -If the other integrity value has any algorithms in common with the current -object, then the hash digests must match, or an error is thrown. - -Any new hashes will be added to the current object's set. - -This is useful when an integrity value may be upgraded with a stronger -algorithm, you wish to prevent accidentally supressing integrity errors by -overwriting the expected integrity value. - -##### Example - -```javascript -const data = fs.readFileSync('data.txt') - -// integrity.txt contains 'sha1-X1UT+IIv2+UUWvM7ZNjZcNz5XG4=' -// because we were young, and didn't realize sha1 would not last -const expectedIntegrity = ssri.parse(fs.readFileSync('integrity.txt', 'utf8')) -const match = ssri.checkData(data, expectedIntegrity, { - algorithms: ['sha512', 'sha1'] -}) -if (!match) { - throw new Error('data corrupted or something!') -} - -// get a stronger algo! -if (match && match.algorithm !== 'sha512') { - const updatedIntegrity = ssri.fromData(data, { algorithms: ['sha512'] }) - expectedIntegrity.merge(updatedIntegrity) - fs.writeFileSync('integrity.txt', expectedIntegrity.toString()) - // file now contains - // 'sha1-X1UT+IIv2+UUWvM7ZNjZcNz5XG4= sha512-yzd8ELD1piyANiWnmdnpCL5F52f10UfUdEkHywVZeqTt0ymgrxR63Qz0GB7TKPoeeZQmWCaz7T1+9vBnypkYWg==' -} -``` - -#### <a name="integrity-to-string"></a> `> Integrity#toString([opts]) -> String` - -Returns the string representation of an `Integrity` object. All hash entries -will be concatenated in the string by `opts.sep`, which defaults to `' '`. - -If you want to serialize an object that didn't come from an `ssri` function, -use [`ssri.stringify()`](#stringify). - -If `opts.strict` is true, the integrity string will be created using strict -parsing rules. See [`ssri.parse`](#parse). - -##### Example - -```javascript -const integrity = 'sha512-9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==?foo' - -ssri.parse(integrity).toString() === integrity -``` - -#### <a name="integrity-to-json"></a> `> Integrity#toJSON() -> String` - -Returns the string representation of an `Integrity` object. All hash entries -will be concatenated in the string by `' '`. - -This is a convenience method so you can pass an `Integrity` object directly to `JSON.stringify`. -For more info check out [toJSON() behavior on mdn](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify#toJSON%28%29_behavior). - -##### Example - -```javascript -const integrity = '"sha512-9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==?foo"' - -JSON.stringify(ssri.parse(integrity)) === integrity -``` - -#### <a name="integrity-match"></a> `> Integrity#match(sri, [opts]) -> Hash | false` - -Returns the matching (truthy) hash if `Integrity` matches the argument passed as -`sri`, which can be anything that [`parse`](#parse) will accept. `opts` will be -passed through to `parse` and [`pickAlgorithm()`](#integrity-pick-algorithm). - -##### Example - -```javascript -const integrity = 'sha512-9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==' - -ssri.parse(integrity).match(integrity) -// Hash { -// digest: '9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==' -// algorithm: 'sha512' -// } - -ssri.parse(integrity).match('sha1-deadbeef') -// false -``` - -#### <a name="integrity-pick-algorithm"></a> `> Integrity#pickAlgorithm([opts]) -> String` - -Returns the "best" algorithm from those available in the integrity object. - -If `opts.pickAlgorithm` is provided, it will be passed two algorithms as -arguments. ssri will prioritize whichever of the two algorithms is returned by -this function. Note that the function may be called multiple times, and it -**must** return one of the two algorithms provided. By default, ssri will make -a best-effort to pick the strongest/most reliable of the given algorithms. It -may intentionally deprioritize algorithms with known vulnerabilities. - -##### Example - -```javascript -ssri.parse('sha1-WEakDigEST sha512-yzd8ELD1piyANiWnmdnpCL5F52f10UfUdEkHywVZeqTt0ymgrxR63Qz0GB7TKPoeeZQmWCaz7T1').pickAlgorithm() // sha512 -``` - -#### <a name="integrity-hex-digest"></a> `> Integrity#hexDigest() -> String` - -`Integrity` is assumed to be either a single-hash `Integrity` instance, or a -`Hash` instance. Returns its `digest`, converted to a hex representation of the -base64 data. - -##### Example - -```javascript -ssri.parse('sha1-deadbeef').hexDigest() // '75e69d6de79f' -``` - -#### <a name="from-hex"></a> `> ssri.fromHex(hexDigest, algorithm, [opts]) -> Integrity` - -Creates an `Integrity` object with a single entry, based on a hex-formatted -hash. This is a utility function to help convert existing shasums to the -Integrity format, and is roughly equivalent to something like: - -```javascript -algorithm + '-' + Buffer.from(hexDigest, 'hex').toString('base64') -``` - -`opts.options` may optionally be passed in: it must be an array of option -strings that will be added to all generated integrity hashes generated by -`fromData`. This is a loosely-specified feature of SRIs, and currently has no -specified semantics besides being `?`-separated. Use at your own risk, and -probably avoid if your integrity strings are meant to be used with browsers. - -If `opts.strict` is true, the integrity object will be created using strict -parsing rules. See [`ssri.parse`](#parse). - -If `opts.single` is true, a single `Hash` object will be returned. - -##### Example - -```javascript -ssri.fromHex('75e69d6de79f', 'sha1').toString() // 'sha1-deadbeef' -``` - -#### <a name="from-data"></a> `> ssri.fromData(data, [opts]) -> Integrity` - -Creates an `Integrity` object from either string or `Buffer` data, calculating -all the requested hashes and adding any specified options to the object. - -`opts.algorithms` determines which algorithms to generate hashes for. All -results will be included in a single `Integrity` object. The default value for -`opts.algorithms` is `['sha512']`. All algorithm strings must be hashes listed -in `crypto.getHashes()` for the host Node.js platform. - -`opts.options` may optionally be passed in: it must be an array of option -strings that will be added to all generated integrity hashes generated by -`fromData`. This is a loosely-specified feature of SRIs, and currently has no -specified semantics besides being `?`-separated. Use at your own risk, and -probably avoid if your integrity strings are meant to be used with browsers. - -If `opts.strict` is true, the integrity object will be created using strict -parsing rules. See [`ssri.parse`](#parse). - -##### Example - -```javascript -const integrityObj = ssri.fromData('foobarbaz', { - algorithms: ['sha256', 'sha384', 'sha512'] -}) -integrity.toString('\n') -// -> -// sha256-l981iLWj8kurw4UbNy8Lpxqdzd7UOxS50Glhv8FwfZ0= -// sha384-irnCxQ0CfQhYGlVAUdwTPC9bF3+YWLxlaDGM4xbYminxpbXEq+D+2GCEBTxcjES9 -// sha512-yzd8ELD1piyANiWnmdnpCL5F52f10UfUdEkHywVZeqTt0ymgrxR63Qz0GB7TKPoeeZQmWCaz7T1+9vBnypkYWg== -``` - -#### <a name="from-stream"></a> `> ssri.fromStream(stream, [opts]) -> Promise<Integrity>` - -Returns a Promise of an Integrity object calculated by reading data from -a given `stream`. - -It accepts both `opts.algorithms` and `opts.options`, which are documented as -part of [`ssri.fromData`](#from-data). - -Additionally, `opts.Promise` may be passed in to inject a Promise library of -choice. By default, ssri will use Node's built-in Promises. - -If `opts.strict` is true, the integrity object will be created using strict -parsing rules. See [`ssri.parse`](#parse). - -##### Example - -```javascript -ssri.fromStream(fs.createReadStream('index.js'), { - algorithms: ['sha1', 'sha512'] -}).then(integrity => { - return ssri.checkStream(fs.createReadStream('index.js'), integrity) -}) // succeeds -``` - -#### <a name="create"></a> `> ssri.create([opts]) -> <Hash>` - -Returns a Hash object with `update(<Buffer or string>[,enc])` and `digest()` methods. - - -The Hash object provides the same methods as [crypto class Hash](https://nodejs.org/dist/latest-v6.x/docs/api/crypto.html#crypto_class_hash). -`digest()` accepts no arguments and returns an Integrity object calculated by reading data from -calls to update. - -It accepts both `opts.algorithms` and `opts.options`, which are documented as -part of [`ssri.fromData`](#from-data). - -If `opts.strict` is true, the integrity object will be created using strict -parsing rules. See [`ssri.parse`](#parse). - -##### Example - -```javascript -const integrity = ssri.create().update('foobarbaz').digest() -integrity.toString() -// -> -// sha512-yzd8ELD1piyANiWnmdnpCL5F52f10UfUdEkHywVZeqTt0ymgrxR63Qz0GB7TKPoeeZQmWCaz7T1+9vBnypkYWg== -``` - -#### <a name="check-data"></a> `> ssri.checkData(data, sri, [opts]) -> Hash|false` - -Verifies `data` integrity against an `sri` argument. `data` may be either a -`String` or a `Buffer`, and `sri` can be any subresource integrity -representation that [`ssri.parse`](#parse) can handle. - -If verification succeeds, `checkData` will return the name of the algorithm that -was used for verification (a truthy value). Otherwise, it will return `false`. - -If `opts.pickAlgorithm` is provided, it will be used by -[`Integrity#pickAlgorithm`](#integrity-pick-algorithm) when deciding which of -the available digests to match against. - -If `opts.error` is true, and verification fails, `checkData` will throw either -an `EBADSIZE` or an `EINTEGRITY` error, instead of just returning false. - -##### Example - -```javascript -const data = fs.readFileSync('index.js') -ssri.checkData(data, ssri.fromData(data)) // -> 'sha512' -ssri.checkData(data, 'sha256-l981iLWj8kurw4UbNy8Lpxqdzd7UOxS50Glhv8FwfZ0') -ssri.checkData(data, 'sha1-BaDDigEST') // -> false -ssri.checkData(data, 'sha1-BaDDigEST', {error: true}) // -> Error! EINTEGRITY -``` - -#### <a name="check-stream"></a> `> ssri.checkStream(stream, sri, [opts]) -> Promise<Hash>` - -Verifies the contents of `stream` against an `sri` argument. `stream` will be -consumed in its entirety by this process. `sri` can be any subresource integrity -representation that [`ssri.parse`](#parse) can handle. - -`checkStream` will return a Promise that either resolves to the -`Hash` that succeeded verification, or, if the verification fails -or an error happens with `stream`, the Promise will be rejected. - -If the Promise is rejected because verification failed, the returned error will -have `err.code` as `EINTEGRITY`. - -If `opts.size` is given, it will be matched against the stream size. An error -with `err.code` `EBADSIZE` will be returned by a rejection if the expected size -and actual size fail to match. - -If `opts.pickAlgorithm` is provided, it will be used by -[`Integrity#pickAlgorithm`](#integrity-pick-algorithm) when deciding which of -the available digests to match against. - -##### Example - -```javascript -const integrity = ssri.fromData(fs.readFileSync('index.js')) - -ssri.checkStream( - fs.createReadStream('index.js'), - integrity -) -// -> -// Promise<{ -// algorithm: 'sha512', -// digest: 'sha512-yzd8ELD1piyANiWnmdnpCL5F52f10UfUdEkHywVZeqTt0ymgrxR63Qz0GB7TKPoeeZQmWCaz7T1' -// }> - -ssri.checkStream( - fs.createReadStream('index.js'), - 'sha256-l981iLWj8kurw4UbNy8Lpxqdzd7UOxS50Glhv8FwfZ0' -) // -> Promise<Hash> - -ssri.checkStream( - fs.createReadStream('index.js'), - 'sha1-BaDDigEST' -) // -> Promise<Error<{code: 'EINTEGRITY'}>> -``` - -#### <a name="integrity-stream"></a> `> integrityStream([opts]) -> IntegrityStream` - -Returns a `Transform` stream that data can be piped through in order to generate -and optionally check data integrity for piped data. When the stream completes -successfully, it emits `size` and `integrity` events, containing the total -number of bytes processed and a calculated `Integrity` instance based on stream -data, respectively. - -If `opts.algorithms` is passed in, the listed algorithms will be calculated when -generating the final `Integrity` instance. The default is `['sha512']`. - -If `opts.single` is passed in, a single `Hash` instance will be returned. - -If `opts.integrity` is passed in, it should be an `integrity` value understood -by [`parse`](#parse) that the stream will check the data against. If -verification succeeds, the integrity stream will emit a `verified` event whose -value is a single `Hash` object that is the one that succeeded verification. If -verification fails, the stream will error with an `EINTEGRITY` error code. - -If `opts.size` is given, it will be matched against the stream size. An error -with `err.code` `EBADSIZE` will be emitted by the stream if the expected size -and actual size fail to match. - -If `opts.pickAlgorithm` is provided, it will be passed two algorithms as -arguments. ssri will prioritize whichever of the two algorithms is returned by -this function. Note that the function may be called multiple times, and it -**must** return one of the two algorithms provided. By default, ssri will make -a best-effort to pick the strongest/most reliable of the given algorithms. It -may intentionally deprioritize algorithms with known vulnerabilities. - -##### Example - -```javascript -const integrity = ssri.fromData(fs.readFileSync('index.js')) -fs.createReadStream('index.js') -.pipe(ssri.integrityStream({integrity})) -``` diff --git a/node_modules/string_decoder/.travis.yml b/node_modules/string_decoder/.travis.yml deleted file mode 100644 index 3347a72546505..0000000000000 --- a/node_modules/string_decoder/.travis.yml +++ /dev/null @@ -1,50 +0,0 @@ -sudo: false -language: node_js -before_install: - - npm install -g npm@2 - - test $NPM_LEGACY && npm install -g npm@latest-3 || npm install npm -g -notifications: - email: false -matrix: - fast_finish: true - include: - - node_js: '0.8' - env: - - TASK=test - - NPM_LEGACY=true - - node_js: '0.10' - env: - - TASK=test - - NPM_LEGACY=true - - node_js: '0.11' - env: - - TASK=test - - NPM_LEGACY=true - - node_js: '0.12' - env: - - TASK=test - - NPM_LEGACY=true - - node_js: 1 - env: - - TASK=test - - NPM_LEGACY=true - - node_js: 2 - env: - - TASK=test - - NPM_LEGACY=true - - node_js: 3 - env: - - TASK=test - - NPM_LEGACY=true - - node_js: 4 - env: TASK=test - - node_js: 5 - env: TASK=test - - node_js: 6 - env: TASK=test - - node_js: 7 - env: TASK=test - - node_js: 8 - env: TASK=test - - node_js: 9 - env: TASK=test diff --git a/node_modules/string_decoder/README.md b/node_modules/string_decoder/README.md deleted file mode 100644 index 5fd58315ed588..0000000000000 --- a/node_modules/string_decoder/README.md +++ /dev/null @@ -1,47 +0,0 @@ -# string_decoder - -***Node-core v8.9.4 string_decoder for userland*** - - -[![NPM](https://nodei.co/npm/string_decoder.png?downloads=true&downloadRank=true)](https://nodei.co/npm/string_decoder/) -[![NPM](https://nodei.co/npm-dl/string_decoder.png?&months=6&height=3)](https://nodei.co/npm/string_decoder/) - - -```bash -npm install --save string_decoder -``` - -***Node-core string_decoder for userland*** - -This package is a mirror of the string_decoder implementation in Node-core. - -Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v8.9.4/docs/api/). - -As of version 1.0.0 **string_decoder** uses semantic versioning. - -## Previous versions - -Previous version numbers match the versions found in Node core, e.g. 0.10.24 matches Node 0.10.24, likewise 0.11.10 matches Node 0.11.10. - -## Update - -The *build/* directory contains a build script that will scrape the source from the [nodejs/node](https://github.com/nodejs/node) repo given a specific Node version. - -## Streams Working Group - -`string_decoder` is maintained by the Streams Working Group, which -oversees the development and maintenance of the Streams API within -Node.js. The responsibilities of the Streams Working Group include: - -* Addressing stream issues on the Node.js issue tracker. -* Authoring and editing stream documentation within the Node.js project. -* Reviewing changes to stream subclasses within the Node.js project. -* Redirecting changes to streams from the Node.js project to this - project. -* Assisting in the implementation of stream providers within Node.js. -* Recommending versions of `readable-stream` to be included in Node.js. -* Messaging about the future of streams to give the community advance - notice of changes. - -See [readable-stream](https://github.com/nodejs/readable-stream) for -more details. diff --git a/node_modules/stringify-package/CHANGELOG.md b/node_modules/stringify-package/CHANGELOG.md deleted file mode 100644 index 2de91768d2790..0000000000000 --- a/node_modules/stringify-package/CHANGELOG.md +++ /dev/null @@ -1,16 +0,0 @@ -# Change Log - -All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. - -<a name="1.0.1"></a> -## [1.0.1](https://github.com/npm/stringify-package/compare/v1.0.0...v1.0.1) (2019-09-30) - - -### Bug Fixes - -* strict comparison ([0c5675f](https://github.com/npm/stringify-package/commit/0c5675f)), closes [#2](https://github.com/npm/stringify-package/issues/2) - - - -<a name="1.0.0"></a> -# 1.0.0 (2018-07-18) diff --git a/node_modules/stringify-package/README.md b/node_modules/stringify-package/README.md deleted file mode 100644 index 1ba4f5a330d17..0000000000000 --- a/node_modules/stringify-package/README.md +++ /dev/null @@ -1,55 +0,0 @@ -# stringify-package [![npm version](https://img.shields.io/npm/v/stringify-package.svg)](https://npm.im/stringify-package) [![license](https://img.shields.io/npm/l/stringify-package.svg)](https://npm.im/stringify-package) [![Travis](https://img.shields.io/travis/npm/stringify-package/latest.svg)](https://travis-ci.org/npm/stringify-package) [![AppVeyor](https://img.shields.io/appveyor/ci/npm/stringify-package/latest.svg)](https://ci.appveyor.com/project/npm/stringify-package) [![Coverage Status](https://coveralls.io/repos/github/npm/stringify-package/badge.svg?branch=latest)](https://coveralls.io/github/npm/stringify-package?branch=latest) - -[`stringify-package`](https://github.com/npm/stringify-package) is a standalone -library for writing out package data as a JSON file. It is extracted from npm. - -## Install - -`$ npm install stringify-package` - -## Table of Contents - -* [Example](#example) -* [Features](#features) -* [Contributing](#contributing) -* [API](#api) - * [`stringifyPackage`](#stringifypackage) - -### Example - -```javascript -const fs = require('fs') -const pkg = { /* ... */ } - -fs.writeFile('package.json', stringifyPackage(pkg), 'utf8', cb(err) => { - // ... -}) -``` - -### Features - -* Ensures consistent file indentation - To match existing file indentation, - [`detect-indent`](https://npm.im/detect-indent) is recommended. - -* Ensures consistent newlines - To match existing newline characters, - [`detect-newline`](https://npm.im/detect-newline) is recommended. - -### Contributing - -The npm team enthusiastically welcomes contributions and project participation! -There's a bunch of things you can do if you want to contribute! The [Contributor -Guide](CONTRIBUTING.md) has all the information you need for everything from -reporting bugs to contributing entire new features. Please don't hesitate to -jump in if you'd like to, or even ask us questions if something isn't clear. - -### API - -### <a name="stringifypackage"></a> `> stringifyPackage(data, indent, newline) -> String` - -#### Arguments - -* `data` - the package data as an object to be stringified -* `indent` - the number of spaces to use for each level of indentation (defaults to 2) -* `newline` - the character(s) to be used as a line terminator diff --git a/node_modules/tar/CHANGELOG.md b/node_modules/tar/CHANGELOG.md deleted file mode 100644 index 9373401b3f464..0000000000000 --- a/node_modules/tar/CHANGELOG.md +++ /dev/null @@ -1,68 +0,0 @@ -# Changelog - -## 6.0 - -- Drop support for node 6 and 8 -- fix symlinks and hardlinks on windows being packed with `\`-style path - targets - -## 5.0 - -- Address unpack race conditions using path reservations -- Change large-numbers errors from TypeError to Error -- Add `TAR_*` error codes -- Raise `TAR_BAD_ARCHIVE` warning/error when there are no valid entries - found in an archive -- do not treat ignored entries as an invalid archive -- drop support for node v4 -- unpack: conditionally use a file mapping to write files on Windows -- Set more portable 'mode' value in portable mode -- Set `portable` gzip option in portable mode - -## 4.4 - -- Add 'mtime' option to tar creation to force mtime -- unpack: only reuse file fs entries if nlink = 1 -- unpack: rename before unlinking files on Windows -- Fix encoding/decoding of base-256 numbers -- Use `stat` instead of `lstat` when checking CWD -- Always provide a callback to fs.close() - -## 4.3 - -- Add 'transform' unpack option - -## 4.2 - -- Fail when zlib fails - -## 4.1 - -- Add noMtime flag for tar creation - -## 4.0 - -- unpack: raise error if cwd is missing or not a dir -- pack: don't drop dots from dotfiles when prefixing - -## 3.1 - -- Support `@file.tar` as an entry argument to copy entries from one tar - file to another. -- Add `noPax` option -- `noResume` option for tar.t -- win32: convert `>|<?:` chars to windows-friendly form -- Exclude mtime for dirs in portable mode - -## 3.0 - -- Minipass-based implementation -- Entirely new API surface, `tar.c()`, `tar.x()` etc., much closer to - system tar semantics -- Massive performance improvement -- Require node 4.x and higher - -## 0.x, 1.x, 2.x - 2011-2014 - -- fstream-based implementation -- slow and kinda bad, but better than npm shelling out to the system `tar` diff --git a/node_modules/tar/README.md b/node_modules/tar/README.md deleted file mode 100644 index 42afb1aa7d1ad..0000000000000 --- a/node_modules/tar/README.md +++ /dev/null @@ -1,1042 +0,0 @@ -# node-tar - -[Fast](./benchmarks) and full-featured Tar for Node.js - -The API is designed to mimic the behavior of `tar(1)` on unix systems. -If you are familiar with how tar works, most of this will hopefully be -straightforward for you. If not, then hopefully this module can teach -you useful unix skills that may come in handy someday :) - -## Background - -A "tar file" or "tarball" is an archive of file system entries -(directories, files, links, etc.) The name comes from "tape archive". -If you run `man tar` on almost any Unix command line, you'll learn -quite a bit about what it can do, and its history. - -Tar has 5 main top-level commands: - -* `c` Create an archive -* `r` Replace entries within an archive -* `u` Update entries within an archive (ie, replace if they're newer) -* `t` List out the contents of an archive -* `x` Extract an archive to disk - -The other flags and options modify how this top level function works. - -## High-Level API - -These 5 functions are the high-level API. All of them have a -single-character name (for unix nerds familiar with `tar(1)`) as well -as a long name (for everyone else). - -All the high-level functions take the following arguments, all three -of which are optional and may be omitted. - -1. `options` - An optional object specifying various options -2. `paths` - An array of paths to add or extract -3. `callback` - Called when the command is completed, if async. (If - sync or no file specified, providing a callback throws a - `TypeError`.) - -If the command is sync (ie, if `options.sync=true`), then the -callback is not allowed, since the action will be completed immediately. - -If a `file` argument is specified, and the command is async, then a -`Promise` is returned. In this case, if async, a callback may be -provided which is called when the command is completed. - -If a `file` option is not specified, then a stream is returned. For -`create`, this is a readable stream of the generated archive. For -`list` and `extract` this is a writable stream that an archive should -be written into. If a file is not specified, then a callback is not -allowed, because you're already getting a stream to work with. - -`replace` and `update` only work on existing archives, and so require -a `file` argument. - -Sync commands without a file argument return a stream that acts on its -input immediately in the same tick. For readable streams, this means -that all of the data is immediately available by calling -`stream.read()`. For writable streams, it will be acted upon as soon -as it is provided, but this can be at any time. - -### Warnings and Errors - -Tar emits warnings and errors for recoverable and unrecoverable situations, -respectively. In many cases, a warning only affects a single entry in an -archive, or is simply informing you that it's modifying an entry to comply -with the settings provided. - -Unrecoverable warnings will always raise an error (ie, emit `'error'` on -streaming actions, throw for non-streaming sync actions, reject the -returned Promise for non-streaming async operations, or call a provided -callback with an `Error` as the first argument). Recoverable errors will -raise an error only if `strict: true` is set in the options. - -Respond to (recoverable) warnings by listening to the `warn` event. -Handlers receive 3 arguments: - -- `code` String. One of the error codes below. This may not match - `data.code`, which preserves the original error code from fs and zlib. -- `message` String. More details about the error. -- `data` Metadata about the error. An `Error` object for errors raised by - fs and zlib. All fields are attached to errors raisd by tar. Typically - contains the following fields, as relevant: - - `tarCode` The tar error code. - - `code` Either the tar error code, or the error code set by the - underlying system. - - `file` The archive file being read or written. - - `cwd` Working directory for creation and extraction operations. - - `entry` The entry object (if it could be created) for `TAR_ENTRY_INFO`, - `TAR_ENTRY_INVALID`, and `TAR_ENTRY_ERROR` warnings. - - `header` The header object (if it could be created, and the entry could - not be created) for `TAR_ENTRY_INFO` and `TAR_ENTRY_INVALID` warnings. - - `recoverable` Boolean. If `false`, then the warning will emit an - `error`, even in non-strict mode. - -#### Error Codes - -* `TAR_ENTRY_INFO` An informative error indicating that an entry is being - modified, but otherwise processed normally. For example, removing `/` or - `C:\` from absolute paths if `preservePaths` is not set. - -* `TAR_ENTRY_INVALID` An indication that a given entry is not a valid tar - archive entry, and will be skipped. This occurs when: - - a checksum fails, - - a `linkpath` is missing for a link type, or - - a `linkpath` is provided for a non-link type. - - If every entry in a parsed archive raises an `TAR_ENTRY_INVALID` error, - then the archive is presumed to be unrecoverably broken, and - `TAR_BAD_ARCHIVE` will be raised. - -* `TAR_ENTRY_ERROR` The entry appears to be a valid tar archive entry, but - encountered an error which prevented it from being unpacked. This occurs - when: - - an unrecoverable fs error happens during unpacking, - - an entry has `..` in the path and `preservePaths` is not set, or - - an entry is extracting through a symbolic link, when `preservePaths` is - not set. - -* `TAR_ENTRY_UNSUPPORTED` An indication that a given entry is - a valid archive entry, but of a type that is unsupported, and so will be - skipped in archive creation or extracting. - -* `TAR_ABORT` When parsing gzipped-encoded archives, the parser will - abort the parse process raise a warning for any zlib errors encountered. - Aborts are considered unrecoverable for both parsing and unpacking. - -* `TAR_BAD_ARCHIVE` The archive file is totally hosed. This can happen for - a number of reasons, and always occurs at the end of a parse or extract: - - - An entry body was truncated before seeing the full number of bytes. - - The archive contained only invalid entries, indicating that it is - likely not an archive, or at least, not an archive this library can - parse. - - `TAR_BAD_ARCHIVE` is considered informative for parse operations, but - unrecoverable for extraction. Note that, if encountered at the end of an - extraction, tar WILL still have extracted as much it could from the - archive, so there may be some garbage files to clean up. - -Errors that occur deeper in the system (ie, either the filesystem or zlib) -will have their error codes left intact, and a `tarCode` matching one of -the above will be added to the warning metadata or the raised error object. - -Errors generated by tar will have one of the above codes set as the -`error.code` field as well, but since errors originating in zlib or fs will -have their original codes, it's better to read `error.tarCode` if you wish -to see how tar is handling the issue. - -### Examples - -The API mimics the `tar(1)` command line functionality, with aliases -for more human-readable option and function names. The goal is that -if you know how to use `tar(1)` in Unix, then you know how to use -`require('tar')` in JavaScript. - -To replicate `tar czf my-tarball.tgz files and folders`, you'd do: - -```js -tar.c( - { - gzip: <true|gzip options>, - file: 'my-tarball.tgz' - }, - ['some', 'files', 'and', 'folders'] -).then(_ => { .. tarball has been created .. }) -``` - -To replicate `tar cz files and folders > my-tarball.tgz`, you'd do: - -```js -tar.c( // or tar.create - { - gzip: <true|gzip options> - }, - ['some', 'files', 'and', 'folders'] -).pipe(fs.createWriteStream('my-tarball.tgz')) -``` - -To replicate `tar xf my-tarball.tgz` you'd do: - -```js -tar.x( // or tar.extract( - { - file: 'my-tarball.tgz' - } -).then(_=> { .. tarball has been dumped in cwd .. }) -``` - -To replicate `cat my-tarball.tgz | tar x -C some-dir --strip=1`: - -```js -fs.createReadStream('my-tarball.tgz').pipe( - tar.x({ - strip: 1, - C: 'some-dir' // alias for cwd:'some-dir', also ok - }) -) -``` - -To replicate `tar tf my-tarball.tgz`, do this: - -```js -tar.t({ - file: 'my-tarball.tgz', - onentry: entry => { .. do whatever with it .. } -}) -``` - -To replicate `cat my-tarball.tgz | tar t` do: - -```js -fs.createReadStream('my-tarball.tgz') - .pipe(tar.t()) - .on('entry', entry => { .. do whatever with it .. }) -``` - -To do anything synchronous, add `sync: true` to the options. Note -that sync functions don't take a callback and don't return a promise. -When the function returns, it's already done. Sync methods without a -file argument return a sync stream, which flushes immediately. But, -of course, it still won't be done until you `.end()` it. - -To filter entries, add `filter: <function>` to the options. -Tar-creating methods call the filter with `filter(path, stat)`. -Tar-reading methods (including extraction) call the filter with -`filter(path, entry)`. The filter is called in the `this`-context of -the `Pack` or `Unpack` stream object. - -The arguments list to `tar t` and `tar x` specify a list of filenames -to extract or list, so they're equivalent to a filter that tests if -the file is in the list. - -For those who _aren't_ fans of tar's single-character command names: - -``` -tar.c === tar.create -tar.r === tar.replace (appends to archive, file is required) -tar.u === tar.update (appends if newer, file is required) -tar.x === tar.extract -tar.t === tar.list -``` - -Keep reading for all the command descriptions and options, as well as -the low-level API that they are built on. - -### tar.c(options, fileList, callback) [alias: tar.create] - -Create a tarball archive. - -The `fileList` is an array of paths to add to the tarball. Adding a -directory also adds its children recursively. - -An entry in `fileList` that starts with an `@` symbol is a tar archive -whose entries will be added. To add a file that starts with `@`, -prepend it with `./`. - -The following options are supported: - -- `file` Write the tarball archive to the specified filename. If this - is specified, then the callback will be fired when the file has been - written, and a promise will be returned that resolves when the file - is written. If a filename is not specified, then a Readable Stream - will be returned which will emit the file data. [Alias: `f`] -- `sync` Act synchronously. If this is set, then any provided file - will be fully written after the call to `tar.c`. If this is set, - and a file is not provided, then the resulting stream will already - have the data ready to `read` or `emit('data')` as soon as you - request it. -- `onwarn` A function that will get called with `(code, message, data)` for - any warnings encountered. (See "Warnings and Errors") -- `strict` Treat warnings as crash-worthy errors. Default false. -- `cwd` The current working directory for creating the archive. - Defaults to `process.cwd()`. [Alias: `C`] -- `prefix` A path portion to prefix onto the entries in the archive. -- `gzip` Set to any truthy value to create a gzipped archive, or an - object with settings for `zlib.Gzip()` [Alias: `z`] -- `filter` A function that gets called with `(path, stat)` for each - entry being added. Return `true` to add the entry to the archive, - or `false` to omit it. -- `portable` Omit metadata that is system-specific: `ctime`, `atime`, - `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note - that `mtime` is still included, because this is necessary for other - time-based operations. Additionally, `mode` is set to a "reasonable - default" for most unix systems, based on a `umask` value of `0o22`. -- `preservePaths` Allow absolute paths. By default, `/` is stripped - from absolute paths. [Alias: `P`] -- `mode` The mode to set on the created file archive -- `noDirRecurse` Do not recursively archive the contents of - directories. [Alias: `n`] -- `follow` Set to true to pack the targets of symbolic links. Without - this option, symbolic links are archived as such. [Alias: `L`, `h`] -- `noPax` Suppress pax extended headers. Note that this means that - long paths and linkpaths will be truncated, and large or negative - numeric values may be interpreted incorrectly. -- `noMtime` Set to true to omit writing `mtime` values for entries. - Note that this prevents using other mtime-based features like - `tar.update` or the `keepNewer` option with the resulting tar archive. - [Alias: `m`, `no-mtime`] -- `mtime` Set to a `Date` object to force a specific `mtime` for - everything added to the archive. Overridden by `noMtime`. - -The following options are mostly internal, but can be modified in some -advanced use cases, such as re-using caches between runs. - -- `linkCache` A Map object containing the device and inode value for - any file whose nlink is > 1, to identify hard links. -- `statCache` A Map object that caches calls `lstat`. -- `readdirCache` A Map object that caches calls to `readdir`. -- `jobs` A number specifying how many concurrent jobs to run. - Defaults to 4. -- `maxReadSize` The maximum buffer size for `fs.read()` operations. - Defaults to 16 MB. - -### tar.x(options, fileList, callback) [alias: tar.extract] - -Extract a tarball archive. - -The `fileList` is an array of paths to extract from the tarball. If -no paths are provided, then all the entries are extracted. - -If the archive is gzipped, then tar will detect this and unzip it. - -Note that all directories that are created will be forced to be -writable, readable, and listable by their owner, to avoid cases where -a directory prevents extraction of child entries by virtue of its -mode. - -Most extraction errors will cause a `warn` event to be emitted. If -the `cwd` is missing, or not a directory, then the extraction will -fail completely. - -The following options are supported: - -- `cwd` Extract files relative to the specified directory. Defaults - to `process.cwd()`. If provided, this must exist and must be a - directory. [Alias: `C`] -- `file` The archive file to extract. If not specified, then a - Writable stream is returned where the archive data should be - written. [Alias: `f`] -- `sync` Create files and directories synchronously. -- `strict` Treat warnings as crash-worthy errors. Default false. -- `filter` A function that gets called with `(path, entry)` for each - entry being unpacked. Return `true` to unpack the entry from the - archive, or `false` to skip it. -- `newer` Set to true to keep the existing file on disk if it's newer - than the file in the archive. [Alias: `keep-newer`, - `keep-newer-files`] -- `keep` Do not overwrite existing files. In particular, if a file - appears more than once in an archive, later copies will not - overwrite earlier copies. [Alias: `k`, `keep-existing`] -- `preservePaths` Allow absolute paths, paths containing `..`, and - extracting through symbolic links. By default, `/` is stripped from - absolute paths, `..` paths are not extracted, and any file whose - location would be modified by a symbolic link is not extracted. - [Alias: `P`] -- `unlink` Unlink files before creating them. Without this option, - tar overwrites existing files, which preserves existing hardlinks. - With this option, existing hardlinks will be broken, as will any - symlink that would affect the location of an extracted file. [Alias: - `U`] -- `strip` Remove the specified number of leading path elements. - Pathnames with fewer elements will be silently skipped. Note that - the pathname is edited after applying the filter, but before - security checks. [Alias: `strip-components`, `stripComponents`] -- `onwarn` A function that will get called with `(code, message, data)` for - any warnings encountered. (See "Warnings and Errors") -- `preserveOwner` If true, tar will set the `uid` and `gid` of - extracted entries to the `uid` and `gid` fields in the archive. - This defaults to true when run as root, and false otherwise. If - false, then files and directories will be set with the owner and - group of the user running the process. This is similar to `-p` in - `tar(1)`, but ACLs and other system-specific data is never unpacked - in this implementation, and modes are set by default already. - [Alias: `p`] -- `uid` Set to a number to force ownership of all extracted files and - folders, and all implicitly created directories, to be owned by the - specified user id, regardless of the `uid` field in the archive. - Cannot be used along with `preserveOwner`. Requires also setting a - `gid` option. -- `gid` Set to a number to force ownership of all extracted files and - folders, and all implicitly created directories, to be owned by the - specified group id, regardless of the `gid` field in the archive. - Cannot be used along with `preserveOwner`. Requires also setting a - `uid` option. -- `noMtime` Set to true to omit writing `mtime` value for extracted - entries. [Alias: `m`, `no-mtime`] -- `transform` Provide a function that takes an `entry` object, and - returns a stream, or any falsey value. If a stream is provided, - then that stream's data will be written instead of the contents of - the archive entry. If a falsey value is provided, then the entry is - written to disk as normal. (To exclude items from extraction, use - the `filter` option described above.) -- `onentry` A function that gets called with `(entry)` for each entry - that passes the filter. -- `onwarn` A function that will get called with `(code, message, data)` for - any warnings encountered. (See "Warnings and Errors") -- `noChmod` Set to true to omit calling `fs.chmod()` to ensure that the - extracted file matches the entry mode. This also suppresses the call to - `process.umask()` to determine the default umask value, since tar will - extract with whatever mode is provided, and let the process `umask` apply - normally. - -The following options are mostly internal, but can be modified in some -advanced use cases, such as re-using caches between runs. - -- `maxReadSize` The maximum buffer size for `fs.read()` operations. - Defaults to 16 MB. -- `umask` Filter the modes of entries like `process.umask()`. -- `dmode` Default mode for directories -- `fmode` Default mode for files -- `dirCache` A Map object of which directories exist. -- `maxMetaEntrySize` The maximum size of meta entries that is - supported. Defaults to 1 MB. - -Note that using an asynchronous stream type with the `transform` -option will cause undefined behavior in sync extractions. -[MiniPass](http://npm.im/minipass)-based streams are designed for this -use case. - -### tar.t(options, fileList, callback) [alias: tar.list] - -List the contents of a tarball archive. - -The `fileList` is an array of paths to list from the tarball. If -no paths are provided, then all the entries are listed. - -If the archive is gzipped, then tar will detect this and unzip it. - -Returns an event emitter that emits `entry` events with -`tar.ReadEntry` objects. However, they don't emit `'data'` or `'end'` -events. (If you want to get actual readable entries, use the -`tar.Parse` class instead.) - -The following options are supported: - -- `cwd` Extract files relative to the specified directory. Defaults - to `process.cwd()`. [Alias: `C`] -- `file` The archive file to list. If not specified, then a - Writable stream is returned where the archive data should be - written. [Alias: `f`] -- `sync` Read the specified file synchronously. (This has no effect - when a file option isn't specified, because entries are emitted as - fast as they are parsed from the stream anyway.) -- `strict` Treat warnings as crash-worthy errors. Default false. -- `filter` A function that gets called with `(path, entry)` for each - entry being listed. Return `true` to emit the entry from the - archive, or `false` to skip it. -- `onentry` A function that gets called with `(entry)` for each entry - that passes the filter. This is important for when both `file` and - `sync` are set, because it will be called synchronously. -- `maxReadSize` The maximum buffer size for `fs.read()` operations. - Defaults to 16 MB. -- `noResume` By default, `entry` streams are resumed immediately after - the call to `onentry`. Set `noResume: true` to suppress this - behavior. Note that by opting into this, the stream will never - complete until the entry data is consumed. -- `onwarn` A function that will get called with `(code, message, data)` for - any warnings encountered. (See "Warnings and Errors") - -### tar.u(options, fileList, callback) [alias: tar.update] - -Add files to an archive if they are newer than the entry already in -the tarball archive. - -The `fileList` is an array of paths to add to the tarball. Adding a -directory also adds its children recursively. - -An entry in `fileList` that starts with an `@` symbol is a tar archive -whose entries will be added. To add a file that starts with `@`, -prepend it with `./`. - -The following options are supported: - -- `file` Required. Write the tarball archive to the specified - filename. [Alias: `f`] -- `sync` Act synchronously. If this is set, then any provided file - will be fully written after the call to `tar.c`. -- `onwarn` A function that will get called with `(code, message, data)` for - any warnings encountered. (See "Warnings and Errors") -- `strict` Treat warnings as crash-worthy errors. Default false. -- `cwd` The current working directory for adding entries to the - archive. Defaults to `process.cwd()`. [Alias: `C`] -- `prefix` A path portion to prefix onto the entries in the archive. -- `gzip` Set to any truthy value to create a gzipped archive, or an - object with settings for `zlib.Gzip()` [Alias: `z`] -- `filter` A function that gets called with `(path, stat)` for each - entry being added. Return `true` to add the entry to the archive, - or `false` to omit it. -- `portable` Omit metadata that is system-specific: `ctime`, `atime`, - `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note - that `mtime` is still included, because this is necessary for other - time-based operations. Additionally, `mode` is set to a "reasonable - default" for most unix systems, based on a `umask` value of `0o22`. -- `preservePaths` Allow absolute paths. By default, `/` is stripped - from absolute paths. [Alias: `P`] -- `maxReadSize` The maximum buffer size for `fs.read()` operations. - Defaults to 16 MB. -- `noDirRecurse` Do not recursively archive the contents of - directories. [Alias: `n`] -- `follow` Set to true to pack the targets of symbolic links. Without - this option, symbolic links are archived as such. [Alias: `L`, `h`] -- `noPax` Suppress pax extended headers. Note that this means that - long paths and linkpaths will be truncated, and large or negative - numeric values may be interpreted incorrectly. -- `noMtime` Set to true to omit writing `mtime` values for entries. - Note that this prevents using other mtime-based features like - `tar.update` or the `keepNewer` option with the resulting tar archive. - [Alias: `m`, `no-mtime`] -- `mtime` Set to a `Date` object to force a specific `mtime` for - everything added to the archive. Overridden by `noMtime`. - -### tar.r(options, fileList, callback) [alias: tar.replace] - -Add files to an existing archive. Because later entries override -earlier entries, this effectively replaces any existing entries. - -The `fileList` is an array of paths to add to the tarball. Adding a -directory also adds its children recursively. - -An entry in `fileList` that starts with an `@` symbol is a tar archive -whose entries will be added. To add a file that starts with `@`, -prepend it with `./`. - -The following options are supported: - -- `file` Required. Write the tarball archive to the specified - filename. [Alias: `f`] -- `sync` Act synchronously. If this is set, then any provided file - will be fully written after the call to `tar.c`. -- `onwarn` A function that will get called with `(code, message, data)` for - any warnings encountered. (See "Warnings and Errors") -- `strict` Treat warnings as crash-worthy errors. Default false. -- `cwd` The current working directory for adding entries to the - archive. Defaults to `process.cwd()`. [Alias: `C`] -- `prefix` A path portion to prefix onto the entries in the archive. -- `gzip` Set to any truthy value to create a gzipped archive, or an - object with settings for `zlib.Gzip()` [Alias: `z`] -- `filter` A function that gets called with `(path, stat)` for each - entry being added. Return `true` to add the entry to the archive, - or `false` to omit it. -- `portable` Omit metadata that is system-specific: `ctime`, `atime`, - `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note - that `mtime` is still included, because this is necessary for other - time-based operations. Additionally, `mode` is set to a "reasonable - default" for most unix systems, based on a `umask` value of `0o22`. -- `preservePaths` Allow absolute paths. By default, `/` is stripped - from absolute paths. [Alias: `P`] -- `maxReadSize` The maximum buffer size for `fs.read()` operations. - Defaults to 16 MB. -- `noDirRecurse` Do not recursively archive the contents of - directories. [Alias: `n`] -- `follow` Set to true to pack the targets of symbolic links. Without - this option, symbolic links are archived as such. [Alias: `L`, `h`] -- `noPax` Suppress pax extended headers. Note that this means that - long paths and linkpaths will be truncated, and large or negative - numeric values may be interpreted incorrectly. -- `noMtime` Set to true to omit writing `mtime` values for entries. - Note that this prevents using other mtime-based features like - `tar.update` or the `keepNewer` option with the resulting tar archive. - [Alias: `m`, `no-mtime`] -- `mtime` Set to a `Date` object to force a specific `mtime` for - everything added to the archive. Overridden by `noMtime`. - - -## Low-Level API - -### class tar.Pack - -A readable tar stream. - -Has all the standard readable stream interface stuff. `'data'` and -`'end'` events, `read()` method, `pause()` and `resume()`, etc. - -#### constructor(options) - -The following options are supported: - -- `onwarn` A function that will get called with `(code, message, data)` for - any warnings encountered. (See "Warnings and Errors") -- `strict` Treat warnings as crash-worthy errors. Default false. -- `cwd` The current working directory for creating the archive. - Defaults to `process.cwd()`. -- `prefix` A path portion to prefix onto the entries in the archive. -- `gzip` Set to any truthy value to create a gzipped archive, or an - object with settings for `zlib.Gzip()` -- `filter` A function that gets called with `(path, stat)` for each - entry being added. Return `true` to add the entry to the archive, - or `false` to omit it. -- `portable` Omit metadata that is system-specific: `ctime`, `atime`, - `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note - that `mtime` is still included, because this is necessary for other - time-based operations. Additionally, `mode` is set to a "reasonable - default" for most unix systems, based on a `umask` value of `0o22`. -- `preservePaths` Allow absolute paths. By default, `/` is stripped - from absolute paths. -- `linkCache` A Map object containing the device and inode value for - any file whose nlink is > 1, to identify hard links. -- `statCache` A Map object that caches calls `lstat`. -- `readdirCache` A Map object that caches calls to `readdir`. -- `jobs` A number specifying how many concurrent jobs to run. - Defaults to 4. -- `maxReadSize` The maximum buffer size for `fs.read()` operations. - Defaults to 16 MB. -- `noDirRecurse` Do not recursively archive the contents of - directories. -- `follow` Set to true to pack the targets of symbolic links. Without - this option, symbolic links are archived as such. -- `noPax` Suppress pax extended headers. Note that this means that - long paths and linkpaths will be truncated, and large or negative - numeric values may be interpreted incorrectly. -- `noMtime` Set to true to omit writing `mtime` values for entries. - Note that this prevents using other mtime-based features like - `tar.update` or the `keepNewer` option with the resulting tar archive. -- `mtime` Set to a `Date` object to force a specific `mtime` for - everything added to the archive. Overridden by `noMtime`. - -#### add(path) - -Adds an entry to the archive. Returns the Pack stream. - -#### write(path) - -Adds an entry to the archive. Returns true if flushed. - -#### end() - -Finishes the archive. - -### class tar.Pack.Sync - -Synchronous version of `tar.Pack`. - -### class tar.Unpack - -A writable stream that unpacks a tar archive onto the file system. - -All the normal writable stream stuff is supported. `write()` and -`end()` methods, `'drain'` events, etc. - -Note that all directories that are created will be forced to be -writable, readable, and listable by their owner, to avoid cases where -a directory prevents extraction of child entries by virtue of its -mode. - -`'close'` is emitted when it's done writing stuff to the file system. - -Most unpack errors will cause a `warn` event to be emitted. If the -`cwd` is missing, or not a directory, then an error will be emitted. - -#### constructor(options) - -- `cwd` Extract files relative to the specified directory. Defaults - to `process.cwd()`. If provided, this must exist and must be a - directory. -- `filter` A function that gets called with `(path, entry)` for each - entry being unpacked. Return `true` to unpack the entry from the - archive, or `false` to skip it. -- `newer` Set to true to keep the existing file on disk if it's newer - than the file in the archive. -- `keep` Do not overwrite existing files. In particular, if a file - appears more than once in an archive, later copies will not - overwrite earlier copies. -- `preservePaths` Allow absolute paths, paths containing `..`, and - extracting through symbolic links. By default, `/` is stripped from - absolute paths, `..` paths are not extracted, and any file whose - location would be modified by a symbolic link is not extracted. -- `unlink` Unlink files before creating them. Without this option, - tar overwrites existing files, which preserves existing hardlinks. - With this option, existing hardlinks will be broken, as will any - symlink that would affect the location of an extracted file. -- `strip` Remove the specified number of leading path elements. - Pathnames with fewer elements will be silently skipped. Note that - the pathname is edited after applying the filter, but before - security checks. -- `onwarn` A function that will get called with `(code, message, data)` for - any warnings encountered. (See "Warnings and Errors") -- `umask` Filter the modes of entries like `process.umask()`. -- `dmode` Default mode for directories -- `fmode` Default mode for files -- `dirCache` A Map object of which directories exist. -- `maxMetaEntrySize` The maximum size of meta entries that is - supported. Defaults to 1 MB. -- `preserveOwner` If true, tar will set the `uid` and `gid` of - extracted entries to the `uid` and `gid` fields in the archive. - This defaults to true when run as root, and false otherwise. If - false, then files and directories will be set with the owner and - group of the user running the process. This is similar to `-p` in - `tar(1)`, but ACLs and other system-specific data is never unpacked - in this implementation, and modes are set by default already. -- `win32` True if on a windows platform. Causes behavior where - filenames containing `<|>?` chars are converted to - windows-compatible values while being unpacked. -- `uid` Set to a number to force ownership of all extracted files and - folders, and all implicitly created directories, to be owned by the - specified user id, regardless of the `uid` field in the archive. - Cannot be used along with `preserveOwner`. Requires also setting a - `gid` option. -- `gid` Set to a number to force ownership of all extracted files and - folders, and all implicitly created directories, to be owned by the - specified group id, regardless of the `gid` field in the archive. - Cannot be used along with `preserveOwner`. Requires also setting a - `uid` option. -- `noMtime` Set to true to omit writing `mtime` value for extracted - entries. -- `transform` Provide a function that takes an `entry` object, and - returns a stream, or any falsey value. If a stream is provided, - then that stream's data will be written instead of the contents of - the archive entry. If a falsey value is provided, then the entry is - written to disk as normal. (To exclude items from extraction, use - the `filter` option described above.) -- `strict` Treat warnings as crash-worthy errors. Default false. -- `onentry` A function that gets called with `(entry)` for each entry - that passes the filter. -- `onwarn` A function that will get called with `(code, message, data)` for - any warnings encountered. (See "Warnings and Errors") -- `noChmod` Set to true to omit calling `fs.chmod()` to ensure that the - extracted file matches the entry mode. This also suppresses the call to - `process.umask()` to determine the default umask value, since tar will - extract with whatever mode is provided, and let the process `umask` apply - normally. - -### class tar.Unpack.Sync - -Synchronous version of `tar.Unpack`. - -Note that using an asynchronous stream type with the `transform` -option will cause undefined behavior in sync unpack streams. -[MiniPass](http://npm.im/minipass)-based streams are designed for this -use case. - -### class tar.Parse - -A writable stream that parses a tar archive stream. All the standard -writable stream stuff is supported. - -If the archive is gzipped, then tar will detect this and unzip it. - -Emits `'entry'` events with `tar.ReadEntry` objects, which are -themselves readable streams that you can pipe wherever. - -Each `entry` will not emit until the one before it is flushed through, -so make sure to either consume the data (with `on('data', ...)` or -`.pipe(...)`) or throw it away with `.resume()` to keep the stream -flowing. - -#### constructor(options) - -Returns an event emitter that emits `entry` events with -`tar.ReadEntry` objects. - -The following options are supported: - -- `strict` Treat warnings as crash-worthy errors. Default false. -- `filter` A function that gets called with `(path, entry)` for each - entry being listed. Return `true` to emit the entry from the - archive, or `false` to skip it. -- `onentry` A function that gets called with `(entry)` for each entry - that passes the filter. -- `onwarn` A function that will get called with `(code, message, data)` for - any warnings encountered. (See "Warnings and Errors") - -#### abort(error) - -Stop all parsing activities. This is called when there are zlib -errors. It also emits an unrecoverable warning with the error provided. - -### class tar.ReadEntry extends [MiniPass](http://npm.im/minipass) - -A representation of an entry that is being read out of a tar archive. - -It has the following fields: - -- `extended` The extended metadata object provided to the constructor. -- `globalExtended` The global extended metadata object provided to the - constructor. -- `remain` The number of bytes remaining to be written into the - stream. -- `blockRemain` The number of 512-byte blocks remaining to be written - into the stream. -- `ignore` Whether this entry should be ignored. -- `meta` True if this represents metadata about the next entry, false - if it represents a filesystem object. -- All the fields from the header, extended header, and global extended - header are added to the ReadEntry object. So it has `path`, `type`, - `size`, `mode`, and so on. - -#### constructor(header, extended, globalExtended) - -Create a new ReadEntry object with the specified header, extended -header, and global extended header values. - -### class tar.WriteEntry extends [MiniPass](http://npm.im/minipass) - -A representation of an entry that is being written from the file -system into a tar archive. - -Emits data for the Header, and for the Pax Extended Header if one is -required, as well as any body data. - -Creating a WriteEntry for a directory does not also create -WriteEntry objects for all of the directory contents. - -It has the following fields: - -- `path` The path field that will be written to the archive. By - default, this is also the path from the cwd to the file system - object. -- `portable` Omit metadata that is system-specific: `ctime`, `atime`, - `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note - that `mtime` is still included, because this is necessary for other - time-based operations. Additionally, `mode` is set to a "reasonable - default" for most unix systems, based on a `umask` value of `0o22`. -- `myuid` If supported, the uid of the user running the current - process. -- `myuser` The `env.USER` string if set, or `''`. Set as the entry - `uname` field if the file's `uid` matches `this.myuid`. -- `maxReadSize` The maximum buffer size for `fs.read()` operations. - Defaults to 1 MB. -- `linkCache` A Map object containing the device and inode value for - any file whose nlink is > 1, to identify hard links. -- `statCache` A Map object that caches calls `lstat`. -- `preservePaths` Allow absolute paths. By default, `/` is stripped - from absolute paths. -- `cwd` The current working directory for creating the archive. - Defaults to `process.cwd()`. -- `absolute` The absolute path to the entry on the filesystem. By - default, this is `path.resolve(this.cwd, this.path)`, but it can be - overridden explicitly. -- `strict` Treat warnings as crash-worthy errors. Default false. -- `win32` True if on a windows platform. Causes behavior where paths - replace `\` with `/` and filenames containing the windows-compatible - forms of `<|>?:` characters are converted to actual `<|>?:` characters - in the archive. -- `noPax` Suppress pax extended headers. Note that this means that - long paths and linkpaths will be truncated, and large or negative - numeric values may be interpreted incorrectly. -- `noMtime` Set to true to omit writing `mtime` values for entries. - Note that this prevents using other mtime-based features like - `tar.update` or the `keepNewer` option with the resulting tar archive. - - -#### constructor(path, options) - -`path` is the path of the entry as it is written in the archive. - -The following options are supported: - -- `portable` Omit metadata that is system-specific: `ctime`, `atime`, - `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note - that `mtime` is still included, because this is necessary for other - time-based operations. Additionally, `mode` is set to a "reasonable - default" for most unix systems, based on a `umask` value of `0o22`. -- `maxReadSize` The maximum buffer size for `fs.read()` operations. - Defaults to 1 MB. -- `linkCache` A Map object containing the device and inode value for - any file whose nlink is > 1, to identify hard links. -- `statCache` A Map object that caches calls `lstat`. -- `preservePaths` Allow absolute paths. By default, `/` is stripped - from absolute paths. -- `cwd` The current working directory for creating the archive. - Defaults to `process.cwd()`. -- `absolute` The absolute path to the entry on the filesystem. By - default, this is `path.resolve(this.cwd, this.path)`, but it can be - overridden explicitly. -- `strict` Treat warnings as crash-worthy errors. Default false. -- `win32` True if on a windows platform. Causes behavior where paths - replace `\` with `/`. -- `onwarn` A function that will get called with `(code, message, data)` for - any warnings encountered. (See "Warnings and Errors") -- `noMtime` Set to true to omit writing `mtime` values for entries. - Note that this prevents using other mtime-based features like - `tar.update` or the `keepNewer` option with the resulting tar archive. -- `umask` Set to restrict the modes on the entries in the archive, - somewhat like how umask works on file creation. Defaults to - `process.umask()` on unix systems, or `0o22` on Windows. - -#### warn(message, data) - -If strict, emit an error with the provided message. - -Othewise, emit a `'warn'` event with the provided message and data. - -### class tar.WriteEntry.Sync - -Synchronous version of tar.WriteEntry - -### class tar.WriteEntry.Tar - -A version of tar.WriteEntry that gets its data from a tar.ReadEntry -instead of from the filesystem. - -#### constructor(readEntry, options) - -`readEntry` is the entry being read out of another archive. - -The following options are supported: - -- `portable` Omit metadata that is system-specific: `ctime`, `atime`, - `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note - that `mtime` is still included, because this is necessary for other - time-based operations. Additionally, `mode` is set to a "reasonable - default" for most unix systems, based on a `umask` value of `0o22`. -- `preservePaths` Allow absolute paths. By default, `/` is stripped - from absolute paths. -- `strict` Treat warnings as crash-worthy errors. Default false. -- `onwarn` A function that will get called with `(code, message, data)` for - any warnings encountered. (See "Warnings and Errors") -- `noMtime` Set to true to omit writing `mtime` values for entries. - Note that this prevents using other mtime-based features like - `tar.update` or the `keepNewer` option with the resulting tar archive. - -### class tar.Header - -A class for reading and writing header blocks. - -It has the following fields: - -- `nullBlock` True if decoding a block which is entirely composed of - `0x00` null bytes. (Useful because tar files are terminated by - at least 2 null blocks.) -- `cksumValid` True if the checksum in the header is valid, false - otherwise. -- `needPax` True if the values, as encoded, will require a Pax - extended header. -- `path` The path of the entry. -- `mode` The 4 lowest-order octal digits of the file mode. That is, - read/write/execute permissions for world, group, and owner, and the - setuid, setgid, and sticky bits. -- `uid` Numeric user id of the file owner -- `gid` Numeric group id of the file owner -- `size` Size of the file in bytes -- `mtime` Modified time of the file -- `cksum` The checksum of the header. This is generated by adding all - the bytes of the header block, treating the checksum field itself as - all ascii space characters (that is, `0x20`). -- `type` The human-readable name of the type of entry this represents, - or the alphanumeric key if unknown. -- `typeKey` The alphanumeric key for the type of entry this header - represents. -- `linkpath` The target of Link and SymbolicLink entries. -- `uname` Human-readable user name of the file owner -- `gname` Human-readable group name of the file owner -- `devmaj` The major portion of the device number. Always `0` for - files, directories, and links. -- `devmin` The minor portion of the device number. Always `0` for - files, directories, and links. -- `atime` File access time. -- `ctime` File change time. - -#### constructor(data, [offset=0]) - -`data` is optional. It is either a Buffer that should be interpreted -as a tar Header starting at the specified offset and continuing for -512 bytes, or a data object of keys and values to set on the header -object, and eventually encode as a tar Header. - -#### decode(block, offset) - -Decode the provided buffer starting at the specified offset. - -Buffer length must be greater than 512 bytes. - -#### set(data) - -Set the fields in the data object. - -#### encode(buffer, offset) - -Encode the header fields into the buffer at the specified offset. - -Returns `this.needPax` to indicate whether a Pax Extended Header is -required to properly encode the specified data. - -### class tar.Pax - -An object representing a set of key-value pairs in an Pax extended -header entry. - -It has the following fields. Where the same name is used, they have -the same semantics as the tar.Header field of the same name. - -- `global` True if this represents a global extended header, or false - if it is for a single entry. -- `atime` -- `charset` -- `comment` -- `ctime` -- `gid` -- `gname` -- `linkpath` -- `mtime` -- `path` -- `size` -- `uid` -- `uname` -- `dev` -- `ino` -- `nlink` - -#### constructor(object, global) - -Set the fields set in the object. `global` is a boolean that defaults -to false. - -#### encode() - -Return a Buffer containing the header and body for the Pax extended -header entry, or `null` if there is nothing to encode. - -#### encodeBody() - -Return a string representing the body of the pax extended header -entry. - -#### encodeField(fieldName) - -Return a string representing the key/value encoding for the specified -fieldName, or `''` if the field is unset. - -### tar.Pax.parse(string, extended, global) - -Return a new Pax object created by parsing the contents of the string -provided. - -If the `extended` object is set, then also add the fields from that -object. (This is necessary because multiple metadata entries can -occur in sequence.) - -### tar.types - -A translation table for the `type` field in tar headers. - -#### tar.types.name.get(code) - -Get the human-readable name for a given alphanumeric code. - -#### tar.types.code.get(name) - -Get the alphanumeric code for a given human-readable name. diff --git a/node_modules/tar/lib/strip-absolute-path.js b/node_modules/tar/lib/strip-absolute-path.js new file mode 100644 index 0000000000000..49161ddc30473 --- /dev/null +++ b/node_modules/tar/lib/strip-absolute-path.js @@ -0,0 +1,14 @@ +// unix absolute paths are also absolute on win32, so we use this for both +const { isAbsolute, parse } = require('path').win32 + +// returns [root, stripped] +module.exports = path => { + let r = '' + while (isAbsolute(path)) { + // windows will think that //x/y/z has a "root" of //x/y/ + const root = path.charAt(0) === '/' ? '/' : parse(path).root + path = path.substr(root.length) + r += root + } + return [r, path] +} diff --git a/node_modules/tar/lib/unpack.js b/node_modules/tar/lib/unpack.js index 7d4b79d9eb3f0..edaf7833cdb5b 100644 --- a/node_modules/tar/lib/unpack.js +++ b/node_modules/tar/lib/unpack.js @@ -14,6 +14,7 @@ const path = require('path') const mkdir = require('./mkdir.js') const wc = require('./winchars.js') const pathReservations = require('./path-reservations.js') +const stripAbsolutePath = require('./strip-absolute-path.js') const ONENTRY = Symbol('onEntry') const CHECKFS = Symbol('checkFs') @@ -224,11 +225,10 @@ class Unpack extends Parser { // absolutes on posix are also absolutes on win32 // so we only need to test this one to get both - if (path.win32.isAbsolute(p)) { - const parsed = path.win32.parse(p) - entry.path = p.substr(parsed.root.length) - const r = parsed.root - this.warn('TAR_ENTRY_INFO', `stripping ${r} from absolute path`, { + const [root, stripped] = stripAbsolutePath(p) + if (root) { + entry.path = stripped + this.warn('TAR_ENTRY_INFO', `stripping ${root} from absolute path`, { entry, path: p, }) @@ -465,6 +465,19 @@ class Unpack extends Parser { } [CHECKFS2] (entry, done) { + // if we are not creating a directory, and the path is in the dirCache, + // then that means we are about to delete the directory we created + // previously, and it is no longer going to be a directory, and neither + // is any of its children. + if (entry.type !== 'Directory') { + for (const path of this.dirCache.keys()) { + if (path === entry.absolute || + path.indexOf(entry.absolute + '/') === 0 || + path.indexOf(entry.absolute + '\\') === 0) + this.dirCache.delete(path) + } + } + this[MKDIR](path.dirname(entry.absolute), this.dmode, er => { if (er) { done() @@ -529,6 +542,15 @@ class Unpack extends Parser { class UnpackSync extends Unpack { [CHECKFS] (entry) { + if (entry.type !== 'Directory') { + for (const path of this.dirCache.keys()) { + if (path === entry.absolute || + path.indexOf(entry.absolute + '/') === 0 || + path.indexOf(entry.absolute + '\\') === 0) + this.dirCache.delete(path) + } + } + const er = this[MKDIR](path.dirname(entry.absolute), this.dmode, neverCalled) if (er) return this[ONERROR](er, entry) diff --git a/node_modules/tar/lib/write-entry.js b/node_modules/tar/lib/write-entry.js index 1d0b746cd6818..0301759ad386f 100644 --- a/node_modules/tar/lib/write-entry.js +++ b/node_modules/tar/lib/write-entry.js @@ -23,6 +23,7 @@ const CLOSE = Symbol('close') const MODE = Symbol('mode') const warner = require('./warn-mixin.js') const winchars = require('./winchars.js') +const stripAbsolutePath = require('./strip-absolute-path.js') const modeFix = require('./mode-fix.js') @@ -52,12 +53,12 @@ const WriteEntry = warner(class WriteEntry extends MiniPass { this.on('warn', opt.onwarn) let pathWarn = false - if (!this.preservePaths && path.win32.isAbsolute(p)) { - // absolutes on posix are also absolutes on win32 - // so we only need to test this one to get both - const parsed = path.win32.parse(p) - this.path = p.substr(parsed.root.length) - pathWarn = parsed.root + if (!this.preservePaths) { + const [root, stripped] = stripAbsolutePath(this.path) + if (root) { + this.path = stripped + pathWarn = root + } } this.win32 = !!opt.win32 || process.platform === 'win32' @@ -351,10 +352,12 @@ const WriteEntryTar = warner(class WriteEntryTar extends MiniPass { this.on('warn', opt.onwarn) let pathWarn = false - if (path.isAbsolute(this.path) && !this.preservePaths) { - const parsed = path.parse(this.path) - pathWarn = parsed.root - this.path = this.path.substr(parsed.root.length) + if (!this.preservePaths) { + const [root, stripped] = stripAbsolutePath(this.path) + if (root) { + this.path = stripped + pathWarn = root + } } this.remain = readEntry.size diff --git a/node_modules/tar/package.json b/node_modules/tar/package.json index 9b8b96ec66ca5..1c82ac7291631 100644 --- a/node_modules/tar/package.json +++ b/node_modules/tar/package.json @@ -2,7 +2,7 @@ "author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)", "name": "tar", "description": "tar for node", - "version": "6.1.0", + "version": "6.1.2", "repository": { "type": "git", "url": "https://github.com/npm/node-tar.git" @@ -38,7 +38,7 @@ "events-to-array": "^1.1.2", "mutate-fs": "^2.1.1", "rimraf": "^2.7.1", - "tap": "^14.9.2", + "tap": "^15.0.9", "tar-fs": "^1.16.3", "tar-stream": "^1.6.2" }, diff --git a/node_modules/text-table/.travis.yml b/node_modules/text-table/.travis.yml deleted file mode 100644 index cc4dba29d959a..0000000000000 --- a/node_modules/text-table/.travis.yml +++ /dev/null @@ -1,4 +0,0 @@ -language: node_js -node_js: - - "0.8" - - "0.10" diff --git a/node_modules/tiny-relative-date/README.md b/node_modules/tiny-relative-date/README.md deleted file mode 100644 index 4087a79263239..0000000000000 --- a/node_modules/tiny-relative-date/README.md +++ /dev/null @@ -1,120 +0,0 @@ -# Relative Date - -[![Build Status](https://travis-ci.org/wildlyinaccurate/tiny-relative-date.png?branch=master)](https://travis-ci.org/wildlyinaccurate/tiny-relative-date) - -Tiny function that provides relative, human-readable dates. - -## Installation - -``` -npm install tiny-relative-date -``` - -## Usage - -The module returns a `relativeDate` function with English translations by default. - -```js -const relativeDate = require('tiny-relative-date') -``` - -The `relativeDate` function accepts date strings or `Date` objects. - -```js -relativeDate('2017-06-25 09:00') // '12 hours ago' -relativeDate(new Date()) // 'just now' -``` - -The value of "now" can also be passed as a second parameter. - -```js -const now = new Date('2017-06-25 08:00:00') -const date = new Date('2017-06-25 07:00:00') - -relativeDate(date, now) // 'an hour ago' -``` - -### Using a non-English locale - -The tiny-relative-date module can be initialised with a locale. See the [translations directory]('./translations') for a list of available locales. - -```js -const relativeDateFactory = require('tiny-relative-date/lib/factory') -const deTranslations = require('tiny-relative-date/translations/de') -const relativeDate = relativeDateFactory(deTranslations) - -relativeDate(new Date()) // 'gerade eben' -``` - -### Using a custom locale - -You can also use a completely custom locale by passing a translations object instead of a locale string. Translations can be plain strings with a `{{time}}` placeholder, or they can be functions. See the **Adding new locales** section below for a list of translation keys. - -```js -const relativeDateFactory = require('tiny-relative-date/lib/factory') -const relativeDate = relativeDateFactory({ - hoursAgo: '{{time}}h ago', - daysAgo: (days) => `${days * 24}h ago` -}) - -relativeDate('2017-06-25 07:00:00') // '2h ago' -relativeDate('2017-06-24 06:00:00') // '27h ago' -``` - -## Contributing - -Contributions are welcome! Running this project locally requires Git and Node.js. - -``` -git clone git@github.com:wildlyinaccurate/tiny-relative-date.git -cd tiny-relative-date/ -npm install -``` - -Once you are set up, you can make changes to files in the `src/`, `spec/` and `translations/` directories. Build any changes you make by running - -``` -npm run build -``` - -And run the tests with - -``` -npm run test -``` - -### Adding new locales - -If you would like to add a new locale, please create a JSON file in the `translations` directory and ensure it has the following keys: - -| Key | Default value ("en" locale) | -|------------------------|-----------------------------| -| `justNow` | just now | -| `secondsAgo` | {{time}} seconds ago | -| `aMinuteAgo` | a minute ago | -| `minutesAgo` | {{time}} minutes ago | -| `anHourAgo` | an hour ago | -| `hoursAgo` | {{time}} hours ago | -| `aDayAgo` | yesterday | -| `daysAgo` | {{time}} days ago | -| `aWeekAgo` | a week ago | -| `weeksAgo` | {{time}} weeks ago | -| `aMonthAgo` | a month ago | -| `monthsAgo` | {{time}} months ago | -| `aYearAgo` | a year ago | -| `yearsAgo` | {{time}} years ago | -| `overAYearAgo` | over a year ago | -| `secondsFromNow` | {{time}} seconds from now | -| `aMinuteFromNow` | a minute from now | -| `minutesFromNow` | {{time}} minutes from now | -| `anHourFromNow` | an hour from now | -| `hoursFromNow` | {{time}} hours from now | -| `aDayFromNow` | tomorrow | -| `daysFromNow` | {{time}} days from now | -| `aWeekFromNow` | a week from now | -| `weeksFromNow` | {{time}} weeks from now | -| `aMonthFromNow` | a month from now | -| `monthsFromNow` | {{time}} months from now | -| `aYearFromNow` | a year from now | -| `yearsFromNow` | {{time}} years from now | -| `overAYearFromNow` | over a year from now | diff --git a/node_modules/treeverse/README.md b/node_modules/treeverse/README.md deleted file mode 100644 index ce08381dae25b..0000000000000 --- a/node_modules/treeverse/README.md +++ /dev/null @@ -1,129 +0,0 @@ -# treeverse - -Walk any kind of tree structure depth- or breadth-first. Supports promises -and advanced map-reduce operations with a very small API. - -Treeverse does not care what kind of tree it is, it will traverse it for -you just fine. It does the right thing with functions that return -Promises, and returns a non-Promise value if your functions don't return -Promises. - -Rather than imposing a specific structure, like requiring you to have child -nodes stored in a `children` array, it calls the supplied `getChildren()` -function, so the children can be anywhere (or not even exist yet!) This -makes it suitable for _creating_ an optimized tree from a set of dependency -manifests, for example. - -## USAGE - -```js -const {depth, breadth} = require('treeverse') - -// depth-first traversal -// returns a promise if any visit/leave function returns a promise -// otherwise returns the result of leave, or visit if no leave function -// provided. -depth({ - // the root node where we start the traversal - tree: rootNode, - - visit (node) { - // optional - // called upon descent into the node. - // return a promise, or a mapped value, or nothing to just leave it - // as-is - }, - leave (node, children) { - // optional - // called as we ascend back to the root of the tree. - // return a promise, or a reduced value, or nothing to leave it as is - // the children array is a list of the child nodes that have been - // visited (and potentially left) already. If the tree is acyclic, - // then leave() will have been called on all of them. If it has - // cycles, then the children may not have been left yet. - }, - getChildren (node, nodeResult) { - // required - // return an array of child nodes in the tree, if any exist - // returning a promise is totally ok, of course. - // the first argument is the original value of the node. The second - // argument is the result of visit(node). - }, - filter (node) { - // optional - // return true if the node should be visited, false otherwise - // initial tree is always visited, so this only filters children - // note that filtering a node _also_ filters all of its children. - }, -}) - -// breadth first traversal -// returns a promise if any visit function returns a promise -// otherwise returns the result of the top-level node. -// note that only a visit() function is supported here, since a node's -// children are typically traversed much later in the process. -breadth({ - // the root node where we start the traversal - tree: rootNode, - - visit (node) { - // optional, but a no-op if not provided. - // called when this node is encountered in the traversal. - // return a promise, or a mapped value, or nothing to leave as-is. - }, - getChildren (node, nodeResult) { - // required, same as depth() - }, - filter (node) { - // optional, same as depth() - }, -}) -``` - -## API - -Both functions take a single options object as an argument, and return -either the result value, or a Promise to the result value if the -methods in the options argument ever return a Promise. - -* `treeverse.breadth` - Perform a breadth-first traversal. That is, walk - across node siblings before traversing node children. -* `treeverse.depth` - Perform a depth-first traversal. That is, walk - down into child nodes before traversing siblings. - -## OPTIONS - -All function options can return a Promise or actual value. - -The return value is the result of the top level visit function if no leave -function is provided, or leave. If any method along the way returns a -promise, then the top level function will return a promise which resolves -to the result of visiting (and leaving) the top node in the tree. - -* `tree` - The initial node where the traversal begins. -* `visit(node)` - Function to call upon visiting a node. -* `leave(node, children)` - (Depth only) Function to call upon leaving a - node, once all of its children have been visited, and potentially left. - `children` is an array of child node visit results. If the graph is - cyclic, then some children _may_ have been visited but not left. -* `getChildren(node, nodeResult)` - Get an array of child nodes to process. -* `filter` - Filter out child nodes from the traversal. Note that this - filters the entire branch of the tree, not just that one node. That is, - children of filtered nodes are not traversed either. - -## STACK DEPTH WARNING - -When a `leave` method is specified, then recursion is used, because -maintaining state otherwise is challenging. This means that using `leave` -with a synchronous depth first traversal of very deeply nested trees will -result in stack overflow errors. - -To avoid this, either make one or more of the functions async, or do all of -the work in the `visit` method. - -Breadth-first traversal always uses a loop, and is stack-safe. - -It is _possible_ to implement depth first traversal with a leave method -using a loop rather than recursion, but maintaining the `leave(node, -[children])` API surface would be challenging, and is not implemented at -this time. diff --git a/node_modules/tunnel-agent/README.md b/node_modules/tunnel-agent/README.md deleted file mode 100644 index bb533d56b1aa2..0000000000000 --- a/node_modules/tunnel-agent/README.md +++ /dev/null @@ -1,4 +0,0 @@ -tunnel-agent -============ - -HTTP proxy tunneling agent. Formerly part of mikeal/request, now a standalone module. diff --git a/node_modules/tweetnacl/.npmignore b/node_modules/tweetnacl/.npmignore deleted file mode 100644 index 7d98dcbd2d360..0000000000000 --- a/node_modules/tweetnacl/.npmignore +++ /dev/null @@ -1,4 +0,0 @@ -.eslintrc -.travis.yml -bower.json -test diff --git a/node_modules/tweetnacl/CHANGELOG.md b/node_modules/tweetnacl/CHANGELOG.md deleted file mode 100644 index 92a4fdc56ac53..0000000000000 --- a/node_modules/tweetnacl/CHANGELOG.md +++ /dev/null @@ -1,221 +0,0 @@ -TweetNaCl.js Changelog -====================== - - -v0.14.5 -------- - -* Fixed incomplete return types in TypeScript typings. -* Replaced COPYING.txt with LICENSE file, which now has public domain dedication - text from The Unlicense. License fields in package.json and bower.json have - been set to "Unlicense". The project was and will be in the public domain -- - this change just makes it easier for automated tools to know about this fact by - using the widely recognized and SPDX-compatible template for public domain - dedication. - - -v0.14.4 -------- - -* Added TypeScript type definitions (contributed by @AndSDev). -* Improved benchmarking code. - - -v0.14.3 -------- - -Fixed a bug in the fast version of Poly1305 and brought it back. - -Thanks to @floodyberry for promptly responding and fixing the original C code: - -> "The issue was not properly detecting if st->h was >= 2^130 - 5, coupled with -> [testing mistake] not catching the failure. The chance of the bug affecting -> anything in the real world is essentially zero luckily, but it's good to have -> it fixed." - -https://github.com/floodyberry/poly1305-donna/issues/2#issuecomment-202698577 - - -v0.14.2 -------- - -Switched Poly1305 fast version back to original (slow) version due to a bug. - - -v0.14.1 -------- - -No code changes, just tweaked packaging and added COPYING.txt. - - -v0.14.0 -------- - -* **Breaking change!** All functions from `nacl.util` have been removed. These - functions are no longer available: - - nacl.util.decodeUTF8 - nacl.util.encodeUTF8 - nacl.util.decodeBase64 - nacl.util.encodeBase64 - - If want to continue using them, you can include - <https://github.com/dchest/tweetnacl-util-js> package: - - <script src="nacl.min.js"></script> - <script src="nacl-util.min.js"></script> - - or - - var nacl = require('tweetnacl'); - nacl.util = require('tweetnacl-util'); - - However it is recommended to use better packages that have wider - compatibility and better performance. Functions from `nacl.util` were never - intended to be robust solution for string conversion and were included for - convenience: cryptography library is not the right place for them. - - Currently calling these functions will throw error pointing to - `tweetnacl-util-js` (in the next version this error message will be removed). - -* Improved detection of available random number generators, making it possible - to use `nacl.randomBytes` and related functions in Web Workers without - changes. - -* Changes to testing (see README). - - -v0.13.3 -------- - -No code changes. - -* Reverted license field in package.json to "Public domain". - -* Fixed typo in README. - - -v0.13.2 -------- - -* Fixed undefined variable bug in fast version of Poly1305. No worries, this - bug was *never* triggered. - -* Specified CC0 public domain dedication. - -* Updated development dependencies. - - -v0.13.1 -------- - -* Exclude `crypto` and `buffer` modules from browserify builds. - - -v0.13.0 -------- - -* Made `nacl-fast` the default version in NPM package. Now - `require("tweetnacl")` will use fast version; to get the original version, - use `require("tweetnacl/nacl.js")`. - -* Cleanup temporary array after generating random bytes. - - -v0.12.2 -------- - -* Improved performance of curve operations, making `nacl.scalarMult`, `nacl.box`, - `nacl.sign` and related functions up to 3x faster in `nacl-fast` version. - - -v0.12.1 -------- - -* Significantly improved performance of Salsa20 (~1.5x faster) and - Poly1305 (~3.5x faster) in `nacl-fast` version. - - -v0.12.0 -------- - -* Instead of using the given secret key directly, TweetNaCl.js now copies it to - a new array in `nacl.box.keyPair.fromSecretKey` and - `nacl.sign.keyPair.fromSecretKey`. - - -v0.11.2 -------- - -* Added new constant: `nacl.sign.seedLength`. - - -v0.11.1 -------- - -* Even faster hash for both short and long inputs (in `nacl-fast`). - - -v0.11.0 -------- - -* Implement `nacl.sign.keyPair.fromSeed` to enable creation of sign key pairs - deterministically from a 32-byte seed. (It behaves like - [libsodium's](http://doc.libsodium.org/public-key_cryptography/public-key_signatures.html) - `crypto_sign_seed_keypair`: the seed becomes a secret part of the secret key.) - -* Fast version now has an improved hash implementation that is 2x-5x faster. - -* Fixed benchmarks, which may have produced incorrect measurements. - - -v0.10.1 -------- - -* Exported undocumented `nacl.lowlevel.crypto_core_hsalsa20`. - - -v0.10.0 -------- - -* **Signature API breaking change!** `nacl.sign` and `nacl.sign.open` now deal - with signed messages, and new `nacl.sign.detached` and - `nacl.sign.detached.verify` are available. - - Previously, `nacl.sign` returned a signature, and `nacl.sign.open` accepted a - message and "detached" signature. This was unlike NaCl's API, which dealt with - signed messages (concatenation of signature and message). - - The new API is: - - nacl.sign(message, secretKey) -> signedMessage - nacl.sign.open(signedMessage, publicKey) -> message | null - - Since detached signatures are common, two new API functions were introduced: - - nacl.sign.detached(message, secretKey) -> signature - nacl.sign.detached.verify(message, signature, publicKey) -> true | false - - (Note that it's `verify`, not `open`, and it returns a boolean value, unlike - `open`, which returns an "unsigned" message.) - -* NPM package now comes without `test` directory to keep it small. - - -v0.9.2 ------- - -* Improved documentation. -* Fast version: increased theoretical message size limit from 2^32-1 to 2^52 - bytes in Poly1305 (and thus, secretbox and box). However this has no impact - in practice since JavaScript arrays or ArrayBuffers are limited to 32-bit - indexes, and most implementations won't allocate more than a gigabyte or so. - (Obviously, there are no tests for the correctness of implementation.) Also, - it's not recommended to use messages that large without splitting them into - smaller packets anyway. - - -v0.9.1 ------- - -* Initial release diff --git a/node_modules/tweetnacl/README.md b/node_modules/tweetnacl/README.md deleted file mode 100644 index ffb6871d36c1b..0000000000000 --- a/node_modules/tweetnacl/README.md +++ /dev/null @@ -1,459 +0,0 @@ -TweetNaCl.js -============ - -Port of [TweetNaCl](http://tweetnacl.cr.yp.to) / [NaCl](http://nacl.cr.yp.to/) -to JavaScript for modern browsers and Node.js. Public domain. - -[![Build Status](https://travis-ci.org/dchest/tweetnacl-js.svg?branch=master) -](https://travis-ci.org/dchest/tweetnacl-js) - -Demo: <https://tweetnacl.js.org> - -**:warning: The library is stable and API is frozen, however it has not been -independently reviewed. If you can help reviewing it, please [contact -me](mailto:dmitry@codingrobots.com).** - -Documentation -============= - -* [Overview](#overview) -* [Installation](#installation) -* [Usage](#usage) - * [Public-key authenticated encryption (box)](#public-key-authenticated-encryption-box) - * [Secret-key authenticated encryption (secretbox)](#secret-key-authenticated-encryption-secretbox) - * [Scalar multiplication](#scalar-multiplication) - * [Signatures](#signatures) - * [Hashing](#hashing) - * [Random bytes generation](#random-bytes-generation) - * [Constant-time comparison](#constant-time-comparison) -* [System requirements](#system-requirements) -* [Development and testing](#development-and-testing) -* [Benchmarks](#benchmarks) -* [Contributors](#contributors) -* [Who uses it](#who-uses-it) - - -Overview --------- - -The primary goal of this project is to produce a translation of TweetNaCl to -JavaScript which is as close as possible to the original C implementation, plus -a thin layer of idiomatic high-level API on top of it. - -There are two versions, you can use either of them: - -* `nacl.js` is the port of TweetNaCl with minimum differences from the - original + high-level API. - -* `nacl-fast.js` is like `nacl.js`, but with some functions replaced with - faster versions. - - -Installation ------------- - -You can install TweetNaCl.js via a package manager: - -[Bower](http://bower.io): - - $ bower install tweetnacl - -[NPM](https://www.npmjs.org/): - - $ npm install tweetnacl - -or [download source code](https://github.com/dchest/tweetnacl-js/releases). - - -Usage ------ - -All API functions accept and return bytes as `Uint8Array`s. If you need to -encode or decode strings, use functions from -<https://github.com/dchest/tweetnacl-util-js> or one of the more robust codec -packages. - -In Node.js v4 and later `Buffer` objects are backed by `Uint8Array`s, so you -can freely pass them to TweetNaCl.js functions as arguments. The returned -objects are still `Uint8Array`s, so if you need `Buffer`s, you'll have to -convert them manually; make sure to convert using copying: `new Buffer(array)`, -instead of sharing: `new Buffer(array.buffer)`, because some functions return -subarrays of their buffers. - - -### Public-key authenticated encryption (box) - -Implements *curve25519-xsalsa20-poly1305*. - -#### nacl.box.keyPair() - -Generates a new random key pair for box and returns it as an object with -`publicKey` and `secretKey` members: - - { - publicKey: ..., // Uint8Array with 32-byte public key - secretKey: ... // Uint8Array with 32-byte secret key - } - - -#### nacl.box.keyPair.fromSecretKey(secretKey) - -Returns a key pair for box with public key corresponding to the given secret -key. - -#### nacl.box(message, nonce, theirPublicKey, mySecretKey) - -Encrypt and authenticates message using peer's public key, our secret key, and -the given nonce, which must be unique for each distinct message for a key pair. - -Returns an encrypted and authenticated message, which is -`nacl.box.overheadLength` longer than the original message. - -#### nacl.box.open(box, nonce, theirPublicKey, mySecretKey) - -Authenticates and decrypts the given box with peer's public key, our secret -key, and the given nonce. - -Returns the original message, or `false` if authentication fails. - -#### nacl.box.before(theirPublicKey, mySecretKey) - -Returns a precomputed shared key which can be used in `nacl.box.after` and -`nacl.box.open.after`. - -#### nacl.box.after(message, nonce, sharedKey) - -Same as `nacl.box`, but uses a shared key precomputed with `nacl.box.before`. - -#### nacl.box.open.after(box, nonce, sharedKey) - -Same as `nacl.box.open`, but uses a shared key precomputed with `nacl.box.before`. - -#### nacl.box.publicKeyLength = 32 - -Length of public key in bytes. - -#### nacl.box.secretKeyLength = 32 - -Length of secret key in bytes. - -#### nacl.box.sharedKeyLength = 32 - -Length of precomputed shared key in bytes. - -#### nacl.box.nonceLength = 24 - -Length of nonce in bytes. - -#### nacl.box.overheadLength = 16 - -Length of overhead added to box compared to original message. - - -### Secret-key authenticated encryption (secretbox) - -Implements *xsalsa20-poly1305*. - -#### nacl.secretbox(message, nonce, key) - -Encrypt and authenticates message using the key and the nonce. The nonce must -be unique for each distinct message for this key. - -Returns an encrypted and authenticated message, which is -`nacl.secretbox.overheadLength` longer than the original message. - -#### nacl.secretbox.open(box, nonce, key) - -Authenticates and decrypts the given secret box using the key and the nonce. - -Returns the original message, or `false` if authentication fails. - -#### nacl.secretbox.keyLength = 32 - -Length of key in bytes. - -#### nacl.secretbox.nonceLength = 24 - -Length of nonce in bytes. - -#### nacl.secretbox.overheadLength = 16 - -Length of overhead added to secret box compared to original message. - - -### Scalar multiplication - -Implements *curve25519*. - -#### nacl.scalarMult(n, p) - -Multiplies an integer `n` by a group element `p` and returns the resulting -group element. - -#### nacl.scalarMult.base(n) - -Multiplies an integer `n` by a standard group element and returns the resulting -group element. - -#### nacl.scalarMult.scalarLength = 32 - -Length of scalar in bytes. - -#### nacl.scalarMult.groupElementLength = 32 - -Length of group element in bytes. - - -### Signatures - -Implements [ed25519](http://ed25519.cr.yp.to). - -#### nacl.sign.keyPair() - -Generates new random key pair for signing and returns it as an object with -`publicKey` and `secretKey` members: - - { - publicKey: ..., // Uint8Array with 32-byte public key - secretKey: ... // Uint8Array with 64-byte secret key - } - -#### nacl.sign.keyPair.fromSecretKey(secretKey) - -Returns a signing key pair with public key corresponding to the given -64-byte secret key. The secret key must have been generated by -`nacl.sign.keyPair` or `nacl.sign.keyPair.fromSeed`. - -#### nacl.sign.keyPair.fromSeed(seed) - -Returns a new signing key pair generated deterministically from a 32-byte seed. -The seed must contain enough entropy to be secure. This method is not -recommended for general use: instead, use `nacl.sign.keyPair` to generate a new -key pair from a random seed. - -#### nacl.sign(message, secretKey) - -Signs the message using the secret key and returns a signed message. - -#### nacl.sign.open(signedMessage, publicKey) - -Verifies the signed message and returns the message without signature. - -Returns `null` if verification failed. - -#### nacl.sign.detached(message, secretKey) - -Signs the message using the secret key and returns a signature. - -#### nacl.sign.detached.verify(message, signature, publicKey) - -Verifies the signature for the message and returns `true` if verification -succeeded or `false` if it failed. - -#### nacl.sign.publicKeyLength = 32 - -Length of signing public key in bytes. - -#### nacl.sign.secretKeyLength = 64 - -Length of signing secret key in bytes. - -#### nacl.sign.seedLength = 32 - -Length of seed for `nacl.sign.keyPair.fromSeed` in bytes. - -#### nacl.sign.signatureLength = 64 - -Length of signature in bytes. - - -### Hashing - -Implements *SHA-512*. - -#### nacl.hash(message) - -Returns SHA-512 hash of the message. - -#### nacl.hash.hashLength = 64 - -Length of hash in bytes. - - -### Random bytes generation - -#### nacl.randomBytes(length) - -Returns a `Uint8Array` of the given length containing random bytes of -cryptographic quality. - -**Implementation note** - -TweetNaCl.js uses the following methods to generate random bytes, -depending on the platform it runs on: - -* `window.crypto.getRandomValues` (WebCrypto standard) -* `window.msCrypto.getRandomValues` (Internet Explorer 11) -* `crypto.randomBytes` (Node.js) - -If the platform doesn't provide a suitable PRNG, the following functions, -which require random numbers, will throw exception: - -* `nacl.randomBytes` -* `nacl.box.keyPair` -* `nacl.sign.keyPair` - -Other functions are deterministic and will continue working. - -If a platform you are targeting doesn't implement secure random number -generator, but you somehow have a cryptographically-strong source of entropy -(not `Math.random`!), and you know what you are doing, you can plug it into -TweetNaCl.js like this: - - nacl.setPRNG(function(x, n) { - // ... copy n random bytes into x ... - }); - -Note that `nacl.setPRNG` *completely replaces* internal random byte generator -with the one provided. - - -### Constant-time comparison - -#### nacl.verify(x, y) - -Compares `x` and `y` in constant time and returns `true` if their lengths are -non-zero and equal, and their contents are equal. - -Returns `false` if either of the arguments has zero length, or arguments have -different lengths, or their contents differ. - - -System requirements -------------------- - -TweetNaCl.js supports modern browsers that have a cryptographically secure -pseudorandom number generator and typed arrays, including the latest versions -of: - -* Chrome -* Firefox -* Safari (Mac, iOS) -* Internet Explorer 11 - -Other systems: - -* Node.js - - -Development and testing ------------------------- - -Install NPM modules needed for development: - - $ npm install - -To build minified versions: - - $ npm run build - -Tests use minified version, so make sure to rebuild it every time you change -`nacl.js` or `nacl-fast.js`. - -### Testing - -To run tests in Node.js: - - $ npm run test-node - -By default all tests described here work on `nacl.min.js`. To test other -versions, set environment variable `NACL_SRC` to the file name you want to test. -For example, the following command will test fast minified version: - - $ NACL_SRC=nacl-fast.min.js npm run test-node - -To run full suite of tests in Node.js, including comparing outputs of -JavaScript port to outputs of the original C version: - - $ npm run test-node-all - -To prepare tests for browsers: - - $ npm run build-test-browser - -and then open `test/browser/test.html` (or `test/browser/test-fast.html`) to -run them. - -To run headless browser tests with `tape-run` (powered by Electron): - - $ npm run test-browser - -(If you get `Error: spawn ENOENT`, install *xvfb*: `sudo apt-get install xvfb`.) - -To run tests in both Node and Electron: - - $ npm test - -### Benchmarking - -To run benchmarks in Node.js: - - $ npm run bench - $ NACL_SRC=nacl-fast.min.js npm run bench - -To run benchmarks in a browser, open `test/benchmark/bench.html` (or -`test/benchmark/bench-fast.html`). - - -Benchmarks ----------- - -For reference, here are benchmarks from MacBook Pro (Retina, 13-inch, Mid 2014) -laptop with 2.6 GHz Intel Core i5 CPU (Intel) in Chrome 53/OS X and Xiaomi Redmi -Note 3 smartphone with 1.8 GHz Qualcomm Snapdragon 650 64-bit CPU (ARM) in -Chrome 52/Android: - -| | nacl.js Intel | nacl-fast.js Intel | nacl.js ARM | nacl-fast.js ARM | -| ------------- |:-------------:|:-------------------:|:-------------:|:-----------------:| -| salsa20 | 1.3 MB/s | 128 MB/s | 0.4 MB/s | 43 MB/s | -| poly1305 | 13 MB/s | 171 MB/s | 4 MB/s | 52 MB/s | -| hash | 4 MB/s | 34 MB/s | 0.9 MB/s | 12 MB/s | -| secretbox 1K | 1113 op/s | 57583 op/s | 334 op/s | 14227 op/s | -| box 1K | 145 op/s | 718 op/s | 37 op/s | 368 op/s | -| scalarMult | 171 op/s | 733 op/s | 56 op/s | 380 op/s | -| sign | 77 op/s | 200 op/s | 20 op/s | 61 op/s | -| sign.open | 39 op/s | 102 op/s | 11 op/s | 31 op/s | - -(You can run benchmarks on your devices by clicking on the links at the bottom -of the [home page](https://tweetnacl.js.org)). - -In short, with *nacl-fast.js* and 1024-byte messages you can expect to encrypt and -authenticate more than 57000 messages per second on a typical laptop or more than -14000 messages per second on a $170 smartphone, sign about 200 and verify 100 -messages per second on a laptop or 60 and 30 messages per second on a smartphone, -per CPU core (with Web Workers you can do these operations in parallel), -which is good enough for most applications. - - -Contributors ------------- - -See AUTHORS.md file. - - -Third-party libraries based on TweetNaCl.js -------------------------------------------- - -* [forward-secrecy](https://github.com/alax/forward-secrecy) — Axolotl ratchet implementation -* [nacl-stream](https://github.com/dchest/nacl-stream-js) - streaming encryption -* [tweetnacl-auth-js](https://github.com/dchest/tweetnacl-auth-js) — implementation of [`crypto_auth`](http://nacl.cr.yp.to/auth.html) -* [chloride](https://github.com/dominictarr/chloride) - unified API for various NaCl modules - - -Who uses it ------------ - -Some notable users of TweetNaCl.js: - -* [miniLock](http://minilock.io/) -* [Stellar](https://www.stellar.org/) diff --git a/node_modules/typedarray-to-buffer/.travis.yml b/node_modules/typedarray-to-buffer/.travis.yml deleted file mode 100644 index f25afbd2f19c7..0000000000000 --- a/node_modules/typedarray-to-buffer/.travis.yml +++ /dev/null @@ -1,11 +0,0 @@ -language: node_js -node_js: - - lts/* -addons: - sauce_connect: true - hosts: - - airtap.local -env: - global: - - secure: i51rE9rZGHbcZWlL58j3H1qtL23OIV2r0X4TcQKNI3pw2mubdHFJmfPNNO19ItfReu8wwQMxOehKamwaNvqMiKWyHfn/QcThFQysqzgGZ6AgnUbYx9od6XFNDeWd1sVBf7QBAL07y7KWlYGWCwFwWjabSVySzQhEBdisPcskfkI= - - secure: BKq6/5z9LK3KDkTjs7BGeBZ1KsWgz+MsAXZ4P64NSeVGFaBdXU45+ww1mwxXFt5l22/mhyOQZfebQl+kGVqRSZ+DEgQeCymkNZ6CD8c6w6cLuOJXiXwuu/cDM2DD0tfGeu2YZC7yEikP7BqEFwH3D324rRzSGLF2RSAAwkOI7bE= diff --git a/node_modules/typedarray-to-buffer/README.md b/node_modules/typedarray-to-buffer/README.md deleted file mode 100644 index 35761fb5f8bbb..0000000000000 --- a/node_modules/typedarray-to-buffer/README.md +++ /dev/null @@ -1,85 +0,0 @@ -# typedarray-to-buffer [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url] - -[travis-image]: https://img.shields.io/travis/feross/typedarray-to-buffer/master.svg -[travis-url]: https://travis-ci.org/feross/typedarray-to-buffer -[npm-image]: https://img.shields.io/npm/v/typedarray-to-buffer.svg -[npm-url]: https://npmjs.org/package/typedarray-to-buffer -[downloads-image]: https://img.shields.io/npm/dm/typedarray-to-buffer.svg -[downloads-url]: https://npmjs.org/package/typedarray-to-buffer -[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg -[standard-url]: https://standardjs.com - -#### Convert a typed array to a [Buffer](https://github.com/feross/buffer) without a copy. - -[![saucelabs][saucelabs-image]][saucelabs-url] - -[saucelabs-image]: https://saucelabs.com/browser-matrix/typedarray-to-buffer.svg -[saucelabs-url]: https://saucelabs.com/u/typedarray-to-buffer - -Say you're using the ['buffer'](https://github.com/feross/buffer) module on npm, or -[browserify](http://browserify.org/) and you're working with lots of binary data. - -Unfortunately, sometimes the browser or someone else's API gives you a typed array like -`Uint8Array` to work with and you need to convert it to a `Buffer`. What do you do? - -Of course: `Buffer.from(uint8array)` - -But, alas, every time you do `Buffer.from(uint8array)` **the entire array gets copied**. -The `Buffer` constructor does a copy; this is -defined by the [node docs](http://nodejs.org/api/buffer.html) and the 'buffer' module -matches the node API exactly. - -So, how can we avoid this expensive copy in -[performance critical applications](https://github.com/feross/buffer/issues/22)? - -***Simply use this module, of course!*** - -If you have an `ArrayBuffer`, you don't need this module, because -`Buffer.from(arrayBuffer)` -[is already efficient](https://nodejs.org/api/buffer.html#buffer_class_method_buffer_from_arraybuffer_byteoffset_length). - -## install - -```bash -npm install typedarray-to-buffer -``` - -## usage - -To convert a typed array to a `Buffer` **without a copy**, do this: - -```js -var toBuffer = require('typedarray-to-buffer') - -var arr = new Uint8Array([1, 2, 3]) -arr = toBuffer(arr) - -// arr is a buffer now! - -arr.toString() // '\u0001\u0002\u0003' -arr.readUInt16BE(0) // 258 -``` - -## how it works - -If the browser supports typed arrays, then `toBuffer` will **augment the typed array** you -pass in with the `Buffer` methods and return it. See [how does Buffer -work?](https://github.com/feross/buffer#how-does-it-work) for more about how augmentation -works. - -This module uses the typed array's underlying `ArrayBuffer` to back the new `Buffer`. This -respects the "view" on the `ArrayBuffer`, i.e. `byteOffset` and `byteLength`. In other -words, if you do `toBuffer(new Uint32Array([1, 2, 3]))`, then the new `Buffer` will -contain `[1, 0, 0, 0, 2, 0, 0, 0, 3, 0, 0, 0]`, **not** `[1, 2, 3]`. And it still doesn't -require a copy. - -If the browser doesn't support typed arrays, then `toBuffer` will create a new `Buffer` -object, copy the data into it, and return it. There's no simple performance optimization -we can do for old browsers. Oh well. - -If this module is used in node, then it will just call `Buffer.from`. This is just for -the convenience of modules that work in both node and the browser. - -## license - -MIT. Copyright (C) [Feross Aboukhadijeh](http://feross.org). diff --git a/node_modules/unique-filename/README.md b/node_modules/unique-filename/README.md deleted file mode 100644 index 74b62b2ab4426..0000000000000 --- a/node_modules/unique-filename/README.md +++ /dev/null @@ -1,33 +0,0 @@ -unique-filename -=============== - -Generate a unique filename for use in temporary directories or caches. - -``` -var uniqueFilename = require('unique-filename') - -// returns something like: /tmp/912ec803b2ce49e4a541068d495ab570 -var randomTmpfile = uniqueFilename(os.tmpdir()) - -// returns something like: /tmp/my-test-912ec803b2ce49e4a541068d495ab570 -var randomPrefixedTmpfile = uniqueFilename(os.tmpdir(), 'my-test') - -var uniqueTmpfile = uniqueFilename('/tmp', 'testing', '/my/thing/to/uniq/on') -``` - -### uniqueFilename(*dir*, *fileprefix*, *uniqstr*) → String - -Returns the full path of a unique filename that looks like: -`dir/prefix-7ddd44c0` -or `dir/7ddd44c0` - -*dir* – The path you want the filename in. `os.tmpdir()` is a good choice for this. - -*fileprefix* – A string to append prior to the unique part of the filename. -The parameter is required if *uniqstr* is also passed in but is otherwise -optional and can be `undefined`/`null`/`''`. If present and not empty -then this string plus a hyphen are prepended to the unique part. - -*uniqstr* – Optional, if not passed the unique part of the resulting -filename will be random. If passed in it will be generated from this string -in a reproducable way. diff --git a/node_modules/unique-slug/.travis.yml b/node_modules/unique-slug/.travis.yml deleted file mode 100644 index 5651fce24d898..0000000000000 --- a/node_modules/unique-slug/.travis.yml +++ /dev/null @@ -1,10 +0,0 @@ -language: node_js -sudo: false -before_install: - - "npm -g install npm" -node_js: - - "6" - - "8" - - "10" - - "lts/*" - - "node" diff --git a/node_modules/unique-slug/README.md b/node_modules/unique-slug/README.md deleted file mode 100644 index 87f92f1d1b5f5..0000000000000 --- a/node_modules/unique-slug/README.md +++ /dev/null @@ -1,19 +0,0 @@ -unique-slug -=========== - -Generate a unique character string suitible for use in files and URLs. - -``` -var uniqueSlug = require('unique-slug') - -var randomSlug = uniqueSlug() -var fileSlug = uniqueSlug('/etc/passwd') -``` - -### uniqueSlug(*str*) → String (8 chars) - -If *str* is passed in then the return value will be its murmur hash in -hex. - -If *str* is not passed in, it will be 4 randomly generated bytes -converted into 8 hexadecimal characters. diff --git a/node_modules/uri-js/README.md b/node_modules/uri-js/README.md deleted file mode 100755 index 43e648bbad5c8..0000000000000 --- a/node_modules/uri-js/README.md +++ /dev/null @@ -1,203 +0,0 @@ -# URI.js - -URI.js is an [RFC 3986](http://www.ietf.org/rfc/rfc3986.txt) compliant, scheme extendable URI parsing/validating/resolving library for all JavaScript environments (browsers, Node.js, etc). -It is also compliant with the IRI ([RFC 3987](http://www.ietf.org/rfc/rfc3987.txt)), IDNA ([RFC 5890](http://www.ietf.org/rfc/rfc5890.txt)), IPv6 Address ([RFC 5952](http://www.ietf.org/rfc/rfc5952.txt)), IPv6 Zone Identifier ([RFC 6874](http://www.ietf.org/rfc/rfc6874.txt)) specifications. - -URI.js has an extensive test suite, and works in all (Node.js, web) environments. It weighs in at 6.4kb (gzipped, 17kb deflated). - -## API - -### Parsing - - URI.parse("uri://user:pass@example.com:123/one/two.three?q1=a1&q2=a2#body"); - //returns: - //{ - // scheme : "uri", - // userinfo : "user:pass", - // host : "example.com", - // port : 123, - // path : "/one/two.three", - // query : "q1=a1&q2=a2", - // fragment : "body" - //} - -### Serializing - - URI.serialize({scheme : "http", host : "example.com", fragment : "footer"}) === "http://example.com/#footer" - -### Resolving - - URI.resolve("uri://a/b/c/d?q", "../../g") === "uri://a/g" - -### Normalizing - - URI.normalize("HTTP://ABC.com:80/%7Esmith/home.html") === "http://abc.com/~smith/home.html" - -### Comparison - - URI.equal("example://a/b/c/%7Bfoo%7D", "eXAMPLE://a/./b/../b/%63/%7bfoo%7d") === true - -### IP Support - - //IPv4 normalization - URI.normalize("//192.068.001.000") === "//192.68.1.0" - - //IPv6 normalization - URI.normalize("//[2001:0:0DB8::0:0001]") === "//[2001:0:db8::1]" - - //IPv6 zone identifier support - URI.parse("//[2001:db8::7%25en1]"); - //returns: - //{ - // host : "2001:db8::7%en1" - //} - -### IRI Support - - //convert IRI to URI - URI.serialize(URI.parse("http://examplé.org/rosé")) === "http://xn--exampl-gva.org/ros%C3%A9" - //convert URI to IRI - URI.serialize(URI.parse("http://xn--exampl-gva.org/ros%C3%A9"), {iri:true}) === "http://examplé.org/rosé" - -### Options - -All of the above functions can accept an additional options argument that is an object that can contain one or more of the following properties: - -* `scheme` (string) - - Indicates the scheme that the URI should be treated as, overriding the URI's normal scheme parsing behavior. - -* `reference` (string) - - If set to `"suffix"`, it indicates that the URI is in the suffix format, and the validator will use the option's `scheme` property to determine the URI's scheme. - -* `tolerant` (boolean, false) - - If set to `true`, the parser will relax URI resolving rules. - -* `absolutePath` (boolean, false) - - If set to `true`, the serializer will not resolve a relative `path` component. - -* `iri` (boolean, false) - - If set to `true`, the serializer will unescape non-ASCII characters as per [RFC 3987](http://www.ietf.org/rfc/rfc3987.txt). - -* `unicodeSupport` (boolean, false) - - If set to `true`, the parser will unescape non-ASCII characters in the parsed output as per [RFC 3987](http://www.ietf.org/rfc/rfc3987.txt). - -* `domainHost` (boolean, false) - - If set to `true`, the library will treat the `host` component as a domain name, and convert IDNs (International Domain Names) as per [RFC 5891](http://www.ietf.org/rfc/rfc5891.txt). - -## Scheme Extendable - -URI.js supports inserting custom [scheme](http://en.wikipedia.org/wiki/URI_scheme) dependent processing rules. Currently, URI.js has built in support for the following schemes: - -* http \[[RFC 2616](http://www.ietf.org/rfc/rfc2616.txt)\] -* https \[[RFC 2818](http://www.ietf.org/rfc/rfc2818.txt)\] -* ws \[[RFC 6455](http://www.ietf.org/rfc/rfc6455.txt)\] -* wss \[[RFC 6455](http://www.ietf.org/rfc/rfc6455.txt)\] -* mailto \[[RFC 6068](http://www.ietf.org/rfc/rfc6068.txt)\] -* urn \[[RFC 2141](http://www.ietf.org/rfc/rfc2141.txt)\] -* urn:uuid \[[RFC 4122](http://www.ietf.org/rfc/rfc4122.txt)\] - -### HTTP/HTTPS Support - - URI.equal("HTTP://ABC.COM:80", "http://abc.com/") === true - URI.equal("https://abc.com", "HTTPS://ABC.COM:443/") === true - -### WS/WSS Support - - URI.parse("wss://example.com/foo?bar=baz"); - //returns: - //{ - // scheme : "wss", - // host: "example.com", - // resourceName: "/foo?bar=baz", - // secure: true, - //} - - URI.equal("WS://ABC.COM:80/chat#one", "ws://abc.com/chat") === true - -### Mailto Support - - URI.parse("mailto:alpha@example.com,bravo@example.com?subject=SUBSCRIBE&body=Sign%20me%20up!"); - //returns: - //{ - // scheme : "mailto", - // to : ["alpha@example.com", "bravo@example.com"], - // subject : "SUBSCRIBE", - // body : "Sign me up!" - //} - - URI.serialize({ - scheme : "mailto", - to : ["alpha@example.com"], - subject : "REMOVE", - body : "Please remove me", - headers : { - cc : "charlie@example.com" - } - }) === "mailto:alpha@example.com?cc=charlie@example.com&subject=REMOVE&body=Please%20remove%20me" - -### URN Support - - URI.parse("urn:example:foo"); - //returns: - //{ - // scheme : "urn", - // nid : "example", - // nss : "foo", - //} - -#### URN UUID Support - - URI.parse("urn:uuid:f81d4fae-7dec-11d0-a765-00a0c91e6bf6"); - //returns: - //{ - // scheme : "urn", - // nid : "uuid", - // uuid : "f81d4fae-7dec-11d0-a765-00a0c91e6bf6", - //} - -## Usage - -To load in a browser, use the following tag: - - <script type="text/javascript" src="uri-js/dist/es5/uri.all.min.js"></script> - -To load in a CommonJS/Module environment, first install with npm/yarn by running on the command line: - - npm install uri-js - # OR - yarn add uri-js - -Then, in your code, load it using: - - const URI = require("uri-js"); - -If you are writing your code in ES6+ (ESNEXT) or TypeScript, you would load it using: - - import * as URI from "uri-js"; - -Or you can load just what you need using named exports: - - import { parse, serialize, resolve, resolveComponents, normalize, equal, removeDotSegments, pctEncChar, pctDecChars, escapeComponent, unescapeComponent } from "uri-js"; - -## Breaking changes - -### Breaking changes from 3.x - -URN parsing has been completely changed to better align with the specification. Scheme is now always `urn`, but has two new properties: `nid` which contains the Namspace Identifier, and `nss` which contains the Namespace Specific String. The `nss` property will be removed by higher order scheme handlers, such as the UUID URN scheme handler. - -The UUID of a URN can now be found in the `uuid` property. - -### Breaking changes from 2.x - -URI validation has been removed as it was slow, exposed a vulnerabilty, and was generally not useful. - -### Breaking changes from 1.x - -The `errors` array on parsed components is now an `error` string. diff --git a/node_modules/util-deprecate/README.md b/node_modules/util-deprecate/README.md deleted file mode 100644 index 75622fa7c250a..0000000000000 --- a/node_modules/util-deprecate/README.md +++ /dev/null @@ -1,53 +0,0 @@ -util-deprecate -============== -### The Node.js `util.deprecate()` function with browser support - -In Node.js, this module simply re-exports the `util.deprecate()` function. - -In the web browser (i.e. via browserify), a browser-specific implementation -of the `util.deprecate()` function is used. - - -## API - -A `deprecate()` function is the only thing exposed by this module. - -``` javascript -// setup: -exports.foo = deprecate(foo, 'foo() is deprecated, use bar() instead'); - - -// users see: -foo(); -// foo() is deprecated, use bar() instead -foo(); -foo(); -``` - - -## License - -(The MIT License) - -Copyright (c) 2014 Nathan Rajlich <nathan@tootallnate.net> - -Permission is hereby granted, free of charge, to any person -obtaining a copy of this software and associated documentation -files (the "Software"), to deal in the Software without -restriction, including without limitation the rights to use, -copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/uuid/CHANGELOG.md b/node_modules/uuid/CHANGELOG.md deleted file mode 100644 index f811b8a0cb91c..0000000000000 --- a/node_modules/uuid/CHANGELOG.md +++ /dev/null @@ -1,119 +0,0 @@ -# Changelog - -All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. - -## [3.4.0](https://github.com/uuidjs/uuid/compare/v3.3.3...v3.4.0) (2020-01-16) - - -### Features - -* rename repository to github:uuidjs/uuid ([#351](https://github.com/uuidjs/uuid/issues/351)) ([e2d7314](https://github.com/uuidjs/uuid/commit/e2d7314)), closes [#338](https://github.com/uuidjs/uuid/issues/338) - -### [3.3.3](https://github.com/uuidjs/uuid/compare/v3.3.2...v3.3.3) (2019-08-19) - -<a name="3.3.2"></a> -## [3.3.2](https://github.com/uuidjs/uuid/compare/v3.3.1...v3.3.2) (2018-06-28) - - -### Bug Fixes - -* typo ([305d877](https://github.com/uuidjs/uuid/commit/305d877)) - - - -<a name="3.3.1"></a> -## [3.3.1](https://github.com/uuidjs/uuid/compare/v3.3.0...v3.3.1) (2018-06-28) - - -### Bug Fixes - -* fix [#284](https://github.com/uuidjs/uuid/issues/284) by setting function name in try-catch ([f2a60f2](https://github.com/uuidjs/uuid/commit/f2a60f2)) - - - -<a name="3.3.0"></a> -# [3.3.0](https://github.com/uuidjs/uuid/compare/v3.2.1...v3.3.0) (2018-06-22) - - -### Bug Fixes - -* assignment to readonly property to allow running in strict mode ([#270](https://github.com/uuidjs/uuid/issues/270)) ([d062fdc](https://github.com/uuidjs/uuid/commit/d062fdc)) -* fix [#229](https://github.com/uuidjs/uuid/issues/229) ([c9684d4](https://github.com/uuidjs/uuid/commit/c9684d4)) -* Get correct version of IE11 crypto ([#274](https://github.com/uuidjs/uuid/issues/274)) ([153d331](https://github.com/uuidjs/uuid/commit/153d331)) -* mem issue when generating uuid ([#267](https://github.com/uuidjs/uuid/issues/267)) ([c47702c](https://github.com/uuidjs/uuid/commit/c47702c)) - -### Features - -* enforce Conventional Commit style commit messages ([#282](https://github.com/uuidjs/uuid/issues/282)) ([cc9a182](https://github.com/uuidjs/uuid/commit/cc9a182)) - - -<a name="3.2.1"></a> -## [3.2.1](https://github.com/uuidjs/uuid/compare/v3.2.0...v3.2.1) (2018-01-16) - - -### Bug Fixes - -* use msCrypto if available. Fixes [#241](https://github.com/uuidjs/uuid/issues/241) ([#247](https://github.com/uuidjs/uuid/issues/247)) ([1fef18b](https://github.com/uuidjs/uuid/commit/1fef18b)) - - - -<a name="3.2.0"></a> -# [3.2.0](https://github.com/uuidjs/uuid/compare/v3.1.0...v3.2.0) (2018-01-16) - - -### Bug Fixes - -* remove mistakenly added typescript dependency, rollback version (standard-version will auto-increment) ([09fa824](https://github.com/uuidjs/uuid/commit/09fa824)) -* use msCrypto if available. Fixes [#241](https://github.com/uuidjs/uuid/issues/241) ([#247](https://github.com/uuidjs/uuid/issues/247)) ([1fef18b](https://github.com/uuidjs/uuid/commit/1fef18b)) - - -### Features - -* Add v3 Support ([#217](https://github.com/uuidjs/uuid/issues/217)) ([d94f726](https://github.com/uuidjs/uuid/commit/d94f726)) - - -# [3.1.0](https://github.com/uuidjs/uuid/compare/v3.1.0...v3.0.1) (2017-06-17) - -### Bug Fixes - -* (fix) Add .npmignore file to exclude test/ and other non-essential files from packing. (#183) -* Fix typo (#178) -* Simple typo fix (#165) - -### Features -* v5 support in CLI (#197) -* V5 support (#188) - - -# 3.0.1 (2016-11-28) - -* split uuid versions into separate files - - -# 3.0.0 (2016-11-17) - -* remove .parse and .unparse - - -# 2.0.0 - -* Removed uuid.BufferClass - - -# 1.4.0 - -* Improved module context detection -* Removed public RNG functions - - -# 1.3.2 - -* Improve tests and handling of v1() options (Issue #24) -* Expose RNG option to allow for perf testing with different generators - - -# 1.3.0 - -* Support for version 1 ids, thanks to [@ctavan](https://github.com/ctavan)! -* Support for node.js crypto API -* De-emphasizing performance in favor of a) cryptographic quality PRNGs where available and b) more manageable code diff --git a/node_modules/uuid/README.md b/node_modules/uuid/README.md deleted file mode 100644 index 1752e4751fc92..0000000000000 --- a/node_modules/uuid/README.md +++ /dev/null @@ -1,276 +0,0 @@ -<!-- - -- This file is auto-generated from README_js.md. Changes should be made there. - --> - -# uuid [![Build Status](https://secure.travis-ci.org/kelektiv/node-uuid.svg?branch=master)](http://travis-ci.org/kelektiv/node-uuid) # - -Simple, fast generation of [RFC4122](http://www.ietf.org/rfc/rfc4122.txt) UUIDS. - -Features: - -* Support for version 1, 3, 4 and 5 UUIDs -* Cross-platform -* Uses cryptographically-strong random number APIs (when available) -* Zero-dependency, small footprint (... but not [this small](https://gist.github.com/982883)) - -[**Deprecation warning**: The use of `require('uuid')` is deprecated and will not be -supported after version 3.x of this module. Instead, use `require('uuid/[v1|v3|v4|v5]')` as shown in the examples below.] - -## Quickstart - CommonJS (Recommended) - -```shell -npm install uuid -``` - -Then generate your uuid version of choice ... - -Version 1 (timestamp): - -```javascript -const uuidv1 = require('uuid/v1'); -uuidv1(); // ⇨ '2c5ea4c0-4067-11e9-8bad-9b1deb4d3b7d' - -``` - -Version 3 (namespace): - -```javascript -const uuidv3 = require('uuid/v3'); - -// ... using predefined DNS namespace (for domain names) -uuidv3('hello.example.com', uuidv3.DNS); // ⇨ '9125a8dc-52ee-365b-a5aa-81b0b3681cf6' - -// ... using predefined URL namespace (for, well, URLs) -uuidv3('http://example.com/hello', uuidv3.URL); // ⇨ 'c6235813-3ba4-3801-ae84-e0a6ebb7d138' - -// ... using a custom namespace -// -// Note: Custom namespaces should be a UUID string specific to your application! -// E.g. the one here was generated using this modules `uuid` CLI. -const MY_NAMESPACE = '1b671a64-40d5-491e-99b0-da01ff1f3341'; -uuidv3('Hello, World!', MY_NAMESPACE); // ⇨ 'e8b5a51d-11c8-3310-a6ab-367563f20686' - -``` - -Version 4 (random): - -```javascript -const uuidv4 = require('uuid/v4'); -uuidv4(); // ⇨ '1b9d6bcd-bbfd-4b2d-9b5d-ab8dfbbd4bed' - -``` - -Version 5 (namespace): - -```javascript -const uuidv5 = require('uuid/v5'); - -// ... using predefined DNS namespace (for domain names) -uuidv5('hello.example.com', uuidv5.DNS); // ⇨ 'fdda765f-fc57-5604-a269-52a7df8164ec' - -// ... using predefined URL namespace (for, well, URLs) -uuidv5('http://example.com/hello', uuidv5.URL); // ⇨ '3bbcee75-cecc-5b56-8031-b6641c1ed1f1' - -// ... using a custom namespace -// -// Note: Custom namespaces should be a UUID string specific to your application! -// E.g. the one here was generated using this modules `uuid` CLI. -const MY_NAMESPACE = '1b671a64-40d5-491e-99b0-da01ff1f3341'; -uuidv5('Hello, World!', MY_NAMESPACE); // ⇨ '630eb68f-e0fa-5ecc-887a-7c7a62614681' - -``` - -## API - -### Version 1 - -```javascript -const uuidv1 = require('uuid/v1'); - -// Incantations -uuidv1(); -uuidv1(options); -uuidv1(options, buffer, offset); -``` - -Generate and return a RFC4122 v1 (timestamp-based) UUID. - -* `options` - (Object) Optional uuid state to apply. Properties may include: - - * `node` - (Array) Node id as Array of 6 bytes (per 4.1.6). Default: Randomly generated ID. See note 1. - * `clockseq` - (Number between 0 - 0x3fff) RFC clock sequence. Default: An internally maintained clockseq is used. - * `msecs` - (Number) Time in milliseconds since unix Epoch. Default: The current time is used. - * `nsecs` - (Number between 0-9999) additional time, in 100-nanosecond units. Ignored if `msecs` is unspecified. Default: internal uuid counter is used, as per 4.2.1.2. - -* `buffer` - (Array | Buffer) Array or buffer where UUID bytes are to be written. -* `offset` - (Number) Starting index in `buffer` at which to begin writing. - -Returns `buffer`, if specified, otherwise the string form of the UUID - -Note: The default [node id](https://tools.ietf.org/html/rfc4122#section-4.1.6) (the last 12 digits in the UUID) is generated once, randomly, on process startup, and then remains unchanged for the duration of the process. - -Example: Generate string UUID with fully-specified options - -```javascript -const v1options = { - node: [0x01, 0x23, 0x45, 0x67, 0x89, 0xab], - clockseq: 0x1234, - msecs: new Date('2011-11-01').getTime(), - nsecs: 5678 -}; -uuidv1(v1options); // ⇨ '710b962e-041c-11e1-9234-0123456789ab' - -``` - -Example: In-place generation of two binary IDs - -```javascript -// Generate two ids in an array -const arr = new Array(); -uuidv1(null, arr, 0); // ⇨ - // [ - // 44, 94, 164, 192, 64, 103, - // 17, 233, 146, 52, 155, 29, - // 235, 77, 59, 125 - // ] -uuidv1(null, arr, 16); // ⇨ - // [ - // 44, 94, 164, 192, 64, 103, 17, 233, - // 146, 52, 155, 29, 235, 77, 59, 125, - // 44, 94, 164, 193, 64, 103, 17, 233, - // 146, 52, 155, 29, 235, 77, 59, 125 - // ] - -``` - -### Version 3 - -```javascript -const uuidv3 = require('uuid/v3'); - -// Incantations -uuidv3(name, namespace); -uuidv3(name, namespace, buffer); -uuidv3(name, namespace, buffer, offset); -``` - -Generate and return a RFC4122 v3 UUID. - -* `name` - (String | Array[]) "name" to create UUID with -* `namespace` - (String | Array[]) "namespace" UUID either as a String or Array[16] of byte values -* `buffer` - (Array | Buffer) Array or buffer where UUID bytes are to be written. -* `offset` - (Number) Starting index in `buffer` at which to begin writing. Default = 0 - -Returns `buffer`, if specified, otherwise the string form of the UUID - -Example: - -```javascript -uuidv3('hello world', MY_NAMESPACE); // ⇨ '042ffd34-d989-321c-ad06-f60826172424' - -``` - -### Version 4 - -```javascript -const uuidv4 = require('uuid/v4') - -// Incantations -uuidv4(); -uuidv4(options); -uuidv4(options, buffer, offset); -``` - -Generate and return a RFC4122 v4 UUID. - -* `options` - (Object) Optional uuid state to apply. Properties may include: - * `random` - (Number[16]) Array of 16 numbers (0-255) to use in place of randomly generated values - * `rng` - (Function) Random # generator function that returns an Array[16] of byte values (0-255) -* `buffer` - (Array | Buffer) Array or buffer where UUID bytes are to be written. -* `offset` - (Number) Starting index in `buffer` at which to begin writing. - -Returns `buffer`, if specified, otherwise the string form of the UUID - -Example: Generate string UUID with predefined `random` values - -```javascript -const v4options = { - random: [ - 0x10, 0x91, 0x56, 0xbe, 0xc4, 0xfb, 0xc1, 0xea, - 0x71, 0xb4, 0xef, 0xe1, 0x67, 0x1c, 0x58, 0x36 - ] -}; -uuidv4(v4options); // ⇨ '109156be-c4fb-41ea-b1b4-efe1671c5836' - -``` - -Example: Generate two IDs in a single buffer - -```javascript -const buffer = new Array(); -uuidv4(null, buffer, 0); // ⇨ - // [ - // 155, 29, 235, 77, 59, - // 125, 75, 173, 155, 221, - // 43, 13, 123, 61, 203, - // 109 - // ] -uuidv4(null, buffer, 16); // ⇨ - // [ - // 155, 29, 235, 77, 59, 125, 75, 173, - // 155, 221, 43, 13, 123, 61, 203, 109, - // 27, 157, 107, 205, 187, 253, 75, 45, - // 155, 93, 171, 141, 251, 189, 75, 237 - // ] - -``` - -### Version 5 - -```javascript -const uuidv5 = require('uuid/v5'); - -// Incantations -uuidv5(name, namespace); -uuidv5(name, namespace, buffer); -uuidv5(name, namespace, buffer, offset); -``` - -Generate and return a RFC4122 v5 UUID. - -* `name` - (String | Array[]) "name" to create UUID with -* `namespace` - (String | Array[]) "namespace" UUID either as a String or Array[16] of byte values -* `buffer` - (Array | Buffer) Array or buffer where UUID bytes are to be written. -* `offset` - (Number) Starting index in `buffer` at which to begin writing. Default = 0 - -Returns `buffer`, if specified, otherwise the string form of the UUID - -Example: - -```javascript -uuidv5('hello world', MY_NAMESPACE); // ⇨ '9f282611-e0fd-5650-8953-89c8e342da0b' - -``` - -## Command Line - -UUIDs can be generated from the command line with the `uuid` command. - -```shell -$ uuid -ddeb27fb-d9a0-4624-be4d-4615062daed4 - -$ uuid v1 -02d37060-d446-11e7-a9fa-7bdae751ebe1 -``` - -Type `uuid --help` for usage details - -## Testing - -```shell -npm test -``` - ----- -Markdown generated from [README_js.md](README_js.md) by [![RunMD Logo](http://i.imgur.com/h0FVyzU.png)](https://github.com/broofa/runmd) \ No newline at end of file diff --git a/node_modules/validate-npm-package-license/README.md b/node_modules/validate-npm-package-license/README.md deleted file mode 100644 index 702bc7b4f3ba3..0000000000000 --- a/node_modules/validate-npm-package-license/README.md +++ /dev/null @@ -1,113 +0,0 @@ -validate-npm-package-license -============================ - -Give me a string and I'll tell you if it's a valid npm package license string. - -```javascript -var valid = require('validate-npm-package-license'); -``` - -SPDX license identifiers are valid license strings: - -```javascript - -var assert = require('assert'); -var validSPDXExpression = { - validForNewPackages: true, - validForOldPackages: true, - spdx: true -}; - -assert.deepEqual(valid('MIT'), validSPDXExpression); -assert.deepEqual(valid('BSD-2-Clause'), validSPDXExpression); -assert.deepEqual(valid('Apache-2.0'), validSPDXExpression); -assert.deepEqual(valid('ISC'), validSPDXExpression); -``` -The function will return a warning and suggestion for nearly-correct license identifiers: - -```javascript -assert.deepEqual( - valid('Apache 2.0'), - { - validForOldPackages: false, - validForNewPackages: false, - warnings: [ - 'license should be ' + - 'a valid SPDX license expression (without "LicenseRef"), ' + - '"UNLICENSED", or ' + - '"SEE LICENSE IN <filename>"', - 'license is similar to the valid expression "Apache-2.0"' - ] - } -); -``` - -SPDX expressions are valid, too ... - -```javascript -// Simple SPDX license expression for dual licensing -assert.deepEqual( - valid('(GPL-3.0-only OR BSD-2-Clause)'), - validSPDXExpression -); -``` - -... except if they contain `LicenseRef`: - -```javascript -var warningAboutLicenseRef = { - validForOldPackages: false, - validForNewPackages: false, - spdx: true, - warnings: [ - 'license should be ' + - 'a valid SPDX license expression (without "LicenseRef"), ' + - '"UNLICENSED", or ' + - '"SEE LICENSE IN <filename>"', - ] -}; - -assert.deepEqual( - valid('LicenseRef-Made-Up'), - warningAboutLicenseRef -); - -assert.deepEqual( - valid('(MIT OR LicenseRef-Made-Up)'), - warningAboutLicenseRef -); -``` - -If you can't describe your licensing terms with standardized SPDX identifiers, put the terms in a file in the package and point users there: - -```javascript -assert.deepEqual( - valid('SEE LICENSE IN LICENSE.txt'), - { - validForNewPackages: true, - validForOldPackages: true, - inFile: 'LICENSE.txt' - } -); - -assert.deepEqual( - valid('SEE LICENSE IN license.md'), - { - validForNewPackages: true, - validForOldPackages: true, - inFile: 'license.md' - } -); -``` - -If there aren't any licensing terms, use `UNLICENSED`: - -```javascript -var unlicensed = { - validForNewPackages: true, - validForOldPackages: true, - unlicensed: true -}; -assert.deepEqual(valid('UNLICENSED'), unlicensed); -assert.deepEqual(valid('UNLICENCED'), unlicensed); -``` diff --git a/node_modules/validate-npm-package-name/.npmignore b/node_modules/validate-npm-package-name/.npmignore deleted file mode 100644 index 3c3629e647f5d..0000000000000 --- a/node_modules/validate-npm-package-name/.npmignore +++ /dev/null @@ -1 +0,0 @@ -node_modules diff --git a/node_modules/validate-npm-package-name/.travis.yml b/node_modules/validate-npm-package-name/.travis.yml deleted file mode 100644 index 54de0d2d1590e..0000000000000 --- a/node_modules/validate-npm-package-name/.travis.yml +++ /dev/null @@ -1,6 +0,0 @@ -sudo: false -language: node_js -node_js: - - '0.10' - - '4' - - '6' diff --git a/node_modules/validate-npm-package-name/README.md b/node_modules/validate-npm-package-name/README.md deleted file mode 100644 index 95d04a4c81717..0000000000000 --- a/node_modules/validate-npm-package-name/README.md +++ /dev/null @@ -1,120 +0,0 @@ -# validate-npm-package-name - -Give me a string and I'll tell you if it's a valid `npm` package name. - -This package exports a single synchronous function that takes a `string` as -input and returns an object with two properties: - -- `validForNewPackages` :: `Boolean` -- `validForOldPackages` :: `Boolean` - -## Contents - -- [Naming rules](#naming-rules) -- [Examples](#examples) - + [Valid Names](#valid-names) - + [Invalid Names](#invalid-names) -- [Legacy Names](#legacy-names) -- [Tests](#tests) -- [License](#license) - -## Naming Rules - -Below is a list of rules that valid `npm` package name should conform to. - -- package name length should be greater than zero -- all the characters in the package name must be lowercase i.e., no uppercase or mixed case names are allowed -- package name *can* consist of hyphens -- package name must *not* contain any non-url-safe characters (since name ends up being part of a URL) -- package name should not start with `.` or `_` -- package name should *not* contain any leading or trailing spaces -- package name should *not* contain any of the following characters: `~)('!*` -- package name *cannot* be the same as a node.js/io.js core module nor a reserved/blacklisted name. For example, the following names are invalid: - + http - + stream - + node_modules - + favicon.ico -- package name length cannot exceed 214 - - -## Examples - -### Valid Names - -```js -var validate = require("validate-npm-package-name") - -validate("some-package") -validate("example.com") -validate("under_score") -validate("123numeric") -validate("excited!") -validate("@npm/thingy") -validate("@jane/foo.js") -``` - -All of the above names are valid, so you'll get this object back: - -```js -{ - validForNewPackages: true, - validForOldPackages: true -} -``` - -### Invalid Names - -```js -validate(" leading-space:and:weirdchars") -``` - -That was never a valid package name, so you get this: - -```js -{ - validForNewPackages: false, - validForOldPackages: false, - errors: [ - 'name cannot contain leading or trailing spaces', - 'name can only contain URL-friendly characters' - ] -} -``` - -## Legacy Names - -In the old days of npm, package names were wild. They could have capital -letters in them. They could be really long. They could be the name of an -existing module in node core. - -If you give this function a package name that **used to be valid**, you'll see -a change in the value of `validForNewPackages` property, and a warnings array -will be present: - -```js -validate("eLaBorAtE-paCkAgE-with-mixed-case-and-more-than-214-characters-----------------------------------------------------------------------------------------------------------------------------------------------------------") -``` - -returns: - -```js -{ - validForNewPackages: false, - validForOldPackages: true, - warnings: [ - "name can no longer contain capital letters", - "name can no longer contain more than 214 characters" - ] -} -``` - -## Tests - -```sh -npm install -npm test -``` - -## License - -ISC diff --git a/node_modules/verror/.npmignore b/node_modules/verror/.npmignore deleted file mode 100644 index f14aec80430c6..0000000000000 --- a/node_modules/verror/.npmignore +++ /dev/null @@ -1,9 +0,0 @@ -.gitignore -.gitmodules -deps -examples -experiments -jsl.node.conf -Makefile -Makefile.targ -test diff --git a/node_modules/verror/README.md b/node_modules/verror/README.md deleted file mode 100644 index c1f0635ef53b9..0000000000000 --- a/node_modules/verror/README.md +++ /dev/null @@ -1,528 +0,0 @@ -# verror: rich JavaScript errors - -This module provides several classes in support of Joyent's [Best Practices for -Error Handling in Node.js](http://www.joyent.com/developers/node/design/errors). -If you find any of the behavior here confusing or surprising, check out that -document first. - -The error classes here support: - -* printf-style arguments for the message -* chains of causes -* properties to provide extra information about the error -* creating your own subclasses that support all of these - -The classes here are: - -* **VError**, for chaining errors while preserving each one's error message. - This is useful in servers and command-line utilities when you want to - propagate an error up a call stack, but allow various levels to add their own - context. See examples below. -* **WError**, for wrapping errors while hiding the lower-level messages from the - top-level error. This is useful for API endpoints where you don't want to - expose internal error messages, but you still want to preserve the error chain - for logging and debugging. -* **SError**, which is just like VError but interprets printf-style arguments - more strictly. -* **MultiError**, which is just an Error that encapsulates one or more other - errors. (This is used for parallel operations that return several errors.) - - -# Quick start - -First, install the package: - - npm install verror - -If nothing else, you can use VError as a drop-in replacement for the built-in -JavaScript Error class, with the addition of printf-style messages: - -```javascript -var err = new VError('missing file: "%s"', '/etc/passwd'); -console.log(err.message); -``` - -This prints: - - missing file: "/etc/passwd" - -You can also pass a `cause` argument, which is any other Error object: - -```javascript -var fs = require('fs'); -var filename = '/nonexistent'; -fs.stat(filename, function (err1) { - var err2 = new VError(err1, 'stat "%s"', filename); - console.error(err2.message); -}); -``` - -This prints out: - - stat "/nonexistent": ENOENT, stat '/nonexistent' - -which resembles how Unix programs typically report errors: - - $ sort /nonexistent - sort: open failed: /nonexistent: No such file or directory - -To match the Unixy feel, when you print out the error, just prepend the -program's name to the VError's `message`. Or just call -[node-cmdutil.fail(your_verror)](https://github.com/joyent/node-cmdutil), which -does this for you. - -You can get the next-level Error using `err.cause()`: - -```javascript -console.error(err2.cause().message); -``` - -prints: - - ENOENT, stat '/nonexistent' - -Of course, you can chain these as many times as you want, and it works with any -kind of Error: - -```javascript -var err1 = new Error('No such file or directory'); -var err2 = new VError(err1, 'failed to stat "%s"', '/junk'); -var err3 = new VError(err2, 'request failed'); -console.error(err3.message); -``` - -This prints: - - request failed: failed to stat "/junk": No such file or directory - -The idea is that each layer in the stack annotates the error with a description -of what it was doing. The end result is a message that explains what happened -at each level. - -You can also decorate Error objects with additional information so that callers -can not only handle each kind of error differently, but also construct their own -error messages (e.g., to localize them, format them, group them by type, and so -on). See the example below. - - -# Deeper dive - -The two main goals for VError are: - -* **Make it easy to construct clear, complete error messages intended for - people.** Clear error messages greatly improve both user experience and - debuggability, so we wanted to make it easy to build them. That's why the - constructor takes printf-style arguments. -* **Make it easy to construct objects with programmatically-accessible - metadata** (which we call _informational properties_). Instead of just saying - "connection refused while connecting to 192.168.1.2:80", you can add - properties like `"ip": "192.168.1.2"` and `"tcpPort": 80`. This can be used - for feeding into monitoring systems, analyzing large numbers of Errors (as - from a log file), or localizing error messages. - -To really make this useful, it also needs to be easy to compose Errors: -higher-level code should be able to augment the Errors reported by lower-level -code to provide a more complete description of what happened. Instead of saying -"connection refused", you can say "operation X failed: connection refused". -That's why VError supports `causes`. - -In order for all this to work, programmers need to know that it's generally safe -to wrap lower-level Errors with higher-level ones. If you have existing code -that handles Errors produced by a library, you should be able to wrap those -Errors with a VError to add information without breaking the error handling -code. There are two obvious ways that this could break such consumers: - -* The error's name might change. People typically use `name` to determine what - kind of Error they've got. To ensure compatibility, you can create VErrors - with custom names, but this approach isn't great because it prevents you from - representing complex failures. For this reason, VError provides - `findCauseByName`, which essentially asks: does this Error _or any of its - causes_ have this specific type? If error handling code uses - `findCauseByName`, then subsystems can construct very specific causal chains - for debuggability and still let people handle simple cases easily. There's an - example below. -* The error's properties might change. People often hang additional properties - off of Error objects. If we wrap an existing Error in a new Error, those - properties would be lost unless we copied them. But there are a variety of - both standard and non-standard Error properties that should _not_ be copied in - this way: most obviously `name`, `message`, and `stack`, but also `fileName`, - `lineNumber`, and a few others. Plus, it's useful for some Error subclasses - to have their own private properties -- and there'd be no way to know whether - these should be copied. For these reasons, VError first-classes these - information properties. You have to provide them in the constructor, you can - only fetch them with the `info()` function, and VError takes care of making - sure properties from causes wind up in the `info()` output. - -Let's put this all together with an example from the node-fast RPC library. -node-fast implements a simple RPC protocol for Node programs. There's a server -and client interface, and clients make RPC requests to servers. Let's say the -server fails with an UnauthorizedError with message "user 'bob' is not -authorized". The client wraps all server errors with a FastServerError. The -client also wraps all request errors with a FastRequestError that includes the -name of the RPC call being made. The result of this failed RPC might look like -this: - - name: FastRequestError - message: "request failed: server error: user 'bob' is not authorized" - rpcMsgid: <unique identifier for this request> - rpcMethod: GetObject - cause: - name: FastServerError - message: "server error: user 'bob' is not authorized" - cause: - name: UnauthorizedError - message: "user 'bob' is not authorized" - rpcUser: "bob" - -When the caller uses `VError.info()`, the information properties are collapsed -so that it looks like this: - - message: "request failed: server error: user 'bob' is not authorized" - rpcMsgid: <unique identifier for this request> - rpcMethod: GetObject - rpcUser: "bob" - -Taking this apart: - -* The error's message is a complete description of the problem. The caller can - report this directly to its caller, which can potentially make its way back to - an end user (if appropriate). It can also be logged. -* The caller can tell that the request failed on the server, rather than as a - result of a client problem (e.g., failure to serialize the request), a - transport problem (e.g., failure to connect to the server), or something else - (e.g., a timeout). They do this using `findCauseByName('FastServerError')` - rather than checking the `name` field directly. -* If the caller logs this error, the logs can be analyzed to aggregate - errors by cause, by RPC method name, by user, or whatever. Or the - error can be correlated with other events for the same rpcMsgid. -* It wasn't very hard for any part of the code to contribute to this Error. - Each part of the stack has just a few lines to provide exactly what it knows, - with very little boilerplate. - -It's not expected that you'd use these complex forms all the time. Despite -supporting the complex case above, you can still just do: - - new VError("my service isn't working"); - -for the simple cases. - - -# Reference: VError, WError, SError - -VError, WError, and SError are convenient drop-in replacements for `Error` that -support printf-style arguments, first-class causes, informational properties, -and other useful features. - - -## Constructors - -The VError constructor has several forms: - -```javascript -/* - * This is the most general form. You can specify any supported options - * (including "cause" and "info") this way. - */ -new VError(options, sprintf_args...) - -/* - * This is a useful shorthand when the only option you need is "cause". - */ -new VError(cause, sprintf_args...) - -/* - * This is a useful shorthand when you don't need any options at all. - */ -new VError(sprintf_args...) -``` - -All of these forms construct a new VError that behaves just like the built-in -JavaScript `Error` class, with some additional methods described below. - -In the first form, `options` is a plain object with any of the following -optional properties: - -Option name | Type | Meaning ----------------- | ---------------- | ------- -`name` | string | Describes what kind of error this is. This is intended for programmatic use to distinguish between different kinds of errors. Note that in modern versions of Node.js, this name is ignored in the `stack` property value, but callers can still use the `name` property to get at it. -`cause` | any Error object | Indicates that the new error was caused by `cause`. See `cause()` below. If unspecified, the cause will be `null`. -`strict` | boolean | If true, then `null` and `undefined` values in `sprintf_args` are passed through to `sprintf()`. Otherwise, these are replaced with the strings `'null'`, and '`undefined`', respectively. -`constructorOpt` | function | If specified, then the stack trace for this error ends at function `constructorOpt`. Functions called by `constructorOpt` will not show up in the stack. This is useful when this class is subclassed. -`info` | object | Specifies arbitrary informational properties that are available through the `VError.info(err)` static class method. See that method for details. - -The second form is equivalent to using the first form with the specified `cause` -as the error's cause. This form is distinguished from the first form because -the first argument is an Error. - -The third form is equivalent to using the first form with all default option -values. This form is distinguished from the other forms because the first -argument is not an object or an Error. - -The `WError` constructor is used exactly the same way as the `VError` -constructor. The `SError` constructor is also used the same way as the -`VError` constructor except that in all cases, the `strict` property is -overriden to `true. - - -## Public properties - -`VError`, `WError`, and `SError` all provide the same public properties as -JavaScript's built-in Error objects. - -Property name | Type | Meaning -------------- | ------ | ------- -`name` | string | Programmatically-usable name of the error. -`message` | string | Human-readable summary of the failure. Programmatically-accessible details are provided through `VError.info(err)` class method. -`stack` | string | Human-readable stack trace where the Error was constructed. - -For all of these classes, the printf-style arguments passed to the constructor -are processed with `sprintf()` to form a message. For `WError`, this becomes -the complete `message` property. For `SError` and `VError`, this message is -prepended to the message of the cause, if any (with a suitable separator), and -the result becomes the `message` property. - -The `stack` property is managed entirely by the underlying JavaScript -implementation. It's generally implemented using a getter function because -constructing the human-readable stack trace is somewhat expensive. - -## Class methods - -The following methods are defined on the `VError` class and as exported -functions on the `verror` module. They're defined this way rather than using -methods on VError instances so that they can be used on Errors not created with -`VError`. - -### `VError.cause(err)` - -The `cause()` function returns the next Error in the cause chain for `err`, or -`null` if there is no next error. See the `cause` argument to the constructor. -Errors can have arbitrarily long cause chains. You can walk the `cause` chain -by invoking `VError.cause(err)` on each subsequent return value. If `err` is -not a `VError`, the cause is `null`. - -### `VError.info(err)` - -Returns an object with all of the extra error information that's been associated -with this Error and all of its causes. These are the properties passed in using -the `info` option to the constructor. Properties not specified in the -constructor for this Error are implicitly inherited from this error's cause. - -These properties are intended to provide programmatically-accessible metadata -about the error. For an error that indicates a failure to resolve a DNS name, -informational properties might include the DNS name to be resolved, or even the -list of resolvers used to resolve it. The values of these properties should -generally be plain objects (i.e., consisting only of null, undefined, numbers, -booleans, strings, and objects and arrays containing only other plain objects). - -### `VError.fullStack(err)` - -Returns a string containing the full stack trace, with all nested errors recursively -reported as `'caused by:' + err.stack`. - -### `VError.findCauseByName(err, name)` - -The `findCauseByName()` function traverses the cause chain for `err`, looking -for an error whose `name` property matches the passed in `name` value. If no -match is found, `null` is returned. - -If all you want is to know _whether_ there's a cause (and you don't care what it -is), you can use `VError.hasCauseWithName(err, name)`. - -If a vanilla error or a non-VError error is passed in, then there is no cause -chain to traverse. In this scenario, the function will check the `name` -property of only `err`. - -### `VError.hasCauseWithName(err, name)` - -Returns true if and only if `VError.findCauseByName(err, name)` would return -a non-null value. This essentially determines whether `err` has any cause in -its cause chain that has name `name`. - -### `VError.errorFromList(errors)` - -Given an array of Error objects (possibly empty), return a single error -representing the whole collection of errors. If the list has: - -* 0 elements, returns `null` -* 1 element, returns the sole error -* more than 1 element, returns a MultiError referencing the whole list - -This is useful for cases where an operation may produce any number of errors, -and you ultimately want to implement the usual `callback(err)` pattern. You can -accumulate the errors in an array and then invoke -`callback(VError.errorFromList(errors))` when the operation is complete. - - -### `VError.errorForEach(err, func)` - -Convenience function for iterating an error that may itself be a MultiError. - -In all cases, `err` must be an Error. If `err` is a MultiError, then `func` is -invoked as `func(errorN)` for each of the underlying errors of the MultiError. -If `err` is any other kind of error, `func` is invoked once as `func(err)`. In -all cases, `func` is invoked synchronously. - -This is useful for cases where an operation may produce any number of warnings -that may be encapsulated with a MultiError -- but may not be. - -This function does not iterate an error's cause chain. - - -## Examples - -The "Demo" section above covers several basic cases. Here's a more advanced -case: - -```javascript -var err1 = new VError('something bad happened'); -/* ... */ -var err2 = new VError({ - 'name': 'ConnectionError', - 'cause': err1, - 'info': { - 'errno': 'ECONNREFUSED', - 'remote_ip': '127.0.0.1', - 'port': 215 - } -}, 'failed to connect to "%s:%d"', '127.0.0.1', 215); - -console.log(err2.message); -console.log(err2.name); -console.log(VError.info(err2)); -console.log(err2.stack); -``` - -This outputs: - - failed to connect to "127.0.0.1:215": something bad happened - ConnectionError - { errno: 'ECONNREFUSED', remote_ip: '127.0.0.1', port: 215 } - ConnectionError: failed to connect to "127.0.0.1:215": something bad happened - at Object.<anonymous> (/home/dap/node-verror/examples/info.js:5:12) - at Module._compile (module.js:456:26) - at Object.Module._extensions..js (module.js:474:10) - at Module.load (module.js:356:32) - at Function.Module._load (module.js:312:12) - at Function.Module.runMain (module.js:497:10) - at startup (node.js:119:16) - at node.js:935:3 - -Information properties are inherited up the cause chain, with values at the top -of the chain overriding same-named values lower in the chain. To continue that -example: - -```javascript -var err3 = new VError({ - 'name': 'RequestError', - 'cause': err2, - 'info': { - 'errno': 'EBADREQUEST' - } -}, 'request failed'); - -console.log(err3.message); -console.log(err3.name); -console.log(VError.info(err3)); -console.log(err3.stack); -``` - -This outputs: - - request failed: failed to connect to "127.0.0.1:215": something bad happened - RequestError - { errno: 'EBADREQUEST', remote_ip: '127.0.0.1', port: 215 } - RequestError: request failed: failed to connect to "127.0.0.1:215": something bad happened - at Object.<anonymous> (/home/dap/node-verror/examples/info.js:20:12) - at Module._compile (module.js:456:26) - at Object.Module._extensions..js (module.js:474:10) - at Module.load (module.js:356:32) - at Function.Module._load (module.js:312:12) - at Function.Module.runMain (module.js:497:10) - at startup (node.js:119:16) - at node.js:935:3 - -You can also print the complete stack trace of combined `Error`s by using -`VError.fullStack(err).` - -```javascript -var err1 = new VError('something bad happened'); -/* ... */ -var err2 = new VError(err1, 'something really bad happened here'); - -console.log(VError.fullStack(err2)); -``` - -This outputs: - - VError: something really bad happened here: something bad happened - at Object.<anonymous> (/home/dap/node-verror/examples/fullStack.js:5:12) - at Module._compile (module.js:409:26) - at Object.Module._extensions..js (module.js:416:10) - at Module.load (module.js:343:32) - at Function.Module._load (module.js:300:12) - at Function.Module.runMain (module.js:441:10) - at startup (node.js:139:18) - at node.js:968:3 - caused by: VError: something bad happened - at Object.<anonymous> (/home/dap/node-verror/examples/fullStack.js:3:12) - at Module._compile (module.js:409:26) - at Object.Module._extensions..js (module.js:416:10) - at Module.load (module.js:343:32) - at Function.Module._load (module.js:300:12) - at Function.Module.runMain (module.js:441:10) - at startup (node.js:139:18) - at node.js:968:3 - -`VError.fullStack` is also safe to use on regular `Error`s, so feel free to use -it whenever you need to extract the stack trace from an `Error`, regardless if -it's a `VError` or not. - -# Reference: MultiError - -MultiError is an Error class that represents a group of Errors. This is used -when you logically need to provide a single Error, but you want to preserve -information about multiple underying Errors. A common case is when you execute -several operations in parallel and some of them fail. - -MultiErrors are constructed as: - -```javascript -new MultiError(error_list) -``` - -`error_list` is an array of at least one `Error` object. - -The cause of the MultiError is the first error provided. None of the other -`VError` options are supported. The `message` for a MultiError consists the -`message` from the first error, prepended with a message indicating that there -were other errors. - -For example: - -```javascript -err = new MultiError([ - new Error('failed to resolve DNS name "abc.example.com"'), - new Error('failed to resolve DNS name "def.example.com"'), -]); - -console.error(err.message); -``` - -outputs: - - first of 2 errors: failed to resolve DNS name "abc.example.com" - -See the convenience function `VError.errorFromList`, which is sometimes simpler -to use than this constructor. - -## Public methods - - -### `errors()` - -Returns an array of the errors used to construct this MultiError. - - -# Contributing - -See separate [contribution guidelines](CONTRIBUTING.md). diff --git a/node_modules/walk-up-path/README.md b/node_modules/walk-up-path/README.md deleted file mode 100644 index 6729745f8a6c7..0000000000000 --- a/node_modules/walk-up-path/README.md +++ /dev/null @@ -1,46 +0,0 @@ -# walk-up-path - -Given a path string, return a generator that walks up the path, emitting -each dirname. - -So, to get a platform-portable walk up, instead of doing something like -this: - -```js -for (let p = dirname(path); p;) { - - // ... do stuff ... - - const pp = dirname(p) - if (p === pp) - p = null - else - p = pp -} -``` - -Or this: - -```js -for (let p = dirname(path); !isRoot(p); p = dirname(p)) { - // ... do stuff ... -} -``` - -You can do this: - -```js -const walkUpPath = require('walk-up-path') -for (const p of walkUpPath(path)) { - // ... do stuff .. -} -``` - -## API - -```js -const walkUpPath = require('walk-up-path') -``` - -Give the fn a string, it'll yield all the directories walking up to the -root. diff --git a/node_modules/wcwidth/.npmignore b/node_modules/wcwidth/.npmignore deleted file mode 100644 index 3c3629e647f5d..0000000000000 --- a/node_modules/wcwidth/.npmignore +++ /dev/null @@ -1 +0,0 @@ -node_modules diff --git a/node_modules/which/CHANGELOG.md b/node_modules/which/CHANGELOG.md deleted file mode 100644 index 7fb1f2033c8dc..0000000000000 --- a/node_modules/which/CHANGELOG.md +++ /dev/null @@ -1,166 +0,0 @@ -# Changes - - -## 2.0.2 - -* Rename bin to `node-which` - -## 2.0.1 - -* generate changelog and publish on version bump -* enforce 100% test coverage -* Promise interface - -## 2.0.0 - -* Parallel tests, modern JavaScript, and drop support for node < 8 - -## 1.3.1 - -* update deps -* update travis - -## v1.3.0 - -* Add nothrow option to which.sync -* update tap - -## v1.2.14 - -* appveyor: drop node 5 and 0.x -* travis-ci: add node 6, drop 0.x - -## v1.2.13 - -* test: Pass missing option to pass on windows -* update tap -* update isexe to 2.0.0 -* neveragain.tech pledge request - -## v1.2.12 - -* Removed unused require - -## v1.2.11 - -* Prevent changelog script from being included in package - -## v1.2.10 - -* Use env.PATH only, not env.Path - -## v1.2.9 - -* fix for paths starting with ../ -* Remove unused `is-absolute` module - -## v1.2.8 - -* bullet items in changelog that contain (but don't start with) # - -## v1.2.7 - -* strip 'update changelog' changelog entries out of changelog - -## v1.2.6 - -* make the changelog bulleted - -## v1.2.5 - -* make a changelog, and keep it up to date -* don't include tests in package -* Properly handle relative-path executables -* appveyor -* Attach error code to Not Found error -* Make tests pass on Windows - -## v1.2.4 - -* Fix typo - -## v1.2.3 - -* update isexe, fix regression in pathExt handling - -## v1.2.2 - -* update deps, use isexe module, test windows - -## v1.2.1 - -* Sometimes windows PATH entries are quoted -* Fixed a bug in the check for group and user mode bits. This bug was introduced during refactoring for supporting strict mode. -* doc cli - -## v1.2.0 - -* Add support for opt.all and -as cli flags -* test the bin -* update travis -* Allow checking for multiple programs in bin/which -* tap 2 - -## v1.1.2 - -* travis -* Refactored and fixed undefined error on Windows -* Support strict mode - -## v1.1.1 - -* test +g exes against secondary groups, if available -* Use windows exe semantics on cygwin & msys -* cwd should be first in path on win32, not last -* Handle lower-case 'env.Path' on Windows -* Update docs -* use single-quotes - -## v1.1.0 - -* Add tests, depend on is-absolute - -## v1.0.9 - -* which.js: root is allowed to execute files owned by anyone - -## v1.0.8 - -* don't use graceful-fs - -## v1.0.7 - -* add license to package.json - -## v1.0.6 - -* isc license - -## 1.0.5 - -* Awful typo - -## 1.0.4 - -* Test for path absoluteness properly -* win: Allow '' as a pathext if cmd has a . in it - -## 1.0.3 - -* Remove references to execPath -* Make `which.sync()` work on Windows by honoring the PATHEXT variable. -* Make `isExe()` always return true on Windows. -* MIT - -## 1.0.2 - -* Only files can be exes - -## 1.0.1 - -* Respect the PATHEXT env for win32 support -* should 0755 the bin -* binary -* guts -* package -* 1st diff --git a/node_modules/which/README.md b/node_modules/which/README.md deleted file mode 100644 index cd833509f3bcc..0000000000000 --- a/node_modules/which/README.md +++ /dev/null @@ -1,54 +0,0 @@ -# which - -Like the unix `which` utility. - -Finds the first instance of a specified executable in the PATH -environment variable. Does not cache the results, so `hash -r` is not -needed when the PATH changes. - -## USAGE - -```javascript -var which = require('which') - -// async usage -which('node', function (er, resolvedPath) { - // er is returned if no "node" is found on the PATH - // if it is found, then the absolute path to the exec is returned -}) - -// or promise -which('node').then(resolvedPath => { ... }).catch(er => { ... not found ... }) - -// sync usage -// throws if not found -var resolved = which.sync('node') - -// if nothrow option is used, returns null if not found -resolved = which.sync('node', {nothrow: true}) - -// Pass options to override the PATH and PATHEXT environment vars. -which('node', { path: someOtherPath }, function (er, resolved) { - if (er) - throw er - console.log('found at %j', resolved) -}) -``` - -## CLI USAGE - -Same as the BSD `which(1)` binary. - -``` -usage: which [-as] program ... -``` - -## OPTIONS - -You may pass an options object as the second argument. - -- `path`: Use instead of the `PATH` environment variable. -- `pathExt`: Use instead of the `PATHEXT` environment variable. -- `all`: Return all matches, instead of just the first one. Note that - this means the function returns an array of strings instead of a - single string. diff --git a/node_modules/wide-align/README.md b/node_modules/wide-align/README.md deleted file mode 100644 index 32f1be04f0977..0000000000000 --- a/node_modules/wide-align/README.md +++ /dev/null @@ -1,47 +0,0 @@ -wide-align ----------- - -A wide-character aware text alignment function for use in terminals / on the -console. - -### Usage - -``` -var align = require('wide-align') - -// Note that if you view this on a unicode console, all of the slashes are -// aligned. This is because on a console, all narrow characters are -// an en wide and all wide characters are an em. In browsers, this isn't -// held to and wide characters like "古" can be less than two narrow -// characters even with a fixed width font. - -console.log(align.center('abc', 10)) // ' abc ' -console.log(align.center('古古古', 10)) // ' 古古古 ' -console.log(align.left('abc', 10)) // 'abc ' -console.log(align.left('古古古', 10)) // '古古古 ' -console.log(align.right('abc', 10)) // ' abc' -console.log(align.right('古古古', 10)) // ' 古古古' -``` - -### Functions - -#### `align.center(str, length)` → `str` - -Returns *str* with spaces added to both sides such that that it is *length* -chars long and centered in the spaces. - -#### `align.left(str, length)` → `str` - -Returns *str* with spaces to the right such that it is *length* chars long. - -### `align.right(str, length)` → `str` - -Returns *str* with spaces to the left such that it is *length* chars long. - -### Origins - -These functions were originally taken from -[cliui](https://npmjs.com/package/cliui). Changes include switching to the -MUCH faster pad generation function from -[lodash](https://npmjs.com/package/lodash), making center alignment pad -both sides and adding left alignment. diff --git a/node_modules/wrappy/README.md b/node_modules/wrappy/README.md deleted file mode 100644 index 98eab2522b86e..0000000000000 --- a/node_modules/wrappy/README.md +++ /dev/null @@ -1,36 +0,0 @@ -# wrappy - -Callback wrapping utility - -## USAGE - -```javascript -var wrappy = require("wrappy") - -// var wrapper = wrappy(wrapperFunction) - -// make sure a cb is called only once -// See also: http://npm.im/once for this specific use case -var once = wrappy(function (cb) { - var called = false - return function () { - if (called) return - called = true - return cb.apply(this, arguments) - } -}) - -function printBoo () { - console.log('boo') -} -// has some rando property -printBoo.iAmBooPrinter = true - -var onlyPrintOnce = once(printBoo) - -onlyPrintOnce() // prints 'boo' -onlyPrintOnce() // does nothing - -// random property is retained! -assert.equal(onlyPrintOnce.iAmBooPrinter, true) -``` diff --git a/node_modules/write-file-atomic/CHANGELOG.md b/node_modules/write-file-atomic/CHANGELOG.md deleted file mode 100644 index d1a6c1b862baa..0000000000000 --- a/node_modules/write-file-atomic/CHANGELOG.md +++ /dev/null @@ -1,32 +0,0 @@ -# 3.0.0 - -* Implement options.tmpfileCreated callback. -* Drop Node.js 6, modernize code, return Promise from async function. -* Support write TypedArray's like in node fs.writeFile. -* Remove graceful-fs dependency. - -# 2.4.3 - -* Ignore errors raised by `fs.closeSync` when cleaning up after a write - error. - -# 2.4.2 - -* A pair of patches to fix some fd leaks. We would leak fds with sync use - when errors occured and with async use any time fsync was not in use. (#34) - -# 2.4.1 - -* Fix a bug where `signal-exit` instances would be leaked. This was fixed when addressing #35. - -# 2.4.0 - -## Features - -* Allow chown and mode options to be set to false to disable the defaulting behavior. (#20) -* Support passing encoding strings in options slot for compat with Node.js API. (#31) -* Add support for running inside of worker threads (#37) - -## Fixes - -* Remove unneeded call when returning success (#36) diff --git a/node_modules/write-file-atomic/README.md b/node_modules/write-file-atomic/README.md deleted file mode 100644 index caea79956f858..0000000000000 --- a/node_modules/write-file-atomic/README.md +++ /dev/null @@ -1,72 +0,0 @@ -write-file-atomic ------------------ - -This is an extension for node's `fs.writeFile` that makes its operation -atomic and allows you set ownership (uid/gid of the file). - -### var writeFileAtomic = require('write-file-atomic')<br>writeFileAtomic(filename, data, [options], [callback]) - -* filename **String** -* data **String** | **Buffer** -* options **Object** | **String** - * chown **Object** default, uid & gid of existing file, if any - * uid **Number** - * gid **Number** - * encoding **String** | **Null** default = 'utf8' - * fsync **Boolean** default = true - * mode **Number** default, from existing file, if any - * tmpfileCreated **Function** called when the tmpfile is created -* callback **Function** - -Atomically and asynchronously writes data to a file, replacing the file if it already -exists. data can be a string or a buffer. - -The file is initially named `filename + "." + murmurhex(__filename, process.pid, ++invocations)`. -Note that `require('worker_threads').threadId` is used in addition to `process.pid` if running inside of a worker thread. -If writeFile completes successfully then, if passed the **chown** option it will change -the ownership of the file. Finally it renames the file back to the filename you specified. If -it encounters errors at any of these steps it will attempt to unlink the temporary file and then -pass the error back to the caller. -If multiple writes are concurrently issued to the same file, the write operations are put into a queue and serialized in the order they were called, using Promises. Writes to different files are still executed in parallel. - -If provided, the **chown** option requires both **uid** and **gid** properties or else -you'll get an error. If **chown** is not specified it will default to using -the owner of the previous file. To prevent chown from being ran you can -also pass `false`, in which case the file will be created with the current user's credentials. - -If **mode** is not specified, it will default to using the permissions from -an existing file, if any. Expicitly setting this to `false` remove this default, resulting -in a file created with the system default permissions. - -If options is a String, it's assumed to be the **encoding** option. The **encoding** option is ignored if **data** is a buffer. It defaults to 'utf8'. - -If the **fsync** option is **false**, writeFile will skip the final fsync call. - -If the **tmpfileCreated** option is specified it will be called with the name of the tmpfile when created. - -Example: - -```javascript -writeFileAtomic('message.txt', 'Hello Node', {chown:{uid:100,gid:50}}, function (err) { - if (err) throw err; - console.log('It\'s saved!'); -}); -``` - -This function also supports async/await: - -```javascript -(async () => { - try { - await writeFileAtomic('message.txt', 'Hello Node', {chown:{uid:100,gid:50}}); - console.log('It\'s saved!'); - } catch (err) { - console.error(err); - process.exit(1); - } -})(); -``` - -### var writeFileAtomicSync = require('write-file-atomic').sync<br>writeFileAtomicSync(filename, data, [options]) - -The synchronous version of **writeFileAtomic**. diff --git a/node_modules/yallist/README.md b/node_modules/yallist/README.md deleted file mode 100644 index f586101869668..0000000000000 --- a/node_modules/yallist/README.md +++ /dev/null @@ -1,204 +0,0 @@ -# yallist - -Yet Another Linked List - -There are many doubly-linked list implementations like it, but this -one is mine. - -For when an array would be too big, and a Map can't be iterated in -reverse order. - - -[![Build Status](https://travis-ci.org/isaacs/yallist.svg?branch=master)](https://travis-ci.org/isaacs/yallist) [![Coverage Status](https://coveralls.io/repos/isaacs/yallist/badge.svg?service=github)](https://coveralls.io/github/isaacs/yallist) - -## basic usage - -```javascript -var yallist = require('yallist') -var myList = yallist.create([1, 2, 3]) -myList.push('foo') -myList.unshift('bar') -// of course pop() and shift() are there, too -console.log(myList.toArray()) // ['bar', 1, 2, 3, 'foo'] -myList.forEach(function (k) { - // walk the list head to tail -}) -myList.forEachReverse(function (k, index, list) { - // walk the list tail to head -}) -var myDoubledList = myList.map(function (k) { - return k + k -}) -// now myDoubledList contains ['barbar', 2, 4, 6, 'foofoo'] -// mapReverse is also a thing -var myDoubledListReverse = myList.mapReverse(function (k) { - return k + k -}) // ['foofoo', 6, 4, 2, 'barbar'] - -var reduced = myList.reduce(function (set, entry) { - set += entry - return set -}, 'start') -console.log(reduced) // 'startfoo123bar' -``` - -## api - -The whole API is considered "public". - -Functions with the same name as an Array method work more or less the -same way. - -There's reverse versions of most things because that's the point. - -### Yallist - -Default export, the class that holds and manages a list. - -Call it with either a forEach-able (like an array) or a set of -arguments, to initialize the list. - -The Array-ish methods all act like you'd expect. No magic length, -though, so if you change that it won't automatically prune or add -empty spots. - -### Yallist.create(..) - -Alias for Yallist function. Some people like factories. - -#### yallist.head - -The first node in the list - -#### yallist.tail - -The last node in the list - -#### yallist.length - -The number of nodes in the list. (Change this at your peril. It is -not magic like Array length.) - -#### yallist.toArray() - -Convert the list to an array. - -#### yallist.forEach(fn, [thisp]) - -Call a function on each item in the list. - -#### yallist.forEachReverse(fn, [thisp]) - -Call a function on each item in the list, in reverse order. - -#### yallist.get(n) - -Get the data at position `n` in the list. If you use this a lot, -probably better off just using an Array. - -#### yallist.getReverse(n) - -Get the data at position `n`, counting from the tail. - -#### yallist.map(fn, thisp) - -Create a new Yallist with the result of calling the function on each -item. - -#### yallist.mapReverse(fn, thisp) - -Same as `map`, but in reverse. - -#### yallist.pop() - -Get the data from the list tail, and remove the tail from the list. - -#### yallist.push(item, ...) - -Insert one or more items to the tail of the list. - -#### yallist.reduce(fn, initialValue) - -Like Array.reduce. - -#### yallist.reduceReverse - -Like Array.reduce, but in reverse. - -#### yallist.reverse - -Reverse the list in place. - -#### yallist.shift() - -Get the data from the list head, and remove the head from the list. - -#### yallist.slice([from], [to]) - -Just like Array.slice, but returns a new Yallist. - -#### yallist.sliceReverse([from], [to]) - -Just like yallist.slice, but the result is returned in reverse. - -#### yallist.toArray() - -Create an array representation of the list. - -#### yallist.toArrayReverse() - -Create a reversed array representation of the list. - -#### yallist.unshift(item, ...) - -Insert one or more items to the head of the list. - -#### yallist.unshiftNode(node) - -Move a Node object to the front of the list. (That is, pull it out of -wherever it lives, and make it the new head.) - -If the node belongs to a different list, then that list will remove it -first. - -#### yallist.pushNode(node) - -Move a Node object to the end of the list. (That is, pull it out of -wherever it lives, and make it the new tail.) - -If the node belongs to a list already, then that list will remove it -first. - -#### yallist.removeNode(node) - -Remove a node from the list, preserving referential integrity of head -and tail and other nodes. - -Will throw an error if you try to have a list remove a node that -doesn't belong to it. - -### Yallist.Node - -The class that holds the data and is actually the list. - -Call with `var n = new Node(value, previousNode, nextNode)` - -Note that if you do direct operations on Nodes themselves, it's very -easy to get into weird states where the list is broken. Be careful :) - -#### node.next - -The next node in the list. - -#### node.prev - -The previous node in the list. - -#### node.value - -The data the node contains. - -#### node.list - -The list to which this node belongs. (Null if it does not belong to -any list.) diff --git a/package-lock.json b/package-lock.json index 0a5d9508710bb..01f2e5b3b3152 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,16 +1,17 @@ { "name": "npm", - "version": "7.6.0", + "version": "7.20.3", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "npm", - "version": "7.6.0", + "version": "7.20.3", "bundleDependencies": [ "@npmcli/arborist", "@npmcli/ci-detect", "@npmcli/config", + "@npmcli/package-json", "@npmcli/run-script", "abbrev", "ansicolors", @@ -33,6 +34,7 @@ "leven", "libnpmaccess", "libnpmdiff", + "libnpmexec", "libnpmfund", "libnpmhook", "libnpmorg", @@ -73,219 +75,50 @@ "treeverse", "validate-npm-package-name", "which", - "write-file-atomic", - "@npmcli/disparity-colors", - "@npmcli/git", - "@npmcli/installed-package-contents", - "@npmcli/map-workspaces", - "@npmcli/metavuln-calculator", - "@npmcli/move-file", - "@npmcli/name-from-folder", - "@npmcli/node-gyp", - "@npmcli/promise-spawn", - "@tootallnate/once", - "agent-base", - "agentkeepalive", - "aggregate-error", - "ajv", - "ansi-regex", - "ansi-styles", - "aproba", - "are-we-there-yet", - "asap", - "asn1", - "assert-plus", - "asynckit", - "aws-sign2", - "aws4", - "balanced-match", - "bcrypt-pbkdf", - "bin-links", - "binary-extensions", - "brace-expansion", - "builtins", - "caseless", - "cidr-regex", - "clean-stack", - "clone", - "cmd-shim", - "code-point-at", - "color-convert", - "color-name", - "colors", - "combined-stream", - "common-ancestor-path", - "concat-map", - "console-control-strings", - "core-util-is", - "dashdash", - "debug", - "debuglog", - "defaults", - "delayed-stream", - "delegates", - "depd", - "dezalgo", - "diff", - "ecc-jsbn", - "emoji-regex", - "encoding", - "env-paths", - "err-code", - "extend", - "extsprintf", - "fast-deep-equal", - "fast-json-stable-stringify", - "forever-agent", - "form-data", - "fs-minipass", - "fs.realpath", - "function-bind", - "gauge", - "getpass", - "har-schema", - "har-validator", - "has", - "has-flag", - "has-unicode", - "http-cache-semantics", - "http-proxy-agent", - "http-signature", - "https-proxy-agent", - "humanize-ms", - "iconv-lite", - "ignore-walk", - "imurmurhash", - "indent-string", - "infer-owner", - "inflight", - "inherits", - "ip", - "ip-regex", - "is-core-module", - "is-fullwidth-code-point", - "is-lambda", - "is-typedarray", - "isarray", - "isexe", - "isstream", - "jsbn", - "json-schema", - "json-schema-traverse", - "json-stringify-nice", - "json-stringify-safe", - "jsonparse", - "jsprim", - "just-diff", - "just-diff-apply", - "lru-cache", - "mime-db", - "mime-types", - "minimatch", - "minipass-collect", - "minipass-fetch", - "minipass-flush", - "minipass-json-stream", - "minipass-sized", - "minizlib", - "mute-stream", - "normalize-package-data", - "npm-bundled", - "npm-install-checks", - "npm-normalize-package-bin", - "npm-packlist", - "number-is-nan", - "oauth-sign", - "object-assign", - "once", - "p-map", - "path-is-absolute", - "path-parse", - "performance-now", - "process-nextick-args", - "promise-all-reject-late", - "promise-call-limit", - "promise-inflight", - "promise-retry", - "promzard", - "psl", - "puka", - "punycode", - "qs", - "read-cmd-shim", - "readable-stream", - "request", - "resolve", - "retry", - "safe-buffer", - "safer-buffer", - "set-blocking", - "signal-exit", - "smart-buffer", - "socks", - "socks-proxy-agent", - "spdx-correct", - "spdx-exceptions", - "spdx-expression-parse", - "spdx-license-ids", - "sshpk", - "string_decoder", - "string-width", - "stringify-package", - "strip-ansi", - "supports-color", - "tunnel-agent", - "tweetnacl", - "typedarray-to-buffer", - "unique-filename", - "unique-slug", - "uri-js", - "util-deprecate", - "uuid", - "validate-npm-package-license", - "verror", - "walk-up-path", - "wcwidth", - "wide-align", - "wrappy", - "yallist" + "write-file-atomic" ], "license": "Artistic-2.0", + "workspaces": [ + "docs", + "packages/*" + ], "dependencies": { - "@npmcli/arborist": "^2.2.5", + "@npmcli/arborist": "^2.8.0", "@npmcli/ci-detect": "^1.2.0", - "@npmcli/config": "^1.2.9", - "@npmcli/run-script": "^1.8.3", + "@npmcli/config": "^2.2.0", + "@npmcli/package-json": "^1.0.1", + "@npmcli/run-script": "^1.8.5", "abbrev": "~1.1.1", "ansicolors": "~0.3.2", "ansistyles": "~0.1.3", "archy": "~1.0.0", - "byte-size": "^7.0.0", - "cacache": "^15.0.5", + "byte-size": "^7.0.1", + "cacache": "^15.2.0", "chalk": "^4.1.0", "chownr": "^2.0.0", "cli-columns": "^3.1.2", "cli-table3": "^0.6.0", "columnify": "~1.5.4", - "glob": "^7.1.4", + "glob": "^7.1.7", "graceful-fs": "^4.2.6", - "hosted-git-info": "^3.0.8", + "hosted-git-info": "^4.0.2", "ini": "^2.0.0", - "init-package-json": "^2.0.2", + "init-package-json": "^2.0.3", "is-cidr": "^4.0.2", "json-parse-even-better-errors": "^2.3.1", "leven": "^3.1.0", - "libnpmaccess": "^4.0.1", - "libnpmdiff": "^2.0.3", - "libnpmfund": "^1.0.2", - "libnpmhook": "^6.0.1", - "libnpmorg": "^2.0.1", + "libnpmaccess": "^4.0.2", + "libnpmdiff": "^2.0.4", + "libnpmexec": "^2.0.0", + "libnpmfund": "^1.1.0", + "libnpmhook": "^6.0.2", + "libnpmorg": "^2.0.2", "libnpmpack": "^2.0.1", - "libnpmpublish": "^4.0.0", - "libnpmsearch": "^3.1.0", - "libnpmteam": "^2.0.2", - "libnpmversion": "^1.0.11", - "make-fetch-happen": "^8.0.14", + "libnpmpublish": "^4.0.1", + "libnpmsearch": "^3.1.1", + "libnpmteam": "^2.0.3", + "libnpmversion": "^1.2.1", + "make-fetch-happen": "^9.0.4", "minipass": "^3.1.3", "minipass-pipeline": "^1.2.4", "mkdirp": "^1.0.4", @@ -293,25 +126,25 @@ "ms": "^2.1.2", "node-gyp": "^7.1.2", "nopt": "^5.0.0", - "npm-audit-report": "^2.1.4", - "npm-package-arg": "^8.1.1", - "npm-pick-manifest": "^6.1.0", - "npm-profile": "^5.0.2", - "npm-registry-fetch": "^9.0.0", + "npm-audit-report": "^2.1.5", + "npm-package-arg": "^8.1.5", + "npm-pick-manifest": "^6.1.1", + "npm-profile": "^5.0.3", + "npm-registry-fetch": "^11.0.0", "npm-user-validate": "^1.0.1", - "npmlog": "~4.1.2", + "npmlog": "^5.0.0", "opener": "^1.5.2", - "pacote": "^11.2.7", + "pacote": "^11.3.5", "parse-conflict-json": "^1.1.1", "qrcode-terminal": "^0.12.0", "read": "~1.0.7", "read-package-json": "^3.0.1", - "read-package-json-fast": "^2.0.2", + "read-package-json-fast": "^2.0.3", "readdir-scoped-modules": "^1.1.0", "rimraf": "^3.0.2", - "semver": "^7.3.4", + "semver": "^7.3.5", "ssri": "^8.0.1", - "tar": "^6.1.0", + "tar": "^6.1.2", "text-table": "~0.2.0", "tiny-relative-date": "^1.3.0", "treeverse": "^1.0.4", @@ -324,24 +157,28 @@ "npx": "bin/npx-cli.js" }, "devDependencies": { - "@mdx-js/mdx": "^1.6.22", - "cmark-gfm": "^0.8.5", - "eslint": "^7.19.0", - "eslint-plugin-import": "^2.22.1", + "eslint": "^7.31.0", + "eslint-plugin-import": "^2.23.4", "eslint-plugin-node": "^11.1.0", - "eslint-plugin-promise": "^4.3.1", + "eslint-plugin-promise": "^5.1.0", "eslint-plugin-standard": "^5.0.0", - "jsdom": "^16.4.0", - "licensee": "^8.1.0", - "marked-man": "^0.7.0", - "require-inject": "^1.4.4", - "tap": "^14.11.0", - "yaml": "^1.10.0" + "licensee": "^8.2.0", + "tap": "^15.0.9" }, "engines": { "node": ">=10" } }, + "docs": { + "version": "1.0.0", + "devDependencies": { + "@mdx-js/mdx": "^1.6.22", + "cmark-gfm": "^0.8.3", + "jsdom": "^16.4.0", + "marked-man": "^0.7.0", + "yaml": "^1.10.0" + } + }, "node_modules/@babel/code-frame": { "version": "7.12.13", "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.12.13.tgz", @@ -382,21 +219,6 @@ "url": "https://opencollective.com/babel" } }, - "node_modules/@babel/core/node_modules/json5": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.0.tgz", - "integrity": "sha512-f+8cldu7X/y7RAJurMEJmdoKXGB/X550w2Nr3tTbezL6RwEE/iMcm+tZnXeoZtKuOq6ft8+CqzEkrIgx1fPoQA==", - "dev": true, - "dependencies": { - "minimist": "^1.2.5" - }, - "bin": { - "json5": "lib/cli.js" - }, - "engines": { - "node": ">=6" - } - }, "node_modules/@babel/core/node_modules/semver": { "version": "5.7.1", "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", @@ -406,44 +228,26 @@ "semver": "bin/semver" } }, - "node_modules/@babel/core/node_modules/source-map": { - "version": "0.5.7", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", - "integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/@babel/generator": { - "version": "7.12.15", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.12.15.tgz", - "integrity": "sha512-6F2xHxBiFXWNSGb7vyCUTBF8RCLY66rS0zEPcP8t/nQyXjha5EuK4z7H5o7fWG8B4M7y6mqVWq1J+1PuwRhecQ==", + "version": "7.14.3", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.14.3.tgz", + "integrity": "sha512-bn0S6flG/j0xtQdz3hsjJ624h3W0r3llttBMfyHX3YrZ/KtLYr15bjA0FXkgW7FpvrDuTuElXeVjiKlYRpnOFA==", "dev": true, "dependencies": { - "@babel/types": "^7.12.13", + "@babel/types": "^7.14.2", "jsesc": "^2.5.1", "source-map": "^0.5.0" } }, - "node_modules/@babel/generator/node_modules/source-map": { - "version": "0.5.7", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", - "integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/@babel/helper-function-name": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.12.13.tgz", - "integrity": "sha512-TZvmPn0UOqmvi5G4vvw0qZTpVptGkB1GL61R6lKvrSdIxGm5Pky7Q3fpKiIkQCAtRCBUwB0PaThlx9vebCDSwA==", + "version": "7.14.2", + "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.14.2.tgz", + "integrity": "sha512-NYZlkZRydxw+YT56IlhIcS8PAhb+FEUiOzuhFTfqDyPmzAhRge6ua0dQYT/Uh0t/EDHq05/i+e5M2d4XvjgarQ==", "dev": true, "dependencies": { "@babel/helper-get-function-arity": "^7.12.13", "@babel/template": "^7.12.13", - "@babel/types": "^7.12.13" + "@babel/types": "^7.14.2" } }, "node_modules/@babel/helper-get-function-arity": { @@ -456,38 +260,37 @@ } }, "node_modules/@babel/helper-member-expression-to-functions": { - "version": "7.12.16", - "resolved": "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.12.16.tgz", - "integrity": "sha512-zYoZC1uvebBFmj1wFAlXwt35JLEgecefATtKp20xalwEK8vHAixLBXTGxNrVGEmTT+gzOThUgr8UEdgtalc1BQ==", + "version": "7.13.12", + "resolved": "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.13.12.tgz", + "integrity": "sha512-48ql1CLL59aKbU94Y88Xgb2VFy7a95ykGRbJJaaVv+LX5U8wFpLfiGXJJGUozsmA1oEh/o5Bp60Voq7ACyA/Sw==", "dev": true, "dependencies": { - "@babel/types": "^7.12.13" + "@babel/types": "^7.13.12" } }, "node_modules/@babel/helper-module-imports": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.12.13.tgz", - "integrity": "sha512-NGmfvRp9Rqxy0uHSSVP+SRIW1q31a7Ji10cLBcqSDUngGentY4FRiHOFZFE1CLU5eiL0oE8reH7Tg1y99TDM/g==", + "version": "7.13.12", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.13.12.tgz", + "integrity": "sha512-4cVvR2/1B693IuOvSI20xqqa/+bl7lqAMR59R4iu39R9aOX8/JoYY1sFaNvUMyMBGnHdwvJgUrzNLoUZxXypxA==", "dev": true, "dependencies": { - "@babel/types": "^7.12.13" + "@babel/types": "^7.13.12" } }, "node_modules/@babel/helper-module-transforms": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.12.13.tgz", - "integrity": "sha512-acKF7EjqOR67ASIlDTupwkKM1eUisNAjaSduo5Cz+793ikfnpe7p4Q7B7EWU2PCoSTPWsQkR7hRUWEIZPiVLGA==", + "version": "7.14.2", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.14.2.tgz", + "integrity": "sha512-OznJUda/soKXv0XhpvzGWDnml4Qnwp16GN+D/kZIdLsWoHj05kyu8Rm5kXmMef+rVJZ0+4pSGLkeixdqNUATDA==", "dev": true, "dependencies": { - "@babel/helper-module-imports": "^7.12.13", - "@babel/helper-replace-supers": "^7.12.13", - "@babel/helper-simple-access": "^7.12.13", + "@babel/helper-module-imports": "^7.13.12", + "@babel/helper-replace-supers": "^7.13.12", + "@babel/helper-simple-access": "^7.13.12", "@babel/helper-split-export-declaration": "^7.12.13", - "@babel/helper-validator-identifier": "^7.12.11", + "@babel/helper-validator-identifier": "^7.14.0", "@babel/template": "^7.12.13", - "@babel/traverse": "^7.12.13", - "@babel/types": "^7.12.13", - "lodash": "^4.17.19" + "@babel/traverse": "^7.14.2", + "@babel/types": "^7.14.2" } }, "node_modules/@babel/helper-optimise-call-expression": { @@ -500,30 +303,30 @@ } }, "node_modules/@babel/helper-plugin-utils": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.12.13.tgz", - "integrity": "sha512-C+10MXCXJLiR6IeG9+Wiejt9jmtFpxUc3MQqCmPY8hfCjyUGl9kT+B2okzEZrtykiwrc4dbCPdDoz0A/HQbDaA==", + "version": "7.13.0", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.13.0.tgz", + "integrity": "sha512-ZPafIPSwzUlAoWT8DKs1W2VyF2gOWthGd5NGFMsBcMMol+ZhK+EQY/e6V96poa6PA/Bh+C9plWN0hXO1uB8AfQ==", "dev": true }, "node_modules/@babel/helper-replace-supers": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.12.13.tgz", - "integrity": "sha512-pctAOIAMVStI2TMLhozPKbf5yTEXc0OJa0eENheb4w09SrgOWEs+P4nTOZYJQCqs8JlErGLDPDJTiGIp3ygbLg==", + "version": "7.14.4", + "resolved": "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.14.4.tgz", + "integrity": "sha512-zZ7uHCWlxfEAAOVDYQpEf/uyi1dmeC7fX4nCf2iz9drnCwi1zvwXL3HwWWNXUQEJ1k23yVn3VbddiI9iJEXaTQ==", "dev": true, "dependencies": { - "@babel/helper-member-expression-to-functions": "^7.12.13", + "@babel/helper-member-expression-to-functions": "^7.13.12", "@babel/helper-optimise-call-expression": "^7.12.13", - "@babel/traverse": "^7.12.13", - "@babel/types": "^7.12.13" + "@babel/traverse": "^7.14.2", + "@babel/types": "^7.14.4" } }, "node_modules/@babel/helper-simple-access": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.12.13.tgz", - "integrity": "sha512-0ski5dyYIHEfwpWGx5GPWhH35j342JaflmCeQmsPWcrOQDtCN6C1zKAVRFVbK53lPW2c9TsuLLSUDf0tIGJ5hA==", + "version": "7.13.12", + "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.13.12.tgz", + "integrity": "sha512-7FEjbrx5SL9cWvXioDbnlYTppcZGuCY6ow3/D5vMggb2Ywgu4dMrpTJX0JdQAIcRRUElOIxF3yEooa9gUb9ZbA==", "dev": true, "dependencies": { - "@babel/types": "^7.12.13" + "@babel/types": "^7.13.12" } }, "node_modules/@babel/helper-split-export-declaration": { @@ -536,29 +339,29 @@ } }, "node_modules/@babel/helper-validator-identifier": { - "version": "7.12.11", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.12.11.tgz", - "integrity": "sha512-np/lG3uARFybkoHokJUmf1QfEvRVCPbmQeUQpKow5cQ3xWrV9i3rUHodKDJPQfTVX61qKi+UdYk8kik84n7XOw==", + "version": "7.14.0", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.14.0.tgz", + "integrity": "sha512-V3ts7zMSu5lfiwWDVWzRDGIN+lnCEUdaXgtVHJgLb1rGaA6jMrtB9EmE7L18foXJIE8Un/A/h6NJfGQp/e1J4A==", "dev": true }, "node_modules/@babel/helpers": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.12.13.tgz", - "integrity": "sha512-oohVzLRZ3GQEk4Cjhfs9YkJA4TdIDTObdBEZGrd6F/T0GPSnuV6l22eMcxlvcvzVIPH3VTtxbseudM1zIE+rPQ==", + "version": "7.14.0", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.14.0.tgz", + "integrity": "sha512-+ufuXprtQ1D1iZTO/K9+EBRn+qPWMJjZSw/S0KlFrxCw4tkrzv9grgpDHkY9MeQTjTY8i2sp7Jep8DfU6tN9Mg==", "dev": true, "dependencies": { "@babel/template": "^7.12.13", - "@babel/traverse": "^7.12.13", - "@babel/types": "^7.12.13" + "@babel/traverse": "^7.14.0", + "@babel/types": "^7.14.0" } }, "node_modules/@babel/highlight": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.12.13.tgz", - "integrity": "sha512-kocDQvIbgMKlWxXe9fof3TQ+gkIPOUSEYhJjqUjvKMez3krV7vbzYCDq39Oj11UAVK7JqPVGQPlgE85dPNlQww==", + "version": "7.14.0", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.14.0.tgz", + "integrity": "sha512-YSCOwxvTYEIMSGaBQb5kDDsCopDdiUGsqpatp3fOlI4+2HQSkTmEVWnVuySdAC5EWCqSWWTv0ib63RjR7dTBdg==", "dev": true, "dependencies": { - "@babel/helper-validator-identifier": "^7.12.11", + "@babel/helper-validator-identifier": "^7.14.0", "chalk": "^2.0.0", "js-tokens": "^4.0.0" } @@ -635,9 +438,9 @@ } }, "node_modules/@babel/parser": { - "version": "7.12.16", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.12.16.tgz", - "integrity": "sha512-c/+u9cqV6F0+4Hpq01jnJO+GLp2DdT63ppz9Xa+6cHaajM9VFzK/iDXiKK65YtpeVwu+ctfS6iqlMqRgQRzeCw==", + "version": "7.14.4", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.14.4.tgz", + "integrity": "sha512-ArliyUsWDUqEGfWcmzpGUzNfLxTdTp6WU4IuP6QFSp9gGfWS6boxFCkJSJ/L4+RG8z/FnIU3WxCk6hPL9SSWeA==", "dev": true, "bin": { "parser": "bin/babel-parser.js" @@ -685,12 +488,12 @@ } }, "node_modules/@babel/plugin-transform-parameters": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.12.13.tgz", - "integrity": "sha512-e7QqwZalNiBRHCpJg/P8s/VJeSRYgmtWySs1JwvfwPqhBbiWfOcHDKdeAi6oAyIimoKWBlwc8oTgbZHdhCoVZA==", + "version": "7.14.2", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.14.2.tgz", + "integrity": "sha512-NxoVmA3APNCC1JdMXkdYXuQS+EMdqy0vIwyDHeKHiJKRxmp1qGSdb0JLEIoPRhkx6H/8Qi3RJ3uqOCYw8giy9A==", "dev": true, "dependencies": { - "@babel/helper-plugin-utils": "^7.12.13" + "@babel/helper-plugin-utils": "^7.13.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" @@ -708,62 +511,50 @@ } }, "node_modules/@babel/traverse": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.12.13.tgz", - "integrity": "sha512-3Zb4w7eE/OslI0fTp8c7b286/cQps3+vdLW3UcwC8VSJC6GbKn55aeVVu2QJNuCDoeKyptLOFrPq8WqZZBodyA==", + "version": "7.14.2", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.14.2.tgz", + "integrity": "sha512-TsdRgvBFHMyHOOzcP9S6QU0QQtjxlRpEYOy3mcCO5RgmC305ki42aSAmfZEMSSYBla2oZ9BMqYlncBaKmD/7iA==", "dev": true, "dependencies": { "@babel/code-frame": "^7.12.13", - "@babel/generator": "^7.12.13", - "@babel/helper-function-name": "^7.12.13", + "@babel/generator": "^7.14.2", + "@babel/helper-function-name": "^7.14.2", "@babel/helper-split-export-declaration": "^7.12.13", - "@babel/parser": "^7.12.13", - "@babel/types": "^7.12.13", + "@babel/parser": "^7.14.2", + "@babel/types": "^7.14.2", "debug": "^4.1.0", - "globals": "^11.1.0", - "lodash": "^4.17.19" - } - }, - "node_modules/@babel/traverse/node_modules/globals": { - "version": "11.12.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", - "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", - "dev": true, - "engines": { - "node": ">=4" + "globals": "^11.1.0" } }, "node_modules/@babel/types": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.12.13.tgz", - "integrity": "sha512-oKrdZTld2im1z8bDwTOQvUbxKwE+854zc16qWZQlcTqMN00pWxHQ4ZeOq0yDMnisOpRykH2/5Qqcrk/OlbAjiQ==", + "version": "7.14.4", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.14.4.tgz", + "integrity": "sha512-lCj4aIs0xUefJFQnwwQv2Bxg7Omd6bgquZ6LGC+gGMh6/s5qDVfjuCMlDmYQ15SLsWHd9n+X3E75lKIhl5Lkiw==", "dev": true, "dependencies": { - "@babel/helper-validator-identifier": "^7.12.11", - "lodash": "^4.17.19", + "@babel/helper-validator-identifier": "^7.14.0", "to-fast-properties": "^2.0.0" } }, "node_modules/@blueoak/list": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/@blueoak/list/-/list-1.0.2.tgz", - "integrity": "sha512-KyqT0kkdxgbGys9mvo/1Mgdt/LGvUFPCZIK9pWPIfOM2mYzMDd/eVYy4sMP1YqvVI129k0alxRyM53H2MAs/Nw==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@blueoak/list/-/list-2.0.0.tgz", + "integrity": "sha512-yQ6/CTy6DYvmJOAIw/BJjKeNG2ZyF8uxgTN8Yvcv4L9YavoVp9xUgmoVUKN5l24NGPDQpswavNanHOqB00ZNXg==", "dev": true }, "node_modules/@eslint/eslintrc": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-0.3.0.tgz", - "integrity": "sha512-1JTKgrOKAHVivSvOYw+sJOunkBjUOvjqWk1DPja7ZFhIS2mX/4EgTT8M7eTK9jrKhL/FvXXEbQwIs3pg1xp3dg==", + "version": "0.4.3", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-0.4.3.tgz", + "integrity": "sha512-J6KFFz5QCYUJq3pf0mjEcCJVERbzv71PUIDczuh9JkwGEzced6CO5ADLHB1rbf/+oPBtoPfMYNOpGDzCANlbXw==", "dev": true, "dependencies": { "ajv": "^6.12.4", "debug": "^4.1.1", "espree": "^7.3.0", - "globals": "^12.1.0", + "globals": "^13.9.0", "ignore": "^4.0.6", "import-fresh": "^3.2.1", "js-yaml": "^3.13.1", - "lodash": "^4.17.20", "minimatch": "^3.0.4", "strip-json-comments": "^3.1.1" }, @@ -771,6 +562,157 @@ "node": "^10.12.0 || >=12.0.0" } }, + "node_modules/@eslint/eslintrc/node_modules/globals": { + "version": "13.10.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.10.0.tgz", + "integrity": "sha512-piHC3blgLGFjvOuMmWZX60f+na1lXFDhQXBf1UYp2fXPXqvEUbOhNwi6BsQ0bQishwedgnjkwv1d9zKf+MWw3g==", + "dev": true, + "dependencies": { + "type-fest": "^0.20.2" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@eslint/eslintrc/node_modules/type-fest": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", + "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@humanwhocodes/config-array": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.5.0.tgz", + "integrity": "sha512-FagtKFz74XrTl7y6HCzQpwDfXP0yhxe9lHLD1UZxjvZIcbyRz8zTFF/yYNfSfzU414eDwZ1SrO0Qvtyf+wFMQg==", + "dev": true, + "dependencies": { + "@humanwhocodes/object-schema": "^1.2.0", + "debug": "^4.1.1", + "minimatch": "^3.0.4" + }, + "engines": { + "node": ">=10.10.0" + } + }, + "node_modules/@humanwhocodes/object-schema": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.0.tgz", + "integrity": "sha512-wdppn25U8z/2yiaT6YGquE6X8sSv7hNMWSXYSSU1jGv/yd6XqjXgTDJ8KP4NgjTXfJ3GbRjeeb8RTV7a/VpM+w==", + "dev": true + }, + "node_modules/@istanbuljs/load-nyc-config": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz", + "integrity": "sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ==", + "dev": true, + "dependencies": { + "camelcase": "^5.3.1", + "find-up": "^4.1.0", + "get-package-type": "^0.1.0", + "js-yaml": "^3.13.1", + "resolve-from": "^5.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@istanbuljs/load-nyc-config/node_modules/find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "dev": true, + "dependencies": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@istanbuljs/load-nyc-config/node_modules/locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "dev": true, + "dependencies": { + "p-locate": "^4.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@istanbuljs/load-nyc-config/node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@istanbuljs/load-nyc-config/node_modules/p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dev": true, + "dependencies": { + "p-limit": "^2.2.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@istanbuljs/load-nyc-config/node_modules/p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/@istanbuljs/load-nyc-config/node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/@istanbuljs/load-nyc-config/node_modules/resolve-from": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", + "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/@istanbuljs/schema": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/@istanbuljs/schema/-/schema-0.1.3.tgz", + "integrity": "sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==", + "dev": true, + "engines": { + "node": ">=8" + } + }, "node_modules/@mdx-js/mdx": { "version": "1.6.22", "resolved": "https://registry.npmjs.org/@mdx-js/mdx/-/mdx-1.6.22.tgz", @@ -813,9 +755,9 @@ } }, "node_modules/@npmcli/arborist": { - "version": "2.2.5", - "resolved": "https://registry.npmjs.org/@npmcli/arborist/-/arborist-2.2.5.tgz", - "integrity": "sha512-nLnhRZsUa1kPryyI0N6hLGX6lsQTFDqBJRTNHmZNmjgzP7ZBKiqz8y6ItsouT2CpWhvmoIpnstLyoglIQyo0YQ==", + "version": "2.8.0", + "resolved": "https://registry.npmjs.org/@npmcli/arborist/-/arborist-2.8.0.tgz", + "integrity": "sha512-R9rTyak1rGdmVTyiU14dgBb+qMllY3B6I8hp7FB4xXsU9dJDrYZJR8I+191CMo5Y1941jTDCtNcXXW9TldPEFQ==", "inBundle": true, "dependencies": { "@npmcli/installed-package-contents": "^1.0.7", @@ -824,30 +766,38 @@ "@npmcli/move-file": "^1.1.0", "@npmcli/name-from-folder": "^1.0.1", "@npmcli/node-gyp": "^1.0.1", + "@npmcli/package-json": "^1.0.1", "@npmcli/run-script": "^1.8.2", "bin-links": "^2.2.1", "cacache": "^15.0.3", "common-ancestor-path": "^1.0.1", "json-parse-even-better-errors": "^2.3.1", - "json-stringify-nice": "^1.1.1", + "json-stringify-nice": "^1.1.4", + "mkdirp": "^1.0.4", "mkdirp-infer-owner": "^2.0.0", "npm-install-checks": "^4.0.0", - "npm-package-arg": "^8.1.0", + "npm-package-arg": "^8.1.5", "npm-pick-manifest": "^6.1.0", - "npm-registry-fetch": "^9.0.0", - "pacote": "^11.2.6", + "npm-registry-fetch": "^11.0.0", + "pacote": "^11.3.5", "parse-conflict-json": "^1.1.1", + "proc-log": "^1.0.0", "promise-all-reject-late": "^1.0.0", "promise-call-limit": "^1.0.1", "read-package-json-fast": "^2.0.2", "readdir-scoped-modules": "^1.1.0", - "semver": "^7.3.4", + "rimraf": "^3.0.2", + "semver": "^7.3.5", + "ssri": "^8.0.1", "tar": "^6.1.0", "treeverse": "^1.0.4", "walk-up-path": "^1.0.0" }, "bin": { "arborist": "bin/index.js" + }, + "engines": { + "node": ">= 10" } }, "node_modules/@npmcli/ci-detect": { @@ -857,9 +807,9 @@ "inBundle": true }, "node_modules/@npmcli/config": { - "version": "1.2.9", - "resolved": "https://registry.npmjs.org/@npmcli/config/-/config-1.2.9.tgz", - "integrity": "sha512-d7mx35ju9HFg0gNHiwMU0HXCJk1esAeRdMktLeD+K2K2awkZyEm1FyX+g8iuZbmWGAaFP/aGiXo7a0lKlmp6Xg==", + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@npmcli/config/-/config-2.2.0.tgz", + "integrity": "sha512-y0V3F7RCWXy8kBOvKvKSRUNKRobLB6vL/UNchy/6+IUNIqu+UyrY3Z7jvj1ZA/AkYc/0WkCUtppCo+bPhMU8Aw==", "inBundle": true, "dependencies": { "ini": "^2.0.0", @@ -876,7 +826,6 @@ "version": "1.0.1", "resolved": "https://registry.npmjs.org/@npmcli/disparity-colors/-/disparity-colors-1.0.1.tgz", "integrity": "sha512-kQ1aCTTU45mPXN+pdAaRxlxr3OunkyztjbbxDY/aIcPS5CnCUrx+1+NvA6pTcYR7wmLZe37+Mi5v3nfbwPxq3A==", - "inBundle": true, "dependencies": { "ansi-styles": "^4.3.0" }, @@ -885,19 +834,18 @@ } }, "node_modules/@npmcli/git": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-2.0.6.tgz", - "integrity": "sha512-a1MnTfeRPBaKbFY07fd+6HugY1WAkKJzdiJvlRub/9o5xz2F/JtPacZZapx5zRJUQFIzSL677vmTSxEcDMrDbg==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-2.1.0.tgz", + "integrity": "sha512-/hBFX/QG1b+N7PZBFs0bi+evgRZcK9nWBxQKZkGoXUT5hJSwl5c4d7y8/hm+NQZRPhQ67RzFaj5UM9YeyKoryw==", "inBundle": true, "dependencies": { - "@npmcli/promise-spawn": "^1.1.0", + "@npmcli/promise-spawn": "^1.3.2", "lru-cache": "^6.0.0", - "mkdirp": "^1.0.3", - "npm-pick-manifest": "^6.0.0", + "mkdirp": "^1.0.4", + "npm-pick-manifest": "^6.1.1", "promise-inflight": "^1.0.1", "promise-retry": "^2.0.1", - "semver": "^7.3.2", - "unique-filename": "^1.1.1", + "semver": "^7.3.5", "which": "^2.0.2" } }, @@ -933,9 +881,9 @@ } }, "node_modules/@npmcli/metavuln-calculator": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/@npmcli/metavuln-calculator/-/metavuln-calculator-1.1.0.tgz", - "integrity": "sha512-fb51NyiWHjeqqFez9FXhvr+E2Dv4ZjPGVgnj8QC1xjHRSw4gMRIO8pNCzU11WYQ2wZxoHBhPMgovZGxP5lP74g==", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@npmcli/metavuln-calculator/-/metavuln-calculator-1.1.1.tgz", + "integrity": "sha512-9xe+ZZ1iGVaUovBVFI9h3qW+UuECUzhvZPxK9RaEA2mjU26o5D0JloGYWwLYvQELJNmBdQB6rrpuN8jni6LwzQ==", "inBundle": true, "dependencies": { "cacache": "^15.0.5", @@ -968,6 +916,15 @@ "integrity": "sha512-yrJUe6reVMpktcvagumoqD9r08fH1iRo01gn1u0zoCApa9lnZGEigVKUd2hzsCId4gdtkZZIVscLhNxMECKgRg==", "inBundle": true }, + "node_modules/@npmcli/package-json": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@npmcli/package-json/-/package-json-1.0.1.tgz", + "integrity": "sha512-y6jnu76E9C23osz8gEMBayZmaZ69vFOIk8vR1FJL/wbEJ54+9aVG9rLTjQKSXfgYZEr50nw1txBBFfBZZe+bYg==", + "inBundle": true, + "dependencies": { + "json-parse-even-better-errors": "^2.3.1" + } + }, "node_modules/@npmcli/promise-spawn": { "version": "1.3.2", "resolved": "https://registry.npmjs.org/@npmcli/promise-spawn/-/promise-spawn-1.3.2.tgz", @@ -978,16 +935,15 @@ } }, "node_modules/@npmcli/run-script": { - "version": "1.8.3", - "resolved": "https://registry.npmjs.org/@npmcli/run-script/-/run-script-1.8.3.tgz", - "integrity": "sha512-ELPGWAVU/xyU+A+H3pEPj0QOvYwLTX71RArXcClFzeiyJ/b/McsZ+d0QxpznvfFtZzxGN/gz/1cvlqICR4/suQ==", + "version": "1.8.5", + "resolved": "https://registry.npmjs.org/@npmcli/run-script/-/run-script-1.8.5.tgz", + "integrity": "sha512-NQspusBCpTjNwNRFMtz2C5MxoxyzlbuJ4YEhxAKrIonTiirKDtatsZictx9RgamQIx6+QuHMNmPl0wQdoESs9A==", "inBundle": true, "dependencies": { "@npmcli/node-gyp": "^1.0.2", "@npmcli/promise-spawn": "^1.3.2", "infer-owner": "^1.0.4", "node-gyp": "^7.1.0", - "puka": "^1.0.1", "read-package-json-fast": "^2.0.1" } }, @@ -1071,9 +1027,9 @@ } }, "node_modules/acorn-jsx": { - "version": "5.3.1", - "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.1.tgz", - "integrity": "sha512-K0Ptm/47OKfQRpNQ2J/oIN/3QYiK6FwW+eJbILhsdxh2WTLdl+30o8aGdTbm5JbffpFFAg/g+zi1E+jvJha5ng==", + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", + "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", "dev": true, "peerDependencies": { "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" @@ -1189,9 +1145,9 @@ "inBundle": true }, "node_modules/anymatch": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.1.tgz", - "integrity": "sha512-mM8522psRCqzV+6LhomX5wgp25YVibjh8Wj23I5RPkPppSVSjyKD2A2mBJmWGa+KN7f2D6LNh9jkBCeyLktzjg==", + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.2.tgz", + "integrity": "sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg==", "dev": true, "dependencies": { "normalize-path": "^3.0.0", @@ -1202,15 +1158,15 @@ } }, "node_modules/append-transform": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/append-transform/-/append-transform-1.0.0.tgz", - "integrity": "sha512-P009oYkeHyU742iSZJzZZywj4QRJdnTWffaKuJQLablCZ1uz6/cW4yaRgcDaoQ+uwOxxnt0gRUcwfsNP2ri0gw==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/append-transform/-/append-transform-2.0.0.tgz", + "integrity": "sha512-7yeyCEurROLQJFv5Xj4lEGTy0borxepjFv1g22oAdqFu//SrAlDl1O1Nxx15SH1RoliUml6p8dwJW9jvZughhg==", "dev": true, "dependencies": { - "default-require-extensions": "^2.0.0" + "default-require-extensions": "^3.0.0" }, "engines": { - "node": ">=4" + "node": ">=8" } }, "node_modules/aproba": { @@ -1235,12 +1191,6 @@ "readable-stream": "^2.0.6" } }, - "node_modules/arg": { - "version": "4.1.3", - "resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz", - "integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==", - "dev": true - }, "node_modules/argparse": { "version": "1.0.10", "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", @@ -1260,15 +1210,15 @@ } }, "node_modules/array-includes": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.2.tgz", - "integrity": "sha512-w2GspexNQpx+PutG3QpT437/BenZBj0M/MZGn5mzv/MofYqo0xmRHzn4lFsoDlWJ+THYsGJmFlW68WlDFx7VRw==", + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.3.tgz", + "integrity": "sha512-gcem1KlBU7c9rB+Rq8/3PPKsK2kjqeEBa3bD5kkQo4nYlOHQCJqIJFqBXDEfwaRuYTT4E+FxA9xez7Gf/e3Q7A==", "dev": true, "dependencies": { - "call-bind": "^1.0.0", + "call-bind": "^1.0.2", "define-properties": "^1.1.3", - "es-abstract": "^1.18.0-next.1", - "get-intrinsic": "^1.0.1", + "es-abstract": "^1.18.0-next.2", + "get-intrinsic": "^1.1.1", "is-string": "^1.0.5" }, "engines": { @@ -1329,12 +1279,12 @@ } }, "node_modules/async-hook-domain": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/async-hook-domain/-/async-hook-domain-1.1.3.tgz", - "integrity": "sha512-ZovMxSbADV3+biB7oR1GL5lGyptI24alp0LWHlmz1OFc5oL47pz3EiIF6nXOkDW7yLqih4NtsiYduzdDW0i+Wg==", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/async-hook-domain/-/async-hook-domain-2.0.3.tgz", + "integrity": "sha512-MadiLLDEZRZzZwcm0dgS+K99qXZ4H2saAUwUgwzFulbAkXrKi3AX5FvWS3FFTQtLMwrqcGqAJe6o12KrObejQA==", "dev": true, - "dependencies": { - "source-map-support": "^0.5.11" + "engines": { + "node": ">=10" } }, "node_modules/asynckit": { @@ -1411,9 +1361,9 @@ } }, "node_modules/balanced-match": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", - "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", "inBundle": true }, "node_modules/base64-js": { @@ -1466,16 +1416,18 @@ "version": "2.2.0", "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz", "integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==", - "inBundle": true, "engines": { "node": ">=8" } }, "node_modules/bind-obj-methods": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/bind-obj-methods/-/bind-obj-methods-2.0.0.tgz", - "integrity": "sha512-3/qRXczDi2Cdbz6jE+W3IflJOutRVica8frpBn14de1mBOkzDo+6tY33kNhvkw54Kn3PzRRD2VnGbGPcTAk4sw==", - "dev": true + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/bind-obj-methods/-/bind-obj-methods-3.0.0.tgz", + "integrity": "sha512-nLEaaz3/sEzNSyPWRsN9HNsqwk1AUyECtGj+XwGdIi3xABnEqecvXtIJ0wehQXuuER5uZ/5fTs2usONgYjG+iw==", + "dev": true, + "engines": { + "node": ">=10" + } }, "node_modules/bindings": { "version": "1.5.0", @@ -1576,18 +1528,18 @@ "inBundle": true }, "node_modules/byte-size": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/byte-size/-/byte-size-7.0.0.tgz", - "integrity": "sha512-NNiBxKgxybMBtWdmvx7ZITJi4ZG+CYUgwOSZTfqB1qogkRHrhbQE/R2r5Fh94X+InN5MCYz6SvB/ejHMj/HbsQ==", + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/byte-size/-/byte-size-7.0.1.tgz", + "integrity": "sha512-crQdqyCwhokxwV1UyDzLZanhkugAgft7vt0qbbdt60C6Zf3CAiGmtUCylbtYwrU6loOUw3euGrNtW1J651ot1A==", "inBundle": true, "engines": { "node": ">=10" } }, "node_modules/cacache": { - "version": "15.0.5", - "resolved": "https://registry.npmjs.org/cacache/-/cacache-15.0.5.tgz", - "integrity": "sha512-lloiL22n7sOjEEXdL8NAjTgv9a1u43xICE9/203qonkZUCj5X1UEWIdf2/Y0d6QcCtMzbKQyhrcDbdvlZTs/+A==", + "version": "15.2.0", + "resolved": "https://registry.npmjs.org/cacache/-/cacache-15.2.0.tgz", + "integrity": "sha512-uKoJSHmnrqXgthDFx/IU6ED/5xd+NNGe+Bb+kLZy7Ku4P+BaiWEUflAKPZ7eAzsYGcsAGASJZsybXp+quEcHTw==", "inBundle": true, "dependencies": { "@npmcli/move-file": "^1.0.1", @@ -1604,7 +1556,7 @@ "p-map": "^4.0.0", "promise-inflight": "^1.0.1", "rimraf": "^3.0.2", - "ssri": "^8.0.0", + "ssri": "^8.0.1", "tar": "^6.0.2", "unique-filename": "^1.1.1" }, @@ -1613,29 +1565,18 @@ } }, "node_modules/caching-transform": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/caching-transform/-/caching-transform-3.0.2.tgz", - "integrity": "sha512-Mtgcv3lh3U0zRii/6qVgQODdPA4G3zhG+jtbCWj39RXuUFTMzH0vcdMtaJS1jPowd+It2Pqr6y3NJMQqOqCE2w==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/caching-transform/-/caching-transform-4.0.0.tgz", + "integrity": "sha512-kpqOvwXnjjN44D89K5ccQC+RUrsy7jB/XLlRrx0D7/2HNcTPqzsb6XgYoErwko6QsV184CA2YgS1fxDiiDZMWA==", "dev": true, "dependencies": { - "hasha": "^3.0.0", - "make-dir": "^2.0.0", - "package-hash": "^3.0.0", - "write-file-atomic": "^2.4.2" + "hasha": "^5.0.0", + "make-dir": "^3.0.0", + "package-hash": "^4.0.0", + "write-file-atomic": "^3.0.0" }, "engines": { - "node": ">=6" - } - }, - "node_modules/caching-transform/node_modules/write-file-atomic": { - "version": "2.4.3", - "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-2.4.3.tgz", - "integrity": "sha512-GaETH5wwsX+GcnzhPgKcKjJ6M2Cq3/iZp1WyY/X1CSqrW+jVNM9Y7D8EC2sM4ZG/V8wZlSniJnCKWPmBYAucRQ==", - "dev": true, - "dependencies": { - "graceful-fs": "^4.1.11", - "imurmurhash": "^0.1.4", - "signal-exit": "^3.0.2" + "node": ">=8" } }, "node_modules/call-bind": { @@ -1651,12 +1592,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/caller": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/caller/-/caller-1.0.1.tgz", - "integrity": "sha1-uFGGD3Dhlds9J3OVqhp+I+ow7PU=", - "dev": true - }, "node_modules/callsites": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", @@ -1701,9 +1636,9 @@ } }, "node_modules/chalk": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.0.tgz", - "integrity": "sha512-qwx12AxXe2Q5xQ43Ac//I6v5aXTipYrSESdOgzrN+9XjgEpyjpKuvSGaN4qE93f7TQTlerQQ8S+EQ0EyDoVL1A==", + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.1.tgz", + "integrity": "sha512-diHzdDKxcU+bAsUboHLPEDQiw0qEe0qd7SYUn3HgcFlWgbDcfLGswOHYeGrHKzG9z6UYf01d9VFMfZxPM1xZSg==", "inBundle": true, "dependencies": { "ansi-styles": "^4.1.0", @@ -1845,9 +1780,9 @@ } }, "node_modules/cli-table3/node_modules/string-width": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.0.tgz", - "integrity": "sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg==", + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.2.tgz", + "integrity": "sha512-XBJbT3N4JhVumXE0eoLU9DCjcaF92KLNqTmFCnG1pf8duUxFGwtP6AD6nkjw9a3IdiRtL3E2w3JDiE/xi3vOeA==", "inBundle": true, "dependencies": { "emoji-regex": "^8.0.0", @@ -1979,7 +1914,7 @@ "version": "1.1.3", "resolved": "https://registry.npmjs.org/color-support/-/color-support-1.1.3.tgz", "integrity": "sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==", - "dev": true, + "inBundle": true, "bin": { "color-support": "bin.js" } @@ -2050,15 +1985,6 @@ "integrity": "sha1-PXz0Rk22RG6mRL9LOVB/mFEAjo4=", "inBundle": true }, - "node_modules/contains-path": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/contains-path/-/contains-path-0.1.0.tgz", - "integrity": "sha1-/ozxhP9mcLa67wGp1IYaXL7EEgo=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/convert-source-map": { "version": "1.7.0", "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.7.0.tgz", @@ -2102,31 +2028,6 @@ "node": ">=6" } }, - "node_modules/cp-file": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/cp-file/-/cp-file-6.2.0.tgz", - "integrity": "sha512-fmvV4caBnofhPe8kOcitBwSn2f39QLjnAnGq3gO9dfd75mUytzKNZB1hde6QHunW2Rt+OwuBOMc3i1tNElbszA==", - "dev": true, - "dependencies": { - "graceful-fs": "^4.1.2", - "make-dir": "^2.0.0", - "nested-error-stacks": "^2.0.0", - "pify": "^4.0.1", - "safe-buffer": "^5.0.1" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/cp-file/node_modules/pify": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz", - "integrity": "sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==", - "dev": true, - "engines": { - "node": ">=6" - } - }, "node_modules/cross-spawn": { "version": "7.0.3", "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", @@ -2192,9 +2093,9 @@ } }, "node_modules/debug": { - "version": "4.3.1", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.1.tgz", - "integrity": "sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==", + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.2.tgz", + "integrity": "sha512-mOp8wKcvj7XxC78zLgw/ZA+6TSgkoE2C/ienthhRD298T7UNwAg9diBpLRxC0mOezLl4B0xV7M0cCO6P/O0Xhw==", "inBundle": true, "dependencies": { "ms": "2.1.2" @@ -2266,15 +2167,24 @@ "dev": true }, "node_modules/default-require-extensions": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/default-require-extensions/-/default-require-extensions-2.0.0.tgz", - "integrity": "sha1-9fj7sYp9bVCyH2QfZJ67Uiz+JPc=", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/default-require-extensions/-/default-require-extensions-3.0.0.tgz", + "integrity": "sha512-ek6DpXq/SCpvjhpFsLFRVtIxJCRw6fUR42lYMVZuUMK7n8eMz4Uh5clckdBjEpLhn/gEBZo7hDJnJcwdKLKQjg==", "dev": true, "dependencies": { - "strip-bom": "^3.0.0" + "strip-bom": "^4.0.0" }, "engines": { - "node": ">=4" + "node": ">=8" + } + }, + "node_modules/default-require-extensions/node_modules/strip-bom": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-4.0.0.tgz", + "integrity": "sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w==", + "dev": true, + "engines": { + "node": ">=8" } }, "node_modules/defaults": { @@ -2361,20 +2271,10 @@ "version": "5.0.0", "resolved": "https://registry.npmjs.org/diff/-/diff-5.0.0.tgz", "integrity": "sha512-/VTCrvm5Z0JGty/BWHljh+BAiw3IK+2j87NGMu8Nwc/f48WoDAC395uomO9ZD117ZOBaHmkX1oyLvkVM/aIT3w==", - "inBundle": true, "engines": { "node": ">=0.3.1" } }, - "node_modules/diff-frag": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/diff-frag/-/diff-frag-1.0.1.tgz", - "integrity": "sha512-6/v2PC/6UTGcWPPetb9acL8foberUg/CtPdALeJUdD1B/weHNvzftoo00gYznqHGRhHEbykUGzqfG9RWOSr5yw==", - "dev": true, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, "node_modules/docopt": { "version": "0.6.2", "resolved": "https://registry.npmjs.org/docopt/-/docopt-0.6.2.tgz", @@ -2384,6 +2284,10 @@ "node": ">=0.10.0" } }, + "node_modules/docs": { + "resolved": "docs", + "link": true + }, "node_modules/doctrine": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", @@ -2465,9 +2369,9 @@ } }, "node_modules/env-paths": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/env-paths/-/env-paths-2.2.0.tgz", - "integrity": "sha512-6u0VYSCo/OW6IoD5WCLLy9JUGARbamfSavcNXry/eu8aHVFei6CD3Sw+VGX5alea1i9pgPHW0mbu6Xj0uBh7gA==", + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/env-paths/-/env-paths-2.2.1.tgz", + "integrity": "sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A==", "inBundle": true, "engines": { "node": ">=6" @@ -2489,25 +2393,27 @@ } }, "node_modules/es-abstract": { - "version": "1.18.0-next.2", - "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.18.0-next.2.tgz", - "integrity": "sha512-Ih4ZMFHEtZupnUh6497zEL4y2+w8+1ljnCyaTa+adcoafI1GOvMwFlDjBLfWR7y9VLfrjRJe9ocuHY1PSR9jjw==", + "version": "1.18.3", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.18.3.tgz", + "integrity": "sha512-nQIr12dxV7SSxE6r6f1l3DtAeEYdsGpps13dR0TwJg1S8gyp4ZPgy3FZcHBgbiQqnoqSTb+oC+kO4UQ0C/J8vw==", "dev": true, "dependencies": { "call-bind": "^1.0.2", "es-to-primitive": "^1.2.1", "function-bind": "^1.1.1", - "get-intrinsic": "^1.0.2", + "get-intrinsic": "^1.1.1", "has": "^1.0.3", - "has-symbols": "^1.0.1", - "is-callable": "^1.2.2", + "has-symbols": "^1.0.2", + "is-callable": "^1.2.3", "is-negative-zero": "^2.0.1", - "is-regex": "^1.1.1", - "object-inspect": "^1.9.0", + "is-regex": "^1.1.3", + "is-string": "^1.0.6", + "object-inspect": "^1.10.3", "object-keys": "^1.1.1", "object.assign": "^4.1.2", - "string.prototype.trimend": "^1.0.3", - "string.prototype.trimstart": "^1.0.3" + "string.prototype.trimend": "^1.0.4", + "string.prototype.trimstart": "^1.0.4", + "unbox-primitive": "^1.0.1" }, "engines": { "node": ">= 0.4" @@ -2540,22 +2446,25 @@ "dev": true }, "node_modules/escape-string-regexp": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz", - "integrity": "sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", "dev": true, "engines": { - "node": ">=8" + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/escodegen": { - "version": "1.14.3", - "resolved": "https://registry.npmjs.org/escodegen/-/escodegen-1.14.3.tgz", - "integrity": "sha512-qFcX0XJkdg+PB3xjZZG/wKSuT1PnQWx57+TVSjIMmILd2yC/6ByYElPwJnslDsuWuSAp4AwJGumarAAmJch5Kw==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/escodegen/-/escodegen-2.0.0.tgz", + "integrity": "sha512-mmHKys/C8BFUGI+MAWNcSYoORYLMdPzjrknd2Vc+bUsjN5bXcr8EhrNB+UTqfL1y3I9c4fw2ihgtMPQLBRiQxw==", "dev": true, "dependencies": { "esprima": "^4.0.1", - "estraverse": "^4.2.0", + "estraverse": "^5.2.0", "esutils": "^2.0.2", "optionator": "^0.8.1" }, @@ -2564,12 +2473,21 @@ "esgenerate": "bin/esgenerate.js" }, "engines": { - "node": ">=4.0" + "node": ">=6.0" }, "optionalDependencies": { "source-map": "~0.6.1" } }, + "node_modules/escodegen/node_modules/estraverse": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.2.0.tgz", + "integrity": "sha512-BxbNGGNm0RyRYvUdHpIwv9IWzeM9XClbOxwoATuFdOE7ZE6wHL+HQ5T8hoPM+zHvmKzzsEqhgy0GrQ5X13afiQ==", + "dev": true, + "engines": { + "node": ">=4.0" + } + }, "node_modules/escodegen/node_modules/levn": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz", @@ -2609,6 +2527,16 @@ "node": ">= 0.8.0" } }, + "node_modules/escodegen/node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "optional": true, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/escodegen/node_modules/type-check": { "version": "0.3.2", "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.3.2.tgz", @@ -2622,29 +2550,32 @@ } }, "node_modules/eslint": { - "version": "7.20.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-7.20.0.tgz", - "integrity": "sha512-qGi0CTcOGP2OtCQBgWZlQjcTuP0XkIpYFj25XtRTQSHC+umNnp7UMshr2G8SLsRFYDdAPFeHOsiteadmMH02Yw==", + "version": "7.31.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-7.31.0.tgz", + "integrity": "sha512-vafgJpSh2ia8tnTkNUkwxGmnumgckLh5aAbLa1xRmIn9+owi8qBNGKL+B881kNKNTy7FFqTEkpNkUvmw0n6PkA==", "dev": true, "dependencies": { "@babel/code-frame": "7.12.11", - "@eslint/eslintrc": "^0.3.0", + "@eslint/eslintrc": "^0.4.3", + "@humanwhocodes/config-array": "^0.5.0", "ajv": "^6.10.0", "chalk": "^4.0.0", "cross-spawn": "^7.0.2", "debug": "^4.0.1", "doctrine": "^3.0.0", "enquirer": "^2.3.5", + "escape-string-regexp": "^4.0.0", "eslint-scope": "^5.1.1", "eslint-utils": "^2.1.0", "eslint-visitor-keys": "^2.0.0", "espree": "^7.3.1", "esquery": "^1.4.0", "esutils": "^2.0.2", - "file-entry-cache": "^6.0.0", + "fast-deep-equal": "^3.1.3", + "file-entry-cache": "^6.0.1", "functional-red-black-tree": "^1.0.1", - "glob-parent": "^5.0.0", - "globals": "^12.1.0", + "glob-parent": "^5.1.2", + "globals": "^13.6.0", "ignore": "^4.0.6", "import-fresh": "^3.0.0", "imurmurhash": "^0.1.4", @@ -2652,7 +2583,7 @@ "js-yaml": "^3.13.1", "json-stable-stringify-without-jsonify": "^1.0.1", "levn": "^0.4.1", - "lodash": "^4.17.20", + "lodash.merge": "^4.6.2", "minimatch": "^3.0.4", "natural-compare": "^1.4.0", "optionator": "^0.9.1", @@ -2661,7 +2592,7 @@ "semver": "^7.2.1", "strip-ansi": "^6.0.0", "strip-json-comments": "^3.1.0", - "table": "^6.0.4", + "table": "^6.0.9", "text-table": "^0.2.0", "v8-compile-cache": "^2.0.3" }, @@ -2701,12 +2632,12 @@ "dev": true }, "node_modules/eslint-module-utils": { - "version": "2.6.0", - "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.6.0.tgz", - "integrity": "sha512-6j9xxegbqe8/kZY8cYpcp0xhbK0EgJlg3g9mib3/miLaExuuwc3n5UEfSnU6hWMbT0FAYVvDbL9RrRgpUeQIvA==", + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.6.1.tgz", + "integrity": "sha512-ZXI9B8cxAJIH4nfkhTwcRTEAnrVfobYqwjWy/QMCZ8rHkZHFjf9yO4BzpiF9kCSfNlMG54eKigISHpX0+AaT4A==", "dev": true, "dependencies": { - "debug": "^2.6.9", + "debug": "^3.2.7", "pkg-dir": "^2.0.0" }, "engines": { @@ -2714,20 +2645,14 @@ } }, "node_modules/eslint-module-utils/node_modules/debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", "dev": true, "dependencies": { - "ms": "2.0.0" + "ms": "^2.1.1" } }, - "node_modules/eslint-module-utils/node_modules/ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", - "dev": true - }, "node_modules/eslint-plugin-es": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/eslint-plugin-es/-/eslint-plugin-es-3.0.1.tgz", @@ -2748,23 +2673,25 @@ } }, "node_modules/eslint-plugin-import": { - "version": "2.22.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.22.1.tgz", - "integrity": "sha512-8K7JjINHOpH64ozkAhpT3sd+FswIZTfMZTjdx052pnWrgRCVfp8op9tbjpAk3DdUeI/Ba4C8OjdC0r90erHEOw==", + "version": "2.23.4", + "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.23.4.tgz", + "integrity": "sha512-6/wP8zZRsnQFiR3iaPFgh5ImVRM1WN5NUWfTIRqwOdeiGJlBcSk82o1FEVq8yXmy4lkIzTo7YhHCIxlU/2HyEQ==", "dev": true, "dependencies": { - "array-includes": "^3.1.1", - "array.prototype.flat": "^1.2.3", - "contains-path": "^0.1.0", + "array-includes": "^3.1.3", + "array.prototype.flat": "^1.2.4", "debug": "^2.6.9", - "doctrine": "1.5.0", + "doctrine": "^2.1.0", "eslint-import-resolver-node": "^0.3.4", - "eslint-module-utils": "^2.6.0", + "eslint-module-utils": "^2.6.1", + "find-up": "^2.0.0", "has": "^1.0.3", + "is-core-module": "^2.4.0", "minimatch": "^3.0.4", - "object.values": "^1.1.1", - "read-pkg-up": "^2.0.0", - "resolve": "^1.17.0", + "object.values": "^1.1.3", + "pkg-up": "^2.0.0", + "read-pkg-up": "^3.0.0", + "resolve": "^1.20.0", "tsconfig-paths": "^3.9.0" }, "engines": { @@ -2784,13 +2711,12 @@ } }, "node_modules/eslint-plugin-import/node_modules/doctrine": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-1.5.0.tgz", - "integrity": "sha1-N53Ocw9hZvds76TmcHoVmwLFpvo=", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz", + "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==", "dev": true, "dependencies": { - "esutils": "^2.0.2", - "isarray": "^1.0.0" + "esutils": "^2.0.2" }, "engines": { "node": ">=0.10.0" @@ -2841,12 +2767,15 @@ } }, "node_modules/eslint-plugin-promise": { - "version": "4.3.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-promise/-/eslint-plugin-promise-4.3.1.tgz", - "integrity": "sha512-bY2sGqyptzFBDLh/GMbAxfdJC+b0f23ME63FOE4+Jao0oZ3E1LEwFtWJX/1pGMJLiTtrSSern2CRM/g+dfc0eQ==", + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-promise/-/eslint-plugin-promise-5.1.0.tgz", + "integrity": "sha512-NGmI6BH5L12pl7ScQHbg7tvtk4wPxxj8yPHH47NvSmMtFneC077PSeY3huFj06ZWZvtbfxSPt3RuOQD5XcR4ng==", "dev": true, "engines": { - "node": ">=6" + "node": "^10.12.0 || >=12.0.0" + }, + "peerDependencies": { + "eslint": "^7.0.0" } }, "node_modules/eslint-plugin-standard": { @@ -2911,9 +2840,9 @@ } }, "node_modules/eslint-visitor-keys": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-2.0.0.tgz", - "integrity": "sha512-QudtT6av5WXels9WjIM7qz1XD1cWGvX4gGXvp/zBn9nXG02D0utdU3Em2m/QjTnrsk6bBjmCygl3rmj118msQQ==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz", + "integrity": "sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw==", "dev": true, "engines": { "node": ">=10" @@ -2937,6 +2866,21 @@ "node": ">=8" } }, + "node_modules/eslint/node_modules/globals": { + "version": "13.9.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.9.0.tgz", + "integrity": "sha512-74/FduwI/JaIrr1H8e71UbDE+5x7pIPs1C2rrwC52SszOo043CsWOZEMW7o2Y58xwm9b+0RBKDxY5n2sUpEFxA==", + "dev": true, + "dependencies": { + "type-fest": "^0.20.2" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/eslint/node_modules/strip-ansi": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", @@ -2949,13 +2893,16 @@ "node": ">=8" } }, - "node_modules/esm": { - "version": "3.2.25", - "resolved": "https://registry.npmjs.org/esm/-/esm-3.2.25.tgz", - "integrity": "sha512-U1suiZ2oDVWv4zPO56S0NcR5QriEahGtdN2OR6FiOG4WJvcjBVFB0qI4+eKoWFH483PKGuLuu6V8Z4T5g63UVA==", + "node_modules/eslint/node_modules/type-fest": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", + "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", "dev": true, "engines": { - "node": ">=6" + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/espree": { @@ -3103,9 +3050,9 @@ "dev": true }, "node_modules/file-entry-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.0.tgz", - "integrity": "sha512-fqoO76jZ3ZnYrXLDRxBR1YvOvc0k844kcOg40bgsPrE25LAb/PDqTY+ho64Xh2c8ZXgIKldchCFHczG2UVRcWA==", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz", + "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==", "dev": true, "dependencies": { "flat-cache": "^3.0.4" @@ -3133,42 +3080,45 @@ } }, "node_modules/find-cache-dir": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-2.1.0.tgz", - "integrity": "sha512-Tq6PixE0w/VMFfCgbONnkiQIVol/JJL7nRMi20fqzA4NRs9AfeqMGeRdPi3wIhYkxjeBaWh2rxwapn5Tu3IqOQ==", + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-3.3.1.tgz", + "integrity": "sha512-t2GDMt3oGC/v+BMwzmllWDuJF/xcDtE5j/fCGbqDD7OLuJkj0cfh1YSA5VKPvwMeLFLNDBkwOKZ2X85jGLVftQ==", "dev": true, "dependencies": { "commondir": "^1.0.1", - "make-dir": "^2.0.0", - "pkg-dir": "^3.0.0" + "make-dir": "^3.0.2", + "pkg-dir": "^4.1.0" }, "engines": { - "node": ">=6" + "node": ">=8" + }, + "funding": { + "url": "https://github.com/avajs/find-cache-dir?sponsor=1" } }, "node_modules/find-cache-dir/node_modules/find-up": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", - "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", "dev": true, "dependencies": { - "locate-path": "^3.0.0" + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" }, "engines": { - "node": ">=6" + "node": ">=8" } }, "node_modules/find-cache-dir/node_modules/locate-path": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", - "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", "dev": true, "dependencies": { - "p-locate": "^3.0.0", - "path-exists": "^3.0.0" + "p-locate": "^4.1.0" }, "engines": { - "node": ">=6" + "node": ">=8" } }, "node_modules/find-cache-dir/node_modules/p-limit": { @@ -3187,15 +3137,15 @@ } }, "node_modules/find-cache-dir/node_modules/p-locate": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", - "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", "dev": true, "dependencies": { - "p-limit": "^2.0.0" + "p-limit": "^2.2.0" }, "engines": { - "node": ">=6" + "node": ">=8" } }, "node_modules/find-cache-dir/node_modules/p-try": { @@ -3207,16 +3157,25 @@ "node": ">=6" } }, + "node_modules/find-cache-dir/node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, "node_modules/find-cache-dir/node_modules/pkg-dir": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-3.0.0.tgz", - "integrity": "sha512-/E57AYkoeQ25qkxMj5PBOVgF8Kiu/h7cYS30Z5+R7WaiCCBfLq58ZI/dSeaEKb9WVJV5n/03QwrN3IeWIFllvw==", + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-4.2.0.tgz", + "integrity": "sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==", "dev": true, "dependencies": { - "find-up": "^3.0.0" + "find-up": "^4.0.0" }, "engines": { - "node": ">=6" + "node": ">=8" } }, "node_modules/find-up": { @@ -3256,81 +3215,19 @@ "integrity": "sha512-zAoAQiudy+r5SvnSw3KJy5os/oRJYHzrzja/tBDqrZtNhUw8bt6y8OBzMWcjWr+8liV8Eb6yOhw8WZ7VFZ5ZzA==", "dev": true }, - "node_modules/flow-parser": { - "version": "0.144.0", - "resolved": "https://registry.npmjs.org/flow-parser/-/flow-parser-0.144.0.tgz", - "integrity": "sha512-si2lCamPs0N1QcTiQY8p9RxvVsVGbx4rpkX6dcfUQ2OOvEg6Cya7LItlPqcx54Gtakdx1St6TseQlV8nafYW5g==", - "dev": true, - "engines": { - "node": ">=0.4.0" - } - }, - "node_modules/flow-remove-types": { - "version": "2.144.0", - "resolved": "https://registry.npmjs.org/flow-remove-types/-/flow-remove-types-2.144.0.tgz", - "integrity": "sha512-1ctvKynmoMtqN9Xcyx3NKLGtcpBy9YqW50F9ENX+ivMnRBxks+UBI8iFC+AObTqAfk9C5eUTTt6vMZkmTrskdg==", - "dev": true, - "dependencies": { - "flow-parser": "^0.144.0", - "pirates": "^3.0.2", - "vlq": "^0.2.1" - }, - "bin": { - "flow-node": "flow-node", - "flow-remove-types": "flow-remove-types" - }, - "engines": { - "node": ">=4" - } - }, "node_modules/foreground-child": { - "version": "1.5.6", - "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-1.5.6.tgz", - "integrity": "sha1-T9ca0t/elnibmApcCilZN8svXOk=", - "dev": true, - "dependencies": { - "cross-spawn": "^4", - "signal-exit": "^3.0.0" - } - }, - "node_modules/foreground-child/node_modules/cross-spawn": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-4.0.2.tgz", - "integrity": "sha1-e5JHYhwjrf3ThWAEqCPL45dCTUE=", - "dev": true, - "dependencies": { - "lru-cache": "^4.0.1", - "which": "^1.2.9" - } - }, - "node_modules/foreground-child/node_modules/lru-cache": { - "version": "4.1.5", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.1.5.tgz", - "integrity": "sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g==", - "dev": true, - "dependencies": { - "pseudomap": "^1.0.2", - "yallist": "^2.1.2" - } - }, - "node_modules/foreground-child/node_modules/which": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", - "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-2.0.0.tgz", + "integrity": "sha512-dCIq9FpEcyQyXKCkyzmlPTFNgrCzPudOe+mhvJU5zAtlBnGVy2yKxtfsxK2tQBThwq225jcvBjpw1Gr40uzZCA==", "dev": true, "dependencies": { - "isexe": "^2.0.0" + "cross-spawn": "^7.0.0", + "signal-exit": "^3.0.2" }, - "bin": { - "which": "bin/which" + "engines": { + "node": ">=8.0.0" } }, - "node_modules/foreground-child/node_modules/yallist": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-2.1.2.tgz", - "integrity": "sha1-HBH5IY8HYImkfdUS+TxmmaaoHVI=", - "dev": true - }, "node_modules/forever-agent": { "version": "0.6.1", "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", @@ -3341,19 +3238,39 @@ } }, "node_modules/form-data": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz", - "integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==", - "inBundle": true, + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-3.0.1.tgz", + "integrity": "sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg==", + "dev": true, "dependencies": { "asynckit": "^0.4.0", - "combined-stream": "^1.0.6", + "combined-stream": "^1.0.8", "mime-types": "^2.1.12" }, "engines": { - "node": ">= 0.12" + "node": ">= 6" } }, + "node_modules/fromentries": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/fromentries/-/fromentries-1.3.2.tgz", + "integrity": "sha512-cHEpEQHUg0f8XdtZCc2ZAhrHzKzT0MrFUTcvx+hfxYu7rGMDc5SKoXFh+n4YigxsHXRzc6OrCshdR1bWH6HHyg==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, "node_modules/fs-access": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/fs-access/-/fs-access-2.0.0.tgz", @@ -3418,9 +3335,9 @@ "inBundle": true }, "node_modules/function-loop": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/function-loop/-/function-loop-1.0.2.tgz", - "integrity": "sha512-Iw4MzMfS3udk/rqxTiDDCllhGwlOrsr50zViTOO/W6lS/9y6B1J0BD2VZzrnWUYBJsl3aeqjgR5v7bWWhZSYbA==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/function-loop/-/function-loop-2.0.1.tgz", + "integrity": "sha512-ktIR+O6i/4h+j/ZhZJNdzeI4i9lEPeEK6UPR2EVyTVBqOwcU3Za9xYKLH64ZR9HmcROyRrOkizNyjjtWJzDDkQ==", "dev": true }, "node_modules/functional-red-black-tree": { @@ -3430,51 +3347,23 @@ "dev": true }, "node_modules/gauge": { - "version": "2.7.4", - "resolved": "https://registry.npmjs.org/gauge/-/gauge-2.7.4.tgz", - "integrity": "sha1-LANAXHU4w51+s3sxcCLjJfsBi/c=", + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/gauge/-/gauge-3.0.1.tgz", + "integrity": "sha512-6STz6KdQgxO4S/ko+AbjlFGGdGcknluoqU+79GOFCDqqyYj5OanQf9AjxwN0jCidtT+ziPMmPSt9E4hfQ0CwIQ==", "inBundle": true, "dependencies": { - "aproba": "^1.0.3", + "aproba": "^1.0.3 || ^2.0.0", + "color-support": "^1.1.2", "console-control-strings": "^1.0.0", - "has-unicode": "^2.0.0", - "object-assign": "^4.1.0", + "has-unicode": "^2.0.1", + "object-assign": "^4.1.1", "signal-exit": "^3.0.0", - "string-width": "^1.0.1", - "strip-ansi": "^3.0.1", - "wide-align": "^1.1.0" - } - }, - "node_modules/gauge/node_modules/aproba": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/aproba/-/aproba-1.2.0.tgz", - "integrity": "sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw==", - "inBundle": true - }, - "node_modules/gauge/node_modules/is-fullwidth-code-point": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz", - "integrity": "sha1-754xOG8DGn8NZDr4L95QxFfvAMs=", - "inBundle": true, - "dependencies": { - "number-is-nan": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/gauge/node_modules/string-width": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz", - "integrity": "sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=", - "inBundle": true, - "dependencies": { - "code-point-at": "^1.0.0", - "is-fullwidth-code-point": "^1.0.0", - "strip-ansi": "^3.0.0" + "string-width": "^1.0.1 || ^2.0.0", + "strip-ansi": "^3.0.1 || ^4.0.0", + "wide-align": "^1.1.2" }, "engines": { - "node": ">=0.10.0" + "node": ">=10" } }, "node_modules/gensync": { @@ -3509,6 +3398,15 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/get-package-type": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/get-package-type/-/get-package-type-0.1.0.tgz", + "integrity": "sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==", + "dev": true, + "engines": { + "node": ">=8.0.0" + } + }, "node_modules/getpass": { "version": "0.1.7", "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", @@ -3525,9 +3423,9 @@ "dev": true }, "node_modules/glob": { - "version": "7.1.6", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", - "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", + "version": "7.1.7", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.7.tgz", + "integrity": "sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ==", "inBundle": true, "dependencies": { "fs.realpath": "^1.0.0", @@ -3545,9 +3443,9 @@ } }, "node_modules/glob-parent": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.1.tgz", - "integrity": "sha512-FnI+VGOpnlGHWZxthPGR+QhR78fuiK0sNLkHQv+bL9fQi57lNNdquIbna/WrfROrolq8GK5Ek6BiMwqL/voRYQ==", + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", "dev": true, "dependencies": { "is-glob": "^4.0.1" @@ -3557,18 +3455,12 @@ } }, "node_modules/globals": { - "version": "12.4.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-12.4.0.tgz", - "integrity": "sha512-BWICuzzDvDoH54NHKCseDanAhE3CeDorgDL5MT6LMXXj2WCnd9UC2szdk4AWLfjdgNBCXLUanXYcpBBKOSWGwg==", + "version": "11.12.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", + "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", "dev": true, - "dependencies": { - "type-fest": "^0.8.1" - }, "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "node": ">=4" } }, "node_modules/graceful-fs": { @@ -3612,6 +3504,15 @@ "node": ">= 0.4.0" } }, + "node_modules/has-bigints": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.0.1.tgz", + "integrity": "sha512-LSBS2LjbNBTf6287JEbEzvJgftkF5qFkmCo9hDRpAzKhUOlJ+hx8dd4USs00SgsUNwc4617J9ki5YtEClM2ffA==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", @@ -3622,9 +3523,9 @@ } }, "node_modules/has-symbols": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.1.tgz", - "integrity": "sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.2.tgz", + "integrity": "sha512-chXa79rL/UC2KlX17jo3vRGz0azaWEx5tGqZg5pO3NUyEJVB17dMruQlzCCOfUvElghKcm5194+BCRvi2Rv/Gw==", "dev": true, "engines": { "node": ">= 0.4" @@ -3640,15 +3541,19 @@ "inBundle": true }, "node_modules/hasha": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/hasha/-/hasha-3.0.0.tgz", - "integrity": "sha1-UqMvq4Vp1BymmmH/GiFPjrfIvTk=", + "version": "5.2.2", + "resolved": "https://registry.npmjs.org/hasha/-/hasha-5.2.2.tgz", + "integrity": "sha512-Hrp5vIK/xr5SkeN2onO32H0MgNZ0f17HRNH39WfL0SYUNOTZ5Lz1TJ8Pajo/87dYGEFlLMm7mIc/k/s6Bvz9HQ==", "dev": true, "dependencies": { - "is-stream": "^1.0.1" + "is-stream": "^2.0.0", + "type-fest": "^0.8.0" }, "engines": { - "node": ">=4" + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/hast-to-hyperscript": { @@ -3720,12 +3625,6 @@ "url": "https://opencollective.com/unified" } }, - "node_modules/hast-util-raw/node_modules/parse5": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/parse5/-/parse5-6.0.1.tgz", - "integrity": "sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw==", - "dev": true - }, "node_modules/hast-util-to-parse5": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/hast-util-to-parse5/-/hast-util-to-parse5-6.0.0.tgz", @@ -3761,9 +3660,9 @@ } }, "node_modules/hosted-git-info": { - "version": "3.0.8", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-3.0.8.tgz", - "integrity": "sha512-aXpmwoOhRBrw6X3j0h5RloK4x1OzsxMPyxqIHyNfSe2pypkVTZFpEiRoSipPEPlMrh0HW/XsjkJ5WgnCirpNUw==", + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-4.0.2.tgz", + "integrity": "sha512-c9OGXbZ3guC/xOlCg1Ci/VgWlwsqDv1yMQL1CWqXDL0hDjXuNcq0zuR4xqPSuasI3kqFDhqSyTjREz5gzq0fXg==", "inBundle": true, "dependencies": { "lru-cache": "^6.0.0" @@ -3858,9 +3757,9 @@ } }, "node_modules/iconv-lite": { - "version": "0.6.2", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.2.tgz", - "integrity": "sha512-2y91h5OpQlolefMPmUlivelittSWy0rP+oYVpn6A7GwVHNE8AWzoYOBNmlwks3LobaJxgHCYZAnyNo2GgpNRNQ==", + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", + "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", "inBundle": true, "optional": true, "dependencies": { @@ -3900,9 +3799,9 @@ } }, "node_modules/ignore-walk": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/ignore-walk/-/ignore-walk-3.0.3.tgz", - "integrity": "sha512-m7o6xuOaT1aqheYHKf8W6J5pYH85ZI9w077erOzLje3JsB1gkafkAhHHY19dqjulgIZHFm32Cp5uNZgcQqdJKw==", + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/ignore-walk/-/ignore-walk-3.0.4.tgz", + "integrity": "sha512-PY6Ii8o1jMRA1z4F2hRkH/xN59ox43DavKvD3oDpfurRlOJyAHpifIwpbdv1n4jt4ov0jSpw3kQ4GhJnpBL6WQ==", "inBundle": true, "dependencies": { "minimatch": "^3.0.4" @@ -3974,17 +3873,17 @@ } }, "node_modules/init-package-json": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/init-package-json/-/init-package-json-2.0.2.tgz", - "integrity": "sha512-PO64kVeArePvhX7Ff0jVWkpnE1DfGRvaWcStYrPugcJz9twQGYibagKJuIMHCX7ENcp0M6LJlcjLBuLD5KeJMg==", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/init-package-json/-/init-package-json-2.0.3.tgz", + "integrity": "sha512-tk/gAgbMMxR6fn1MgMaM1HpU1ryAmBWWitnxG5OhuNXeX0cbpbgV5jA4AIpQJVNoyOfOevTtO6WX+rPs+EFqaQ==", "inBundle": true, "dependencies": { "glob": "^7.1.1", - "npm-package-arg": "^8.1.0", + "npm-package-arg": "^8.1.2", "promzard": "^0.3.0", "read": "~1.0.1", - "read-package-json": "^3.0.0", - "semver": "^7.3.2", + "read-package-json": "^3.0.1", + "semver": "^7.3.5", "validate-npm-package-license": "^3.0.4", "validate-npm-package-name": "^3.0.0" }, @@ -4043,6 +3942,15 @@ "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=", "dev": true }, + "node_modules/is-bigint": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.2.tgz", + "integrity": "sha512-0JV5+SOCQkIdzjBK9buARcV804Ddu7A0Qet6sHi3FimE9ne6m4BGQZfRn+NZiXbBk4F4XmHfDZIipLj9pX8dSA==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/is-binary-path": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", @@ -4055,6 +3963,21 @@ "node": ">=8" } }, + "node_modules/is-boolean-object": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.1.1.tgz", + "integrity": "sha512-bXdQWkECBUIAcCkeH1unwJLIpZYaa5VvuygSyS/c2lf719mTKZDU5UdDRlpd01UjADgmW8RfqaP+mRaVPdr/Ng==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/is-buffer": { "version": "2.0.5", "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-2.0.5.tgz", @@ -4103,9 +4026,9 @@ } }, "node_modules/is-core-module": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.2.0.tgz", - "integrity": "sha512-XRAfAdyyY5F5cOXn7hYQDqh2Xmii+DEfIcQGxK/uNwMHhIkPWO0g8msXcbzLe+MpGoR951MlqM/2iIlU4vKDdQ==", + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.5.0.tgz", + "integrity": "sha512-TXCMSDsEHMEEZ6eCA8rwRDbLu55MRGmrctljsBX/2v1d9/GzqHOxW5c5oPSgrUt2vBFXebu9rGqckXGPWOlYpg==", "inBundle": true, "dependencies": { "has": "^1.0.3" @@ -4115,9 +4038,9 @@ } }, "node_modules/is-date-object": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.2.tgz", - "integrity": "sha512-USlDT524woQ08aoZFzh3/Z6ch9Y/EWXEHQ/AaRN0SkKq4t2Jw2R2339tSXmwuVoY7LLlBCbOIlx2myP/L5zk0g==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.4.tgz", + "integrity": "sha512-/b4ZVsG7Z5XVtIxs/h9W8nvfLgSAyKYdtGWQLbqy6jA1icmgjf8WCoTKgeS4wy5tYaPePouzFMANbnj94c2Z+A==", "dev": true, "engines": { "node": ">= 0.4" @@ -4203,6 +4126,18 @@ "node": ">=0.12.0" } }, + "node_modules/is-number-object": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.5.tgz", + "integrity": "sha512-RU0lI/n95pMoUKu9v1BZP5MBcZuNSVJkMkAG2dJqC4z2GlkGUNeH68SuHuBKBD/XFe+LHZ+f9BKkLET60Niedw==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/is-plain-obj": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-2.1.0.tgz", @@ -4213,19 +4148,19 @@ } }, "node_modules/is-potential-custom-element-name": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.0.tgz", - "integrity": "sha1-DFLlS8yjkbssSUsh6GJtczbG45c=", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz", + "integrity": "sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ==", "dev": true }, "node_modules/is-regex": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.2.tgz", - "integrity": "sha512-axvdhb5pdhEVThqJzYXwMlVuZwC+FF2DpcOhTS+y/8jVq4trxyPgfcwIxIKiyeuLlSQYKkmUaPQJ8ZE4yNKXDg==", + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.3.tgz", + "integrity": "sha512-qSVXFz28HM7y+IWX6vLCsexdlvzT1PJNFSBuaQLQ5o0IEw8UDYW6/2+eCMVyIsbM8CNLX2a/QWmSpyxYEHY7CQ==", "dev": true, "dependencies": { "call-bind": "^1.0.2", - "has-symbols": "^1.0.1" + "has-symbols": "^1.0.2" }, "engines": { "node": ">= 0.4" @@ -4235,18 +4170,18 @@ } }, "node_modules/is-stream": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz", - "integrity": "sha1-EtSj3U5o4Lec6428hBc66A2RykQ=", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.0.tgz", + "integrity": "sha512-XCoy+WlUr7d1+Z8GgSuXmpuUFC9fOhRXglJMx+dwLKTkL44Cjd4W1Z5P+BQZpr+cR93aGP4S/s7Ftw6Nd/kiEw==", "dev": true, "engines": { - "node": ">=0.10.0" + "node": ">=8" } }, "node_modules/is-string": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.5.tgz", - "integrity": "sha512-buY6VNRjhQMiF1qWDouloZlQbRhDPCebwxSjxMjxgemYT46YMd2NR0/H+fBhEfWX4A/w9TBJ+ol+okqJKFE6vQ==", + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.6.tgz", + "integrity": "sha512-2gdzbKUuqtQ3lYNrUTQYoClPhm7oQu4UdpSZMp1/DGgkHBT8E2Z1l0yMdb6D4zNAxwDiMv8MdulKROJGNl0Q0w==", "dev": true, "engines": { "node": ">= 0.4" @@ -4256,12 +4191,12 @@ } }, "node_modules/is-symbol": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.3.tgz", - "integrity": "sha512-OwijhaRSgqvhm/0ZdAcXNZt9lYdKFpcRDT5ULUuYXPoT794UNOdU+gpT6Rzo7b4V2HUl/op6GqY894AZwv9faQ==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.4.tgz", + "integrity": "sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg==", "dev": true, "dependencies": { - "has-symbols": "^1.0.1" + "has-symbols": "^1.0.2" }, "engines": { "node": ">= 0.4" @@ -4286,6 +4221,15 @@ "url": "https://github.com/sponsors/wooorm" } }, + "node_modules/is-windows": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-windows/-/is-windows-1.0.2.tgz", + "integrity": "sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/is-word-character": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/is-word-character/-/is-word-character-1.0.4.tgz", @@ -4315,42 +4259,39 @@ "inBundle": true }, "node_modules/istanbul-lib-coverage": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.5.tgz", - "integrity": "sha512-8aXznuEPCJvGnMSRft4udDRDtb1V3pkQkMMI5LI+6HuQz5oQ4J2UFn1H82raA3qJtyOLkkwVqICBQkjnGtn5mA==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.0.0.tgz", + "integrity": "sha512-UiUIqxMgRDET6eR+o5HbfRYP1l0hqkWOs7vNxC/mggutCMUIhWMm8gAHb8tHlyfD3/l6rlgNA5cKdDzEAf6hEg==", "dev": true, "engines": { - "node": ">=6" + "node": ">=8" } }, "node_modules/istanbul-lib-hook": { - "version": "2.0.7", - "resolved": "https://registry.npmjs.org/istanbul-lib-hook/-/istanbul-lib-hook-2.0.7.tgz", - "integrity": "sha512-vrRztU9VRRFDyC+aklfLoeXyNdTfga2EI3udDGn4cZ6fpSXpHLV9X6CHvfoMCPtggg8zvDDmC4b9xfu0z6/llA==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/istanbul-lib-hook/-/istanbul-lib-hook-3.0.0.tgz", + "integrity": "sha512-Pt/uge1Q9s+5VAZ+pCo16TYMWPBIl+oaNIjgLQxcX0itS6ueeaA+pEfThZpH8WxhFgCiEb8sAJY6MdUKgiIWaQ==", "dev": true, "dependencies": { - "append-transform": "^1.0.0" + "append-transform": "^2.0.0" }, "engines": { - "node": ">=6" + "node": ">=8" } }, "node_modules/istanbul-lib-instrument": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-3.3.0.tgz", - "integrity": "sha512-5nnIN4vo5xQZHdXno/YDXJ0G+I3dAm4XgzfSVTPLQpj/zAV2dV6Juy0yaf10/zrJOJeHoN3fraFe+XRq2bFVZA==", + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-4.0.3.tgz", + "integrity": "sha512-BXgQl9kf4WTCPCCpmFGoJkz/+uhvm7h7PFKUYxh7qarQd3ER33vHG//qaE8eN25l07YqZPpHXU9I09l/RD5aGQ==", "dev": true, "dependencies": { - "@babel/generator": "^7.4.0", - "@babel/parser": "^7.4.3", - "@babel/template": "^7.4.0", - "@babel/traverse": "^7.4.3", - "@babel/types": "^7.4.0", - "istanbul-lib-coverage": "^2.0.5", - "semver": "^6.0.0" + "@babel/core": "^7.7.5", + "@istanbuljs/schema": "^0.1.2", + "istanbul-lib-coverage": "^3.0.0", + "semver": "^6.3.0" }, "engines": { - "node": ">=6" + "node": ">=8" } }, "node_modules/istanbul-lib-instrument/node_modules/semver": { @@ -4363,170 +4304,83 @@ } }, "node_modules/istanbul-lib-processinfo": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/istanbul-lib-processinfo/-/istanbul-lib-processinfo-1.0.0.tgz", - "integrity": "sha512-FY0cPmWa4WoQNlvB8VOcafiRoB5nB+l2Pz2xGuXHRSy1KM8QFOYfz/rN+bGMCAeejrY3mrpF5oJHcN0s/garCg==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/istanbul-lib-processinfo/-/istanbul-lib-processinfo-2.0.2.tgz", + "integrity": "sha512-kOwpa7z9hme+IBPZMzQ5vdQj8srYgAtaRqeI48NGmAQ+/5yKiHLV0QbYqQpxsdEF0+w14SoB8YbnHKcXE2KnYw==", "dev": true, "dependencies": { "archy": "^1.0.0", - "cross-spawn": "^6.0.5", - "istanbul-lib-coverage": "^2.0.3", - "rimraf": "^2.6.3", - "uuid": "^3.3.2" - } - }, - "node_modules/istanbul-lib-processinfo/node_modules/cross-spawn": { - "version": "6.0.5", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", - "integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==", - "dev": true, - "dependencies": { - "nice-try": "^1.0.4", - "path-key": "^2.0.1", - "semver": "^5.5.0", - "shebang-command": "^1.2.0", - "which": "^1.2.9" + "cross-spawn": "^7.0.0", + "istanbul-lib-coverage": "^3.0.0-alpha.1", + "make-dir": "^3.0.0", + "p-map": "^3.0.0", + "rimraf": "^3.0.0", + "uuid": "^3.3.3" }, "engines": { - "node": ">=4.8" - } - }, - "node_modules/istanbul-lib-processinfo/node_modules/path-key": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", - "integrity": "sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A=", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/istanbul-lib-processinfo/node_modules/rimraf": { - "version": "2.7.1", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", - "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==", - "dev": true, - "dependencies": { - "glob": "^7.1.3" - }, - "bin": { - "rimraf": "bin.js" - } - }, - "node_modules/istanbul-lib-processinfo/node_modules/semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", - "dev": true, - "bin": { - "semver": "bin/semver" + "node": ">=8" } }, - "node_modules/istanbul-lib-processinfo/node_modules/shebang-command": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", - "integrity": "sha1-RKrGW2lbAzmJaMOfNj/uXer98eo=", + "node_modules/istanbul-lib-processinfo/node_modules/p-map": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-map/-/p-map-3.0.0.tgz", + "integrity": "sha512-d3qXVTF/s+W+CdJ5A29wywV2n8CQQYahlgz2bFiA+4eVNJbHJodPZ+/gXwPGh0bOqA+j8S+6+ckmvLGPk1QpxQ==", "dev": true, "dependencies": { - "shebang-regex": "^1.0.0" + "aggregate-error": "^3.0.0" }, "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/istanbul-lib-processinfo/node_modules/shebang-regex": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", - "integrity": "sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/istanbul-lib-processinfo/node_modules/which": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", - "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", - "dev": true, - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "which": "bin/which" + "node": ">=8" } }, "node_modules/istanbul-lib-report": { - "version": "2.0.8", - "resolved": "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-2.0.8.tgz", - "integrity": "sha512-fHBeG573EIihhAblwgxrSenp0Dby6tJMFR/HvlerBsrCTD5bkUuoNtn3gVh29ZCS824cGGBPn7Sg7cNk+2xUsQ==", - "dev": true, - "dependencies": { - "istanbul-lib-coverage": "^2.0.5", - "make-dir": "^2.1.0", - "supports-color": "^6.1.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/istanbul-lib-report/node_modules/has-flag": { "version": "3.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/istanbul-lib-report/node_modules/supports-color": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-6.1.0.tgz", - "integrity": "sha512-qe1jfm1Mg7Nq/NSh6XE24gPXROEVsWHxC1LIx//XNlD9iw7YZQGjZNjYN7xGaEG6iKdA8EtNFW6R0gjnVXp+wQ==", + "resolved": "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz", + "integrity": "sha512-wcdi+uAKzfiGT2abPpKZ0hSU1rGQjUQnLvtY5MpQ7QCTahD3VODhcu4wcfY1YtkGaDD5yuydOLINXsfbus9ROw==", "dev": true, "dependencies": { - "has-flag": "^3.0.0" + "istanbul-lib-coverage": "^3.0.0", + "make-dir": "^3.0.0", + "supports-color": "^7.1.0" }, "engines": { - "node": ">=6" + "node": ">=8" } }, "node_modules/istanbul-lib-source-maps": { - "version": "3.0.6", - "resolved": "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-3.0.6.tgz", - "integrity": "sha512-R47KzMtDJH6X4/YW9XTx+jrLnZnscW4VpNN+1PViSYTejLVPWv7oov+Duf8YQSPyVRUvueQqz1TcsC6mooZTXw==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.0.tgz", + "integrity": "sha512-c16LpFRkR8vQXyHZ5nLpY35JZtzj1PQY1iZmesUbf1FZHbIupcWfjgOXBY9YHkLEQ6puz1u4Dgj6qmU/DisrZg==", "dev": true, "dependencies": { "debug": "^4.1.1", - "istanbul-lib-coverage": "^2.0.5", - "make-dir": "^2.1.0", - "rimraf": "^2.6.3", + "istanbul-lib-coverage": "^3.0.0", "source-map": "^0.6.1" }, "engines": { - "node": ">=6" + "node": ">=8" } }, - "node_modules/istanbul-lib-source-maps/node_modules/rimraf": { - "version": "2.7.1", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", - "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==", + "node_modules/istanbul-lib-source-maps/node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", "dev": true, - "dependencies": { - "glob": "^7.1.3" - }, - "bin": { - "rimraf": "bin.js" + "engines": { + "node": ">=0.10.0" } }, "node_modules/istanbul-reports": { - "version": "2.2.7", - "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-2.2.7.tgz", - "integrity": "sha512-uu1F/L1o5Y6LzPVSVZXNOoD/KXpJue9aeLRd0sM9uMXfZvzomB0WxVamWb5ue8kA2vVWEmW7EG+A5n3f1kqHKg==", + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.0.2.tgz", + "integrity": "sha512-9tZvz7AiR3PEDNGiV9vIouQ/EAcqMXFmkcA1CDFTwOB98OZVDL0PH9glHotf5Ugp6GCOTypfzGWI/OqjWNCRUw==", "dev": true, "dependencies": { - "html-escaper": "^2.0.0" + "html-escaper": "^2.0.0", + "istanbul-lib-report": "^3.0.0" }, "engines": { - "node": ">=6" + "node": ">=8" } }, "node_modules/jackspeak": { @@ -4567,36 +4421,37 @@ "inBundle": true }, "node_modules/jsdom": { - "version": "16.4.0", - "resolved": "https://registry.npmjs.org/jsdom/-/jsdom-16.4.0.tgz", - "integrity": "sha512-lYMm3wYdgPhrl7pDcRmvzPhhrGVBeVhPIqeHjzeiHN3DFmD1RBpbExbi8vU7BJdH8VAZYovR8DMt0PNNDM7k8w==", + "version": "16.6.0", + "resolved": "https://registry.npmjs.org/jsdom/-/jsdom-16.6.0.tgz", + "integrity": "sha512-Ty1vmF4NHJkolaEmdjtxTfSfkdb8Ywarwf63f+F8/mDD1uLSSWDxDuMiZxiPhwunLrn9LOSVItWj4bLYsLN3Dg==", "dev": true, "dependencies": { - "abab": "^2.0.3", - "acorn": "^7.1.1", + "abab": "^2.0.5", + "acorn": "^8.2.4", "acorn-globals": "^6.0.0", "cssom": "^0.4.4", - "cssstyle": "^2.2.0", + "cssstyle": "^2.3.0", "data-urls": "^2.0.0", - "decimal.js": "^10.2.0", + "decimal.js": "^10.2.1", "domexception": "^2.0.1", - "escodegen": "^1.14.1", + "escodegen": "^2.0.0", + "form-data": "^3.0.0", "html-encoding-sniffer": "^2.0.1", - "is-potential-custom-element-name": "^1.0.0", + "http-proxy-agent": "^4.0.1", + "https-proxy-agent": "^5.0.0", + "is-potential-custom-element-name": "^1.0.1", "nwsapi": "^2.2.0", - "parse5": "5.1.1", - "request": "^2.88.2", - "request-promise-native": "^1.0.8", - "saxes": "^5.0.0", + "parse5": "6.0.1", + "saxes": "^5.0.1", "symbol-tree": "^3.2.4", - "tough-cookie": "^3.0.1", + "tough-cookie": "^4.0.0", "w3c-hr-time": "^1.0.2", "w3c-xmlserializer": "^2.0.0", "webidl-conversions": "^6.1.0", "whatwg-encoding": "^1.0.5", "whatwg-mimetype": "^2.3.0", - "whatwg-url": "^8.0.0", - "ws": "^7.2.3", + "whatwg-url": "^8.5.0", + "ws": "^7.4.5", "xml-name-validator": "^3.0.0" }, "engines": { @@ -4611,6 +4466,18 @@ } } }, + "node_modules/jsdom/node_modules/acorn": { + "version": "8.3.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.3.0.tgz", + "integrity": "sha512-tqPKHZ5CaBJw0Xmy0ZZvLs1qTV+BNFSyvn77ASXkpBNfIRk8ev26fKrD9iLGwGA9zedPao52GSHzq8lyZG0NUw==", + "dev": true, + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, "node_modules/jsesc": { "version": "2.5.2", "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz", @@ -4660,9 +4527,9 @@ "dev": true }, "node_modules/json-stringify-nice": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/json-stringify-nice/-/json-stringify-nice-1.1.1.tgz", - "integrity": "sha512-aHOgcSoOLvmFZQMvZ27rFw68r4e9OlQtH7YEcF2u5amVYbF/D3cKBXKCvl5EGhQz2NwJZ6RPfgRX6yNQ+UBKJw==", + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/json-stringify-nice/-/json-stringify-nice-1.1.4.tgz", + "integrity": "sha512-5Z5RFW63yxReJ7vANgW6eZFGWaQvnPE3WNmZoOJrSkGju2etKA2L5rrOa1sm877TVTFt57A80BH1bArcmlLfPw==", "inBundle": true, "funding": { "url": "https://github.com/sponsors/isaacs" @@ -4675,15 +4542,18 @@ "inBundle": true }, "node_modules/json5": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/json5/-/json5-1.0.1.tgz", - "integrity": "sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow==", + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.0.tgz", + "integrity": "sha512-f+8cldu7X/y7RAJurMEJmdoKXGB/X550w2Nr3tTbezL6RwEE/iMcm+tZnXeoZtKuOq6ft8+CqzEkrIgx1fPoQA==", "dev": true, "dependencies": { - "minimist": "^1.2.0" + "minimist": "^1.2.5" }, "bin": { "json5": "lib/cli.js" + }, + "engines": { + "node": ">=6" } }, "node_modules/jsonparse": { @@ -4711,9 +4581,9 @@ } }, "node_modules/just-diff": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/just-diff/-/just-diff-3.0.2.tgz", - "integrity": "sha512-+EiNvacECnZbszZa5IMjzrJ3dy2HKMXyGaNYWBnXy+iWW+437jIvQUrWaM9M+XI/6gOH8EjqvhGUOSh7ETekyg==", + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/just-diff/-/just-diff-3.1.1.tgz", + "integrity": "sha512-sdMWKjRq8qWZEjDcVA6llnUT8RDEBIfOiGpYFPYa9u+2c39JCsejktSP7mj5eRid5EIvTzIpQ2kDOCw1Nq9BjQ==", "inBundle": true }, "node_modules/just-diff-apply": { @@ -4754,67 +4624,76 @@ } }, "node_modules/libnpmaccess": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/libnpmaccess/-/libnpmaccess-4.0.1.tgz", - "integrity": "sha512-ZiAgvfUbvmkHoMTzdwmNWCrQRsDkOC+aM5BDfO0C9aOSwF3R1LdFDBD+Rer1KWtsoQYO35nXgmMR7OUHpDRxyA==", + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/libnpmaccess/-/libnpmaccess-4.0.3.tgz", + "integrity": "sha512-sPeTSNImksm8O2b6/pf3ikv4N567ERYEpeKRPSmqlNt1dTZbvgpJIzg5vAhXHpw2ISBsELFRelk0jEahj1c6nQ==", "inBundle": true, "dependencies": { "aproba": "^2.0.0", "minipass": "^3.1.1", - "npm-package-arg": "^8.0.0", - "npm-registry-fetch": "^9.0.0" + "npm-package-arg": "^8.1.2", + "npm-registry-fetch": "^11.0.0" }, "engines": { "node": ">=10" } }, "node_modules/libnpmdiff": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/libnpmdiff/-/libnpmdiff-2.0.3.tgz", - "integrity": "sha512-BgVvJCjd+EGY3Ifb3+gWkZwMjn6kYMtruT88XXOrJCWyjnG5aRdFv3lKuJx5JdU5ku08G5LlY8tOZdfRn72m7w==", + "resolved": "packages/libnpmdiff", + "link": true + }, + "node_modules/libnpmexec": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/libnpmexec/-/libnpmexec-2.0.0.tgz", + "integrity": "sha512-9zHswx//Lp2ao+huWF2aL+6v4haMncyxNusk6Us2fbLNnPh3+rgSkv38LJ2v8gmKS2kAnkUmQf8pHjcZ+7Z3NA==", "inBundle": true, "dependencies": { - "@npmcli/disparity-colors": "^1.0.1", - "binary-extensions": "^2.2.0", - "diff": "^5.0.0", - "minimatch": "^3.0.4", - "pacote": "^11.2.3", - "tar": "^6.1.0" + "@npmcli/arborist": "^2.3.0", + "@npmcli/ci-detect": "^1.3.0", + "@npmcli/run-script": "^1.8.4", + "chalk": "^4.1.0", + "mkdirp-infer-owner": "^2.0.0", + "npm-package-arg": "^8.1.2", + "pacote": "^11.3.1", + "proc-log": "^1.0.0", + "read": "^1.0.7", + "read-package-json-fast": "^2.0.2", + "walk-up-path": "^1.0.0" }, "engines": { "node": ">=10" } }, "node_modules/libnpmfund": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/libnpmfund/-/libnpmfund-1.0.2.tgz", - "integrity": "sha512-Scw2JiLxfT7wqW/VbxIXV8u3FaFT/ZlR8YLFgTdCPsL1Hhli0554ZXyP8JTu1sLeDpHsoqtgLb4mgYVQnqigjA==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/libnpmfund/-/libnpmfund-1.1.0.tgz", + "integrity": "sha512-Kfmh3pLS5/RGKG5WXEig8mjahPVOxkik6lsbH4iX0si1xxNi6eeUh/+nF1MD+2cgalsQif3O5qyr6mNz2ryJrQ==", "inBundle": true, "dependencies": { - "@npmcli/arborist": "^2.0.0" + "@npmcli/arborist": "^2.5.0" } }, "node_modules/libnpmhook": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/libnpmhook/-/libnpmhook-6.0.1.tgz", - "integrity": "sha512-rwiWIWAQ6R5sPFRi9gsSC/+1/BxFlxk5nNQysVTXEHbqM9ds8g/duW79wRbZKnRyK1xyOmafxbj69nt9tcUkyw==", + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/libnpmhook/-/libnpmhook-6.0.3.tgz", + "integrity": "sha512-3fmkZJibIybzmAvxJ65PeV3NzRc0m4xmYt6scui5msocThbEp4sKFT80FhgrCERYDjlUuFahU6zFNbJDHbQ++g==", "inBundle": true, "dependencies": { "aproba": "^2.0.0", - "npm-registry-fetch": "^9.0.0" + "npm-registry-fetch": "^11.0.0" }, "engines": { "node": ">=10" } }, "node_modules/libnpmorg": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/libnpmorg/-/libnpmorg-2.0.1.tgz", - "integrity": "sha512-Wj0aApN6TfZWHqtJNjkY7IeQpX24jrQD58IHrEz234quKVRYlegUiMsZl2g4OEFeZNSSc9QN28EdI1SBkUlW7g==", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/libnpmorg/-/libnpmorg-2.0.3.tgz", + "integrity": "sha512-JSGl3HFeiRFUZOUlGdiNcUZOsUqkSYrg6KMzvPZ1WVZ478i47OnKSS0vkPmX45Pai5mTKuwIqBMcGWG7O8HfdA==", "inBundle": true, "dependencies": { "aproba": "^2.0.0", - "npm-registry-fetch": "^9.0.0" + "npm-registry-fetch": "^11.0.0" }, "engines": { "node": ">=10" @@ -4835,71 +4714,108 @@ } }, "node_modules/libnpmpublish": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/libnpmpublish/-/libnpmpublish-4.0.0.tgz", - "integrity": "sha512-2RwYXRfZAB1x/9udKpZmqEzSqNd7ouBRU52jyG14/xG8EF+O9A62d7/XVR3iABEQHf1iYhkm0Oq9iXjrL3tsXA==", + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/libnpmpublish/-/libnpmpublish-4.0.2.tgz", + "integrity": "sha512-+AD7A2zbVeGRCFI2aO//oUmapCwy7GHqPXFJh3qpToSRNU+tXKJ2YFUgjt04LPPAf2dlEH95s6EhIHM1J7bmOw==", "inBundle": true, "dependencies": { - "normalize-package-data": "^3.0.0", - "npm-package-arg": "^8.1.0", - "npm-registry-fetch": "^9.0.0", + "normalize-package-data": "^3.0.2", + "npm-package-arg": "^8.1.2", + "npm-registry-fetch": "^11.0.0", "semver": "^7.1.3", - "ssri": "^8.0.0" + "ssri": "^8.0.1" }, "engines": { "node": ">=10" } }, "node_modules/libnpmsearch": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/libnpmsearch/-/libnpmsearch-3.1.0.tgz", - "integrity": "sha512-UQyzQjtAv99kZDuijqTB2Do63qtt+2SKNOVSTnehWTQbxzXF7Jvc8UD3YNPljm8+Y5T31K2AqptbY5BD6XHlIg==", + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/libnpmsearch/-/libnpmsearch-3.1.2.tgz", + "integrity": "sha512-BaQHBjMNnsPYk3Bl6AiOeVuFgp72jviShNBw5aHaHNKWqZxNi38iVNoXbo6bG/Ccc/m1To8s0GtMdtn6xZ1HAw==", "inBundle": true, "dependencies": { - "npm-registry-fetch": "^9.0.0" + "npm-registry-fetch": "^11.0.0" }, "engines": { "node": ">=10" } }, "node_modules/libnpmteam": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/libnpmteam/-/libnpmteam-2.0.2.tgz", - "integrity": "sha512-QGvtbMPdQzK+XybBPK0UjfLEI9fiDPQSFMbZW+2lmm0BgPoqxHle0Wl90bsIyBVY7pYzp45MgMqQNo7KWCLpDA==", + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/libnpmteam/-/libnpmteam-2.0.4.tgz", + "integrity": "sha512-FPrVJWv820FZFXaflAEVTLRWZrerCvfe7ZHSMzJ/62EBlho2KFlYKjyNEsPW3JiV7TLSXi3vo8u0gMwIkXSMTw==", "inBundle": true, "dependencies": { "aproba": "^2.0.0", - "npm-registry-fetch": "^9.0.0" + "npm-registry-fetch": "^11.0.0" }, "engines": { "node": ">=10" } }, "node_modules/libnpmversion": { - "version": "1.0.11", - "resolved": "https://registry.npmjs.org/libnpmversion/-/libnpmversion-1.0.11.tgz", - "integrity": "sha512-HKbfJ0wwx+W9Br4bvbHUMN/YIe7B8qmFtdaLZnXEUozaaTD6gGpIEf1aH1xRlGfNPocT6YBz3O6+RAgSndAgbA==", + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/libnpmversion/-/libnpmversion-1.2.1.tgz", + "integrity": "sha512-AA7x5CFgBFN+L4/JWobnY5t4OAHjQuPbAwUYJ7/NtHuyLut5meb+ne/aj0n7PWNiTGCJcRw/W6Zd2LoLT7EZuQ==", "inBundle": true, "dependencies": { - "@npmcli/git": "^2.0.6", - "@npmcli/run-script": "^1.8.3", - "read-package-json-fast": "^2.0.1", - "semver": "^7.3.4", + "@npmcli/git": "^2.0.7", + "@npmcli/run-script": "^1.8.4", + "json-parse-even-better-errors": "^2.3.1", + "semver": "^7.3.5", "stringify-package": "^1.0.1" } }, - "node_modules/licensee": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/licensee/-/licensee-8.1.0.tgz", - "integrity": "sha512-rnXUmUuLzZrGfm3bfWNl71Emw/OJqwUyIrIRq5D06Ct9EbiFnZtiydA5ryf4FDPikdneJ0l1Q+g6TuMjpWGfrA==", + "node_modules/libtap": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/libtap/-/libtap-1.1.1.tgz", + "integrity": "sha512-Fye8fh1+G7E8qqmjQaY+pXGxy7HM0S6bqCCJFLa16+g2jODBByxbJFDpjbDNF69wfRVyvJ+foLZc1WTIv7dx+g==", "dev": true, "dependencies": { - "@blueoak/list": "^1.0.2", - "correct-license-metadata": "^1.0.1", - "docopt": "^0.6.2", - "fs-access": "^2.0.0", - "has": "^1.0.3", - "json-parse-errback": "^2.0.1", + "async-hook-domain": "^2.0.1", + "bind-obj-methods": "^3.0.0", + "diff": "^4.0.2", + "function-loop": "^2.0.1", + "minipass": "^3.1.1", + "own-or": "^1.0.0", + "own-or-env": "^1.0.1", + "signal-exit": "^3.0.2", + "stack-utils": "^2.0.1", + "tap-parser": "^10.0.1", + "tap-yaml": "^1.0.0", + "tcompare": "^5.0.1", + "trivial-deferred": "^1.0.1", + "yapool": "^1.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/libtap/node_modules/diff": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", + "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", + "dev": true, + "engines": { + "node": ">=0.3.1" + } + }, + "node_modules/licensee": { + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/licensee/-/licensee-8.2.0.tgz", + "integrity": "sha512-Z5fQ+qP85N38klGijH0bXiWVlKqLKDMqsckKL+VcA+ZQ/DJK5cpIpvryGHtREaQ3Ah5jrgtXN8mHfII7UtlsJg==", + "dev": true, + "dependencies": { + "@blueoak/list": "^2.0.0", + "correct-license-metadata": "^1.0.1", + "docopt": "^0.6.2", + "fs-access": "^2.0.0", + "has": "^1.0.3", + "json-parse-errback": "^2.0.1", "npm-license-corrections": "^1.0.0", "read-package-tree": "^5.3.1", "run-parallel": "^1.1.9", @@ -4924,14 +4840,14 @@ } }, "node_modules/load-json-file": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-2.0.0.tgz", - "integrity": "sha1-eUfkIUmvgNaWy/eXvKq8/h/inKg=", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-4.0.0.tgz", + "integrity": "sha1-L19Fq5HjMhYjT9U62rZo607AmTs=", "dev": true, "dependencies": { "graceful-fs": "^4.1.2", - "parse-json": "^2.2.0", - "pify": "^2.0.0", + "parse-json": "^4.0.0", + "pify": "^3.0.0", "strip-bom": "^3.0.0" }, "engines": { @@ -4952,9 +4868,15 @@ } }, "node_modules/lodash": { - "version": "4.17.20", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.20.tgz", - "integrity": "sha512-PlhdFcillOINfeV7Ni6oF1TAEayyZBoZ8bcshTHqOYJYlrqzRK5hagpagky5o4HfCzzd1TRkXPMFq6cKk9rGmA==", + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", + "dev": true + }, + "node_modules/lodash.clonedeep": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/lodash.clonedeep/-/lodash.clonedeep-4.5.0.tgz", + "integrity": "sha1-4j8/nE+Pvd6HJSnBBxhXoIblzO8=", "dev": true }, "node_modules/lodash.flattendeep": { @@ -4963,10 +4885,16 @@ "integrity": "sha1-+wMJF/hqMTTlvJvsDWngAT3f7bI=", "dev": true }, - "node_modules/lodash.sortby": { - "version": "4.7.0", - "resolved": "https://registry.npmjs.org/lodash.sortby/-/lodash.sortby-4.7.0.tgz", - "integrity": "sha1-7dFMgk4sycHgsKG0K7UhBRakJDg=", + "node_modules/lodash.merge": { + "version": "4.6.2", + "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", + "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", + "dev": true + }, + "node_modules/lodash.truncate": { + "version": "4.4.2", + "resolved": "https://registry.npmjs.org/lodash.truncate/-/lodash.truncate-4.4.2.tgz", + "integrity": "sha1-WjUNoLERO4N+z//VgSy+WNbq4ZM=", "dev": true }, "node_modules/lodash.uniq": { @@ -5009,50 +4937,37 @@ } }, "node_modules/make-dir": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-2.1.0.tgz", - "integrity": "sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", + "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", "dev": true, "dependencies": { - "pify": "^4.0.1", - "semver": "^5.6.0" + "semver": "^6.0.0" }, "engines": { - "node": ">=6" - } - }, - "node_modules/make-dir/node_modules/pify": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz", - "integrity": "sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==", - "dev": true, - "engines": { - "node": ">=6" + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/make-dir/node_modules/semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", "dev": true, "bin": { - "semver": "bin/semver" + "semver": "bin/semver.js" } }, - "node_modules/make-error": { - "version": "1.3.6", - "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz", - "integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==", - "dev": true - }, "node_modules/make-fetch-happen": { - "version": "8.0.14", - "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-8.0.14.tgz", - "integrity": "sha512-EsS89h6l4vbfJEtBZnENTOFk8mCRpY5ru36Xe5bcX1KYIli2mkSHqoFsp5O1wMDvTJJzxe/4THpCTtygjeeGWQ==", + "version": "9.0.4", + "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-9.0.4.tgz", + "integrity": "sha512-sQWNKMYqSmbAGXqJg2jZ+PmHh5JAybvwu0xM8mZR/bsTjGiTASj3ldXJV7KFHy1k/IJIBkjxQFoWIVsv9+PQMg==", "inBundle": true, "dependencies": { "agentkeepalive": "^4.1.3", - "cacache": "^15.0.5", + "cacache": "^15.2.0", "http-cache-semantics": "^4.1.0", "http-proxy-agent": "^4.0.1", "https-proxy-agent": "^5.0.0", @@ -5063,6 +4978,7 @@ "minipass-fetch": "^1.3.2", "minipass-flush": "^1.0.5", "minipass-pipeline": "^1.2.4", + "negotiator": "^0.6.2", "promise-retry": "^2.0.1", "socks-proxy-agent": "^5.0.0", "ssri": "^8.0.0" @@ -5158,31 +5074,22 @@ "integrity": "sha1-/oWy7HWlkDfyrf7BAP1sYBdhFS4=", "dev": true }, - "node_modules/merge-source-map": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/merge-source-map/-/merge-source-map-1.1.0.tgz", - "integrity": "sha512-Qkcp7P2ygktpMPh2mCQZaf3jhN6D3Z/qVZHSdWvQ+2Ef5HgRAPBO57A77+ENm0CPx2+1Ce/MYKi3ymqdfuqibw==", - "dev": true, - "dependencies": { - "source-map": "^0.6.1" - } - }, "node_modules/mime-db": { - "version": "1.45.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.45.0.tgz", - "integrity": "sha512-CkqLUxUk15hofLoLyljJSrukZi8mAtgd+yE5uO4tqRZsdsAJKv0O+rFMhVDRJgozy+yG6md5KwuXhD4ocIoP+w==", + "version": "1.49.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.49.0.tgz", + "integrity": "sha512-CIc8j9URtOVApSFCQIF+VBkX1RwXp/oMMOrqdyXSBXq5RWNEsRfyj1kiRnQgmNXmHxPoFIxOroKA3zcU9P+nAA==", "inBundle": true, "engines": { "node": ">= 0.6" } }, "node_modules/mime-types": { - "version": "2.1.28", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.28.tgz", - "integrity": "sha512-0TO2yJ5YHYr7M2zzT7gDU1tbwHxEUWBCLt0lscSNpcdAfFyJOVEpRYNS7EXVcTLNj/25QO8gulHC5JtTzSE2UQ==", + "version": "2.1.32", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.32.tgz", + "integrity": "sha512-hJGaVS4G4c9TSMYh2n6SQAGrC4RnfU+daP8G7cSCmaqNjiOoUY0VHCMS42pxnQmVF1GWwFhbHWn3RIxCqTmZ9A==", "inBundle": true, "dependencies": { - "mime-db": "1.45.0" + "mime-db": "1.49.0" }, "engines": { "node": ">= 0.6" @@ -5243,9 +5150,9 @@ } }, "node_modules/minipass-fetch": { - "version": "1.3.3", - "resolved": "https://registry.npmjs.org/minipass-fetch/-/minipass-fetch-1.3.3.tgz", - "integrity": "sha512-akCrLDWfbdAWkMLBxJEeWTdNsjML+dt5YgOI4gJ53vuO0vrmYQkUPxa6j6V65s9CcePIr2SSWqjT2EcrNseryQ==", + "version": "1.3.4", + "resolved": "https://registry.npmjs.org/minipass-fetch/-/minipass-fetch-1.3.4.tgz", + "integrity": "sha512-TielGogIzbUEtd1LsjZFs47RWuHHfhl6TiCx1InVxApBAmQ8bL0dL5ilkLGcRvuyW/A9nE+Lvn855Ewz8S0PnQ==", "inBundle": true, "dependencies": { "minipass": "^3.1.0", @@ -5374,22 +5281,19 @@ "integrity": "sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc=", "dev": true }, - "node_modules/nested-error-stacks": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/nested-error-stacks/-/nested-error-stacks-2.1.0.tgz", - "integrity": "sha512-AO81vsIO1k1sM4Zrd6Hu7regmJN1NSiAja10gc4bX3F0wd+9rQmcuHQaHVQCYIEC8iFXnE+mavh23GOt7wBgug==", - "dev": true - }, - "node_modules/nice-try": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/nice-try/-/nice-try-1.0.5.tgz", - "integrity": "sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==", - "dev": true + "node_modules/negotiator": { + "version": "0.6.2", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.2.tgz", + "integrity": "sha512-hZXc7K2e+PgeI1eDBe/10Ard4ekbfrrqG8Ep+8Jmf4JID2bNg7NvCPOZN+kfF574pFQI7mum2AUqDidoKqcTOw==", + "inBundle": true, + "engines": { + "node": ">= 0.6" + } }, "node_modules/node-abi": { - "version": "2.19.3", - "resolved": "https://registry.npmjs.org/node-abi/-/node-abi-2.19.3.tgz", - "integrity": "sha512-9xZrlyfvKhWme2EXFKQhZRp1yNWT/uI1luYPr3sFl+H4keYY4xR+1jO7mvTTijIsHf1M+QDe9uWuKeEpLInIlg==", + "version": "2.30.0", + "resolved": "https://registry.npmjs.org/node-abi/-/node-abi-2.30.0.tgz", + "integrity": "sha512-g6bZh3YCKQRdwuO/tSZZYJAw622SjsRfJ2X0Iy4sSOHZ34/sPPdVBn8fev2tj7njzLwuqPw9uMtGsGkO5kIQvg==", "dev": true, "dependencies": { "semver": "^5.4.1" @@ -5405,9 +5309,9 @@ } }, "node_modules/node-addon-api": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-3.1.0.tgz", - "integrity": "sha512-flmrDNB06LIl5lywUz7YlNGZH/5p0M7W28k8hzd9Lshtdh1wshD2Y+U4h9LD6KObOy1f+fEVdgprPrEymjM5uw==", + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-3.2.1.tgz", + "integrity": "sha512-mmcei9JghVNDYydghQmeDX8KoAm0FAiYyIcUt/N4nhyAipB17pllZQDOJD2fotxABnt4Mdz+dKTO7eftLg4d0A==", "dev": true }, "node_modules/node-gyp": { @@ -5434,15 +5338,78 @@ "node": ">= 10.12.0" } }, - "node_modules/node-modules-regexp": { + "node_modules/node-gyp/node_modules/aproba": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/aproba/-/aproba-1.2.0.tgz", + "integrity": "sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw==", + "inBundle": true + }, + "node_modules/node-gyp/node_modules/gauge": { + "version": "2.7.4", + "resolved": "https://registry.npmjs.org/gauge/-/gauge-2.7.4.tgz", + "integrity": "sha1-LANAXHU4w51+s3sxcCLjJfsBi/c=", + "inBundle": true, + "dependencies": { + "aproba": "^1.0.3", + "console-control-strings": "^1.0.0", + "has-unicode": "^2.0.0", + "object-assign": "^4.1.0", + "signal-exit": "^3.0.0", + "string-width": "^1.0.1", + "strip-ansi": "^3.0.1", + "wide-align": "^1.1.0" + } + }, + "node_modules/node-gyp/node_modules/is-fullwidth-code-point": { "version": "1.0.0", - "resolved": "https://registry.npmjs.org/node-modules-regexp/-/node-modules-regexp-1.0.0.tgz", - "integrity": "sha1-jZ2+KJZKSsVxLpExZCEHxx6Q7EA=", - "dev": true, + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz", + "integrity": "sha1-754xOG8DGn8NZDr4L95QxFfvAMs=", + "inBundle": true, + "dependencies": { + "number-is-nan": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/node-gyp/node_modules/npmlog": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-4.1.2.tgz", + "integrity": "sha512-2uUqazuKlTaSI/dC8AzicUck7+IrEaOnN/e0jd3Xtt1KcGpwx30v50mL7oPyr/h9bL3E4aZccVwpwP+5W9Vjkg==", + "inBundle": true, + "dependencies": { + "are-we-there-yet": "~1.1.2", + "console-control-strings": "~1.1.0", + "gauge": "~2.7.3", + "set-blocking": "~2.0.0" + } + }, + "node_modules/node-gyp/node_modules/string-width": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz", + "integrity": "sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=", + "inBundle": true, + "dependencies": { + "code-point-at": "^1.0.0", + "is-fullwidth-code-point": "^1.0.0", + "strip-ansi": "^3.0.0" + }, "engines": { "node": ">=0.10.0" } }, + "node_modules/node-preload": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/node-preload/-/node-preload-0.2.1.tgz", + "integrity": "sha512-RM5oyBy45cLEoHqCeh+MNuFAxO0vTFBLskvQbOKnEE7YTTSN4tbN8QWDIPQ6L+WvKsB/qLEGpYe2ZZ9d4W9OIQ==", + "dev": true, + "dependencies": { + "process-on-spawn": "^1.0.0" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/noop-logger": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/noop-logger/-/noop-logger-0.1.1.tgz", @@ -5465,14 +5432,14 @@ } }, "node_modules/normalize-package-data": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-3.0.0.tgz", - "integrity": "sha512-6lUjEI0d3v6kFrtgA/lOx4zHCWULXsFNIjHolnZCKCTLA6m/G625cdn3O7eNmT0iD3jfo6HZ9cdImGZwf21prw==", + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-3.0.2.tgz", + "integrity": "sha512-6CdZocmfGaKnIHPVFhJJZ3GuR8SsLKvDANFp47Jmy51aKIr8akjAWTSxtpI+MBgBFdSMRyo4hMpDlT6dTffgZg==", "inBundle": true, "dependencies": { - "hosted-git-info": "^3.0.6", - "resolve": "^1.17.0", - "semver": "^7.3.2", + "hosted-git-info": "^4.0.1", + "resolve": "^1.20.0", + "semver": "^7.3.4", "validate-npm-package-license": "^3.0.1" }, "engines": { @@ -5489,9 +5456,9 @@ } }, "node_modules/npm-audit-report": { - "version": "2.1.4", - "resolved": "https://registry.npmjs.org/npm-audit-report/-/npm-audit-report-2.1.4.tgz", - "integrity": "sha512-Tz7rnfskSdZ0msTzt2mENC/B+H2QI8u0jN0ck7o3zDsQYIQrek/l3MjEc+CARer+64LsVTU6ZIqNuh0X55QPhw==", + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/npm-audit-report/-/npm-audit-report-2.1.5.tgz", + "integrity": "sha512-YB8qOoEmBhUH1UJgh1xFAv7Jg1d+xoNhsDYiFQlEFThEBui0W1vIz2ZK6FVg4WZjwEdl7uBQlm1jy3MUfyHeEw==", "inBundle": true, "dependencies": { "chalk": "^4.0.0" @@ -5501,9 +5468,9 @@ } }, "node_modules/npm-bundled": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/npm-bundled/-/npm-bundled-1.1.1.tgz", - "integrity": "sha512-gqkfgGePhTpAEgUsGEgcq1rqPXA+tv/aVBlgEzfXwA1yiUJF7xtEt3CtVwOjNYQOVknDk0F20w58Fnm3EtG0fA==", + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/npm-bundled/-/npm-bundled-1.1.2.tgz", + "integrity": "sha512-x5DHup0SuyQcmL3s7Rx/YQ8sbw/Hzg0rj48eN0dV7hf5cmQq5PXIeioroH3raV1QC1yh3uTYuMThvEQF3iKgGQ==", "inBundle": true, "dependencies": { "npm-normalize-package-bin": "^1.0.1" @@ -5534,13 +5501,13 @@ "inBundle": true }, "node_modules/npm-package-arg": { - "version": "8.1.1", - "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-8.1.1.tgz", - "integrity": "sha512-CsP95FhWQDwNqiYS+Q0mZ7FAEDytDZAkNxQqea6IaAFJTAY9Lhhqyl0irU/6PMc7BGfUmnsbHcqxJD7XuVM/rg==", + "version": "8.1.5", + "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-8.1.5.tgz", + "integrity": "sha512-LhgZrg0n0VgvzVdSm1oiZworPbTxYHUJCgtsJW8mGvlDpxTM1vSJc3m5QZeUkhAHIzbz3VCHd/R4osi1L1Tg/Q==", "inBundle": true, "dependencies": { - "hosted-git-info": "^3.0.6", - "semver": "^7.0.0", + "hosted-git-info": "^4.0.1", + "semver": "^7.3.4", "validate-npm-package-name": "^3.0.0" }, "engines": { @@ -5548,9 +5515,9 @@ } }, "node_modules/npm-packlist": { - "version": "2.1.4", - "resolved": "https://registry.npmjs.org/npm-packlist/-/npm-packlist-2.1.4.tgz", - "integrity": "sha512-Qzg2pvXC9U4I4fLnUrBmcIT4x0woLtUgxUi9eC+Zrcv1Xx5eamytGAfbDWQ67j7xOcQ2VW1I3su9smVTIdu7Hw==", + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/npm-packlist/-/npm-packlist-2.2.2.tgz", + "integrity": "sha512-Jt01acDvJRhJGthnUJVF/w6gumWOZxO7IkpY/lsX9//zqQgnF7OJaxgQXcerd4uQOLu7W5bkb4mChL9mdfm+Zg==", "inBundle": true, "dependencies": { "glob": "^7.1.6", @@ -5566,37 +5533,36 @@ } }, "node_modules/npm-pick-manifest": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-6.1.0.tgz", - "integrity": "sha512-ygs4k6f54ZxJXrzT0x34NybRlLeZ4+6nECAIbr2i0foTnijtS1TJiyzpqtuUAJOps/hO0tNDr8fRV5g+BtRlTw==", + "version": "6.1.1", + "resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-6.1.1.tgz", + "integrity": "sha512-dBsdBtORT84S8V8UTad1WlUyKIY9iMsAmqxHbLdeEeBNMLQDlDWWra3wYUx9EBEIiG/YwAy0XyNHDd2goAsfuA==", "inBundle": true, "dependencies": { "npm-install-checks": "^4.0.0", - "npm-package-arg": "^8.0.0", - "semver": "^7.0.0" + "npm-normalize-package-bin": "^1.0.1", + "npm-package-arg": "^8.1.2", + "semver": "^7.3.4" } }, "node_modules/npm-profile": { - "version": "5.0.2", - "resolved": "https://registry.npmjs.org/npm-profile/-/npm-profile-5.0.2.tgz", - "integrity": "sha512-hOhpH23PeWUFParJ6T1nquiHJLmFZ5VReTjBf1TJpl1YGuqfUS+ZYujVYPfuMbixosO82kWzvnxg4ZmP4VkTeg==", + "version": "5.0.4", + "resolved": "https://registry.npmjs.org/npm-profile/-/npm-profile-5.0.4.tgz", + "integrity": "sha512-OKtU7yoAEBOnc8zJ+/uo5E4ugPp09sopo+6y1njPp+W99P8DvQon3BJYmpvyK2Bf1+3YV5LN1bvgXRoZ1LUJBA==", "inBundle": true, "dependencies": { - "npm-registry-fetch": "^9.0.0" + "npm-registry-fetch": "^11.0.0" }, "engines": { "node": ">=10" } }, "node_modules/npm-registry-fetch": { - "version": "9.0.0", - "resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-9.0.0.tgz", - "integrity": "sha512-PuFYYtnQ8IyVl6ib9d3PepeehcUeHN9IO5N/iCRhyg9tStQcqGQBRVHmfmMWPDERU3KwZoHFvbJ4FPXPspvzbA==", + "version": "11.0.0", + "resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-11.0.0.tgz", + "integrity": "sha512-jmlgSxoDNuhAtxUIG6pVwwtz840i994dL14FoNVZisrmZW5kWd63IUTNv1m/hyRSGSqWjCUp/YZlS1BJyNp9XA==", "inBundle": true, "dependencies": { - "@npmcli/ci-detect": "^1.0.0", - "lru-cache": "^6.0.0", - "make-fetch-happen": "^8.0.9", + "make-fetch-happen": "^9.0.1", "minipass": "^3.1.3", "minipass-fetch": "^1.3.0", "minipass-json-stream": "^1.0.1", @@ -5614,15 +5580,15 @@ "inBundle": true }, "node_modules/npmlog": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-4.1.2.tgz", - "integrity": "sha512-2uUqazuKlTaSI/dC8AzicUck7+IrEaOnN/e0jd3Xtt1KcGpwx30v50mL7oPyr/h9bL3E4aZccVwpwP+5W9Vjkg==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-5.0.0.tgz", + "integrity": "sha512-ftpIiLjerL2tUg3dCqN8pOSoB90gqZlzv/gaZoxHaKjeLClrfJIEQ1Pdxi6qSzflz916Bljdy8dTWQ4J7hAFSQ==", "inBundle": true, "dependencies": { - "are-we-there-yet": "~1.1.2", - "console-control-strings": "~1.1.0", - "gauge": "~2.7.3", - "set-blocking": "~2.0.0" + "are-we-there-yet": "^1.1.5", + "console-control-strings": "^1.1.0", + "gauge": "^3.0.0", + "set-blocking": "^2.0.0" } }, "node_modules/null-check": { @@ -5650,67 +5616,69 @@ "dev": true }, "node_modules/nyc": { - "version": "14.1.1", - "resolved": "https://registry.npmjs.org/nyc/-/nyc-14.1.1.tgz", - "integrity": "sha512-OI0vm6ZGUnoGZv/tLdZ2esSVzDwUC88SNs+6JoSOMVxA+gKMB8Tk7jBwgemLx4O40lhhvZCVw1C+OYLOBOPXWw==", + "version": "15.1.0", + "resolved": "https://registry.npmjs.org/nyc/-/nyc-15.1.0.tgz", + "integrity": "sha512-jMW04n9SxKdKi1ZMGhvUTHBN0EICCRkHemEoE5jm6mTYcqcdas0ATzgUgejlQUHMvpnOZqGB5Xxsv9KxJW1j8A==", "dev": true, "dependencies": { - "archy": "^1.0.0", - "caching-transform": "^3.0.2", - "convert-source-map": "^1.6.0", - "cp-file": "^6.2.0", - "find-cache-dir": "^2.1.0", - "find-up": "^3.0.0", - "foreground-child": "^1.5.6", - "glob": "^7.1.3", - "istanbul-lib-coverage": "^2.0.5", - "istanbul-lib-hook": "^2.0.7", - "istanbul-lib-instrument": "^3.3.0", - "istanbul-lib-report": "^2.0.8", - "istanbul-lib-source-maps": "^3.0.6", - "istanbul-reports": "^2.2.4", - "js-yaml": "^3.13.1", - "make-dir": "^2.1.0", - "merge-source-map": "^1.1.0", - "resolve-from": "^4.0.0", - "rimraf": "^2.6.3", + "@istanbuljs/load-nyc-config": "^1.0.0", + "@istanbuljs/schema": "^0.1.2", + "caching-transform": "^4.0.0", + "convert-source-map": "^1.7.0", + "decamelize": "^1.2.0", + "find-cache-dir": "^3.2.0", + "find-up": "^4.1.0", + "foreground-child": "^2.0.0", + "get-package-type": "^0.1.0", + "glob": "^7.1.6", + "istanbul-lib-coverage": "^3.0.0", + "istanbul-lib-hook": "^3.0.0", + "istanbul-lib-instrument": "^4.0.0", + "istanbul-lib-processinfo": "^2.0.2", + "istanbul-lib-report": "^3.0.0", + "istanbul-lib-source-maps": "^4.0.0", + "istanbul-reports": "^3.0.2", + "make-dir": "^3.0.0", + "node-preload": "^0.2.1", + "p-map": "^3.0.0", + "process-on-spawn": "^1.0.0", + "resolve-from": "^5.0.0", + "rimraf": "^3.0.0", "signal-exit": "^3.0.2", - "spawn-wrap": "^1.4.2", - "test-exclude": "^5.2.3", - "uuid": "^3.3.2", - "yargs": "^13.2.2", - "yargs-parser": "^13.0.0" + "spawn-wrap": "^2.0.0", + "test-exclude": "^6.0.0", + "yargs": "^15.0.2" }, "bin": { "nyc": "bin/nyc.js" }, "engines": { - "node": ">=6" + "node": ">=8.9" } }, "node_modules/nyc/node_modules/find-up": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", - "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", "dev": true, "dependencies": { - "locate-path": "^3.0.0" + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" }, "engines": { - "node": ">=6" + "node": ">=8" } }, "node_modules/nyc/node_modules/locate-path": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", - "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", "dev": true, "dependencies": { - "p-locate": "^3.0.0", - "path-exists": "^3.0.0" + "p-locate": "^4.1.0" }, "engines": { - "node": ">=6" + "node": ">=8" } }, "node_modules/nyc/node_modules/p-limit": { @@ -5729,15 +5697,27 @@ } }, "node_modules/nyc/node_modules/p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dev": true, + "dependencies": { + "p-limit": "^2.2.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/nyc/node_modules/p-map": { "version": "3.0.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", - "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "resolved": "https://registry.npmjs.org/p-map/-/p-map-3.0.0.tgz", + "integrity": "sha512-d3qXVTF/s+W+CdJ5A29wywV2n8CQQYahlgz2bFiA+4eVNJbHJodPZ+/gXwPGh0bOqA+j8S+6+ckmvLGPk1QpxQ==", "dev": true, "dependencies": { - "p-limit": "^2.0.0" + "aggregate-error": "^3.0.0" }, "engines": { - "node": ">=6" + "node": ">=8" } }, "node_modules/nyc/node_modules/p-try": { @@ -5749,16 +5729,22 @@ "node": ">=6" } }, - "node_modules/nyc/node_modules/rimraf": { - "version": "2.7.1", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", - "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==", + "node_modules/nyc/node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", "dev": true, - "dependencies": { - "glob": "^7.1.3" - }, - "bin": { - "rimraf": "bin.js" + "engines": { + "node": ">=8" + } + }, + "node_modules/nyc/node_modules/resolve-from": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", + "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", + "dev": true, + "engines": { + "node": ">=8" } }, "node_modules/oauth-sign": { @@ -5780,9 +5766,9 @@ } }, "node_modules/object-inspect": { - "version": "1.9.0", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.9.0.tgz", - "integrity": "sha512-i3Bp9iTqwhaLZBxGkRfo5ZbE07BQRT7MGu8+nNgwW9ItGp1TzCTw2DLEoWwjClxBjOFI/hWljTAmYGCEwmtnOw==", + "version": "1.10.3", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.10.3.tgz", + "integrity": "sha512-e5mCJlSH7poANfC8z8S9s9S2IN5/4Zb3aZ33f5s8YqoazCFzNLloLU8r5VCG+G7WoqLvAAZoVMcy3tp/3X0Plw==", "dev": true, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -5816,14 +5802,14 @@ } }, "node_modules/object.getownpropertydescriptors": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.1.1.tgz", - "integrity": "sha512-6DtXgZ/lIZ9hqx4GtZETobXLR/ZLaa0aqV0kzbn80Rf8Z2e/XFnhA0I7p07N2wH8bBBltr2xQPi6sbKWAY2Eng==", + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.1.2.tgz", + "integrity": "sha512-WtxeKSzfBjlzL+F9b7M7hewDzMwy+C8NRssHd1YrNlzHzIDrXcXiNOMrezdAEM4UXixgV+vvnyBeN7Rygl2ttQ==", "dev": true, "dependencies": { - "call-bind": "^1.0.0", + "call-bind": "^1.0.2", "define-properties": "^1.1.3", - "es-abstract": "^1.18.0-next.1" + "es-abstract": "^1.18.0-next.2" }, "engines": { "node": ">= 0.8" @@ -5833,15 +5819,14 @@ } }, "node_modules/object.values": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.2.tgz", - "integrity": "sha512-MYC0jvJopr8EK6dPBiO8Nb9mvjdypOachO5REGk6MXzujbBrAisKo3HmdEI6kZDL6fC31Mwee/5YbtMebixeag==", + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.4.tgz", + "integrity": "sha512-TnGo7j4XSnKQoK3MfvkzqKCi0nVe/D9I9IjwTNYdb/fxYHpjrluHVOgw0AF6jrRFGMPHdfuidR09tIDiIvnaSg==", "dev": true, "dependencies": { - "call-bind": "^1.0.0", + "call-bind": "^1.0.2", "define-properties": "^1.1.3", - "es-abstract": "^1.18.0-next.1", - "has": "^1.0.3" + "es-abstract": "^1.18.2" }, "engines": { "node": ">= 0.4" @@ -5885,15 +5870,6 @@ "node": ">= 0.8.0" } }, - "node_modules/os-homedir": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/os-homedir/-/os-homedir-1.0.2.tgz", - "integrity": "sha1-/7xJiDNuDoM94MFox+8VISGqf7M=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/own-or": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/own-or/-/own-or-1.0.0.tgz", @@ -5958,27 +5934,27 @@ } }, "node_modules/package-hash": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/package-hash/-/package-hash-3.0.0.tgz", - "integrity": "sha512-lOtmukMDVvtkL84rJHI7dpTYq+0rli8N2wlnqUcBuDWCfVhRUfOmnR9SsoHFMLpACvEV60dX7rd0rFaYDZI+FA==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/package-hash/-/package-hash-4.0.0.tgz", + "integrity": "sha512-whdkPIooSu/bASggZ96BWVvZTRMOFxnyUG5PnTSGKoJE2gd5mbVNmR2Nj20QFzxYYgAXpoqC+AiXzl+UMRh7zQ==", "dev": true, "dependencies": { "graceful-fs": "^4.1.15", - "hasha": "^3.0.0", + "hasha": "^5.0.0", "lodash.flattendeep": "^4.4.0", "release-zalgo": "^1.0.0" }, "engines": { - "node": ">=6" + "node": ">=8" } }, "node_modules/pacote": { - "version": "11.2.7", - "resolved": "https://registry.npmjs.org/pacote/-/pacote-11.2.7.tgz", - "integrity": "sha512-ogxPor11v/rnU9ukwLlI2dPx22q9iob1+yZyqSwerKsOvBMhU9e+SJHtxY4y2N0MRH4/5jGsGiRLsZeJWyM4dQ==", + "version": "11.3.5", + "resolved": "https://registry.npmjs.org/pacote/-/pacote-11.3.5.tgz", + "integrity": "sha512-fT375Yczn4zi+6Hkk2TBe1x1sP8FgFsEIZ2/iWaXY2r/NkhDJfxbcn5paz1+RTFCyNf+dPnaoBDJoAxXSU8Bkg==", "inBundle": true, "dependencies": { - "@npmcli/git": "^2.0.1", + "@npmcli/git": "^2.1.0", "@npmcli/installed-package-contents": "^1.0.6", "@npmcli/promise-spawn": "^1.2.0", "@npmcli/run-script": "^1.8.2", @@ -5991,7 +5967,7 @@ "npm-package-arg": "^8.0.1", "npm-packlist": "^2.1.4", "npm-pick-manifest": "^6.0.0", - "npm-registry-fetch": "^9.0.0", + "npm-registry-fetch": "^11.0.0", "promise-retry": "^2.0.1", "read-package-json-fast": "^2.0.1", "rimraf": "^3.0.2", @@ -6047,21 +6023,22 @@ } }, "node_modules/parse-json": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-2.2.0.tgz", - "integrity": "sha1-9ID0BDTvgHQfhGkJn43qGPVaTck=", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-4.0.0.tgz", + "integrity": "sha1-vjX1Qlvh9/bHRxhPmKeIy5lHfuA=", "dev": true, "dependencies": { - "error-ex": "^1.2.0" + "error-ex": "^1.3.1", + "json-parse-better-errors": "^1.0.1" }, "engines": { - "node": ">=0.10.0" + "node": ">=4" } }, "node_modules/parse5": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/parse5/-/parse5-5.1.1.tgz", - "integrity": "sha512-ugq4DFI0Ptb+WWjAdOK16+u/nHfiIrcE+sh8kZMaM0WllQKLI9rOUq6c2b7cwPkXdzfQESqvoqK6ug7U/Yyzug==", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/parse5/-/parse5-6.0.1.tgz", + "integrity": "sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw==", "dev": true }, "node_modules/path-exists": { @@ -6092,18 +6069,18 @@ } }, "node_modules/path-parse": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.6.tgz", - "integrity": "sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw==", + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", "inBundle": true }, "node_modules/path-type": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-2.0.0.tgz", - "integrity": "sha1-8BLMuEFbcJb8LaoQVMPXI4lZTHM=", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-3.0.0.tgz", + "integrity": "sha512-T2ZUsdZFHgA3u4e5PfPbjd7HDDpxPnQb5jN0SrDsjNSuVXHJqtwTnWqG0B1jZrgmJ/7lj1EmVIByWt1gxGkWvg==", "dev": true, "dependencies": { - "pify": "^2.0.0" + "pify": "^3.0.0" }, "engines": { "node": ">=4" @@ -6116,9 +6093,9 @@ "inBundle": true }, "node_modules/picomatch": { - "version": "2.2.2", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.2.2.tgz", - "integrity": "sha512-q0M/9eZHzmr0AulXyPwNfZjtwZ/RBZlbN3K3CErVrk50T2ASYI7Bye0EvekFY3IP1Nt2DHu0re+V2ZHIpMkuWg==", + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.0.tgz", + "integrity": "sha512-lY1Q/PiJGC2zOv/z391WOTD+Z02bCgsFfvxoXXf6h7kv9o+WmsmzYqrAwY63sNgOxE4xEdq0WyUnXfKeBrSvYw==", "dev": true, "engines": { "node": ">=8.6" @@ -6128,30 +6105,30 @@ } }, "node_modules/pify": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", - "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", + "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=", "dev": true, "engines": { - "node": ">=0.10.0" + "node": ">=4" } }, - "node_modules/pirates": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/pirates/-/pirates-3.0.2.tgz", - "integrity": "sha512-c5CgUJq6H2k6MJz72Ak1F5sN9n9wlSlJyEnwvpm9/y3WB4E3pHBDT2c6PEiS1vyJvq2bUxUAIu0EGf8Cx4Ic7Q==", + "node_modules/pkg-dir": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-2.0.0.tgz", + "integrity": "sha1-9tXREJ4Z1j7fQo4L1X4Sd3YVM0s=", "dev": true, "dependencies": { - "node-modules-regexp": "^1.0.0" + "find-up": "^2.1.0" }, "engines": { - "node": ">= 4" + "node": ">=4" } }, - "node_modules/pkg-dir": { + "node_modules/pkg-up": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-2.0.0.tgz", - "integrity": "sha1-9tXREJ4Z1j7fQo4L1X4Sd3YVM0s=", + "resolved": "https://registry.npmjs.org/pkg-up/-/pkg-up-2.0.0.tgz", + "integrity": "sha1-yBmscoBZpGHKscOImivjxJoATX8=", "dev": true, "dependencies": { "find-up": "^2.1.0" @@ -6161,9 +6138,9 @@ } }, "node_modules/prebuild-install": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/prebuild-install/-/prebuild-install-6.0.0.tgz", - "integrity": "sha512-h2ZJ1PXHKWZpp1caLw0oX9sagVpL2YTk+ZwInQbQ3QqNd4J03O6MpFNmMTJlkfgPENWqe5kP0WjQLqz5OjLfsw==", + "version": "6.1.2", + "resolved": "https://registry.npmjs.org/prebuild-install/-/prebuild-install-6.1.2.tgz", + "integrity": "sha512-PzYWIKZeP+967WuKYXlTOhYBgGOvTRSfaKI89XnfJ0ansRAH7hDU45X+K+FZeI1Wb/7p/NnuctPH3g0IqKUuSQ==", "dev": true, "dependencies": { "detect-libc": "^1.0.3", @@ -6172,15 +6149,14 @@ "minimist": "^1.2.3", "mkdirp-classic": "^0.5.3", "napi-build-utils": "^1.0.1", - "node-abi": "^2.7.0", + "node-abi": "^2.21.0", "noop-logger": "^0.1.1", "npmlog": "^4.0.1", "pump": "^3.0.0", "rc": "^1.2.7", "simple-get": "^3.0.3", "tar-fs": "^2.0.0", - "tunnel-agent": "^0.6.0", - "which-pm-runs": "^1.0.0" + "tunnel-agent": "^0.6.0" }, "bin": { "prebuild-install": "bin.js" @@ -6189,6 +6165,66 @@ "node": ">=6" } }, + "node_modules/prebuild-install/node_modules/aproba": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/aproba/-/aproba-1.2.0.tgz", + "integrity": "sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw==", + "dev": true + }, + "node_modules/prebuild-install/node_modules/gauge": { + "version": "2.7.4", + "resolved": "https://registry.npmjs.org/gauge/-/gauge-2.7.4.tgz", + "integrity": "sha1-LANAXHU4w51+s3sxcCLjJfsBi/c=", + "dev": true, + "dependencies": { + "aproba": "^1.0.3", + "console-control-strings": "^1.0.0", + "has-unicode": "^2.0.0", + "object-assign": "^4.1.0", + "signal-exit": "^3.0.0", + "string-width": "^1.0.1", + "strip-ansi": "^3.0.1", + "wide-align": "^1.1.0" + } + }, + "node_modules/prebuild-install/node_modules/is-fullwidth-code-point": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz", + "integrity": "sha1-754xOG8DGn8NZDr4L95QxFfvAMs=", + "dev": true, + "dependencies": { + "number-is-nan": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/prebuild-install/node_modules/npmlog": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-4.1.2.tgz", + "integrity": "sha512-2uUqazuKlTaSI/dC8AzicUck7+IrEaOnN/e0jd3Xtt1KcGpwx30v50mL7oPyr/h9bL3E4aZccVwpwP+5W9Vjkg==", + "dev": true, + "dependencies": { + "are-we-there-yet": "~1.1.2", + "console-control-strings": "~1.1.0", + "gauge": "~2.7.3", + "set-blocking": "~2.0.0" + } + }, + "node_modules/prebuild-install/node_modules/string-width": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz", + "integrity": "sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=", + "dev": true, + "dependencies": { + "code-point-at": "^1.0.0", + "is-fullwidth-code-point": "^1.0.0", + "strip-ansi": "^3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/prelude-ls": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", @@ -6198,12 +6234,30 @@ "node": ">= 0.8.0" } }, + "node_modules/proc-log": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-1.0.0.tgz", + "integrity": "sha512-aCk8AO51s+4JyuYGg3Q/a6gnrlDO09NpVWePtjp7xwphcoQ04x5WAfCyugcsbLooWcMJ87CLkD4+604IckEdhg==", + "inBundle": true + }, "node_modules/process-nextick-args": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==", "inBundle": true }, + "node_modules/process-on-spawn": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/process-on-spawn/-/process-on-spawn-1.0.0.tgz", + "integrity": "sha512-1WsPDsUSMmZH5LeMLegqkPDrsGgsWwk1Exipy2hvB0o/F0ASzbpIctSCcZIK1ykJvtTJULEH+20WOFjMvGnCTg==", + "dev": true, + "dependencies": { + "fromentries": "^1.2.0" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/progress": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/progress/-/progress-2.0.3.tgz", @@ -6283,27 +6337,12 @@ "url": "https://github.com/sponsors/wooorm" } }, - "node_modules/pseudomap": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/pseudomap/-/pseudomap-1.0.2.tgz", - "integrity": "sha1-8FKijacOYYkX7wqKw0wa5aaChrM=", - "dev": true - }, "node_modules/psl": { "version": "1.8.0", "resolved": "https://registry.npmjs.org/psl/-/psl-1.8.0.tgz", "integrity": "sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ==", "inBundle": true }, - "node_modules/puka": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/puka/-/puka-1.0.1.tgz", - "integrity": "sha512-ssjRZxBd7BT3dte1RR3VoeT2cT/ODH8x+h0rUF1rMqB0srHYf48stSDWfiYakTp5UBZMxroZhB2+ExLDHm7W3g==", - "inBundle": true, - "engines": { - "node": ">=4" - } - }, "node_modules/pump": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", @@ -6342,9 +6381,9 @@ } }, "node_modules/queue-microtask": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.2.tgz", - "integrity": "sha512-dB15eXv3p2jDlbOiNLyMabYg1/sXvppd8DP2J3EOCQ0AkuSXCW2tP7mnVouVLJKgUMY6yP0kcQDVpLCN13h4Xg==", + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", "dev": true, "funding": [ { @@ -6445,9 +6484,9 @@ } }, "node_modules/read-package-json-fast": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/read-package-json-fast/-/read-package-json-fast-2.0.2.tgz", - "integrity": "sha512-5fyFUyO9B799foVk4n6ylcoAktG/FbE3jwRKxvwaeSrIunaoMc0u81dzXxjeAFKOce7O5KncdfwpGvvs6r5PsQ==", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/read-package-json-fast/-/read-package-json-fast-2.0.3.tgz", + "integrity": "sha512-W/BKtbL+dUjTuRL2vziuYhp76s5HZ9qQhd/dKfWIZveD0O40453QNyZhC0e63lqZrAQ4jiOapVoeJ7JrszenQQ==", "inBundle": true, "dependencies": { "json-parse-even-better-errors": "^2.3.0", @@ -6469,9 +6508,9 @@ } }, "node_modules/read-package-tree/node_modules/hosted-git-info": { - "version": "2.8.8", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.8.tgz", - "integrity": "sha512-f/wzC2QaWBs7t9IYqB4T3sR1xviIViXJRJTWBlx2Gf3g0Xi5vI7Yy4koXQ1c9OYDGHN9sBy1DQ2AB8fqZBWhUg==", + "version": "2.8.9", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.9.tgz", + "integrity": "sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==", "dev": true }, "node_modules/read-package-tree/node_modules/normalize-package-data": { @@ -6508,36 +6547,36 @@ } }, "node_modules/read-pkg": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-2.0.0.tgz", - "integrity": "sha1-jvHAYjxqbbDcZxPEv6xGMysjaPg=", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-3.0.0.tgz", + "integrity": "sha1-nLxoaXj+5l0WwA4rGcI3/Pbjg4k=", "dev": true, "dependencies": { - "load-json-file": "^2.0.0", + "load-json-file": "^4.0.0", "normalize-package-data": "^2.3.2", - "path-type": "^2.0.0" + "path-type": "^3.0.0" }, "engines": { "node": ">=4" } }, "node_modules/read-pkg-up": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-2.0.0.tgz", - "integrity": "sha1-a3KoBImE4MQeeVEP1en6mbO1Sb4=", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-3.0.0.tgz", + "integrity": "sha1-PtSWaF26D4/hGNBpHcUfSh/5bwc=", "dev": true, "dependencies": { "find-up": "^2.0.0", - "read-pkg": "^2.0.0" + "read-pkg": "^3.0.0" }, "engines": { "node": ">=4" } }, "node_modules/read-pkg/node_modules/hosted-git-info": { - "version": "2.8.8", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.8.tgz", - "integrity": "sha512-f/wzC2QaWBs7t9IYqB4T3sR1xviIViXJRJTWBlx2Gf3g0Xi5vI7Yy4koXQ1c9OYDGHN9sBy1DQ2AB8fqZBWhUg==", + "version": "2.8.9", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.9.tgz", + "integrity": "sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==", "dev": true }, "node_modules/read-pkg/node_modules/normalize-package-data": { @@ -6742,50 +6781,18 @@ "node": ">= 6" } }, - "node_modules/request-promise-core": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/request-promise-core/-/request-promise-core-1.1.4.tgz", - "integrity": "sha512-TTbAfBBRdWD7aNNOoVOBH4pN/KigV6LyapYNNlAPA8JwbovRti1E88m3sYAwsLi5ryhPKsE9APwnjFTgdUjTpw==", - "dev": true, - "dependencies": { - "lodash": "^4.17.19" - }, - "engines": { - "node": ">=0.10.0" - }, - "peerDependencies": { - "request": "^2.34" - } - }, - "node_modules/request-promise-native": { - "version": "1.0.9", - "resolved": "https://registry.npmjs.org/request-promise-native/-/request-promise-native-1.0.9.tgz", - "integrity": "sha512-wcW+sIUiWnKgNY0dqCpOZkUbF/I+YPi+f09JZIDa39Ec+q82CpSYniDp+ISgTTbKmnpJWASeJBPZmoxH84wt3g==", - "deprecated": "request-promise-native has been deprecated because it extends the now deprecated request package, see https://github.com/request/request/issues/3142", - "dev": true, - "dependencies": { - "request-promise-core": "1.1.4", - "stealthy-require": "^1.1.1", - "tough-cookie": "^2.3.3" - }, - "engines": { - "node": ">=0.12.0" - }, - "peerDependencies": { - "request": "^2.34" - } - }, - "node_modules/request-promise-native/node_modules/tough-cookie": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.5.0.tgz", - "integrity": "sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==", - "dev": true, + "node_modules/request/node_modules/form-data": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz", + "integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==", + "inBundle": true, "dependencies": { - "psl": "^1.1.28", - "punycode": "^2.1.1" + "asynckit": "^0.4.0", + "combined-stream": "^1.0.6", + "mime-types": "^2.1.12" }, "engines": { - "node": ">=0.8" + "node": ">= 0.12" } }, "node_modules/request/node_modules/tough-cookie": { @@ -6819,15 +6826,6 @@ "node": ">=0.10.0" } }, - "node_modules/require-inject": { - "version": "1.4.4", - "resolved": "https://registry.npmjs.org/require-inject/-/require-inject-1.4.4.tgz", - "integrity": "sha512-5Y5ctRN84+I4iOZO61gm+48tgP/6Hcd3VZydkaEM3MCuOvnHRsTJYQBOc01faI/Z9at5nsCAJVHhlfPA6Pc0Og==", - "dev": true, - "dependencies": { - "caller": "^1.0.1" - } - }, "node_modules/require-main-filename": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz", @@ -6928,9 +6926,9 @@ } }, "node_modules/semver": { - "version": "7.3.4", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.4.tgz", - "integrity": "sha512-tCfb2WLjqFAtXn4KEdxIhalnRtoKFN7nAwj0B3ZXCbQloV2tq5eDbcTmT68JJD3nRJq24/XgxtQKFIpQdtvmVw==", + "version": "7.3.5", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", + "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", "inBundle": true, "dependencies": { "lru-cache": "^6.0.0" @@ -7043,9 +7041,9 @@ } }, "node_modules/socks": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/socks/-/socks-2.5.1.tgz", - "integrity": "sha512-oZCsJJxapULAYJaEYBSzMcz8m3jqgGrHaGhkmU/o/PQfFWYWxkAaA0UMGImb6s6tEXfKi959X6VJjMMQ3P6TTQ==", + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/socks/-/socks-2.6.1.tgz", + "integrity": "sha512-kLQ9N5ucj8uIcxrDwjm0Jsqk06xdpBjGNQtpXy4Q8/QY2k+fY7nZH8CARy+hkbG+SGAovmzzuauCpBlb8FrnBA==", "inBundle": true, "dependencies": { "ip": "^1.1.5", @@ -7071,9 +7069,9 @@ } }, "node_modules/source-map": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "version": "0.5.7", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", + "integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=", "dev": true, "engines": { "node": ">=0.10.0" @@ -7089,6 +7087,15 @@ "source-map": "^0.6.0" } }, + "node_modules/source-map-support/node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/space-separated-tokens": { "version": "1.1.5", "resolved": "https://registry.npmjs.org/space-separated-tokens/-/space-separated-tokens-1.1.5.tgz", @@ -7100,53 +7107,20 @@ } }, "node_modules/spawn-wrap": { - "version": "1.4.3", - "resolved": "https://registry.npmjs.org/spawn-wrap/-/spawn-wrap-1.4.3.tgz", - "integrity": "sha512-IgB8md0QW/+tWqcavuFgKYR/qIRvJkRLPJDFaoXtLLUaVcCDK0+HeFTkmQHj3eprcYhc+gOl0aEA1w7qZlYezw==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/spawn-wrap/-/spawn-wrap-2.0.0.tgz", + "integrity": "sha512-EeajNjfN9zMnULLwhZZQU3GWBoFNkbngTUPfaawT4RkMiviTxcX0qfhVbGey39mfctfDHkWtuecgQ8NJcyQWHg==", "dev": true, "dependencies": { - "foreground-child": "^1.5.6", - "mkdirp": "^0.5.0", - "os-homedir": "^1.0.1", - "rimraf": "^2.6.2", + "foreground-child": "^2.0.0", + "is-windows": "^1.0.2", + "make-dir": "^3.0.0", + "rimraf": "^3.0.0", "signal-exit": "^3.0.2", - "which": "^1.3.0" - } - }, - "node_modules/spawn-wrap/node_modules/mkdirp": { - "version": "0.5.5", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz", - "integrity": "sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==", - "dev": true, - "dependencies": { - "minimist": "^1.2.5" - }, - "bin": { - "mkdirp": "bin/cmd.js" - } - }, - "node_modules/spawn-wrap/node_modules/rimraf": { - "version": "2.7.1", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", - "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==", - "dev": true, - "dependencies": { - "glob": "^7.1.3" - }, - "bin": { - "rimraf": "bin.js" - } - }, - "node_modules/spawn-wrap/node_modules/which": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", - "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", - "dev": true, - "dependencies": { - "isexe": "^2.0.0" + "which": "^2.0.1" }, - "bin": { - "which": "bin/which" + "engines": { + "node": ">=8" } }, "node_modules/spdx-compare": { @@ -7196,9 +7170,9 @@ } }, "node_modules/spdx-license-ids": { - "version": "3.0.7", - "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.7.tgz", - "integrity": "sha512-U+MTEOO0AiDzxwFvoa4JVnMV6mZlJKk2sBLt90s7G0Gd0Mlknc7kxEn3nuDPNZRta7O2uy8oLcZLVT+4sqNZHQ==", + "version": "3.0.9", + "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.9.tgz", + "integrity": "sha512-Ki212dKK4ogX+xDo4CtOZBVIwhsKBEfsEEcwmJfLQzirgc2jIWdzg40Unxz/HzEUqM1WFzVlQSMF9kZZ2HboLQ==", "inBundle": true }, "node_modules/spdx-osi": { @@ -7245,11 +7219,6 @@ "safer-buffer": "^2.0.2", "tweetnacl": "~0.14.0" }, - "bin": { - "sshpk-conv": "bin/sshpk-conv", - "sshpk-sign": "bin/sshpk-sign", - "sshpk-verify": "bin/sshpk-verify" - }, "engines": { "node": ">=0.10.0" } @@ -7267,13 +7236,22 @@ } }, "node_modules/stack-utils": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/stack-utils/-/stack-utils-1.0.4.tgz", - "integrity": "sha512-IPDJfugEGbfizBwBZRZ3xpccMdRyP5lqsBWXGQWimVjua/ccLCeMOAVjlc1R7LxFjo5sEDhyNIXd8mo/AiDS9w==", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/stack-utils/-/stack-utils-2.0.3.tgz", + "integrity": "sha512-gL//fkxfWUsIlFL2Tl42Cl6+HFALEaB1FU76I/Fy+oZjRreP7OPMXFlGbxM7NQsI0ZpUfw76sHnv0WNYuTb7Iw==", "dev": true, "dependencies": { "escape-string-regexp": "^2.0.0" }, + "engines": { + "node": ">=10" + } + }, + "node_modules/stack-utils/node_modules/escape-string-regexp": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz", + "integrity": "sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==", + "dev": true, "engines": { "node": ">=8" } @@ -7288,15 +7266,6 @@ "url": "https://github.com/sponsors/wooorm" } }, - "node_modules/stealthy-require": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/stealthy-require/-/stealthy-require-1.1.1.tgz", - "integrity": "sha1-NbCYdbT/SfJqd35QmzCQoyJr8ks=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/string_decoder": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", @@ -7341,12 +7310,12 @@ } }, "node_modules/string.prototype.trimend": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.3.tgz", - "integrity": "sha512-ayH0pB+uf0U28CtjlLvL7NaohvR1amUvVZk+y3DYb0Ey2PUV5zPkkKy9+U1ndVEIXO8hNg18eIv9Jntbii+dKw==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.4.tgz", + "integrity": "sha512-y9xCjw1P23Awk8EvTpcyL2NIr1j7wJ39f+k6lvRnSMz+mz9CGz9NYPelDk42kOz6+ql8xjfK8oYzy3jAP5QU5A==", "dev": true, "dependencies": { - "call-bind": "^1.0.0", + "call-bind": "^1.0.2", "define-properties": "^1.1.3" }, "funding": { @@ -7354,12 +7323,12 @@ } }, "node_modules/string.prototype.trimstart": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.3.tgz", - "integrity": "sha512-oBIBUy5lea5tt0ovtOFiEQaBkoBBkyJhZXzJYrSmDo5IUUqbOPvVezuRs/agBIdZ2p2Eo1FD6bD9USyBLfl3xg==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.4.tgz", + "integrity": "sha512-jh6e984OBfvxS50tdY2nRZnoC5/mLFKOREQfw8t5yytkoUsJRNxvI/E39qu1sD0OtWI3OC0XgKSmcWwziwYuZw==", "dev": true, "dependencies": { - "call-bind": "^1.0.0", + "call-bind": "^1.0.2", "define-properties": "^1.1.3" }, "funding": { @@ -7433,24 +7402,26 @@ "dev": true }, "node_modules/table": { - "version": "6.0.7", - "resolved": "https://registry.npmjs.org/table/-/table-6.0.7.tgz", - "integrity": "sha512-rxZevLGTUzWna/qBLObOe16kB2RTnnbhciwgPbMMlazz1yZGVEgnZK762xyVdVznhqxrfCeBMmMkgOOaPwjH7g==", + "version": "6.7.1", + "resolved": "https://registry.npmjs.org/table/-/table-6.7.1.tgz", + "integrity": "sha512-ZGum47Yi6KOOFDE8m223td53ath2enHcYLgOCjGr5ngu8bdIARQk6mN/wRMv4yMRcHnCSnHbCEha4sobQx5yWg==", "dev": true, "dependencies": { - "ajv": "^7.0.2", - "lodash": "^4.17.20", + "ajv": "^8.0.1", + "lodash.clonedeep": "^4.5.0", + "lodash.truncate": "^4.4.2", "slice-ansi": "^4.0.0", - "string-width": "^4.2.0" + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0" }, "engines": { "node": ">=10.0.0" } }, "node_modules/table/node_modules/ajv": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-7.1.0.tgz", - "integrity": "sha512-svS9uILze/cXbH0z2myCK2Brqprx/+JJYK5pHicT/GQiBfzzhUVAIT6MwqJg8y4xV/zoGsUeuPuwtoiKSGE15g==", + "version": "8.5.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.5.0.tgz", + "integrity": "sha512-Y2l399Tt1AguU3BPRP9Fn4eN+Or+StUGWCUpbnFyXSo8NZ9S4uj+AG2pjs5apK+ZMOwYOz1+a+VKvKH7CudXgQ==", "dev": true, "dependencies": { "fast-deep-equal": "^3.1.1", @@ -7488,9 +7459,9 @@ "dev": true }, "node_modules/table/node_modules/string-width": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.0.tgz", - "integrity": "sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg==", + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.2.tgz", + "integrity": "sha512-XBJbT3N4JhVumXE0eoLU9DCjcaF92KLNqTmFCnG1pf8duUxFGwtP6AD6nkjw9a3IdiRtL3E2w3JDiE/xi3vOeA==", "dev": true, "dependencies": { "emoji-regex": "^8.0.0", @@ -7514,169 +7485,68 @@ } }, "node_modules/tap": { - "version": "14.11.0", - "resolved": "https://registry.npmjs.org/tap/-/tap-14.11.0.tgz", - "integrity": "sha512-z8qnNFVyIjLh/bNoTLFRkEk09XZUDAZbCkz/BjvHHly3ao5H+y60gPnedALfheEjA6dA4tpp/mrKq2NWlMuq0A==", + "version": "15.0.9", + "resolved": "https://registry.npmjs.org/tap/-/tap-15.0.9.tgz", + "integrity": "sha512-bqY5SxEqYKRd37PIUfKBf9HMs/hklyl/fGXkuStr9rYTIGa0/icpSLsm6IVOmx2qT0/TliPNJ6OvS5kddJYHdg==", "bundleDependencies": [ "ink", "treport", - "@types/react", - "import-jsx", - "minipass", - "signal-exit", - "tap-parser", - "tap-yaml", - "yaml", - "@babel/code-frame", - "@babel/core", - "@babel/generator", - "@babel/helper-annotate-as-pure", - "@babel/helper-builder-react-jsx", - "@babel/helper-builder-react-jsx-experimental", - "@babel/helper-function-name", - "@babel/helper-get-function-arity", - "@babel/helper-member-expression-to-functions", - "@babel/helper-module-imports", - "@babel/helper-module-transforms", - "@babel/helper-optimise-call-expression", - "@babel/helper-plugin-utils", - "@babel/helper-replace-supers", - "@babel/helper-simple-access", - "@babel/helper-split-export-declaration", - "@babel/helper-validator-identifier", - "@babel/helpers", - "@babel/highlight", - "@babel/parser", - "@babel/plugin-proposal-object-rest-spread", - "@babel/plugin-syntax-jsx", - "@babel/plugin-syntax-object-rest-spread", - "@babel/plugin-transform-destructuring", - "@babel/plugin-transform-parameters", - "@babel/plugin-transform-react-jsx", - "@babel/template", - "@babel/traverse", - "@babel/types", - "@types/color-name", - "@types/prop-types", - "@types/yoga-layout", - "ansi-escapes", - "ansi-regex", - "ansi-styles", - "ansicolors", - "arrify", - "astral-regex", - "auto-bind", - "caller-callsite", - "caller-path", - "callsites", - "cardinal", - "chalk", - "ci-info", - "cli-cursor", - "cli-truncate", - "color-convert", - "color-name", - "convert-source-map", - "csstype", - "debug", - "emoji-regex", - "escape-string-regexp", - "esprima", - "events-to-array", - "gensync", - "globals", - "has-flag", - "is-ci", - "is-fullwidth-code-point", - "js-tokens", - "jsesc", - "json5", - "lodash", - "lodash.throttle", - "log-update", - "loose-envify", - "mimic-fn", - "minimist", - "ms", - "object-assign", - "onetime", - "path-parse", - "prop-types", - "punycode", - "react-is", - "react-reconciler", - "redeyed", - "resolve", - "resolve-from", - "restore-cursor", - "scheduler", - "semver", - "slice-ansi", - "string-length", - "string-width", - "strip-ansi", - "supports-color", - "to-fast-properties", - "type-fest", - "unicode-length", - "widest-line", - "wrap-ansi", - "yoga-layout-prebuilt" + "@types/react" ], "dev": true, "dependencies": { - "@types/react": "^16.9.16", - "async-hook-domain": "^1.1.3", - "bind-obj-methods": "^2.0.0", - "browser-process-hrtime": "^1.0.0", + "@types/react": "^16.9.23", "chokidar": "^3.3.0", - "color-support": "^1.1.0", "coveralls": "^3.0.11", - "diff": "^4.0.1", - "esm": "^3.2.25", "findit": "^2.0.0", - "flow-remove-types": "^2.112.0", - "foreground-child": "^1.3.3", + "foreground-child": "^2.0.0", "fs-exists-cached": "^1.0.0", - "function-loop": "^1.0.2", "glob": "^7.1.6", - "import-jsx": "^3.1.0", - "ink": "^2.6.0", + "import-jsx": "^4.0.0", + "ink": "^2.7.1", "isexe": "^2.0.0", - "istanbul-lib-processinfo": "^1.0.0", + "istanbul-lib-processinfo": "^2.0.2", "jackspeak": "^1.4.0", + "libtap": "^1.1.1", "minipass": "^3.1.1", - "mkdirp": "^0.5.4", - "nyc": "^14.1.1", + "mkdirp": "^1.0.4", + "nyc": "^15.1.0", "opener": "^1.5.1", - "own-or": "^1.0.0", - "own-or-env": "^1.0.1", "react": "^16.12.0", - "rimraf": "^2.7.1", + "rimraf": "^3.0.0", "signal-exit": "^3.0.0", "source-map-support": "^0.5.16", - "stack-utils": "^1.0.3", "tap-mocha-reporter": "^5.0.0", "tap-parser": "^10.0.1", "tap-yaml": "^1.0.0", - "tcompare": "^3.0.0", - "treport": "^1.0.2", - "trivial-deferred": "^1.0.1", - "ts-node": "^8.5.2", - "typescript": "^3.7.2", - "which": "^2.0.2", - "write-file-atomic": "^3.0.1", - "yaml": "^1.7.2", - "yapool": "^1.0.0" + "tcompare": "^5.0.6", + "treport": "^2.0.2", + "which": "^2.0.2" }, "bin": { "tap": "bin/run.js" }, "engines": { - "node": ">=8" + "node": ">=10" }, "funding": { "url": "https://github.com/sponsors/isaacs" + }, + "peerDependencies": { + "flow-remove-types": ">=2.112.0", + "ts-node": ">=8.5.2", + "typescript": ">=3.7.2" + }, + "peerDependenciesMeta": { + "flow-remove-types": { + "optional": true + }, + "ts-node": { + "optional": true + }, + "typescript": { + "optional": true + } } }, "node_modules/tap-mocha-reporter": { @@ -7710,6 +7580,15 @@ "node": ">=0.3.1" } }, + "node_modules/tap-mocha-reporter/node_modules/escape-string-regexp": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz", + "integrity": "sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, "node_modules/tap-parser": { "version": "10.1.0", "resolved": "https://registry.npmjs.org/tap-parser/-/tap-parser-10.1.0.tgz", @@ -7737,35 +7616,40 @@ } }, "node_modules/tap/node_modules/@babel/code-frame": { - "version": "7.10.4", + "version": "7.12.13", "dev": true, "inBundle": true, "license": "MIT", "dependencies": { - "@babel/highlight": "^7.10.4" + "@babel/highlight": "^7.12.13" } }, + "node_modules/tap/node_modules/@babel/compat-data": { + "version": "7.14.0", + "dev": true, + "inBundle": true, + "license": "MIT" + }, "node_modules/tap/node_modules/@babel/core": { - "version": "7.10.5", + "version": "7.14.0", "dev": true, "inBundle": true, "license": "MIT", "dependencies": { - "@babel/code-frame": "^7.10.4", - "@babel/generator": "^7.10.5", - "@babel/helper-module-transforms": "^7.10.5", - "@babel/helpers": "^7.10.4", - "@babel/parser": "^7.10.5", - "@babel/template": "^7.10.4", - "@babel/traverse": "^7.10.5", - "@babel/types": "^7.10.5", + "@babel/code-frame": "^7.12.13", + "@babel/generator": "^7.14.0", + "@babel/helper-compilation-targets": "^7.13.16", + "@babel/helper-module-transforms": "^7.14.0", + "@babel/helpers": "^7.14.0", + "@babel/parser": "^7.14.0", + "@babel/template": "^7.12.13", + "@babel/traverse": "^7.14.0", + "@babel/types": "^7.14.0", "convert-source-map": "^1.7.0", "debug": "^4.1.0", - "gensync": "^1.0.0-beta.1", + "gensync": "^1.0.0-beta.2", "json5": "^2.1.2", - "lodash": "^4.17.19", - "resolve": "^1.3.2", - "semver": "^5.4.1", + "semver": "^6.3.0", "source-map": "^0.5.0" }, "engines": { @@ -7776,194 +7660,176 @@ "url": "https://opencollective.com/babel" } }, - "node_modules/tap/node_modules/@babel/core/node_modules/source-map": { - "version": "0.5.7", - "dev": true, - "inBundle": true, - "license": "BSD-3-Clause", - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/tap/node_modules/@babel/generator": { - "version": "7.10.5", + "version": "7.14.1", "dev": true, "inBundle": true, "license": "MIT", "dependencies": { - "@babel/types": "^7.10.5", + "@babel/types": "^7.14.1", "jsesc": "^2.5.1", "source-map": "^0.5.0" } }, - "node_modules/tap/node_modules/@babel/generator/node_modules/source-map": { - "version": "0.5.7", - "dev": true, - "inBundle": true, - "license": "BSD-3-Clause", - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/tap/node_modules/@babel/helper-annotate-as-pure": { - "version": "7.10.4", - "dev": true, - "inBundle": true, - "license": "MIT", - "dependencies": { - "@babel/types": "^7.10.4" - } - }, - "node_modules/tap/node_modules/@babel/helper-builder-react-jsx": { - "version": "7.10.4", + "version": "7.12.13", "dev": true, "inBundle": true, "license": "MIT", "dependencies": { - "@babel/helper-annotate-as-pure": "^7.10.4", - "@babel/types": "^7.10.4" + "@babel/types": "^7.12.13" } }, - "node_modules/tap/node_modules/@babel/helper-builder-react-jsx-experimental": { - "version": "7.10.5", + "node_modules/tap/node_modules/@babel/helper-compilation-targets": { + "version": "7.13.16", "dev": true, "inBundle": true, "license": "MIT", "dependencies": { - "@babel/helper-annotate-as-pure": "^7.10.4", - "@babel/helper-module-imports": "^7.10.4", - "@babel/types": "^7.10.5" + "@babel/compat-data": "^7.13.15", + "@babel/helper-validator-option": "^7.12.17", + "browserslist": "^4.14.5", + "semver": "^6.3.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" } }, "node_modules/tap/node_modules/@babel/helper-function-name": { - "version": "7.10.4", + "version": "7.12.13", "dev": true, "inBundle": true, "license": "MIT", "dependencies": { - "@babel/helper-get-function-arity": "^7.10.4", - "@babel/template": "^7.10.4", - "@babel/types": "^7.10.4" + "@babel/helper-get-function-arity": "^7.12.13", + "@babel/template": "^7.12.13", + "@babel/types": "^7.12.13" } }, "node_modules/tap/node_modules/@babel/helper-get-function-arity": { - "version": "7.10.4", + "version": "7.12.13", "dev": true, "inBundle": true, "license": "MIT", "dependencies": { - "@babel/types": "^7.10.4" + "@babel/types": "^7.12.13" } }, "node_modules/tap/node_modules/@babel/helper-member-expression-to-functions": { - "version": "7.10.5", + "version": "7.13.12", "dev": true, "inBundle": true, "license": "MIT", "dependencies": { - "@babel/types": "^7.10.5" + "@babel/types": "^7.13.12" } }, "node_modules/tap/node_modules/@babel/helper-module-imports": { - "version": "7.10.4", + "version": "7.13.12", "dev": true, "inBundle": true, "license": "MIT", "dependencies": { - "@babel/types": "^7.10.4" + "@babel/types": "^7.13.12" } }, "node_modules/tap/node_modules/@babel/helper-module-transforms": { - "version": "7.10.5", + "version": "7.14.0", "dev": true, "inBundle": true, "license": "MIT", "dependencies": { - "@babel/helper-module-imports": "^7.10.4", - "@babel/helper-replace-supers": "^7.10.4", - "@babel/helper-simple-access": "^7.10.4", - "@babel/helper-split-export-declaration": "^7.10.4", - "@babel/template": "^7.10.4", - "@babel/types": "^7.10.5", - "lodash": "^4.17.19" + "@babel/helper-module-imports": "^7.13.12", + "@babel/helper-replace-supers": "^7.13.12", + "@babel/helper-simple-access": "^7.13.12", + "@babel/helper-split-export-declaration": "^7.12.13", + "@babel/helper-validator-identifier": "^7.14.0", + "@babel/template": "^7.12.13", + "@babel/traverse": "^7.14.0", + "@babel/types": "^7.14.0" } }, "node_modules/tap/node_modules/@babel/helper-optimise-call-expression": { - "version": "7.10.4", + "version": "7.12.13", "dev": true, "inBundle": true, "license": "MIT", "dependencies": { - "@babel/types": "^7.10.4" + "@babel/types": "^7.12.13" } }, "node_modules/tap/node_modules/@babel/helper-plugin-utils": { - "version": "7.10.4", + "version": "7.13.0", "dev": true, "inBundle": true, "license": "MIT" }, "node_modules/tap/node_modules/@babel/helper-replace-supers": { - "version": "7.10.4", + "version": "7.13.12", "dev": true, "inBundle": true, "license": "MIT", "dependencies": { - "@babel/helper-member-expression-to-functions": "^7.10.4", - "@babel/helper-optimise-call-expression": "^7.10.4", - "@babel/traverse": "^7.10.4", - "@babel/types": "^7.10.4" + "@babel/helper-member-expression-to-functions": "^7.13.12", + "@babel/helper-optimise-call-expression": "^7.12.13", + "@babel/traverse": "^7.13.0", + "@babel/types": "^7.13.12" } }, "node_modules/tap/node_modules/@babel/helper-simple-access": { - "version": "7.10.4", + "version": "7.13.12", "dev": true, "inBundle": true, "license": "MIT", "dependencies": { - "@babel/template": "^7.10.4", - "@babel/types": "^7.10.4" + "@babel/types": "^7.13.12" } }, "node_modules/tap/node_modules/@babel/helper-split-export-declaration": { - "version": "7.10.4", + "version": "7.12.13", "dev": true, "inBundle": true, "license": "MIT", "dependencies": { - "@babel/types": "^7.10.4" + "@babel/types": "^7.12.13" } }, "node_modules/tap/node_modules/@babel/helper-validator-identifier": { - "version": "7.10.4", + "version": "7.14.0", + "dev": true, + "inBundle": true, + "license": "MIT" + }, + "node_modules/tap/node_modules/@babel/helper-validator-option": { + "version": "7.12.17", "dev": true, "inBundle": true, "license": "MIT" }, "node_modules/tap/node_modules/@babel/helpers": { - "version": "7.10.4", + "version": "7.14.0", "dev": true, "inBundle": true, "license": "MIT", "dependencies": { - "@babel/template": "^7.10.4", - "@babel/traverse": "^7.10.4", - "@babel/types": "^7.10.4" + "@babel/template": "^7.12.13", + "@babel/traverse": "^7.14.0", + "@babel/types": "^7.14.0" } }, "node_modules/tap/node_modules/@babel/highlight": { - "version": "7.10.4", + "version": "7.14.0", "dev": true, "inBundle": true, "license": "MIT", "dependencies": { - "@babel/helper-validator-identifier": "^7.10.4", + "@babel/helper-validator-identifier": "^7.14.0", "chalk": "^2.0.0", "js-tokens": "^4.0.0" } }, "node_modules/tap/node_modules/@babel/parser": { - "version": "7.10.5", + "version": "7.14.1", "dev": true, "inBundle": true, "license": "MIT", @@ -7975,26 +7841,28 @@ } }, "node_modules/tap/node_modules/@babel/plugin-proposal-object-rest-spread": { - "version": "7.10.4", + "version": "7.13.8", "dev": true, "inBundle": true, "license": "MIT", "dependencies": { - "@babel/helper-plugin-utils": "^7.10.4", - "@babel/plugin-syntax-object-rest-spread": "^7.8.0", - "@babel/plugin-transform-parameters": "^7.10.4" + "@babel/compat-data": "^7.13.8", + "@babel/helper-compilation-targets": "^7.13.8", + "@babel/helper-plugin-utils": "^7.13.0", + "@babel/plugin-syntax-object-rest-spread": "^7.8.3", + "@babel/plugin-transform-parameters": "^7.13.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/tap/node_modules/@babel/plugin-syntax-jsx": { - "version": "7.10.4", + "version": "7.12.13", "dev": true, "inBundle": true, "license": "MIT", "dependencies": { - "@babel/helper-plugin-utils": "^7.10.4" + "@babel/helper-plugin-utils": "^7.12.13" }, "peerDependencies": { "@babel/core": "^7.0.0-0" @@ -8013,90 +7881,82 @@ } }, "node_modules/tap/node_modules/@babel/plugin-transform-destructuring": { - "version": "7.10.4", + "version": "7.13.17", "dev": true, "inBundle": true, "license": "MIT", "dependencies": { - "@babel/helper-plugin-utils": "^7.10.4" + "@babel/helper-plugin-utils": "^7.13.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/tap/node_modules/@babel/plugin-transform-parameters": { - "version": "7.10.5", + "version": "7.13.0", "dev": true, "inBundle": true, "license": "MIT", "dependencies": { - "@babel/helper-get-function-arity": "^7.10.4", - "@babel/helper-plugin-utils": "^7.10.4" + "@babel/helper-plugin-utils": "^7.13.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/tap/node_modules/@babel/plugin-transform-react-jsx": { - "version": "7.10.4", + "version": "7.13.12", "dev": true, "inBundle": true, "license": "MIT", "dependencies": { - "@babel/helper-builder-react-jsx": "^7.10.4", - "@babel/helper-builder-react-jsx-experimental": "^7.10.4", - "@babel/helper-plugin-utils": "^7.10.4", - "@babel/plugin-syntax-jsx": "^7.10.4" + "@babel/helper-annotate-as-pure": "^7.12.13", + "@babel/helper-module-imports": "^7.13.12", + "@babel/helper-plugin-utils": "^7.13.0", + "@babel/plugin-syntax-jsx": "^7.12.13", + "@babel/types": "^7.13.12" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/tap/node_modules/@babel/template": { - "version": "7.10.4", + "version": "7.12.13", "dev": true, "inBundle": true, "license": "MIT", "dependencies": { - "@babel/code-frame": "^7.10.4", - "@babel/parser": "^7.10.4", - "@babel/types": "^7.10.4" + "@babel/code-frame": "^7.12.13", + "@babel/parser": "^7.12.13", + "@babel/types": "^7.12.13" } }, "node_modules/tap/node_modules/@babel/traverse": { - "version": "7.10.5", + "version": "7.14.0", "dev": true, "inBundle": true, "license": "MIT", "dependencies": { - "@babel/code-frame": "^7.10.4", - "@babel/generator": "^7.10.5", - "@babel/helper-function-name": "^7.10.4", - "@babel/helper-split-export-declaration": "^7.10.4", - "@babel/parser": "^7.10.5", - "@babel/types": "^7.10.5", + "@babel/code-frame": "^7.12.13", + "@babel/generator": "^7.14.0", + "@babel/helper-function-name": "^7.12.13", + "@babel/helper-split-export-declaration": "^7.12.13", + "@babel/parser": "^7.14.0", + "@babel/types": "^7.14.0", "debug": "^4.1.0", - "globals": "^11.1.0", - "lodash": "^4.17.19" + "globals": "^11.1.0" } }, "node_modules/tap/node_modules/@babel/types": { - "version": "7.10.5", + "version": "7.14.1", "dev": true, "inBundle": true, "license": "MIT", "dependencies": { - "@babel/helper-validator-identifier": "^7.10.4", - "lodash": "^4.17.19", + "@babel/helper-validator-identifier": "^7.14.0", "to-fast-properties": "^2.0.0" } }, - "node_modules/tap/node_modules/@types/color-name": { - "version": "1.1.1", - "dev": true, - "inBundle": true, - "license": "MIT" - }, "node_modules/tap/node_modules/@types/prop-types": { "version": "15.7.3", "dev": true, @@ -8104,15 +7964,22 @@ "license": "MIT" }, "node_modules/tap/node_modules/@types/react": { - "version": "16.9.43", + "version": "16.14.6", "dev": true, "inBundle": true, "license": "MIT", "dependencies": { "@types/prop-types": "*", - "csstype": "^2.2.0" + "@types/scheduler": "*", + "csstype": "^3.0.2" } }, + "node_modules/tap/node_modules/@types/scheduler": { + "version": "0.16.1", + "dev": true, + "inBundle": true, + "license": "MIT" + }, "node_modules/tap/node_modules/@types/yoga-layout": { "version": "1.9.2", "dev": true, @@ -8120,12 +7987,12 @@ "license": "MIT" }, "node_modules/tap/node_modules/ansi-escapes": { - "version": "4.3.1", + "version": "4.3.2", "dev": true, "inBundle": true, "license": "MIT", "dependencies": { - "type-fest": "^0.11.0" + "type-fest": "^0.21.3" }, "engines": { "node": ">=8" @@ -8134,15 +8001,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/tap/node_modules/ansi-regex": { - "version": "5.0.0", - "dev": true, - "inBundle": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, "node_modules/tap/node_modules/ansi-styles": { "version": "3.2.1", "dev": true, @@ -8191,6 +8049,45 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/tap/node_modules/balanced-match": { + "version": "1.0.2", + "dev": true, + "inBundle": true, + "license": "MIT" + }, + "node_modules/tap/node_modules/brace-expansion": { + "version": "1.1.11", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/tap/node_modules/browserslist": { + "version": "4.16.6", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "caniuse-lite": "^1.0.30001219", + "colorette": "^1.2.2", + "electron-to-chromium": "^1.3.723", + "escalade": "^3.1.1", + "node-releases": "^1.1.71" + }, + "bin": { + "browserslist": "cli.js" + }, + "engines": { + "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + } + }, "node_modules/tap/node_modules/caller-callsite": { "version": "2.0.0", "dev": true, @@ -8224,6 +8121,12 @@ "node": ">=4" } }, + "node_modules/tap/node_modules/caniuse-lite": { + "version": "1.0.30001223", + "dev": true, + "inBundle": true, + "license": "CC-BY-4.0" + }, "node_modules/tap/node_modules/cardinal": { "version": "2.1.1", "dev": true, @@ -8300,6 +8203,24 @@ "inBundle": true, "license": "MIT" }, + "node_modules/tap/node_modules/colorette": { + "version": "1.2.2", + "dev": true, + "inBundle": true, + "license": "MIT" + }, + "node_modules/tap/node_modules/commondir": { + "version": "1.0.1", + "dev": true, + "inBundle": true, + "license": "MIT" + }, + "node_modules/tap/node_modules/concat-map": { + "version": "0.0.1", + "dev": true, + "inBundle": true, + "license": "MIT" + }, "node_modules/tap/node_modules/convert-source-map": { "version": "1.7.0", "dev": true, @@ -8309,35 +8230,34 @@ "safe-buffer": "~5.1.1" } }, - "node_modules/tap/node_modules/convert-source-map/node_modules/safe-buffer": { - "version": "5.1.2", - "dev": true, - "inBundle": true, - "license": "MIT" - }, "node_modules/tap/node_modules/csstype": { - "version": "2.6.11", + "version": "3.0.8", "dev": true, "inBundle": true, "license": "MIT" }, "node_modules/tap/node_modules/debug": { - "version": "4.1.1", + "version": "4.3.1", "dev": true, "inBundle": true, "license": "MIT", "dependencies": { - "ms": "^2.1.1" + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } } }, - "node_modules/tap/node_modules/diff": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", - "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", + "node_modules/tap/node_modules/electron-to-chromium": { + "version": "1.3.727", "dev": true, - "engines": { - "node": ">=0.3.1" - } + "inBundle": true, + "license": "ISC" }, "node_modules/tap/node_modules/emoji-regex": { "version": "8.0.0", @@ -8345,6 +8265,15 @@ "inBundle": true, "license": "MIT" }, + "node_modules/tap/node_modules/escalade": { + "version": "3.1.1", + "dev": true, + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, "node_modules/tap/node_modules/escape-string-regexp": { "version": "1.0.5", "dev": true, @@ -8373,8 +8302,44 @@ "inBundle": true, "license": "ISC" }, + "node_modules/tap/node_modules/find-cache-dir": { + "version": "3.3.1", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "commondir": "^1.0.1", + "make-dir": "^3.0.2", + "pkg-dir": "^4.1.0" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/avajs/find-cache-dir?sponsor=1" + } + }, + "node_modules/tap/node_modules/find-up": { + "version": "4.1.0", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/tap/node_modules/fs.realpath": { + "version": "1.0.0", + "dev": true, + "inBundle": true, + "license": "ISC" + }, "node_modules/tap/node_modules/gensync": { - "version": "1.0.0-beta.1", + "version": "1.0.0-beta.2", "dev": true, "inBundle": true, "license": "MIT", @@ -8382,6 +8347,26 @@ "node": ">=6.9.0" } }, + "node_modules/tap/node_modules/glob": { + "version": "7.1.7", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/tap/node_modules/globals": { "version": "11.12.0", "dev": true, @@ -8401,7 +8386,7 @@ } }, "node_modules/tap/node_modules/import-jsx": { - "version": "3.1.0", + "version": "4.0.0", "dev": true, "inBundle": true, "license": "MIT", @@ -8411,12 +8396,31 @@ "@babel/plugin-transform-destructuring": "^7.5.0", "@babel/plugin-transform-react-jsx": "^7.3.0", "caller-path": "^2.0.0", - "resolve-from": "^3.0.0" + "find-cache-dir": "^3.2.0", + "make-dir": "^3.0.2", + "resolve-from": "^3.0.0", + "rimraf": "^3.0.0" }, "engines": { - "node": ">= 4" + "node": ">=10" + } + }, + "node_modules/tap/node_modules/inflight": { + "version": "1.0.6", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" } }, + "node_modules/tap/node_modules/inherits": { + "version": "2.0.4", + "dev": true, + "inBundle": true, + "license": "ISC" + }, "node_modules/tap/node_modules/ink": { "version": "2.7.1", "dev": true, @@ -8456,12 +8460,11 @@ } }, "node_modules/tap/node_modules/ink/node_modules/ansi-styles": { - "version": "4.2.1", + "version": "4.3.0", "dev": true, "inBundle": true, "license": "MIT", "dependencies": { - "@types/color-name": "^1.1.1", "color-convert": "^2.0.1" }, "engines": { @@ -8512,7 +8515,7 @@ } }, "node_modules/tap/node_modules/ink/node_modules/supports-color": { - "version": "7.1.0", + "version": "7.2.0", "dev": true, "inBundle": true, "license": "MIT", @@ -8563,7 +8566,7 @@ } }, "node_modules/tap/node_modules/json5": { - "version": "2.1.3", + "version": "2.2.0", "dev": true, "inBundle": true, "license": "MIT", @@ -8577,14 +8580,20 @@ "node": ">=6" } }, - "node_modules/tap/node_modules/lodash": { - "version": "4.17.19", - "dev": true, - "inBundle": true, - "license": "MIT" - }, - "node_modules/tap/node_modules/lodash.throttle": { - "version": "4.1.1", + "node_modules/tap/node_modules/locate-path": { + "version": "5.0.0", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "p-locate": "^4.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/tap/node_modules/lodash.throttle": { + "version": "4.1.1", "dev": true, "inBundle": true, "license": "MIT" @@ -8737,6 +8746,21 @@ "loose-envify": "cli.js" } }, + "node_modules/tap/node_modules/make-dir": { + "version": "3.1.0", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "semver": "^6.0.0" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/tap/node_modules/mimic-fn": { "version": "2.1.0", "dev": true, @@ -8746,6 +8770,18 @@ "node": ">=6" } }, + "node_modules/tap/node_modules/minimatch": { + "version": "3.0.4", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, "node_modules/tap/node_modules/minimist": { "version": "1.2.5", "dev": true, @@ -8764,26 +8800,14 @@ "node": ">=8" } }, - "node_modules/tap/node_modules/minipass/node_modules/yallist": { - "version": "4.0.0", + "node_modules/tap/node_modules/ms": { + "version": "2.1.2", "dev": true, "inBundle": true, - "license": "ISC" + "license": "MIT" }, - "node_modules/tap/node_modules/mkdirp": { - "version": "0.5.5", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz", - "integrity": "sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==", - "dev": true, - "dependencies": { - "minimist": "^1.2.5" - }, - "bin": { - "mkdirp": "bin/cmd.js" - } - }, - "node_modules/tap/node_modules/ms": { - "version": "2.1.2", + "node_modules/tap/node_modules/node-releases": { + "version": "1.1.71", "dev": true, "inBundle": true, "license": "MIT" @@ -8797,8 +8821,17 @@ "node": ">=0.10.0" } }, + "node_modules/tap/node_modules/once": { + "version": "1.4.0", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "wrappy": "1" + } + }, "node_modules/tap/node_modules/onetime": { - "version": "5.1.0", + "version": "5.1.2", "dev": true, "inBundle": true, "license": "MIT", @@ -8807,13 +8840,76 @@ }, "engines": { "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/tap/node_modules/path-parse": { - "version": "1.0.6", + "node_modules/tap/node_modules/p-limit": { + "version": "2.3.0", "dev": true, "inBundle": true, - "license": "MIT" + "license": "MIT", + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/tap/node_modules/p-locate": { + "version": "4.1.0", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "p-limit": "^2.2.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/tap/node_modules/p-try": { + "version": "2.2.0", + "dev": true, + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/tap/node_modules/path-exists": { + "version": "4.0.0", + "dev": true, + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/tap/node_modules/path-is-absolute": { + "version": "1.0.1", + "dev": true, + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/tap/node_modules/pkg-dir": { + "version": "4.2.0", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "find-up": "^4.0.0" + }, + "engines": { + "node": ">=8" + } }, "node_modules/tap/node_modules/prop-types": { "version": "15.7.2", @@ -8868,18 +8964,6 @@ "esprima": "~4.0.0" } }, - "node_modules/tap/node_modules/resolve": { - "version": "1.17.0", - "dev": true, - "inBundle": true, - "license": "MIT", - "dependencies": { - "path-parse": "^1.0.6" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/tap/node_modules/resolve-from": { "version": "3.0.0", "dev": true, @@ -8903,17 +8987,26 @@ } }, "node_modules/tap/node_modules/rimraf": { - "version": "2.7.1", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", - "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==", + "version": "3.0.2", "dev": true, + "inBundle": true, + "license": "ISC", "dependencies": { "glob": "^7.1.3" }, "bin": { "rimraf": "bin.js" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" } }, + "node_modules/tap/node_modules/safe-buffer": { + "version": "5.1.2", + "dev": true, + "inBundle": true, + "license": "MIT" + }, "node_modules/tap/node_modules/scheduler": { "version": "0.18.0", "dev": true, @@ -8925,12 +9018,12 @@ } }, "node_modules/tap/node_modules/semver": { - "version": "5.7.1", + "version": "6.3.0", "dev": true, "inBundle": true, "license": "ISC", "bin": { - "semver": "bin/semver" + "semver": "bin/semver.js" } }, "node_modules/tap/node_modules/signal-exit": { @@ -8954,12 +9047,11 @@ } }, "node_modules/tap/node_modules/slice-ansi/node_modules/ansi-styles": { - "version": "4.2.1", + "version": "4.3.0", "dev": true, "inBundle": true, "license": "MIT", "dependencies": { - "@types/color-name": "^1.1.1", "color-convert": "^2.0.1" }, "engines": { @@ -8987,6 +9079,15 @@ "inBundle": true, "license": "MIT" }, + "node_modules/tap/node_modules/source-map": { + "version": "0.5.7", + "dev": true, + "inBundle": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/tap/node_modules/string-length": { "version": "3.1.0", "dev": true, @@ -9031,7 +9132,7 @@ } }, "node_modules/tap/node_modules/string-width": { - "version": "4.2.0", + "version": "4.2.2", "dev": true, "inBundle": true, "license": "MIT", @@ -9044,7 +9145,16 @@ "node": ">=8" } }, - "node_modules/tap/node_modules/strip-ansi": { + "node_modules/tap/node_modules/string-width/node_modules/ansi-regex": { + "version": "5.0.0", + "dev": true, + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/tap/node_modules/string-width/node_modules/strip-ansi": { "version": "6.0.0", "dev": true, "inBundle": true, @@ -9069,7 +9179,7 @@ } }, "node_modules/tap/node_modules/tap-parser": { - "version": "10.0.1", + "version": "10.1.0", "dev": true, "inBundle": true, "license": "MIT", @@ -9104,14 +9214,14 @@ } }, "node_modules/tap/node_modules/treport": { - "version": "1.0.2", + "version": "2.0.2", "dev": true, "inBundle": true, "license": "ISC", "dependencies": { "cardinal": "^2.1.1", "chalk": "^3.0.0", - "import-jsx": "^3.1.0", + "import-jsx": "^4.0.0", "ink": "^2.6.0", "ms": "^2.1.2", "string-length": "^3.1.0", @@ -9123,12 +9233,11 @@ } }, "node_modules/tap/node_modules/treport/node_modules/ansi-styles": { - "version": "4.2.1", + "version": "4.3.0", "dev": true, "inBundle": true, "license": "MIT", "dependencies": { - "@types/color-name": "^1.1.1", "color-convert": "^2.0.1" }, "engines": { @@ -9179,7 +9288,7 @@ } }, "node_modules/tap/node_modules/treport/node_modules/supports-color": { - "version": "7.1.0", + "version": "7.2.0", "dev": true, "inBundle": true, "license": "MIT", @@ -9191,12 +9300,12 @@ } }, "node_modules/tap/node_modules/type-fest": { - "version": "0.11.0", + "version": "0.21.3", "dev": true, "inBundle": true, "license": "(MIT OR CC0-1.0)", "engines": { - "node": ">=8" + "node": ">=10" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" @@ -9259,13 +9368,21 @@ "node": ">=8" } }, + "node_modules/tap/node_modules/wrap-ansi/node_modules/ansi-regex": { + "version": "5.0.0", + "dev": true, + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, "node_modules/tap/node_modules/wrap-ansi/node_modules/ansi-styles": { - "version": "4.2.1", + "version": "4.3.0", "dev": true, "inBundle": true, "license": "MIT", "dependencies": { - "@types/color-name": "^1.1.1", "color-convert": "^2.0.1" }, "engines": { @@ -9293,8 +9410,32 @@ "inBundle": true, "license": "MIT" }, + "node_modules/tap/node_modules/wrap-ansi/node_modules/strip-ansi": { + "version": "6.0.0", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/tap/node_modules/wrappy": { + "version": "1.0.2", + "dev": true, + "inBundle": true, + "license": "ISC" + }, + "node_modules/tap/node_modules/yallist": { + "version": "4.0.0", + "dev": true, + "inBundle": true, + "license": "ISC" + }, "node_modules/tap/node_modules/yaml": { - "version": "1.10.0", + "version": "1.10.2", "dev": true, "inBundle": true, "license": "ISC", @@ -9303,7 +9444,7 @@ } }, "node_modules/tap/node_modules/yoga-layout-prebuilt": { - "version": "1.9.6", + "version": "1.10.0", "dev": true, "inBundle": true, "license": "MIT", @@ -9315,9 +9456,9 @@ } }, "node_modules/tar": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/tar/-/tar-6.1.0.tgz", - "integrity": "sha512-DUCttfhsnLCjwoDoFcI+B2iJgYa93vBnDUATYEeRx6sntCTdN01VnqsIuTlALXla/LWooNg0yEGeB+Y8WdFxGA==", + "version": "6.1.2", + "resolved": "https://registry.npmjs.org/tar/-/tar-6.1.2.tgz", + "integrity": "sha512-EwKEgqJ7nJoS+s8QfLYVGMDmAsj+StbI2AM/RTHeUSsOw6Z8bwNBRv5z3CY0m7laC5qUAqruLX5AhMuc5deY3Q==", "inBundle": true, "dependencies": { "chownr": "^2.0.0", @@ -9380,191 +9521,38 @@ } }, "node_modules/tcompare": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/tcompare/-/tcompare-3.0.4.tgz", - "integrity": "sha512-Q3TitMVK59NyKgQyFh+857wTAUE329IzLDehuPgU4nF5e8g+EUQ+yUbjUy1/6ugiNnXztphT+NnqlCXolv9P3A==", - "dev": true, - "dependencies": { - "diff-frag": "^1.0.1" - } - }, - "node_modules/test-exclude": { - "version": "5.2.3", - "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-5.2.3.tgz", - "integrity": "sha512-M+oxtseCFO3EDtAaGH7iiej3CBkzXqFMbzqYAACdzKui4eZA+pq3tZEwChvOdNfa7xxy8BfbmgJSIr43cC/+2g==", - "dev": true, - "dependencies": { - "glob": "^7.1.3", - "minimatch": "^3.0.4", - "read-pkg-up": "^4.0.0", - "require-main-filename": "^2.0.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/test-exclude/node_modules/find-up": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", - "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "version": "5.0.6", + "resolved": "https://registry.npmjs.org/tcompare/-/tcompare-5.0.6.tgz", + "integrity": "sha512-OvO7omN/wkdsKzmOqr3sQFfLbghs/2X5mwSkcfgRiXZshfPnTsAs3IRf1RixR/Pff26qG/r9ogcZMpV0YdeGXg==", "dev": true, "dependencies": { - "locate-path": "^3.0.0" + "diff": "^4.0.2" }, "engines": { - "node": ">=6" - } - }, - "node_modules/test-exclude/node_modules/hosted-git-info": { - "version": "2.8.8", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.8.tgz", - "integrity": "sha512-f/wzC2QaWBs7t9IYqB4T3sR1xviIViXJRJTWBlx2Gf3g0Xi5vI7Yy4koXQ1c9OYDGHN9sBy1DQ2AB8fqZBWhUg==", - "dev": true - }, - "node_modules/test-exclude/node_modules/load-json-file": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-4.0.0.tgz", - "integrity": "sha1-L19Fq5HjMhYjT9U62rZo607AmTs=", - "dev": true, - "dependencies": { - "graceful-fs": "^4.1.2", - "parse-json": "^4.0.0", - "pify": "^3.0.0", - "strip-bom": "^3.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/test-exclude/node_modules/locate-path": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", - "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", - "dev": true, - "dependencies": { - "p-locate": "^3.0.0", - "path-exists": "^3.0.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/test-exclude/node_modules/normalize-package-data": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz", - "integrity": "sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==", - "dev": true, - "dependencies": { - "hosted-git-info": "^2.1.4", - "resolve": "^1.10.0", - "semver": "2 || 3 || 4 || 5", - "validate-npm-package-license": "^3.0.1" - } - }, - "node_modules/test-exclude/node_modules/p-limit": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", - "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", - "dev": true, - "dependencies": { - "p-try": "^2.0.0" - }, - "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/test-exclude/node_modules/p-locate": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", - "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", - "dev": true, - "dependencies": { - "p-limit": "^2.0.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/test-exclude/node_modules/p-try": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", - "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", - "dev": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/test-exclude/node_modules/parse-json": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-4.0.0.tgz", - "integrity": "sha1-vjX1Qlvh9/bHRxhPmKeIy5lHfuA=", - "dev": true, - "dependencies": { - "error-ex": "^1.3.1", - "json-parse-better-errors": "^1.0.1" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/test-exclude/node_modules/path-type": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-3.0.0.tgz", - "integrity": "sha512-T2ZUsdZFHgA3u4e5PfPbjd7HDDpxPnQb5jN0SrDsjNSuVXHJqtwTnWqG0B1jZrgmJ/7lj1EmVIByWt1gxGkWvg==", - "dev": true, - "dependencies": { - "pify": "^3.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/test-exclude/node_modules/pify": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", - "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=", - "dev": true, - "engines": { - "node": ">=4" + "node": ">=10" } }, - "node_modules/test-exclude/node_modules/read-pkg": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-3.0.0.tgz", - "integrity": "sha1-nLxoaXj+5l0WwA4rGcI3/Pbjg4k=", + "node_modules/tcompare/node_modules/diff": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", + "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", "dev": true, - "dependencies": { - "load-json-file": "^4.0.0", - "normalize-package-data": "^2.3.2", - "path-type": "^3.0.0" - }, "engines": { - "node": ">=4" + "node": ">=0.3.1" } }, - "node_modules/test-exclude/node_modules/read-pkg-up": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-4.0.0.tgz", - "integrity": "sha512-6etQSH7nJGsK0RbG/2TeDzZFa8shjQ1um+SwQQ5cwKy0dhSXdOncEhb1CPpvQG4h7FyOV6EB6YlV0yJvZQNAkA==", + "node_modules/test-exclude": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz", + "integrity": "sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==", "dev": true, "dependencies": { - "find-up": "^3.0.0", - "read-pkg": "^3.0.0" + "@istanbuljs/schema": "^0.1.2", + "glob": "^7.1.4", + "minimatch": "^3.0.4" }, "engines": { - "node": ">=6" - } - }, - "node_modules/test-exclude/node_modules/semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", - "dev": true, - "bin": { - "semver": "bin/semver" + "node": ">=8" } }, "node_modules/text-table": { @@ -9601,32 +9589,23 @@ } }, "node_modules/tough-cookie": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-3.0.1.tgz", - "integrity": "sha512-yQyJ0u4pZsv9D4clxO69OEjLWYw+jbgspjTue4lTQZLfV0c5l1VmK2y1JK8E9ahdpltPOaAThPcp5nKPUgSnsg==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.0.0.tgz", + "integrity": "sha512-tHdtEpQCMrc1YLrMaqXXcj6AxhYi/xgit6mZu1+EDWUn+qhUf8wMQoFIy9NXuq23zAwtcB0t/MjACGR18pcRbg==", "dev": true, "dependencies": { - "ip-regex": "^2.1.0", - "psl": "^1.1.28", - "punycode": "^2.1.1" + "psl": "^1.1.33", + "punycode": "^2.1.1", + "universalify": "^0.1.2" }, "engines": { "node": ">=6" } }, - "node_modules/tough-cookie/node_modules/ip-regex": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/ip-regex/-/ip-regex-2.1.0.tgz", - "integrity": "sha1-+ni/XS5pE8kRzp+BnuUUa7bYROk=", - "dev": true, - "engines": { - "node": ">=4" - } - }, "node_modules/tr46": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/tr46/-/tr46-2.0.2.tgz", - "integrity": "sha512-3n1qG+/5kg+jrbTzwAykB5yRYtQCTqOGKq5U5PE3b0a1/mzo6snDhjGS0zJVJunO0NrT3Dg1MLy5TjWP/UJppg==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-2.1.0.tgz", + "integrity": "sha512-15Ih7phfcdP5YxqiB+iDtLoaTz4Nd35+IiAv0kQ5FNKHzXgdWqPoTIqEDDJmXceQt4JZk6lVPT8lnDlPpGDppw==", "dev": true, "dependencies": { "punycode": "^2.1.1" @@ -9673,40 +9652,6 @@ "url": "https://github.com/sponsors/wooorm" } }, - "node_modules/ts-node": { - "version": "8.10.2", - "resolved": "https://registry.npmjs.org/ts-node/-/ts-node-8.10.2.tgz", - "integrity": "sha512-ISJJGgkIpDdBhWVu3jufsWpK3Rzo7bdiIXJjQc0ynKxVOVcg2oIrf2H2cejminGrptVc6q6/uynAHNCuWGbpVA==", - "dev": true, - "dependencies": { - "arg": "^4.1.0", - "diff": "^4.0.1", - "make-error": "^1.1.1", - "source-map-support": "^0.5.17", - "yn": "3.1.1" - }, - "bin": { - "ts-node": "dist/bin.js", - "ts-node-script": "dist/bin-script.js", - "ts-node-transpile-only": "dist/bin-transpile.js", - "ts-script": "dist/bin-script-deprecated.js" - }, - "engines": { - "node": ">=6.0.0" - }, - "peerDependencies": { - "typescript": ">=2.7" - } - }, - "node_modules/ts-node/node_modules/diff": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", - "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", - "dev": true, - "engines": { - "node": ">=0.3.1" - } - }, "node_modules/tsconfig-paths": { "version": "3.9.0", "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.9.0.tgz", @@ -9719,6 +9664,18 @@ "strip-bom": "^3.0.0" } }, + "node_modules/tsconfig-paths/node_modules/json5": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json5/-/json5-1.0.1.tgz", + "integrity": "sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow==", + "dev": true, + "dependencies": { + "minimist": "^1.2.0" + }, + "bin": { + "json5": "lib/cli.js" + } + }, "node_modules/tunnel-agent": { "version": "0.6.0", "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", @@ -9767,17 +9724,19 @@ "is-typedarray": "^1.0.0" } }, - "node_modules/typescript": { - "version": "3.9.9", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.9.9.tgz", - "integrity": "sha512-kdMjTiekY+z/ubJCATUPlRDl39vXYiMV9iyeMuEuXZh2we6zz80uovNN2WlAxmmdE/Z/YQe+EbOEXB5RHEED3w==", + "node_modules/unbox-primitive": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.1.tgz", + "integrity": "sha512-tZU/3NqK3dA5gpE1KtyiJUrEB0lxnGkMFHptJ7q6ewdZ8s12QrODwNbhIJStmJkd1QDXa1NRA8aF2A1zk/Ypyw==", "dev": true, - "bin": { - "tsc": "bin/tsc", - "tsserver": "bin/tsserver" + "dependencies": { + "function-bind": "^1.1.1", + "has-bigints": "^1.0.1", + "has-symbols": "^1.0.2", + "which-boxed-primitive": "^1.0.2" }, - "engines": { - "node": ">=4.2.0" + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, "node_modules/unherit": { @@ -9861,9 +9820,9 @@ } }, "node_modules/unist-util-is": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-4.0.4.tgz", - "integrity": "sha512-3dF39j/u423v4BBQrk1AQ2Ve1FxY5W3JKwXxVFzBODQ6WEvccguhgp802qQLKSnxPODE6WuRZtV+ohlUg4meBA==", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-4.1.0.tgz", + "integrity": "sha512-ZOQSsnce92GrxSqlnEEseX0gi7GH9zTJZ0p9dtu87WRb/37mMPO2Ilx1s/t9vBHrFhbgweUwb+t7cIn5dxPhZg==", "dev": true, "funding": { "type": "opencollective", @@ -9881,9 +9840,9 @@ } }, "node_modules/unist-util-remove": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/unist-util-remove/-/unist-util-remove-2.0.1.tgz", - "integrity": "sha512-YtuetK6o16CMfG+0u4nndsWpujgsHDHHLyE0yGpJLLn5xSjKeyGyzEBOI2XbmoUHCYabmNgX52uxlWoQhcvR7Q==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/unist-util-remove/-/unist-util-remove-2.1.0.tgz", + "integrity": "sha512-J8NYPyBm4baYLdCbjmf1bhPu45Cr1MWTm77qd9istEkzWpnN6O9tMsEbB2JhNnBCqGENRqEWomQ+He6au0B27Q==", "dev": true, "dependencies": { "unist-util-is": "^4.0.0" @@ -9948,6 +9907,15 @@ "url": "https://opencollective.com/unified" } }, + "node_modules/universalify": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz", + "integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==", + "dev": true, + "engines": { + "node": ">= 4.0.0" + } + }, "node_modules/uri-js": { "version": "4.4.1", "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", @@ -9976,15 +9944,16 @@ "version": "3.4.0", "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==", + "deprecated": "Please upgrade to version 7 or higher. Older versions may use Math.random() in certain circumstances, which is known to be problematic. See https://v8.dev/blog/math-random for details.", "inBundle": true, "bin": { "uuid": "bin/uuid" } }, "node_modules/v8-compile-cache": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.2.0.tgz", - "integrity": "sha512-gTpR5XQNKFwOd4clxfnhaqvfqMpqEwr4tOtCyz4MtYZX2JYhfr1JvBFKdS+7K/9rfpZR3VLX+YWBbKoxCgS43Q==", + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.3.0.tgz", + "integrity": "sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA==", "dev": true }, "node_modules/validate-npm-package-license": { @@ -10060,12 +10029,6 @@ "url": "https://opencollective.com/unified" } }, - "node_modules/vlq": { - "version": "0.2.3", - "resolved": "https://registry.npmjs.org/vlq/-/vlq-0.2.3.tgz", - "integrity": "sha512-DRibZL6DsNhIgYQ+wNdWDL2SL3bKPlVrRiBqV5yuMm++op8W4kGFtaQfCs4KEJn0wBZcHVHJ3eoywX8983k1ow==", - "dev": true - }, "node_modules/w3c-hr-time": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/w3c-hr-time/-/w3c-hr-time-1.0.2.tgz", @@ -10149,12 +10112,12 @@ "dev": true }, "node_modules/whatwg-url": { - "version": "8.4.0", - "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-8.4.0.tgz", - "integrity": "sha512-vwTUFf6V4zhcPkWp/4CQPr1TW9Ml6SF4lVyaIMBdJw5i6qUUJ1QWM4Z6YYVkfka0OUIzVo/0aNtGVGk256IKWw==", + "version": "8.5.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-8.5.0.tgz", + "integrity": "sha512-fy+R77xWv0AiqfLl4nuGUlQ3/6b5uNfQ4WAbGQVMYshCTCCPK9psC1nWh3XHuxGVCtlcDDQPQW1csmmIQo+fwg==", "dev": true, "dependencies": { - "lodash.sortby": "^4.7.0", + "lodash": "^4.7.0", "tr46": "^2.0.2", "webidl-conversions": "^6.1.0" }, @@ -10177,18 +10140,28 @@ "node": ">= 8" } }, + "node_modules/which-boxed-primitive": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz", + "integrity": "sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==", + "dev": true, + "dependencies": { + "is-bigint": "^1.0.1", + "is-boolean-object": "^1.1.0", + "is-number-object": "^1.0.4", + "is-string": "^1.0.5", + "is-symbol": "^1.0.3" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/which-module": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.0.tgz", "integrity": "sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=", "dev": true }, - "node_modules/which-pm-runs": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/which-pm-runs/-/which-pm-runs-1.0.0.tgz", - "integrity": "sha1-Zws6+8VS4LVd9rd4DKdGFfI60cs=", - "dev": true - }, "node_modules/wide-align": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.3.tgz", @@ -10265,9 +10238,9 @@ } }, "node_modules/ws": { - "version": "7.4.3", - "resolved": "https://registry.npmjs.org/ws/-/ws-7.4.3.tgz", - "integrity": "sha512-hr6vCR76GsossIRsr8OLR9acVVm1jyfEWvhbNjtgPOrfvAlKzvyeg/P6r8RuDjRyrcQoPQT7K0DGEPc7Ae6jzA==", + "version": "7.4.6", + "resolved": "https://registry.npmjs.org/ws/-/ws-7.4.6.tgz", + "integrity": "sha512-YmhHDO4MzaDLB+M9ym/mDA5z0naX8j7SIlT8f8z+I0VtzsRbekxEutHSme7NPS2qE8StCYQNUnfWdXta/Yu85A==", "dev": true, "engines": { "node": ">=8.3.0" @@ -10307,9 +10280,9 @@ } }, "node_modules/y18n": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.1.tgz", - "integrity": "sha512-wNcy4NvjMYL8gogWWYAO7ZFWFfHcbdbE57tZO8e4cbpj8tfUcwrwqSl3ad8HxpYWCdXcJUCeKKZS62Av1affwQ==", + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.3.tgz", + "integrity": "sha512-JKhqTOwSrqNA1NY5lSztJ1GrBiUodLMmIZuLiDaMRJ+itFd+ABVE8XBjOvIWL+rSqNDC74LCSFmlb/U4UZ4hJQ==", "dev": true }, "node_modules/yallist": { @@ -10319,9 +10292,9 @@ "inBundle": true }, "node_modules/yaml": { - "version": "1.10.0", - "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.0.tgz", - "integrity": "sha512-yr2icI4glYaNG+KWONODapy2/jDdMSDnrONSjblABjD9B4Z5LgiircSt8m8sRZFNi08kG9Sm0uSHtEmP3zaEGg==", + "version": "1.10.2", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz", + "integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==", "dev": true, "engines": { "node": ">= 6" @@ -10334,109 +10307,92 @@ "dev": true }, "node_modules/yargs": { - "version": "13.3.2", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-13.3.2.tgz", - "integrity": "sha512-AX3Zw5iPruN5ie6xGRIDgqkT+ZhnRlZMLMHAs8tg7nRruy2Nb+i5o9bwghAogtM08q1dpr2LVoS8KSTMYpWXUw==", + "version": "15.4.1", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-15.4.1.tgz", + "integrity": "sha512-aePbxDmcYW++PaqBsJ+HYUFwCdv4LVvdnhBy78E57PIor8/OVvhMrADFFEDh8DHDFRv/O9i3lPhsENjO7QX0+A==", "dev": true, "dependencies": { - "cliui": "^5.0.0", - "find-up": "^3.0.0", + "cliui": "^6.0.0", + "decamelize": "^1.2.0", + "find-up": "^4.1.0", "get-caller-file": "^2.0.1", "require-directory": "^2.1.1", "require-main-filename": "^2.0.0", "set-blocking": "^2.0.0", - "string-width": "^3.0.0", + "string-width": "^4.2.0", "which-module": "^2.0.0", "y18n": "^4.0.0", - "yargs-parser": "^13.1.2" + "yargs-parser": "^18.1.2" + }, + "engines": { + "node": ">=8" } }, "node_modules/yargs-parser": { - "version": "13.1.2", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-13.1.2.tgz", - "integrity": "sha512-3lbsNRf/j+A4QuSZfDRA7HRSfWrzO0YjqTJd5kjAq37Zep1CEgaYmrH9Q3GwPiB9cHyd1Y1UwggGhJGoxipbzg==", + "version": "18.1.3", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-18.1.3.tgz", + "integrity": "sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ==", "dev": true, "dependencies": { "camelcase": "^5.0.0", "decamelize": "^1.2.0" - } - }, - "node_modules/yargs/node_modules/ansi-regex": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", - "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", - "dev": true, + }, "engines": { "node": ">=6" } }, - "node_modules/yargs/node_modules/ansi-styles": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", - "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "node_modules/yargs/node_modules/ansi-regex": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz", + "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==", "dev": true, - "dependencies": { - "color-convert": "^1.9.0" - }, "engines": { - "node": ">=4" + "node": ">=8" } }, "node_modules/yargs/node_modules/cliui": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-5.0.0.tgz", - "integrity": "sha512-PYeGSEmmHM6zvoef2w8TPzlrnNpXIjTipYK780YswmIP9vjxmd6Y2a3CB2Ks6/AU8NHjZugXvo8w3oWM2qnwXA==", + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-6.0.0.tgz", + "integrity": "sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ==", "dev": true, "dependencies": { - "string-width": "^3.1.0", - "strip-ansi": "^5.2.0", - "wrap-ansi": "^5.1.0" + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0", + "wrap-ansi": "^6.2.0" } }, - "node_modules/yargs/node_modules/color-convert": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", - "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "node_modules/yargs/node_modules/find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", "dev": true, "dependencies": { - "color-name": "1.1.3" + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=8" } }, - "node_modules/yargs/node_modules/color-name": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=", - "dev": true - }, - "node_modules/yargs/node_modules/emoji-regex": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", - "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==", - "dev": true - }, - "node_modules/yargs/node_modules/find-up": { + "node_modules/yargs/node_modules/is-fullwidth-code-point": { "version": "3.0.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", - "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", "dev": true, - "dependencies": { - "locate-path": "^3.0.0" - }, "engines": { - "node": ">=6" + "node": ">=8" } }, "node_modules/yargs/node_modules/locate-path": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", - "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", "dev": true, "dependencies": { - "p-locate": "^3.0.0", - "path-exists": "^3.0.0" + "p-locate": "^4.1.0" }, "engines": { - "node": ">=6" + "node": ">=8" } }, "node_modules/yargs/node_modules/p-limit": { @@ -10455,15 +10411,15 @@ } }, "node_modules/yargs/node_modules/p-locate": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", - "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", "dev": true, "dependencies": { - "p-limit": "^2.0.0" + "p-limit": "^2.2.0" }, "engines": { - "node": ">=6" + "node": ">=8" } }, "node_modules/yargs/node_modules/p-try": { @@ -10475,53 +10431,53 @@ "node": ">=6" } }, + "node_modules/yargs/node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, "node_modules/yargs/node_modules/string-width": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", - "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.2.tgz", + "integrity": "sha512-XBJbT3N4JhVumXE0eoLU9DCjcaF92KLNqTmFCnG1pf8duUxFGwtP6AD6nkjw9a3IdiRtL3E2w3JDiE/xi3vOeA==", "dev": true, "dependencies": { - "emoji-regex": "^7.0.1", - "is-fullwidth-code-point": "^2.0.0", - "strip-ansi": "^5.1.0" + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.0" }, "engines": { - "node": ">=6" + "node": ">=8" } }, "node_modules/yargs/node_modules/strip-ansi": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", - "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", + "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", "dev": true, "dependencies": { - "ansi-regex": "^4.1.0" + "ansi-regex": "^5.0.0" }, "engines": { - "node": ">=6" + "node": ">=8" } }, "node_modules/yargs/node_modules/wrap-ansi": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-5.1.0.tgz", - "integrity": "sha512-QC1/iN/2/RPVJ5jYK8BGttj5z83LmSKmvbvrXPNCLZSEb32KKVDJDl/MOt2N01qU2H/FkzEa9PKto1BqDjtd7Q==", + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz", + "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==", "dev": true, "dependencies": { - "ansi-styles": "^3.2.0", - "string-width": "^3.0.0", - "strip-ansi": "^5.0.0" + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" }, "engines": { - "node": ">=6" - } - }, - "node_modules/yn": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", - "integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==", - "dev": true, - "engines": { - "node": ">=6" + "node": ">=8" } }, "node_modules/zwitch": { @@ -10533,6 +10489,31 @@ "type": "github", "url": "https://github.com/sponsors/wooorm" } + }, + "packages/libnpmdiff": { + "version": "2.0.4", + "license": "ISC", + "dependencies": { + "@npmcli/disparity-colors": "^1.0.1", + "@npmcli/installed-package-contents": "^1.0.7", + "binary-extensions": "^2.2.0", + "diff": "^5.0.0", + "minimatch": "^3.0.4", + "npm-package-arg": "^8.1.4", + "pacote": "^11.3.4", + "tar": "^6.1.0" + }, + "devDependencies": { + "eslint": "^7.28.0", + "eslint-plugin-import": "^2.23.4", + "eslint-plugin-node": "^11.1.0", + "eslint-plugin-promise": "^5.1.0", + "eslint-plugin-standard": "^5.0.0", + "tap": "^15.0.9" + }, + "engines": { + "node": ">=10" + } } }, "dependencies": { @@ -10569,57 +10550,34 @@ "source-map": "^0.5.0" }, "dependencies": { - "json5": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.0.tgz", - "integrity": "sha512-f+8cldu7X/y7RAJurMEJmdoKXGB/X550w2Nr3tTbezL6RwEE/iMcm+tZnXeoZtKuOq6ft8+CqzEkrIgx1fPoQA==", - "dev": true, - "requires": { - "minimist": "^1.2.5" - } - }, "semver": { "version": "5.7.1", "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", "dev": true - }, - "source-map": { - "version": "0.5.7", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", - "integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=", - "dev": true } } }, "@babel/generator": { - "version": "7.12.15", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.12.15.tgz", - "integrity": "sha512-6F2xHxBiFXWNSGb7vyCUTBF8RCLY66rS0zEPcP8t/nQyXjha5EuK4z7H5o7fWG8B4M7y6mqVWq1J+1PuwRhecQ==", + "version": "7.14.3", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.14.3.tgz", + "integrity": "sha512-bn0S6flG/j0xtQdz3hsjJ624h3W0r3llttBMfyHX3YrZ/KtLYr15bjA0FXkgW7FpvrDuTuElXeVjiKlYRpnOFA==", "dev": true, "requires": { - "@babel/types": "^7.12.13", + "@babel/types": "^7.14.2", "jsesc": "^2.5.1", "source-map": "^0.5.0" - }, - "dependencies": { - "source-map": { - "version": "0.5.7", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", - "integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=", - "dev": true - } } }, "@babel/helper-function-name": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.12.13.tgz", - "integrity": "sha512-TZvmPn0UOqmvi5G4vvw0qZTpVptGkB1GL61R6lKvrSdIxGm5Pky7Q3fpKiIkQCAtRCBUwB0PaThlx9vebCDSwA==", + "version": "7.14.2", + "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.14.2.tgz", + "integrity": "sha512-NYZlkZRydxw+YT56IlhIcS8PAhb+FEUiOzuhFTfqDyPmzAhRge6ua0dQYT/Uh0t/EDHq05/i+e5M2d4XvjgarQ==", "dev": true, "requires": { "@babel/helper-get-function-arity": "^7.12.13", "@babel/template": "^7.12.13", - "@babel/types": "^7.12.13" + "@babel/types": "^7.14.2" } }, "@babel/helper-get-function-arity": { @@ -10632,38 +10590,37 @@ } }, "@babel/helper-member-expression-to-functions": { - "version": "7.12.16", - "resolved": "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.12.16.tgz", - "integrity": "sha512-zYoZC1uvebBFmj1wFAlXwt35JLEgecefATtKp20xalwEK8vHAixLBXTGxNrVGEmTT+gzOThUgr8UEdgtalc1BQ==", + "version": "7.13.12", + "resolved": "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.13.12.tgz", + "integrity": "sha512-48ql1CLL59aKbU94Y88Xgb2VFy7a95ykGRbJJaaVv+LX5U8wFpLfiGXJJGUozsmA1oEh/o5Bp60Voq7ACyA/Sw==", "dev": true, "requires": { - "@babel/types": "^7.12.13" + "@babel/types": "^7.13.12" } }, "@babel/helper-module-imports": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.12.13.tgz", - "integrity": "sha512-NGmfvRp9Rqxy0uHSSVP+SRIW1q31a7Ji10cLBcqSDUngGentY4FRiHOFZFE1CLU5eiL0oE8reH7Tg1y99TDM/g==", + "version": "7.13.12", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.13.12.tgz", + "integrity": "sha512-4cVvR2/1B693IuOvSI20xqqa/+bl7lqAMR59R4iu39R9aOX8/JoYY1sFaNvUMyMBGnHdwvJgUrzNLoUZxXypxA==", "dev": true, "requires": { - "@babel/types": "^7.12.13" + "@babel/types": "^7.13.12" } }, "@babel/helper-module-transforms": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.12.13.tgz", - "integrity": "sha512-acKF7EjqOR67ASIlDTupwkKM1eUisNAjaSduo5Cz+793ikfnpe7p4Q7B7EWU2PCoSTPWsQkR7hRUWEIZPiVLGA==", + "version": "7.14.2", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.14.2.tgz", + "integrity": "sha512-OznJUda/soKXv0XhpvzGWDnml4Qnwp16GN+D/kZIdLsWoHj05kyu8Rm5kXmMef+rVJZ0+4pSGLkeixdqNUATDA==", "dev": true, "requires": { - "@babel/helper-module-imports": "^7.12.13", - "@babel/helper-replace-supers": "^7.12.13", - "@babel/helper-simple-access": "^7.12.13", + "@babel/helper-module-imports": "^7.13.12", + "@babel/helper-replace-supers": "^7.13.12", + "@babel/helper-simple-access": "^7.13.12", "@babel/helper-split-export-declaration": "^7.12.13", - "@babel/helper-validator-identifier": "^7.12.11", + "@babel/helper-validator-identifier": "^7.14.0", "@babel/template": "^7.12.13", - "@babel/traverse": "^7.12.13", - "@babel/types": "^7.12.13", - "lodash": "^4.17.19" + "@babel/traverse": "^7.14.2", + "@babel/types": "^7.14.2" } }, "@babel/helper-optimise-call-expression": { @@ -10676,30 +10633,30 @@ } }, "@babel/helper-plugin-utils": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.12.13.tgz", - "integrity": "sha512-C+10MXCXJLiR6IeG9+Wiejt9jmtFpxUc3MQqCmPY8hfCjyUGl9kT+B2okzEZrtykiwrc4dbCPdDoz0A/HQbDaA==", + "version": "7.13.0", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.13.0.tgz", + "integrity": "sha512-ZPafIPSwzUlAoWT8DKs1W2VyF2gOWthGd5NGFMsBcMMol+ZhK+EQY/e6V96poa6PA/Bh+C9plWN0hXO1uB8AfQ==", "dev": true }, "@babel/helper-replace-supers": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.12.13.tgz", - "integrity": "sha512-pctAOIAMVStI2TMLhozPKbf5yTEXc0OJa0eENheb4w09SrgOWEs+P4nTOZYJQCqs8JlErGLDPDJTiGIp3ygbLg==", + "version": "7.14.4", + "resolved": "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.14.4.tgz", + "integrity": "sha512-zZ7uHCWlxfEAAOVDYQpEf/uyi1dmeC7fX4nCf2iz9drnCwi1zvwXL3HwWWNXUQEJ1k23yVn3VbddiI9iJEXaTQ==", "dev": true, "requires": { - "@babel/helper-member-expression-to-functions": "^7.12.13", + "@babel/helper-member-expression-to-functions": "^7.13.12", "@babel/helper-optimise-call-expression": "^7.12.13", - "@babel/traverse": "^7.12.13", - "@babel/types": "^7.12.13" + "@babel/traverse": "^7.14.2", + "@babel/types": "^7.14.4" } }, "@babel/helper-simple-access": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.12.13.tgz", - "integrity": "sha512-0ski5dyYIHEfwpWGx5GPWhH35j342JaflmCeQmsPWcrOQDtCN6C1zKAVRFVbK53lPW2c9TsuLLSUDf0tIGJ5hA==", + "version": "7.13.12", + "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.13.12.tgz", + "integrity": "sha512-7FEjbrx5SL9cWvXioDbnlYTppcZGuCY6ow3/D5vMggb2Ywgu4dMrpTJX0JdQAIcRRUElOIxF3yEooa9gUb9ZbA==", "dev": true, "requires": { - "@babel/types": "^7.12.13" + "@babel/types": "^7.13.12" } }, "@babel/helper-split-export-declaration": { @@ -10712,29 +10669,29 @@ } }, "@babel/helper-validator-identifier": { - "version": "7.12.11", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.12.11.tgz", - "integrity": "sha512-np/lG3uARFybkoHokJUmf1QfEvRVCPbmQeUQpKow5cQ3xWrV9i3rUHodKDJPQfTVX61qKi+UdYk8kik84n7XOw==", + "version": "7.14.0", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.14.0.tgz", + "integrity": "sha512-V3ts7zMSu5lfiwWDVWzRDGIN+lnCEUdaXgtVHJgLb1rGaA6jMrtB9EmE7L18foXJIE8Un/A/h6NJfGQp/e1J4A==", "dev": true }, "@babel/helpers": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.12.13.tgz", - "integrity": "sha512-oohVzLRZ3GQEk4Cjhfs9YkJA4TdIDTObdBEZGrd6F/T0GPSnuV6l22eMcxlvcvzVIPH3VTtxbseudM1zIE+rPQ==", + "version": "7.14.0", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.14.0.tgz", + "integrity": "sha512-+ufuXprtQ1D1iZTO/K9+EBRn+qPWMJjZSw/S0KlFrxCw4tkrzv9grgpDHkY9MeQTjTY8i2sp7Jep8DfU6tN9Mg==", "dev": true, "requires": { "@babel/template": "^7.12.13", - "@babel/traverse": "^7.12.13", - "@babel/types": "^7.12.13" + "@babel/traverse": "^7.14.0", + "@babel/types": "^7.14.0" } }, "@babel/highlight": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.12.13.tgz", - "integrity": "sha512-kocDQvIbgMKlWxXe9fof3TQ+gkIPOUSEYhJjqUjvKMez3krV7vbzYCDq39Oj11UAVK7JqPVGQPlgE85dPNlQww==", + "version": "7.14.0", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.14.0.tgz", + "integrity": "sha512-YSCOwxvTYEIMSGaBQb5kDDsCopDdiUGsqpatp3fOlI4+2HQSkTmEVWnVuySdAC5EWCqSWWTv0ib63RjR7dTBdg==", "dev": true, "requires": { - "@babel/helper-validator-identifier": "^7.12.11", + "@babel/helper-validator-identifier": "^7.14.0", "chalk": "^2.0.0", "js-tokens": "^4.0.0" }, @@ -10798,9 +10755,9 @@ } }, "@babel/parser": { - "version": "7.12.16", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.12.16.tgz", - "integrity": "sha512-c/+u9cqV6F0+4Hpq01jnJO+GLp2DdT63ppz9Xa+6cHaajM9VFzK/iDXiKK65YtpeVwu+ctfS6iqlMqRgQRzeCw==", + "version": "7.14.4", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.14.4.tgz", + "integrity": "sha512-ArliyUsWDUqEGfWcmzpGUzNfLxTdTp6WU4IuP6QFSp9gGfWS6boxFCkJSJ/L4+RG8z/FnIU3WxCk6hPL9SSWeA==", "dev": true }, "@babel/plugin-proposal-object-rest-spread": { @@ -10833,12 +10790,12 @@ } }, "@babel/plugin-transform-parameters": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.12.13.tgz", - "integrity": "sha512-e7QqwZalNiBRHCpJg/P8s/VJeSRYgmtWySs1JwvfwPqhBbiWfOcHDKdeAi6oAyIimoKWBlwc8oTgbZHdhCoVZA==", + "version": "7.14.2", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.14.2.tgz", + "integrity": "sha512-NxoVmA3APNCC1JdMXkdYXuQS+EMdqy0vIwyDHeKHiJKRxmp1qGSdb0JLEIoPRhkx6H/8Qi3RJ3uqOCYw8giy9A==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.12.13" + "@babel/helper-plugin-utils": "^7.13.0" } }, "@babel/template": { @@ -10853,65 +10810,164 @@ } }, "@babel/traverse": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.12.13.tgz", - "integrity": "sha512-3Zb4w7eE/OslI0fTp8c7b286/cQps3+vdLW3UcwC8VSJC6GbKn55aeVVu2QJNuCDoeKyptLOFrPq8WqZZBodyA==", + "version": "7.14.2", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.14.2.tgz", + "integrity": "sha512-TsdRgvBFHMyHOOzcP9S6QU0QQtjxlRpEYOy3mcCO5RgmC305ki42aSAmfZEMSSYBla2oZ9BMqYlncBaKmD/7iA==", "dev": true, "requires": { "@babel/code-frame": "^7.12.13", - "@babel/generator": "^7.12.13", - "@babel/helper-function-name": "^7.12.13", + "@babel/generator": "^7.14.2", + "@babel/helper-function-name": "^7.14.2", "@babel/helper-split-export-declaration": "^7.12.13", - "@babel/parser": "^7.12.13", - "@babel/types": "^7.12.13", + "@babel/parser": "^7.14.2", + "@babel/types": "^7.14.2", "debug": "^4.1.0", - "globals": "^11.1.0", - "lodash": "^4.17.19" - }, - "dependencies": { - "globals": { - "version": "11.12.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", - "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", - "dev": true - } + "globals": "^11.1.0" } }, "@babel/types": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.12.13.tgz", - "integrity": "sha512-oKrdZTld2im1z8bDwTOQvUbxKwE+854zc16qWZQlcTqMN00pWxHQ4ZeOq0yDMnisOpRykH2/5Qqcrk/OlbAjiQ==", + "version": "7.14.4", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.14.4.tgz", + "integrity": "sha512-lCj4aIs0xUefJFQnwwQv2Bxg7Omd6bgquZ6LGC+gGMh6/s5qDVfjuCMlDmYQ15SLsWHd9n+X3E75lKIhl5Lkiw==", "dev": true, "requires": { - "@babel/helper-validator-identifier": "^7.12.11", - "lodash": "^4.17.19", + "@babel/helper-validator-identifier": "^7.14.0", "to-fast-properties": "^2.0.0" } }, "@blueoak/list": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/@blueoak/list/-/list-1.0.2.tgz", - "integrity": "sha512-KyqT0kkdxgbGys9mvo/1Mgdt/LGvUFPCZIK9pWPIfOM2mYzMDd/eVYy4sMP1YqvVI129k0alxRyM53H2MAs/Nw==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@blueoak/list/-/list-2.0.0.tgz", + "integrity": "sha512-yQ6/CTy6DYvmJOAIw/BJjKeNG2ZyF8uxgTN8Yvcv4L9YavoVp9xUgmoVUKN5l24NGPDQpswavNanHOqB00ZNXg==", "dev": true }, "@eslint/eslintrc": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-0.3.0.tgz", - "integrity": "sha512-1JTKgrOKAHVivSvOYw+sJOunkBjUOvjqWk1DPja7ZFhIS2mX/4EgTT8M7eTK9jrKhL/FvXXEbQwIs3pg1xp3dg==", + "version": "0.4.3", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-0.4.3.tgz", + "integrity": "sha512-J6KFFz5QCYUJq3pf0mjEcCJVERbzv71PUIDczuh9JkwGEzced6CO5ADLHB1rbf/+oPBtoPfMYNOpGDzCANlbXw==", "dev": true, "requires": { "ajv": "^6.12.4", "debug": "^4.1.1", "espree": "^7.3.0", - "globals": "^12.1.0", + "globals": "^13.9.0", "ignore": "^4.0.6", "import-fresh": "^3.2.1", "js-yaml": "^3.13.1", - "lodash": "^4.17.20", "minimatch": "^3.0.4", "strip-json-comments": "^3.1.1" + }, + "dependencies": { + "globals": { + "version": "13.10.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.10.0.tgz", + "integrity": "sha512-piHC3blgLGFjvOuMmWZX60f+na1lXFDhQXBf1UYp2fXPXqvEUbOhNwi6BsQ0bQishwedgnjkwv1d9zKf+MWw3g==", + "dev": true, + "requires": { + "type-fest": "^0.20.2" + } + }, + "type-fest": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", + "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", + "dev": true + } + } + }, + "@humanwhocodes/config-array": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.5.0.tgz", + "integrity": "sha512-FagtKFz74XrTl7y6HCzQpwDfXP0yhxe9lHLD1UZxjvZIcbyRz8zTFF/yYNfSfzU414eDwZ1SrO0Qvtyf+wFMQg==", + "dev": true, + "requires": { + "@humanwhocodes/object-schema": "^1.2.0", + "debug": "^4.1.1", + "minimatch": "^3.0.4" + } + }, + "@humanwhocodes/object-schema": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.0.tgz", + "integrity": "sha512-wdppn25U8z/2yiaT6YGquE6X8sSv7hNMWSXYSSU1jGv/yd6XqjXgTDJ8KP4NgjTXfJ3GbRjeeb8RTV7a/VpM+w==", + "dev": true + }, + "@istanbuljs/load-nyc-config": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz", + "integrity": "sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ==", + "dev": true, + "requires": { + "camelcase": "^5.3.1", + "find-up": "^4.1.0", + "get-package-type": "^0.1.0", + "js-yaml": "^3.13.1", + "resolve-from": "^5.0.0" + }, + "dependencies": { + "find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "dev": true, + "requires": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + } + }, + "locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "dev": true, + "requires": { + "p-locate": "^4.1.0" + } + }, + "p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "requires": { + "p-try": "^2.0.0" + } + }, + "p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dev": true, + "requires": { + "p-limit": "^2.2.0" + } + }, + "p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true + }, + "path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true + }, + "resolve-from": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", + "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", + "dev": true + } } }, + "@istanbuljs/schema": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/@istanbuljs/schema/-/schema-0.1.3.tgz", + "integrity": "sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==", + "dev": true + }, "@mdx-js/mdx": { "version": "1.6.22", "resolved": "https://registry.npmjs.org/@mdx-js/mdx/-/mdx-1.6.22.tgz", @@ -10946,9 +11002,9 @@ "dev": true }, "@npmcli/arborist": { - "version": "2.2.5", - "resolved": "https://registry.npmjs.org/@npmcli/arborist/-/arborist-2.2.5.tgz", - "integrity": "sha512-nLnhRZsUa1kPryyI0N6hLGX6lsQTFDqBJRTNHmZNmjgzP7ZBKiqz8y6ItsouT2CpWhvmoIpnstLyoglIQyo0YQ==", + "version": "2.8.0", + "resolved": "https://registry.npmjs.org/@npmcli/arborist/-/arborist-2.8.0.tgz", + "integrity": "sha512-R9rTyak1rGdmVTyiU14dgBb+qMllY3B6I8hp7FB4xXsU9dJDrYZJR8I+191CMo5Y1941jTDCtNcXXW9TldPEFQ==", "requires": { "@npmcli/installed-package-contents": "^1.0.7", "@npmcli/map-workspaces": "^1.0.2", @@ -10956,24 +11012,29 @@ "@npmcli/move-file": "^1.1.0", "@npmcli/name-from-folder": "^1.0.1", "@npmcli/node-gyp": "^1.0.1", + "@npmcli/package-json": "^1.0.1", "@npmcli/run-script": "^1.8.2", "bin-links": "^2.2.1", "cacache": "^15.0.3", "common-ancestor-path": "^1.0.1", "json-parse-even-better-errors": "^2.3.1", - "json-stringify-nice": "^1.1.1", + "json-stringify-nice": "^1.1.4", + "mkdirp": "^1.0.4", "mkdirp-infer-owner": "^2.0.0", "npm-install-checks": "^4.0.0", - "npm-package-arg": "^8.1.0", + "npm-package-arg": "^8.1.5", "npm-pick-manifest": "^6.1.0", - "npm-registry-fetch": "^9.0.0", - "pacote": "^11.2.6", + "npm-registry-fetch": "^11.0.0", + "pacote": "^11.3.5", "parse-conflict-json": "^1.1.1", + "proc-log": "^1.0.0", "promise-all-reject-late": "^1.0.0", "promise-call-limit": "^1.0.1", "read-package-json-fast": "^2.0.2", "readdir-scoped-modules": "^1.1.0", - "semver": "^7.3.4", + "rimraf": "^3.0.2", + "semver": "^7.3.5", + "ssri": "^8.0.1", "tar": "^6.1.0", "treeverse": "^1.0.4", "walk-up-path": "^1.0.0" @@ -10985,9 +11046,9 @@ "integrity": "sha512-oN3y7FAROHhrAt7Rr7PnTSwrHrZVRTS2ZbyxeQwSSYD0ifwM3YNgQqbaRmjcWoPyq77MjchusjJDspbzMmip1Q==" }, "@npmcli/config": { - "version": "1.2.9", - "resolved": "https://registry.npmjs.org/@npmcli/config/-/config-1.2.9.tgz", - "integrity": "sha512-d7mx35ju9HFg0gNHiwMU0HXCJk1esAeRdMktLeD+K2K2awkZyEm1FyX+g8iuZbmWGAaFP/aGiXo7a0lKlmp6Xg==", + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@npmcli/config/-/config-2.2.0.tgz", + "integrity": "sha512-y0V3F7RCWXy8kBOvKvKSRUNKRobLB6vL/UNchy/6+IUNIqu+UyrY3Z7jvj1ZA/AkYc/0WkCUtppCo+bPhMU8Aw==", "requires": { "ini": "^2.0.0", "mkdirp-infer-owner": "^2.0.0", @@ -11005,18 +11066,17 @@ } }, "@npmcli/git": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-2.0.6.tgz", - "integrity": "sha512-a1MnTfeRPBaKbFY07fd+6HugY1WAkKJzdiJvlRub/9o5xz2F/JtPacZZapx5zRJUQFIzSL677vmTSxEcDMrDbg==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-2.1.0.tgz", + "integrity": "sha512-/hBFX/QG1b+N7PZBFs0bi+evgRZcK9nWBxQKZkGoXUT5hJSwl5c4d7y8/hm+NQZRPhQ67RzFaj5UM9YeyKoryw==", "requires": { - "@npmcli/promise-spawn": "^1.1.0", + "@npmcli/promise-spawn": "^1.3.2", "lru-cache": "^6.0.0", - "mkdirp": "^1.0.3", - "npm-pick-manifest": "^6.0.0", + "mkdirp": "^1.0.4", + "npm-pick-manifest": "^6.1.1", "promise-inflight": "^1.0.1", "promise-retry": "^2.0.1", - "semver": "^7.3.2", - "unique-filename": "^1.1.1", + "semver": "^7.3.5", "which": "^2.0.2" } }, @@ -11041,9 +11101,9 @@ } }, "@npmcli/metavuln-calculator": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/@npmcli/metavuln-calculator/-/metavuln-calculator-1.1.0.tgz", - "integrity": "sha512-fb51NyiWHjeqqFez9FXhvr+E2Dv4ZjPGVgnj8QC1xjHRSw4gMRIO8pNCzU11WYQ2wZxoHBhPMgovZGxP5lP74g==", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@npmcli/metavuln-calculator/-/metavuln-calculator-1.1.1.tgz", + "integrity": "sha512-9xe+ZZ1iGVaUovBVFI9h3qW+UuECUzhvZPxK9RaEA2mjU26o5D0JloGYWwLYvQELJNmBdQB6rrpuN8jni6LwzQ==", "requires": { "cacache": "^15.0.5", "pacote": "^11.1.11", @@ -11069,6 +11129,14 @@ "resolved": "https://registry.npmjs.org/@npmcli/node-gyp/-/node-gyp-1.0.2.tgz", "integrity": "sha512-yrJUe6reVMpktcvagumoqD9r08fH1iRo01gn1u0zoCApa9lnZGEigVKUd2hzsCId4gdtkZZIVscLhNxMECKgRg==" }, + "@npmcli/package-json": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@npmcli/package-json/-/package-json-1.0.1.tgz", + "integrity": "sha512-y6jnu76E9C23osz8gEMBayZmaZ69vFOIk8vR1FJL/wbEJ54+9aVG9rLTjQKSXfgYZEr50nw1txBBFfBZZe+bYg==", + "requires": { + "json-parse-even-better-errors": "^2.3.1" + } + }, "@npmcli/promise-spawn": { "version": "1.3.2", "resolved": "https://registry.npmjs.org/@npmcli/promise-spawn/-/promise-spawn-1.3.2.tgz", @@ -11078,15 +11146,14 @@ } }, "@npmcli/run-script": { - "version": "1.8.3", - "resolved": "https://registry.npmjs.org/@npmcli/run-script/-/run-script-1.8.3.tgz", - "integrity": "sha512-ELPGWAVU/xyU+A+H3pEPj0QOvYwLTX71RArXcClFzeiyJ/b/McsZ+d0QxpznvfFtZzxGN/gz/1cvlqICR4/suQ==", + "version": "1.8.5", + "resolved": "https://registry.npmjs.org/@npmcli/run-script/-/run-script-1.8.5.tgz", + "integrity": "sha512-NQspusBCpTjNwNRFMtz2C5MxoxyzlbuJ4YEhxAKrIonTiirKDtatsZictx9RgamQIx6+QuHMNmPl0wQdoESs9A==", "requires": { "@npmcli/node-gyp": "^1.0.2", "@npmcli/promise-spawn": "^1.3.2", "infer-owner": "^1.0.4", "node-gyp": "^7.1.0", - "puka": "^1.0.1", "read-package-json-fast": "^2.0.1" } }, @@ -11159,9 +11226,9 @@ } }, "acorn-jsx": { - "version": "5.3.1", - "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.1.tgz", - "integrity": "sha512-K0Ptm/47OKfQRpNQ2J/oIN/3QYiK6FwW+eJbILhsdxh2WTLdl+30o8aGdTbm5JbffpFFAg/g+zi1E+jvJha5ng==", + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", + "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", "dev": true, "requires": {} }, @@ -11239,9 +11306,9 @@ "integrity": "sha1-XeYEFb2gcbs3EnhUyGT0GyMlRTk=" }, "anymatch": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.1.tgz", - "integrity": "sha512-mM8522psRCqzV+6LhomX5wgp25YVibjh8Wj23I5RPkPppSVSjyKD2A2mBJmWGa+KN7f2D6LNh9jkBCeyLktzjg==", + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.2.tgz", + "integrity": "sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg==", "dev": true, "requires": { "normalize-path": "^3.0.0", @@ -11249,12 +11316,12 @@ } }, "append-transform": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/append-transform/-/append-transform-1.0.0.tgz", - "integrity": "sha512-P009oYkeHyU742iSZJzZZywj4QRJdnTWffaKuJQLablCZ1uz6/cW4yaRgcDaoQ+uwOxxnt0gRUcwfsNP2ri0gw==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/append-transform/-/append-transform-2.0.0.tgz", + "integrity": "sha512-7yeyCEurROLQJFv5Xj4lEGTy0borxepjFv1g22oAdqFu//SrAlDl1O1Nxx15SH1RoliUml6p8dwJW9jvZughhg==", "dev": true, "requires": { - "default-require-extensions": "^2.0.0" + "default-require-extensions": "^3.0.0" } }, "aproba": { @@ -11276,12 +11343,6 @@ "readable-stream": "^2.0.6" } }, - "arg": { - "version": "4.1.3", - "resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz", - "integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==", - "dev": true - }, "argparse": { "version": "1.0.10", "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", @@ -11298,15 +11359,15 @@ "dev": true }, "array-includes": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.2.tgz", - "integrity": "sha512-w2GspexNQpx+PutG3QpT437/BenZBj0M/MZGn5mzv/MofYqo0xmRHzn4lFsoDlWJ+THYsGJmFlW68WlDFx7VRw==", + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.3.tgz", + "integrity": "sha512-gcem1KlBU7c9rB+Rq8/3PPKsK2kjqeEBa3bD5kkQo4nYlOHQCJqIJFqBXDEfwaRuYTT4E+FxA9xez7Gf/e3Q7A==", "dev": true, "requires": { - "call-bind": "^1.0.0", + "call-bind": "^1.0.2", "define-properties": "^1.1.3", - "es-abstract": "^1.18.0-next.1", - "get-intrinsic": "^1.0.1", + "es-abstract": "^1.18.0-next.2", + "get-intrinsic": "^1.1.1", "is-string": "^1.0.5" } }, @@ -11346,13 +11407,10 @@ "dev": true }, "async-hook-domain": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/async-hook-domain/-/async-hook-domain-1.1.3.tgz", - "integrity": "sha512-ZovMxSbADV3+biB7oR1GL5lGyptI24alp0LWHlmz1OFc5oL47pz3EiIF6nXOkDW7yLqih4NtsiYduzdDW0i+Wg==", - "dev": true, - "requires": { - "source-map-support": "^0.5.11" - } + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/async-hook-domain/-/async-hook-domain-2.0.3.tgz", + "integrity": "sha512-MadiLLDEZRZzZwcm0dgS+K99qXZ4H2saAUwUgwzFulbAkXrKi3AX5FvWS3FFTQtLMwrqcGqAJe6o12KrObejQA==", + "dev": true }, "asynckit": { "version": "0.4.0", @@ -11411,9 +11469,9 @@ "dev": true }, "balanced-match": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", - "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=" + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" }, "base64-js": { "version": "1.5.1", @@ -11448,9 +11506,9 @@ "integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==" }, "bind-obj-methods": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/bind-obj-methods/-/bind-obj-methods-2.0.0.tgz", - "integrity": "sha512-3/qRXczDi2Cdbz6jE+W3IflJOutRVica8frpBn14de1mBOkzDo+6tY33kNhvkw54Kn3PzRRD2VnGbGPcTAk4sw==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/bind-obj-methods/-/bind-obj-methods-3.0.0.tgz", + "integrity": "sha512-nLEaaz3/sEzNSyPWRsN9HNsqwk1AUyECtGj+XwGdIi3xABnEqecvXtIJ0wehQXuuER5uZ/5fTs2usONgYjG+iw==", "dev": true }, "bindings": { @@ -11532,14 +11590,14 @@ "integrity": "sha1-y5T662HIaWRR2zZTThQi+U8K7og=" }, "byte-size": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/byte-size/-/byte-size-7.0.0.tgz", - "integrity": "sha512-NNiBxKgxybMBtWdmvx7ZITJi4ZG+CYUgwOSZTfqB1qogkRHrhbQE/R2r5Fh94X+InN5MCYz6SvB/ejHMj/HbsQ==" + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/byte-size/-/byte-size-7.0.1.tgz", + "integrity": "sha512-crQdqyCwhokxwV1UyDzLZanhkugAgft7vt0qbbdt60C6Zf3CAiGmtUCylbtYwrU6loOUw3euGrNtW1J651ot1A==" }, "cacache": { - "version": "15.0.5", - "resolved": "https://registry.npmjs.org/cacache/-/cacache-15.0.5.tgz", - "integrity": "sha512-lloiL22n7sOjEEXdL8NAjTgv9a1u43xICE9/203qonkZUCj5X1UEWIdf2/Y0d6QcCtMzbKQyhrcDbdvlZTs/+A==", + "version": "15.2.0", + "resolved": "https://registry.npmjs.org/cacache/-/cacache-15.2.0.tgz", + "integrity": "sha512-uKoJSHmnrqXgthDFx/IU6ED/5xd+NNGe+Bb+kLZy7Ku4P+BaiWEUflAKPZ7eAzsYGcsAGASJZsybXp+quEcHTw==", "requires": { "@npmcli/move-file": "^1.0.1", "chownr": "^2.0.0", @@ -11555,34 +11613,21 @@ "p-map": "^4.0.0", "promise-inflight": "^1.0.1", "rimraf": "^3.0.2", - "ssri": "^8.0.0", + "ssri": "^8.0.1", "tar": "^6.0.2", "unique-filename": "^1.1.1" } }, "caching-transform": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/caching-transform/-/caching-transform-3.0.2.tgz", - "integrity": "sha512-Mtgcv3lh3U0zRii/6qVgQODdPA4G3zhG+jtbCWj39RXuUFTMzH0vcdMtaJS1jPowd+It2Pqr6y3NJMQqOqCE2w==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/caching-transform/-/caching-transform-4.0.0.tgz", + "integrity": "sha512-kpqOvwXnjjN44D89K5ccQC+RUrsy7jB/XLlRrx0D7/2HNcTPqzsb6XgYoErwko6QsV184CA2YgS1fxDiiDZMWA==", "dev": true, "requires": { - "hasha": "^3.0.0", - "make-dir": "^2.0.0", - "package-hash": "^3.0.0", - "write-file-atomic": "^2.4.2" - }, - "dependencies": { - "write-file-atomic": { - "version": "2.4.3", - "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-2.4.3.tgz", - "integrity": "sha512-GaETH5wwsX+GcnzhPgKcKjJ6M2Cq3/iZp1WyY/X1CSqrW+jVNM9Y7D8EC2sM4ZG/V8wZlSniJnCKWPmBYAucRQ==", - "dev": true, - "requires": { - "graceful-fs": "^4.1.11", - "imurmurhash": "^0.1.4", - "signal-exit": "^3.0.2" - } - } + "hasha": "^5.0.0", + "make-dir": "^3.0.0", + "package-hash": "^4.0.0", + "write-file-atomic": "^3.0.0" } }, "call-bind": { @@ -11595,12 +11640,6 @@ "get-intrinsic": "^1.0.2" } }, - "caller": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/caller/-/caller-1.0.1.tgz", - "integrity": "sha1-uFGGD3Dhlds9J3OVqhp+I+ow7PU=", - "dev": true - }, "callsites": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", @@ -11631,9 +11670,9 @@ "dev": true }, "chalk": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.0.tgz", - "integrity": "sha512-qwx12AxXe2Q5xQ43Ac//I6v5aXTipYrSESdOgzrN+9XjgEpyjpKuvSGaN4qE93f7TQTlerQQ8S+EQ0EyDoVL1A==", + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.1.tgz", + "integrity": "sha512-diHzdDKxcU+bAsUboHLPEDQiw0qEe0qd7SYUn3HgcFlWgbDcfLGswOHYeGrHKzG9z6UYf01d9VFMfZxPM1xZSg==", "requires": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" @@ -11721,9 +11760,9 @@ "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==" }, "string-width": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.0.tgz", - "integrity": "sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg==", + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.2.tgz", + "integrity": "sha512-XBJbT3N4JhVumXE0eoLU9DCjcaF92KLNqTmFCnG1pf8duUxFGwtP6AD6nkjw9a3IdiRtL3E2w3JDiE/xi3vOeA==", "requires": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", @@ -11819,8 +11858,7 @@ "color-support": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/color-support/-/color-support-1.1.3.tgz", - "integrity": "sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==", - "dev": true + "integrity": "sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==" }, "colors": { "version": "1.4.0", @@ -11872,12 +11910,6 @@ "resolved": "https://registry.npmjs.org/console-control-strings/-/console-control-strings-1.1.0.tgz", "integrity": "sha1-PXz0Rk22RG6mRL9LOVB/mFEAjo4=" }, - "contains-path": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/contains-path/-/contains-path-0.1.0.tgz", - "integrity": "sha1-/ozxhP9mcLa67wGp1IYaXL7EEgo=", - "dev": true - }, "convert-source-map": { "version": "1.7.0", "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.7.0.tgz", @@ -11914,27 +11946,6 @@ "request": "^2.88.2" } }, - "cp-file": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/cp-file/-/cp-file-6.2.0.tgz", - "integrity": "sha512-fmvV4caBnofhPe8kOcitBwSn2f39QLjnAnGq3gO9dfd75mUytzKNZB1hde6QHunW2Rt+OwuBOMc3i1tNElbszA==", - "dev": true, - "requires": { - "graceful-fs": "^4.1.2", - "make-dir": "^2.0.0", - "nested-error-stacks": "^2.0.0", - "pify": "^4.0.1", - "safe-buffer": "^5.0.1" - }, - "dependencies": { - "pify": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz", - "integrity": "sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==", - "dev": true - } - } - }, "cross-spawn": { "version": "7.0.3", "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", @@ -11989,9 +12000,9 @@ } }, "debug": { - "version": "4.3.1", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.1.tgz", - "integrity": "sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==", + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.2.tgz", + "integrity": "sha512-mOp8wKcvj7XxC78zLgw/ZA+6TSgkoE2C/ienthhRD298T7UNwAg9diBpLRxC0mOezLl4B0xV7M0cCO6P/O0Xhw==", "requires": { "ms": "2.1.2" }, @@ -12042,12 +12053,20 @@ "dev": true }, "default-require-extensions": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/default-require-extensions/-/default-require-extensions-2.0.0.tgz", - "integrity": "sha1-9fj7sYp9bVCyH2QfZJ67Uiz+JPc=", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/default-require-extensions/-/default-require-extensions-3.0.0.tgz", + "integrity": "sha512-ek6DpXq/SCpvjhpFsLFRVtIxJCRw6fUR42lYMVZuUMK7n8eMz4Uh5clckdBjEpLhn/gEBZo7hDJnJcwdKLKQjg==", "dev": true, "requires": { - "strip-bom": "^3.0.0" + "strip-bom": "^4.0.0" + }, + "dependencies": { + "strip-bom": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-4.0.0.tgz", + "integrity": "sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w==", + "dev": true + } } }, "defaults": { @@ -12111,18 +12130,22 @@ "resolved": "https://registry.npmjs.org/diff/-/diff-5.0.0.tgz", "integrity": "sha512-/VTCrvm5Z0JGty/BWHljh+BAiw3IK+2j87NGMu8Nwc/f48WoDAC395uomO9ZD117ZOBaHmkX1oyLvkVM/aIT3w==" }, - "diff-frag": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/diff-frag/-/diff-frag-1.0.1.tgz", - "integrity": "sha512-6/v2PC/6UTGcWPPetb9acL8foberUg/CtPdALeJUdD1B/weHNvzftoo00gYznqHGRhHEbykUGzqfG9RWOSr5yw==", - "dev": true - }, "docopt": { "version": "0.6.2", "resolved": "https://registry.npmjs.org/docopt/-/docopt-0.6.2.tgz", "integrity": "sha1-so6eIiDaXsSffqW7JKR3h0Be6xE=", "dev": true }, + "docs": { + "version": "file:docs", + "requires": { + "@mdx-js/mdx": "^1.6.22", + "cmark-gfm": "^0.8.3", + "jsdom": "^16.4.0", + "marked-man": "^0.7.0", + "yaml": "^1.10.0" + } + }, "doctrine": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", @@ -12191,9 +12214,9 @@ } }, "env-paths": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/env-paths/-/env-paths-2.2.0.tgz", - "integrity": "sha512-6u0VYSCo/OW6IoD5WCLLy9JUGARbamfSavcNXry/eu8aHVFei6CD3Sw+VGX5alea1i9pgPHW0mbu6Xj0uBh7gA==" + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/env-paths/-/env-paths-2.2.1.tgz", + "integrity": "sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A==" }, "err-code": { "version": "2.0.3", @@ -12210,25 +12233,27 @@ } }, "es-abstract": { - "version": "1.18.0-next.2", - "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.18.0-next.2.tgz", - "integrity": "sha512-Ih4ZMFHEtZupnUh6497zEL4y2+w8+1ljnCyaTa+adcoafI1GOvMwFlDjBLfWR7y9VLfrjRJe9ocuHY1PSR9jjw==", + "version": "1.18.3", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.18.3.tgz", + "integrity": "sha512-nQIr12dxV7SSxE6r6f1l3DtAeEYdsGpps13dR0TwJg1S8gyp4ZPgy3FZcHBgbiQqnoqSTb+oC+kO4UQ0C/J8vw==", "dev": true, "requires": { "call-bind": "^1.0.2", "es-to-primitive": "^1.2.1", "function-bind": "^1.1.1", - "get-intrinsic": "^1.0.2", + "get-intrinsic": "^1.1.1", "has": "^1.0.3", - "has-symbols": "^1.0.1", - "is-callable": "^1.2.2", + "has-symbols": "^1.0.2", + "is-callable": "^1.2.3", "is-negative-zero": "^2.0.1", - "is-regex": "^1.1.1", - "object-inspect": "^1.9.0", + "is-regex": "^1.1.3", + "is-string": "^1.0.6", + "object-inspect": "^1.10.3", "object-keys": "^1.1.1", "object.assign": "^4.1.2", - "string.prototype.trimend": "^1.0.3", - "string.prototype.trimstart": "^1.0.3" + "string.prototype.trimend": "^1.0.4", + "string.prototype.trimstart": "^1.0.4", + "unbox-primitive": "^1.0.1" } }, "es-to-primitive": { @@ -12249,24 +12274,30 @@ "dev": true }, "escape-string-regexp": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz", - "integrity": "sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", "dev": true }, "escodegen": { - "version": "1.14.3", - "resolved": "https://registry.npmjs.org/escodegen/-/escodegen-1.14.3.tgz", - "integrity": "sha512-qFcX0XJkdg+PB3xjZZG/wKSuT1PnQWx57+TVSjIMmILd2yC/6ByYElPwJnslDsuWuSAp4AwJGumarAAmJch5Kw==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/escodegen/-/escodegen-2.0.0.tgz", + "integrity": "sha512-mmHKys/C8BFUGI+MAWNcSYoORYLMdPzjrknd2Vc+bUsjN5bXcr8EhrNB+UTqfL1y3I9c4fw2ihgtMPQLBRiQxw==", "dev": true, "requires": { "esprima": "^4.0.1", - "estraverse": "^4.2.0", + "estraverse": "^5.2.0", "esutils": "^2.0.2", "optionator": "^0.8.1", "source-map": "~0.6.1" }, "dependencies": { + "estraverse": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.2.0.tgz", + "integrity": "sha512-BxbNGGNm0RyRYvUdHpIwv9IWzeM9XClbOxwoATuFdOE7ZE6wHL+HQ5T8hoPM+zHvmKzzsEqhgy0GrQ5X13afiQ==", + "dev": true + }, "levn": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz", @@ -12297,6 +12328,13 @@ "integrity": "sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ=", "dev": true }, + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "optional": true + }, "type-check": { "version": "0.3.2", "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.3.2.tgz", @@ -12309,29 +12347,32 @@ } }, "eslint": { - "version": "7.20.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-7.20.0.tgz", - "integrity": "sha512-qGi0CTcOGP2OtCQBgWZlQjcTuP0XkIpYFj25XtRTQSHC+umNnp7UMshr2G8SLsRFYDdAPFeHOsiteadmMH02Yw==", + "version": "7.31.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-7.31.0.tgz", + "integrity": "sha512-vafgJpSh2ia8tnTkNUkwxGmnumgckLh5aAbLa1xRmIn9+owi8qBNGKL+B881kNKNTy7FFqTEkpNkUvmw0n6PkA==", "dev": true, "requires": { "@babel/code-frame": "7.12.11", - "@eslint/eslintrc": "^0.3.0", + "@eslint/eslintrc": "^0.4.3", + "@humanwhocodes/config-array": "^0.5.0", "ajv": "^6.10.0", "chalk": "^4.0.0", "cross-spawn": "^7.0.2", "debug": "^4.0.1", "doctrine": "^3.0.0", "enquirer": "^2.3.5", + "escape-string-regexp": "^4.0.0", "eslint-scope": "^5.1.1", "eslint-utils": "^2.1.0", "eslint-visitor-keys": "^2.0.0", "espree": "^7.3.1", "esquery": "^1.4.0", "esutils": "^2.0.2", - "file-entry-cache": "^6.0.0", + "fast-deep-equal": "^3.1.3", + "file-entry-cache": "^6.0.1", "functional-red-black-tree": "^1.0.1", - "glob-parent": "^5.0.0", - "globals": "^12.1.0", + "glob-parent": "^5.1.2", + "globals": "^13.6.0", "ignore": "^4.0.6", "import-fresh": "^3.0.0", "imurmurhash": "^0.1.4", @@ -12339,7 +12380,7 @@ "js-yaml": "^3.13.1", "json-stable-stringify-without-jsonify": "^1.0.1", "levn": "^0.4.1", - "lodash": "^4.17.20", + "lodash.merge": "^4.6.2", "minimatch": "^3.0.4", "natural-compare": "^1.4.0", "optionator": "^0.9.1", @@ -12348,7 +12389,7 @@ "semver": "^7.2.1", "strip-ansi": "^6.0.0", "strip-json-comments": "^3.1.0", - "table": "^6.0.4", + "table": "^6.0.9", "text-table": "^0.2.0", "v8-compile-cache": "^2.0.3" }, @@ -12368,6 +12409,15 @@ "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==", "dev": true }, + "globals": { + "version": "13.9.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.9.0.tgz", + "integrity": "sha512-74/FduwI/JaIrr1H8e71UbDE+5x7pIPs1C2rrwC52SszOo043CsWOZEMW7o2Y58xwm9b+0RBKDxY5n2sUpEFxA==", + "dev": true, + "requires": { + "type-fest": "^0.20.2" + } + }, "strip-ansi": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", @@ -12376,6 +12426,12 @@ "requires": { "ansi-regex": "^5.0.0" } + }, + "type-fest": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", + "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", + "dev": true } } }, @@ -12407,29 +12463,23 @@ } }, "eslint-module-utils": { - "version": "2.6.0", - "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.6.0.tgz", - "integrity": "sha512-6j9xxegbqe8/kZY8cYpcp0xhbK0EgJlg3g9mib3/miLaExuuwc3n5UEfSnU6hWMbT0FAYVvDbL9RrRgpUeQIvA==", + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.6.1.tgz", + "integrity": "sha512-ZXI9B8cxAJIH4nfkhTwcRTEAnrVfobYqwjWy/QMCZ8rHkZHFjf9yO4BzpiF9kCSfNlMG54eKigISHpX0+AaT4A==", "dev": true, "requires": { - "debug": "^2.6.9", + "debug": "^3.2.7", "pkg-dir": "^2.0.0" }, "dependencies": { "debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", "dev": true, "requires": { - "ms": "2.0.0" + "ms": "^2.1.1" } - }, - "ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", - "dev": true } } }, @@ -12444,23 +12494,25 @@ } }, "eslint-plugin-import": { - "version": "2.22.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.22.1.tgz", - "integrity": "sha512-8K7JjINHOpH64ozkAhpT3sd+FswIZTfMZTjdx052pnWrgRCVfp8op9tbjpAk3DdUeI/Ba4C8OjdC0r90erHEOw==", + "version": "2.23.4", + "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.23.4.tgz", + "integrity": "sha512-6/wP8zZRsnQFiR3iaPFgh5ImVRM1WN5NUWfTIRqwOdeiGJlBcSk82o1FEVq8yXmy4lkIzTo7YhHCIxlU/2HyEQ==", "dev": true, "requires": { - "array-includes": "^3.1.1", - "array.prototype.flat": "^1.2.3", - "contains-path": "^0.1.0", + "array-includes": "^3.1.3", + "array.prototype.flat": "^1.2.4", "debug": "^2.6.9", - "doctrine": "1.5.0", + "doctrine": "^2.1.0", "eslint-import-resolver-node": "^0.3.4", - "eslint-module-utils": "^2.6.0", + "eslint-module-utils": "^2.6.1", + "find-up": "^2.0.0", "has": "^1.0.3", + "is-core-module": "^2.4.0", "minimatch": "^3.0.4", - "object.values": "^1.1.1", - "read-pkg-up": "^2.0.0", - "resolve": "^1.17.0", + "object.values": "^1.1.3", + "pkg-up": "^2.0.0", + "read-pkg-up": "^3.0.0", + "resolve": "^1.20.0", "tsconfig-paths": "^3.9.0" }, "dependencies": { @@ -12474,13 +12526,12 @@ } }, "doctrine": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-1.5.0.tgz", - "integrity": "sha1-N53Ocw9hZvds76TmcHoVmwLFpvo=", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz", + "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==", "dev": true, "requires": { - "esutils": "^2.0.2", - "isarray": "^1.0.0" + "esutils": "^2.0.2" } }, "ms": { @@ -12520,10 +12571,11 @@ } }, "eslint-plugin-promise": { - "version": "4.3.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-promise/-/eslint-plugin-promise-4.3.1.tgz", - "integrity": "sha512-bY2sGqyptzFBDLh/GMbAxfdJC+b0f23ME63FOE4+Jao0oZ3E1LEwFtWJX/1pGMJLiTtrSSern2CRM/g+dfc0eQ==", - "dev": true + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-promise/-/eslint-plugin-promise-5.1.0.tgz", + "integrity": "sha512-NGmI6BH5L12pl7ScQHbg7tvtk4wPxxj8yPHH47NvSmMtFneC077PSeY3huFj06ZWZvtbfxSPt3RuOQD5XcR4ng==", + "dev": true, + "requires": {} }, "eslint-plugin-standard": { "version": "5.0.0", @@ -12560,15 +12612,9 @@ } }, "eslint-visitor-keys": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-2.0.0.tgz", - "integrity": "sha512-QudtT6av5WXels9WjIM7qz1XD1cWGvX4gGXvp/zBn9nXG02D0utdU3Em2m/QjTnrsk6bBjmCygl3rmj118msQQ==", - "dev": true - }, - "esm": { - "version": "3.2.25", - "resolved": "https://registry.npmjs.org/esm/-/esm-3.2.25.tgz", - "integrity": "sha512-U1suiZ2oDVWv4zPO56S0NcR5QriEahGtdN2OR6FiOG4WJvcjBVFB0qI4+eKoWFH483PKGuLuu6V8Z4T5g63UVA==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz", + "integrity": "sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw==", "dev": true }, "espree": { @@ -12681,9 +12727,9 @@ "dev": true }, "file-entry-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.0.tgz", - "integrity": "sha512-fqoO76jZ3ZnYrXLDRxBR1YvOvc0k844kcOg40bgsPrE25LAb/PDqTY+ho64Xh2c8ZXgIKldchCFHczG2UVRcWA==", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz", + "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==", "dev": true, "requires": { "flat-cache": "^3.0.4" @@ -12705,33 +12751,33 @@ } }, "find-cache-dir": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-2.1.0.tgz", - "integrity": "sha512-Tq6PixE0w/VMFfCgbONnkiQIVol/JJL7nRMi20fqzA4NRs9AfeqMGeRdPi3wIhYkxjeBaWh2rxwapn5Tu3IqOQ==", + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-3.3.1.tgz", + "integrity": "sha512-t2GDMt3oGC/v+BMwzmllWDuJF/xcDtE5j/fCGbqDD7OLuJkj0cfh1YSA5VKPvwMeLFLNDBkwOKZ2X85jGLVftQ==", "dev": true, "requires": { "commondir": "^1.0.1", - "make-dir": "^2.0.0", - "pkg-dir": "^3.0.0" + "make-dir": "^3.0.2", + "pkg-dir": "^4.1.0" }, "dependencies": { "find-up": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", - "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", "dev": true, "requires": { - "locate-path": "^3.0.0" + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" } }, "locate-path": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", - "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", "dev": true, "requires": { - "p-locate": "^3.0.0", - "path-exists": "^3.0.0" + "p-locate": "^4.1.0" } }, "p-limit": { @@ -12744,12 +12790,12 @@ } }, "p-locate": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", - "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", "dev": true, "requires": { - "p-limit": "^2.0.0" + "p-limit": "^2.2.0" } }, "p-try": { @@ -12758,13 +12804,19 @@ "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", "dev": true }, + "path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true + }, "pkg-dir": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-3.0.0.tgz", - "integrity": "sha512-/E57AYkoeQ25qkxMj5PBOVgF8Kiu/h7cYS30Z5+R7WaiCCBfLq58ZI/dSeaEKb9WVJV5n/03QwrN3IeWIFllvw==", + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-4.2.0.tgz", + "integrity": "sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==", "dev": true, "requires": { - "find-up": "^3.0.0" + "find-up": "^4.0.0" } } } @@ -12800,68 +12852,14 @@ "integrity": "sha512-zAoAQiudy+r5SvnSw3KJy5os/oRJYHzrzja/tBDqrZtNhUw8bt6y8OBzMWcjWr+8liV8Eb6yOhw8WZ7VFZ5ZzA==", "dev": true }, - "flow-parser": { - "version": "0.144.0", - "resolved": "https://registry.npmjs.org/flow-parser/-/flow-parser-0.144.0.tgz", - "integrity": "sha512-si2lCamPs0N1QcTiQY8p9RxvVsVGbx4rpkX6dcfUQ2OOvEg6Cya7LItlPqcx54Gtakdx1St6TseQlV8nafYW5g==", - "dev": true - }, - "flow-remove-types": { - "version": "2.144.0", - "resolved": "https://registry.npmjs.org/flow-remove-types/-/flow-remove-types-2.144.0.tgz", - "integrity": "sha512-1ctvKynmoMtqN9Xcyx3NKLGtcpBy9YqW50F9ENX+ivMnRBxks+UBI8iFC+AObTqAfk9C5eUTTt6vMZkmTrskdg==", - "dev": true, - "requires": { - "flow-parser": "^0.144.0", - "pirates": "^3.0.2", - "vlq": "^0.2.1" - } - }, "foreground-child": { - "version": "1.5.6", - "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-1.5.6.tgz", - "integrity": "sha1-T9ca0t/elnibmApcCilZN8svXOk=", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-2.0.0.tgz", + "integrity": "sha512-dCIq9FpEcyQyXKCkyzmlPTFNgrCzPudOe+mhvJU5zAtlBnGVy2yKxtfsxK2tQBThwq225jcvBjpw1Gr40uzZCA==", "dev": true, "requires": { - "cross-spawn": "^4", - "signal-exit": "^3.0.0" - }, - "dependencies": { - "cross-spawn": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-4.0.2.tgz", - "integrity": "sha1-e5JHYhwjrf3ThWAEqCPL45dCTUE=", - "dev": true, - "requires": { - "lru-cache": "^4.0.1", - "which": "^1.2.9" - } - }, - "lru-cache": { - "version": "4.1.5", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.1.5.tgz", - "integrity": "sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g==", - "dev": true, - "requires": { - "pseudomap": "^1.0.2", - "yallist": "^2.1.2" - } - }, - "which": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", - "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", - "dev": true, - "requires": { - "isexe": "^2.0.0" - } - }, - "yallist": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-2.1.2.tgz", - "integrity": "sha1-HBH5IY8HYImkfdUS+TxmmaaoHVI=", - "dev": true - } + "cross-spawn": "^7.0.0", + "signal-exit": "^3.0.2" } }, "forever-agent": { @@ -12870,15 +12868,22 @@ "integrity": "sha1-+8cfDEGt6zf5bFd60e1C2P2sypE=" }, "form-data": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz", - "integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==", + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-3.0.1.tgz", + "integrity": "sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg==", + "dev": true, "requires": { "asynckit": "^0.4.0", - "combined-stream": "^1.0.6", + "combined-stream": "^1.0.8", "mime-types": "^2.1.12" } }, + "fromentries": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/fromentries/-/fromentries-1.3.2.tgz", + "integrity": "sha512-cHEpEQHUg0f8XdtZCc2ZAhrHzKzT0MrFUTcvx+hfxYu7rGMDc5SKoXFh+n4YigxsHXRzc6OrCshdR1bWH6HHyg==", + "dev": true + }, "fs-access": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/fs-access/-/fs-access-2.0.0.tgz", @@ -12922,59 +12927,35 @@ }, "function-bind": { "version": "1.1.1", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", - "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==" - }, - "function-loop": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/function-loop/-/function-loop-1.0.2.tgz", - "integrity": "sha512-Iw4MzMfS3udk/rqxTiDDCllhGwlOrsr50zViTOO/W6lS/9y6B1J0BD2VZzrnWUYBJsl3aeqjgR5v7bWWhZSYbA==", - "dev": true - }, - "functional-red-black-tree": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz", - "integrity": "sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc=", - "dev": true - }, - "gauge": { - "version": "2.7.4", - "resolved": "https://registry.npmjs.org/gauge/-/gauge-2.7.4.tgz", - "integrity": "sha1-LANAXHU4w51+s3sxcCLjJfsBi/c=", - "requires": { - "aproba": "^1.0.3", - "console-control-strings": "^1.0.0", - "has-unicode": "^2.0.0", - "object-assign": "^4.1.0", - "signal-exit": "^3.0.0", - "string-width": "^1.0.1", - "strip-ansi": "^3.0.1", - "wide-align": "^1.1.0" - }, - "dependencies": { - "aproba": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/aproba/-/aproba-1.2.0.tgz", - "integrity": "sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw==" - }, - "is-fullwidth-code-point": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz", - "integrity": "sha1-754xOG8DGn8NZDr4L95QxFfvAMs=", - "requires": { - "number-is-nan": "^1.0.0" - } - }, - "string-width": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz", - "integrity": "sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=", - "requires": { - "code-point-at": "^1.0.0", - "is-fullwidth-code-point": "^1.0.0", - "strip-ansi": "^3.0.0" - } - } + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==" + }, + "function-loop": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/function-loop/-/function-loop-2.0.1.tgz", + "integrity": "sha512-ktIR+O6i/4h+j/ZhZJNdzeI4i9lEPeEK6UPR2EVyTVBqOwcU3Za9xYKLH64ZR9HmcROyRrOkizNyjjtWJzDDkQ==", + "dev": true + }, + "functional-red-black-tree": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz", + "integrity": "sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc=", + "dev": true + }, + "gauge": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/gauge/-/gauge-3.0.1.tgz", + "integrity": "sha512-6STz6KdQgxO4S/ko+AbjlFGGdGcknluoqU+79GOFCDqqyYj5OanQf9AjxwN0jCidtT+ziPMmPSt9E4hfQ0CwIQ==", + "requires": { + "aproba": "^1.0.3 || ^2.0.0", + "color-support": "^1.1.2", + "console-control-strings": "^1.0.0", + "has-unicode": "^2.0.1", + "object-assign": "^4.1.1", + "signal-exit": "^3.0.0", + "string-width": "^1.0.1 || ^2.0.0", + "strip-ansi": "^3.0.1 || ^4.0.0", + "wide-align": "^1.1.2" } }, "gensync": { @@ -13000,6 +12981,12 @@ "has-symbols": "^1.0.1" } }, + "get-package-type": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/get-package-type/-/get-package-type-0.1.0.tgz", + "integrity": "sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==", + "dev": true + }, "getpass": { "version": "0.1.7", "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", @@ -13015,9 +13002,9 @@ "dev": true }, "glob": { - "version": "7.1.6", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", - "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", + "version": "7.1.7", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.7.tgz", + "integrity": "sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ==", "requires": { "fs.realpath": "^1.0.0", "inflight": "^1.0.4", @@ -13028,22 +13015,19 @@ } }, "glob-parent": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.1.tgz", - "integrity": "sha512-FnI+VGOpnlGHWZxthPGR+QhR78fuiK0sNLkHQv+bL9fQi57lNNdquIbna/WrfROrolq8GK5Ek6BiMwqL/voRYQ==", + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", "dev": true, "requires": { "is-glob": "^4.0.1" } }, "globals": { - "version": "12.4.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-12.4.0.tgz", - "integrity": "sha512-BWICuzzDvDoH54NHKCseDanAhE3CeDorgDL5MT6LMXXj2WCnd9UC2szdk4AWLfjdgNBCXLUanXYcpBBKOSWGwg==", - "dev": true, - "requires": { - "type-fest": "^0.8.1" - } + "version": "11.12.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", + "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", + "dev": true }, "graceful-fs": { "version": "4.2.6", @@ -13072,15 +13056,21 @@ "function-bind": "^1.1.1" } }, + "has-bigints": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.0.1.tgz", + "integrity": "sha512-LSBS2LjbNBTf6287JEbEzvJgftkF5qFkmCo9hDRpAzKhUOlJ+hx8dd4USs00SgsUNwc4617J9ki5YtEClM2ffA==", + "dev": true + }, "has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==" }, "has-symbols": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.1.tgz", - "integrity": "sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.2.tgz", + "integrity": "sha512-chXa79rL/UC2KlX17jo3vRGz0azaWEx5tGqZg5pO3NUyEJVB17dMruQlzCCOfUvElghKcm5194+BCRvi2Rv/Gw==", "dev": true }, "has-unicode": { @@ -13089,12 +13079,13 @@ "integrity": "sha1-4Ob+aijPUROIVeCG0Wkedx3iqLk=" }, "hasha": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/hasha/-/hasha-3.0.0.tgz", - "integrity": "sha1-UqMvq4Vp1BymmmH/GiFPjrfIvTk=", + "version": "5.2.2", + "resolved": "https://registry.npmjs.org/hasha/-/hasha-5.2.2.tgz", + "integrity": "sha512-Hrp5vIK/xr5SkeN2onO32H0MgNZ0f17HRNH39WfL0SYUNOTZ5Lz1TJ8Pajo/87dYGEFlLMm7mIc/k/s6Bvz9HQ==", "dev": true, "requires": { - "is-stream": "^1.0.1" + "is-stream": "^2.0.0", + "type-fest": "^0.8.0" } }, "hast-to-hyperscript": { @@ -13148,14 +13139,6 @@ "web-namespaces": "^1.0.0", "xtend": "^4.0.0", "zwitch": "^1.0.0" - }, - "dependencies": { - "parse5": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/parse5/-/parse5-6.0.1.tgz", - "integrity": "sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw==", - "dev": true - } } }, "hast-util-to-parse5": { @@ -13185,9 +13168,9 @@ } }, "hosted-git-info": { - "version": "3.0.8", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-3.0.8.tgz", - "integrity": "sha512-aXpmwoOhRBrw6X3j0h5RloK4x1OzsxMPyxqIHyNfSe2pypkVTZFpEiRoSipPEPlMrh0HW/XsjkJ5WgnCirpNUw==", + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-4.0.2.tgz", + "integrity": "sha512-c9OGXbZ3guC/xOlCg1Ci/VgWlwsqDv1yMQL1CWqXDL0hDjXuNcq0zuR4xqPSuasI3kqFDhqSyTjREz5gzq0fXg==", "requires": { "lru-cache": "^6.0.0" } @@ -13256,9 +13239,9 @@ } }, "iconv-lite": { - "version": "0.6.2", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.2.tgz", - "integrity": "sha512-2y91h5OpQlolefMPmUlivelittSWy0rP+oYVpn6A7GwVHNE8AWzoYOBNmlwks3LobaJxgHCYZAnyNo2GgpNRNQ==", + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", + "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", "optional": true, "requires": { "safer-buffer": ">= 2.1.2 < 3.0.0" @@ -13277,9 +13260,9 @@ "dev": true }, "ignore-walk": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/ignore-walk/-/ignore-walk-3.0.3.tgz", - "integrity": "sha512-m7o6xuOaT1aqheYHKf8W6J5pYH85ZI9w077erOzLje3JsB1gkafkAhHHY19dqjulgIZHFm32Cp5uNZgcQqdJKw==", + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/ignore-walk/-/ignore-walk-3.0.4.tgz", + "integrity": "sha512-PY6Ii8o1jMRA1z4F2hRkH/xN59ox43DavKvD3oDpfurRlOJyAHpifIwpbdv1n4jt4ov0jSpw3kQ4GhJnpBL6WQ==", "requires": { "minimatch": "^3.0.4" } @@ -13329,16 +13312,16 @@ "integrity": "sha512-7PnF4oN3CvZF23ADhA5wRaYEQpJ8qygSkbtTXWBeXWXmEVRXK+1ITciHWwHhsjv1TmW0MgacIv6hEi5pX5NQdA==" }, "init-package-json": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/init-package-json/-/init-package-json-2.0.2.tgz", - "integrity": "sha512-PO64kVeArePvhX7Ff0jVWkpnE1DfGRvaWcStYrPugcJz9twQGYibagKJuIMHCX7ENcp0M6LJlcjLBuLD5KeJMg==", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/init-package-json/-/init-package-json-2.0.3.tgz", + "integrity": "sha512-tk/gAgbMMxR6fn1MgMaM1HpU1ryAmBWWitnxG5OhuNXeX0cbpbgV5jA4AIpQJVNoyOfOevTtO6WX+rPs+EFqaQ==", "requires": { "glob": "^7.1.1", - "npm-package-arg": "^8.1.0", + "npm-package-arg": "^8.1.2", "promzard": "^0.3.0", "read": "~1.0.1", - "read-package-json": "^3.0.0", - "semver": "^7.3.2", + "read-package-json": "^3.0.1", + "semver": "^7.3.5", "validate-npm-package-license": "^3.0.4", "validate-npm-package-name": "^3.0.0" } @@ -13381,6 +13364,12 @@ "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=", "dev": true }, + "is-bigint": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.2.tgz", + "integrity": "sha512-0JV5+SOCQkIdzjBK9buARcV804Ddu7A0Qet6sHi3FimE9ne6m4BGQZfRn+NZiXbBk4F4XmHfDZIipLj9pX8dSA==", + "dev": true + }, "is-binary-path": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", @@ -13390,6 +13379,15 @@ "binary-extensions": "^2.0.0" } }, + "is-boolean-object": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.1.1.tgz", + "integrity": "sha512-bXdQWkECBUIAcCkeH1unwJLIpZYaa5VvuygSyS/c2lf719mTKZDU5UdDRlpd01UjADgmW8RfqaP+mRaVPdr/Ng==", + "dev": true, + "requires": { + "call-bind": "^1.0.2" + } + }, "is-buffer": { "version": "2.0.5", "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-2.0.5.tgz", @@ -13411,17 +13409,17 @@ } }, "is-core-module": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.2.0.tgz", - "integrity": "sha512-XRAfAdyyY5F5cOXn7hYQDqh2Xmii+DEfIcQGxK/uNwMHhIkPWO0g8msXcbzLe+MpGoR951MlqM/2iIlU4vKDdQ==", + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.5.0.tgz", + "integrity": "sha512-TXCMSDsEHMEEZ6eCA8rwRDbLu55MRGmrctljsBX/2v1d9/GzqHOxW5c5oPSgrUt2vBFXebu9rGqckXGPWOlYpg==", "requires": { "has": "^1.0.3" } }, "is-date-object": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.2.tgz", - "integrity": "sha512-USlDT524woQ08aoZFzh3/Z6ch9Y/EWXEHQ/AaRN0SkKq4t2Jw2R2339tSXmwuVoY7LLlBCbOIlx2myP/L5zk0g==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.4.tgz", + "integrity": "sha512-/b4ZVsG7Z5XVtIxs/h9W8nvfLgSAyKYdtGWQLbqy6jA1icmgjf8WCoTKgeS4wy5tYaPePouzFMANbnj94c2Z+A==", "dev": true }, "is-decimal": { @@ -13473,6 +13471,12 @@ "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", "dev": true }, + "is-number-object": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.5.tgz", + "integrity": "sha512-RU0lI/n95pMoUKu9v1BZP5MBcZuNSVJkMkAG2dJqC4z2GlkGUNeH68SuHuBKBD/XFe+LHZ+f9BKkLET60Niedw==", + "dev": true + }, "is-plain-obj": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-2.1.0.tgz", @@ -13480,40 +13484,40 @@ "dev": true }, "is-potential-custom-element-name": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.0.tgz", - "integrity": "sha1-DFLlS8yjkbssSUsh6GJtczbG45c=", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz", + "integrity": "sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ==", "dev": true }, "is-regex": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.2.tgz", - "integrity": "sha512-axvdhb5pdhEVThqJzYXwMlVuZwC+FF2DpcOhTS+y/8jVq4trxyPgfcwIxIKiyeuLlSQYKkmUaPQJ8ZE4yNKXDg==", + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.3.tgz", + "integrity": "sha512-qSVXFz28HM7y+IWX6vLCsexdlvzT1PJNFSBuaQLQ5o0IEw8UDYW6/2+eCMVyIsbM8CNLX2a/QWmSpyxYEHY7CQ==", "dev": true, "requires": { "call-bind": "^1.0.2", - "has-symbols": "^1.0.1" + "has-symbols": "^1.0.2" } }, "is-stream": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz", - "integrity": "sha1-EtSj3U5o4Lec6428hBc66A2RykQ=", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.0.tgz", + "integrity": "sha512-XCoy+WlUr7d1+Z8GgSuXmpuUFC9fOhRXglJMx+dwLKTkL44Cjd4W1Z5P+BQZpr+cR93aGP4S/s7Ftw6Nd/kiEw==", "dev": true }, "is-string": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.5.tgz", - "integrity": "sha512-buY6VNRjhQMiF1qWDouloZlQbRhDPCebwxSjxMjxgemYT46YMd2NR0/H+fBhEfWX4A/w9TBJ+ol+okqJKFE6vQ==", + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.6.tgz", + "integrity": "sha512-2gdzbKUuqtQ3lYNrUTQYoClPhm7oQu4UdpSZMp1/DGgkHBT8E2Z1l0yMdb6D4zNAxwDiMv8MdulKROJGNl0Q0w==", "dev": true }, "is-symbol": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.3.tgz", - "integrity": "sha512-OwijhaRSgqvhm/0ZdAcXNZt9lYdKFpcRDT5ULUuYXPoT794UNOdU+gpT6Rzo7b4V2HUl/op6GqY894AZwv9faQ==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.4.tgz", + "integrity": "sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg==", "dev": true, "requires": { - "has-symbols": "^1.0.1" + "has-symbols": "^1.0.2" } }, "is-typedarray": { @@ -13527,6 +13531,12 @@ "integrity": "sha512-SDweEzfIZM0SJV0EUga669UTKlmL0Pq8Lno0QDQsPnvECB3IM2aP0gdx5TrU0A01MAPfViaZiI2V1QMZLaKK5w==", "dev": true }, + "is-windows": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-windows/-/is-windows-1.0.2.tgz", + "integrity": "sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA==", + "dev": true + }, "is-word-character": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/is-word-character/-/is-word-character-1.0.4.tgz", @@ -13549,33 +13559,30 @@ "integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo=" }, "istanbul-lib-coverage": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.5.tgz", - "integrity": "sha512-8aXznuEPCJvGnMSRft4udDRDtb1V3pkQkMMI5LI+6HuQz5oQ4J2UFn1H82raA3qJtyOLkkwVqICBQkjnGtn5mA==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.0.0.tgz", + "integrity": "sha512-UiUIqxMgRDET6eR+o5HbfRYP1l0hqkWOs7vNxC/mggutCMUIhWMm8gAHb8tHlyfD3/l6rlgNA5cKdDzEAf6hEg==", "dev": true }, "istanbul-lib-hook": { - "version": "2.0.7", - "resolved": "https://registry.npmjs.org/istanbul-lib-hook/-/istanbul-lib-hook-2.0.7.tgz", - "integrity": "sha512-vrRztU9VRRFDyC+aklfLoeXyNdTfga2EI3udDGn4cZ6fpSXpHLV9X6CHvfoMCPtggg8zvDDmC4b9xfu0z6/llA==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/istanbul-lib-hook/-/istanbul-lib-hook-3.0.0.tgz", + "integrity": "sha512-Pt/uge1Q9s+5VAZ+pCo16TYMWPBIl+oaNIjgLQxcX0itS6ueeaA+pEfThZpH8WxhFgCiEb8sAJY6MdUKgiIWaQ==", "dev": true, "requires": { - "append-transform": "^1.0.0" + "append-transform": "^2.0.0" } }, "istanbul-lib-instrument": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-3.3.0.tgz", - "integrity": "sha512-5nnIN4vo5xQZHdXno/YDXJ0G+I3dAm4XgzfSVTPLQpj/zAV2dV6Juy0yaf10/zrJOJeHoN3fraFe+XRq2bFVZA==", + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-4.0.3.tgz", + "integrity": "sha512-BXgQl9kf4WTCPCCpmFGoJkz/+uhvm7h7PFKUYxh7qarQd3ER33vHG//qaE8eN25l07YqZPpHXU9I09l/RD5aGQ==", "dev": true, "requires": { - "@babel/generator": "^7.4.0", - "@babel/parser": "^7.4.3", - "@babel/template": "^7.4.0", - "@babel/traverse": "^7.4.3", - "@babel/types": "^7.4.0", - "istanbul-lib-coverage": "^2.0.5", - "semver": "^6.0.0" + "@babel/core": "^7.7.5", + "@istanbuljs/schema": "^0.1.2", + "istanbul-lib-coverage": "^3.0.0", + "semver": "^6.3.0" }, "dependencies": { "semver": { @@ -13587,137 +13594,69 @@ } }, "istanbul-lib-processinfo": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/istanbul-lib-processinfo/-/istanbul-lib-processinfo-1.0.0.tgz", - "integrity": "sha512-FY0cPmWa4WoQNlvB8VOcafiRoB5nB+l2Pz2xGuXHRSy1KM8QFOYfz/rN+bGMCAeejrY3mrpF5oJHcN0s/garCg==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/istanbul-lib-processinfo/-/istanbul-lib-processinfo-2.0.2.tgz", + "integrity": "sha512-kOwpa7z9hme+IBPZMzQ5vdQj8srYgAtaRqeI48NGmAQ+/5yKiHLV0QbYqQpxsdEF0+w14SoB8YbnHKcXE2KnYw==", "dev": true, "requires": { "archy": "^1.0.0", - "cross-spawn": "^6.0.5", - "istanbul-lib-coverage": "^2.0.3", - "rimraf": "^2.6.3", - "uuid": "^3.3.2" + "cross-spawn": "^7.0.0", + "istanbul-lib-coverage": "^3.0.0-alpha.1", + "make-dir": "^3.0.0", + "p-map": "^3.0.0", + "rimraf": "^3.0.0", + "uuid": "^3.3.3" }, "dependencies": { - "cross-spawn": { - "version": "6.0.5", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", - "integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==", - "dev": true, - "requires": { - "nice-try": "^1.0.4", - "path-key": "^2.0.1", - "semver": "^5.5.0", - "shebang-command": "^1.2.0", - "which": "^1.2.9" - } - }, - "path-key": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", - "integrity": "sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A=", - "dev": true - }, - "rimraf": { - "version": "2.7.1", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", - "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==", - "dev": true, - "requires": { - "glob": "^7.1.3" - } - }, - "semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", - "dev": true - }, - "shebang-command": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", - "integrity": "sha1-RKrGW2lbAzmJaMOfNj/uXer98eo=", - "dev": true, - "requires": { - "shebang-regex": "^1.0.0" - } - }, - "shebang-regex": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", - "integrity": "sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM=", - "dev": true - }, - "which": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", - "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", + "p-map": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-map/-/p-map-3.0.0.tgz", + "integrity": "sha512-d3qXVTF/s+W+CdJ5A29wywV2n8CQQYahlgz2bFiA+4eVNJbHJodPZ+/gXwPGh0bOqA+j8S+6+ckmvLGPk1QpxQ==", "dev": true, "requires": { - "isexe": "^2.0.0" + "aggregate-error": "^3.0.0" } } } }, "istanbul-lib-report": { - "version": "2.0.8", - "resolved": "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-2.0.8.tgz", - "integrity": "sha512-fHBeG573EIihhAblwgxrSenp0Dby6tJMFR/HvlerBsrCTD5bkUuoNtn3gVh29ZCS824cGGBPn7Sg7cNk+2xUsQ==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz", + "integrity": "sha512-wcdi+uAKzfiGT2abPpKZ0hSU1rGQjUQnLvtY5MpQ7QCTahD3VODhcu4wcfY1YtkGaDD5yuydOLINXsfbus9ROw==", "dev": true, "requires": { - "istanbul-lib-coverage": "^2.0.5", - "make-dir": "^2.1.0", - "supports-color": "^6.1.0" - }, - "dependencies": { - "has-flag": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=", - "dev": true - }, - "supports-color": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-6.1.0.tgz", - "integrity": "sha512-qe1jfm1Mg7Nq/NSh6XE24gPXROEVsWHxC1LIx//XNlD9iw7YZQGjZNjYN7xGaEG6iKdA8EtNFW6R0gjnVXp+wQ==", - "dev": true, - "requires": { - "has-flag": "^3.0.0" - } - } + "istanbul-lib-coverage": "^3.0.0", + "make-dir": "^3.0.0", + "supports-color": "^7.1.0" } }, "istanbul-lib-source-maps": { - "version": "3.0.6", - "resolved": "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-3.0.6.tgz", - "integrity": "sha512-R47KzMtDJH6X4/YW9XTx+jrLnZnscW4VpNN+1PViSYTejLVPWv7oov+Duf8YQSPyVRUvueQqz1TcsC6mooZTXw==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.0.tgz", + "integrity": "sha512-c16LpFRkR8vQXyHZ5nLpY35JZtzj1PQY1iZmesUbf1FZHbIupcWfjgOXBY9YHkLEQ6puz1u4Dgj6qmU/DisrZg==", "dev": true, "requires": { "debug": "^4.1.1", - "istanbul-lib-coverage": "^2.0.5", - "make-dir": "^2.1.0", - "rimraf": "^2.6.3", + "istanbul-lib-coverage": "^3.0.0", "source-map": "^0.6.1" }, "dependencies": { - "rimraf": { - "version": "2.7.1", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", - "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==", - "dev": true, - "requires": { - "glob": "^7.1.3" - } + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true } } }, "istanbul-reports": { - "version": "2.2.7", - "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-2.2.7.tgz", - "integrity": "sha512-uu1F/L1o5Y6LzPVSVZXNOoD/KXpJue9aeLRd0sM9uMXfZvzomB0WxVamWb5ue8kA2vVWEmW7EG+A5n3f1kqHKg==", + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.0.2.tgz", + "integrity": "sha512-9tZvz7AiR3PEDNGiV9vIouQ/EAcqMXFmkcA1CDFTwOB98OZVDL0PH9glHotf5Ugp6GCOTypfzGWI/OqjWNCRUw==", "dev": true, "requires": { - "html-escaper": "^2.0.0" + "html-escaper": "^2.0.0", + "istanbul-lib-report": "^3.0.0" } }, "jackspeak": { @@ -13751,37 +13690,46 @@ "integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM=" }, "jsdom": { - "version": "16.4.0", - "resolved": "https://registry.npmjs.org/jsdom/-/jsdom-16.4.0.tgz", - "integrity": "sha512-lYMm3wYdgPhrl7pDcRmvzPhhrGVBeVhPIqeHjzeiHN3DFmD1RBpbExbi8vU7BJdH8VAZYovR8DMt0PNNDM7k8w==", + "version": "16.6.0", + "resolved": "https://registry.npmjs.org/jsdom/-/jsdom-16.6.0.tgz", + "integrity": "sha512-Ty1vmF4NHJkolaEmdjtxTfSfkdb8Ywarwf63f+F8/mDD1uLSSWDxDuMiZxiPhwunLrn9LOSVItWj4bLYsLN3Dg==", "dev": true, "requires": { - "abab": "^2.0.3", - "acorn": "^7.1.1", + "abab": "^2.0.5", + "acorn": "^8.2.4", "acorn-globals": "^6.0.0", "cssom": "^0.4.4", - "cssstyle": "^2.2.0", + "cssstyle": "^2.3.0", "data-urls": "^2.0.0", - "decimal.js": "^10.2.0", + "decimal.js": "^10.2.1", "domexception": "^2.0.1", - "escodegen": "^1.14.1", + "escodegen": "^2.0.0", + "form-data": "^3.0.0", "html-encoding-sniffer": "^2.0.1", - "is-potential-custom-element-name": "^1.0.0", + "http-proxy-agent": "^4.0.1", + "https-proxy-agent": "^5.0.0", + "is-potential-custom-element-name": "^1.0.1", "nwsapi": "^2.2.0", - "parse5": "5.1.1", - "request": "^2.88.2", - "request-promise-native": "^1.0.8", - "saxes": "^5.0.0", + "parse5": "6.0.1", + "saxes": "^5.0.1", "symbol-tree": "^3.2.4", - "tough-cookie": "^3.0.1", + "tough-cookie": "^4.0.0", "w3c-hr-time": "^1.0.2", "w3c-xmlserializer": "^2.0.0", "webidl-conversions": "^6.1.0", "whatwg-encoding": "^1.0.5", "whatwg-mimetype": "^2.3.0", - "whatwg-url": "^8.0.0", - "ws": "^7.2.3", + "whatwg-url": "^8.5.0", + "ws": "^7.4.5", "xml-name-validator": "^3.0.0" + }, + "dependencies": { + "acorn": { + "version": "8.3.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.3.0.tgz", + "integrity": "sha512-tqPKHZ5CaBJw0Xmy0ZZvLs1qTV+BNFSyvn77ASXkpBNfIRk8ev26fKrD9iLGwGA9zedPao52GSHzq8lyZG0NUw==", + "dev": true + } } }, "jsesc": { @@ -13824,9 +13772,9 @@ "dev": true }, "json-stringify-nice": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/json-stringify-nice/-/json-stringify-nice-1.1.1.tgz", - "integrity": "sha512-aHOgcSoOLvmFZQMvZ27rFw68r4e9OlQtH7YEcF2u5amVYbF/D3cKBXKCvl5EGhQz2NwJZ6RPfgRX6yNQ+UBKJw==" + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/json-stringify-nice/-/json-stringify-nice-1.1.4.tgz", + "integrity": "sha512-5Z5RFW63yxReJ7vANgW6eZFGWaQvnPE3WNmZoOJrSkGju2etKA2L5rrOa1sm877TVTFt57A80BH1bArcmlLfPw==" }, "json-stringify-safe": { "version": "5.0.1", @@ -13834,12 +13782,12 @@ "integrity": "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus=" }, "json5": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/json5/-/json5-1.0.1.tgz", - "integrity": "sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow==", + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.0.tgz", + "integrity": "sha512-f+8cldu7X/y7RAJurMEJmdoKXGB/X550w2Nr3tTbezL6RwEE/iMcm+tZnXeoZtKuOq6ft8+CqzEkrIgx1fPoQA==", "dev": true, "requires": { - "minimist": "^1.2.0" + "minimist": "^1.2.5" } }, "jsonparse": { @@ -13859,9 +13807,9 @@ } }, "just-diff": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/just-diff/-/just-diff-3.0.2.tgz", - "integrity": "sha512-+EiNvacECnZbszZa5IMjzrJ3dy2HKMXyGaNYWBnXy+iWW+437jIvQUrWaM9M+XI/6gOH8EjqvhGUOSh7ETekyg==" + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/just-diff/-/just-diff-3.1.1.tgz", + "integrity": "sha512-sdMWKjRq8qWZEjDcVA6llnUT8RDEBIfOiGpYFPYa9u+2c39JCsejktSP7mj5eRid5EIvTzIpQ2kDOCw1Nq9BjQ==" }, "just-diff-apply": { "version": "3.0.0", @@ -13890,53 +13838,77 @@ } }, "libnpmaccess": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/libnpmaccess/-/libnpmaccess-4.0.1.tgz", - "integrity": "sha512-ZiAgvfUbvmkHoMTzdwmNWCrQRsDkOC+aM5BDfO0C9aOSwF3R1LdFDBD+Rer1KWtsoQYO35nXgmMR7OUHpDRxyA==", + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/libnpmaccess/-/libnpmaccess-4.0.3.tgz", + "integrity": "sha512-sPeTSNImksm8O2b6/pf3ikv4N567ERYEpeKRPSmqlNt1dTZbvgpJIzg5vAhXHpw2ISBsELFRelk0jEahj1c6nQ==", "requires": { "aproba": "^2.0.0", "minipass": "^3.1.1", - "npm-package-arg": "^8.0.0", - "npm-registry-fetch": "^9.0.0" + "npm-package-arg": "^8.1.2", + "npm-registry-fetch": "^11.0.0" } }, "libnpmdiff": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/libnpmdiff/-/libnpmdiff-2.0.3.tgz", - "integrity": "sha512-BgVvJCjd+EGY3Ifb3+gWkZwMjn6kYMtruT88XXOrJCWyjnG5aRdFv3lKuJx5JdU5ku08G5LlY8tOZdfRn72m7w==", + "version": "file:packages/libnpmdiff", "requires": { "@npmcli/disparity-colors": "^1.0.1", + "@npmcli/installed-package-contents": "^1.0.7", "binary-extensions": "^2.2.0", "diff": "^5.0.0", + "eslint": "^7.28.0", + "eslint-plugin-import": "^2.23.4", + "eslint-plugin-node": "^11.1.0", + "eslint-plugin-promise": "^5.1.0", + "eslint-plugin-standard": "^5.0.0", "minimatch": "^3.0.4", - "pacote": "^11.2.3", + "npm-package-arg": "^8.1.4", + "pacote": "^11.3.4", + "tap": "^15.0.9", "tar": "^6.1.0" } }, + "libnpmexec": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/libnpmexec/-/libnpmexec-2.0.0.tgz", + "integrity": "sha512-9zHswx//Lp2ao+huWF2aL+6v4haMncyxNusk6Us2fbLNnPh3+rgSkv38LJ2v8gmKS2kAnkUmQf8pHjcZ+7Z3NA==", + "requires": { + "@npmcli/arborist": "^2.3.0", + "@npmcli/ci-detect": "^1.3.0", + "@npmcli/run-script": "^1.8.4", + "chalk": "^4.1.0", + "mkdirp-infer-owner": "^2.0.0", + "npm-package-arg": "^8.1.2", + "pacote": "^11.3.1", + "proc-log": "^1.0.0", + "read": "^1.0.7", + "read-package-json-fast": "^2.0.2", + "walk-up-path": "^1.0.0" + } + }, "libnpmfund": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/libnpmfund/-/libnpmfund-1.0.2.tgz", - "integrity": "sha512-Scw2JiLxfT7wqW/VbxIXV8u3FaFT/ZlR8YLFgTdCPsL1Hhli0554ZXyP8JTu1sLeDpHsoqtgLb4mgYVQnqigjA==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/libnpmfund/-/libnpmfund-1.1.0.tgz", + "integrity": "sha512-Kfmh3pLS5/RGKG5WXEig8mjahPVOxkik6lsbH4iX0si1xxNi6eeUh/+nF1MD+2cgalsQif3O5qyr6mNz2ryJrQ==", "requires": { - "@npmcli/arborist": "^2.0.0" + "@npmcli/arborist": "^2.5.0" } }, "libnpmhook": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/libnpmhook/-/libnpmhook-6.0.1.tgz", - "integrity": "sha512-rwiWIWAQ6R5sPFRi9gsSC/+1/BxFlxk5nNQysVTXEHbqM9ds8g/duW79wRbZKnRyK1xyOmafxbj69nt9tcUkyw==", + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/libnpmhook/-/libnpmhook-6.0.3.tgz", + "integrity": "sha512-3fmkZJibIybzmAvxJ65PeV3NzRc0m4xmYt6scui5msocThbEp4sKFT80FhgrCERYDjlUuFahU6zFNbJDHbQ++g==", "requires": { "aproba": "^2.0.0", - "npm-registry-fetch": "^9.0.0" + "npm-registry-fetch": "^11.0.0" } }, "libnpmorg": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/libnpmorg/-/libnpmorg-2.0.1.tgz", - "integrity": "sha512-Wj0aApN6TfZWHqtJNjkY7IeQpX24jrQD58IHrEz234quKVRYlegUiMsZl2g4OEFeZNSSc9QN28EdI1SBkUlW7g==", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/libnpmorg/-/libnpmorg-2.0.3.tgz", + "integrity": "sha512-JSGl3HFeiRFUZOUlGdiNcUZOsUqkSYrg6KMzvPZ1WVZ478i47OnKSS0vkPmX45Pai5mTKuwIqBMcGWG7O8HfdA==", "requires": { "aproba": "^2.0.0", - "npm-registry-fetch": "^9.0.0" + "npm-registry-fetch": "^11.0.0" } }, "libnpmpack": { @@ -13950,53 +13922,83 @@ } }, "libnpmpublish": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/libnpmpublish/-/libnpmpublish-4.0.0.tgz", - "integrity": "sha512-2RwYXRfZAB1x/9udKpZmqEzSqNd7ouBRU52jyG14/xG8EF+O9A62d7/XVR3iABEQHf1iYhkm0Oq9iXjrL3tsXA==", + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/libnpmpublish/-/libnpmpublish-4.0.2.tgz", + "integrity": "sha512-+AD7A2zbVeGRCFI2aO//oUmapCwy7GHqPXFJh3qpToSRNU+tXKJ2YFUgjt04LPPAf2dlEH95s6EhIHM1J7bmOw==", "requires": { - "normalize-package-data": "^3.0.0", - "npm-package-arg": "^8.1.0", - "npm-registry-fetch": "^9.0.0", + "normalize-package-data": "^3.0.2", + "npm-package-arg": "^8.1.2", + "npm-registry-fetch": "^11.0.0", "semver": "^7.1.3", - "ssri": "^8.0.0" + "ssri": "^8.0.1" } }, "libnpmsearch": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/libnpmsearch/-/libnpmsearch-3.1.0.tgz", - "integrity": "sha512-UQyzQjtAv99kZDuijqTB2Do63qtt+2SKNOVSTnehWTQbxzXF7Jvc8UD3YNPljm8+Y5T31K2AqptbY5BD6XHlIg==", + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/libnpmsearch/-/libnpmsearch-3.1.2.tgz", + "integrity": "sha512-BaQHBjMNnsPYk3Bl6AiOeVuFgp72jviShNBw5aHaHNKWqZxNi38iVNoXbo6bG/Ccc/m1To8s0GtMdtn6xZ1HAw==", "requires": { - "npm-registry-fetch": "^9.0.0" + "npm-registry-fetch": "^11.0.0" } }, "libnpmteam": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/libnpmteam/-/libnpmteam-2.0.2.tgz", - "integrity": "sha512-QGvtbMPdQzK+XybBPK0UjfLEI9fiDPQSFMbZW+2lmm0BgPoqxHle0Wl90bsIyBVY7pYzp45MgMqQNo7KWCLpDA==", + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/libnpmteam/-/libnpmteam-2.0.4.tgz", + "integrity": "sha512-FPrVJWv820FZFXaflAEVTLRWZrerCvfe7ZHSMzJ/62EBlho2KFlYKjyNEsPW3JiV7TLSXi3vo8u0gMwIkXSMTw==", "requires": { "aproba": "^2.0.0", - "npm-registry-fetch": "^9.0.0" + "npm-registry-fetch": "^11.0.0" } }, "libnpmversion": { - "version": "1.0.11", - "resolved": "https://registry.npmjs.org/libnpmversion/-/libnpmversion-1.0.11.tgz", - "integrity": "sha512-HKbfJ0wwx+W9Br4bvbHUMN/YIe7B8qmFtdaLZnXEUozaaTD6gGpIEf1aH1xRlGfNPocT6YBz3O6+RAgSndAgbA==", + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/libnpmversion/-/libnpmversion-1.2.1.tgz", + "integrity": "sha512-AA7x5CFgBFN+L4/JWobnY5t4OAHjQuPbAwUYJ7/NtHuyLut5meb+ne/aj0n7PWNiTGCJcRw/W6Zd2LoLT7EZuQ==", "requires": { - "@npmcli/git": "^2.0.6", - "@npmcli/run-script": "^1.8.3", - "read-package-json-fast": "^2.0.1", - "semver": "^7.3.4", + "@npmcli/git": "^2.0.7", + "@npmcli/run-script": "^1.8.4", + "json-parse-even-better-errors": "^2.3.1", + "semver": "^7.3.5", "stringify-package": "^1.0.1" } }, + "libtap": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/libtap/-/libtap-1.1.1.tgz", + "integrity": "sha512-Fye8fh1+G7E8qqmjQaY+pXGxy7HM0S6bqCCJFLa16+g2jODBByxbJFDpjbDNF69wfRVyvJ+foLZc1WTIv7dx+g==", + "dev": true, + "requires": { + "async-hook-domain": "^2.0.1", + "bind-obj-methods": "^3.0.0", + "diff": "^4.0.2", + "function-loop": "^2.0.1", + "minipass": "^3.1.1", + "own-or": "^1.0.0", + "own-or-env": "^1.0.1", + "signal-exit": "^3.0.2", + "stack-utils": "^2.0.1", + "tap-parser": "^10.0.1", + "tap-yaml": "^1.0.0", + "tcompare": "^5.0.1", + "trivial-deferred": "^1.0.1", + "yapool": "^1.0.0" + }, + "dependencies": { + "diff": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", + "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", + "dev": true + } + } + }, "licensee": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/licensee/-/licensee-8.1.0.tgz", - "integrity": "sha512-rnXUmUuLzZrGfm3bfWNl71Emw/OJqwUyIrIRq5D06Ct9EbiFnZtiydA5ryf4FDPikdneJ0l1Q+g6TuMjpWGfrA==", + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/licensee/-/licensee-8.2.0.tgz", + "integrity": "sha512-Z5fQ+qP85N38klGijH0bXiWVlKqLKDMqsckKL+VcA+ZQ/DJK5cpIpvryGHtREaQ3Ah5jrgtXN8mHfII7UtlsJg==", "dev": true, "requires": { - "@blueoak/list": "^1.0.2", + "@blueoak/list": "^2.0.0", "correct-license-metadata": "^1.0.1", "docopt": "^0.6.2", "fs-access": "^2.0.0", @@ -14022,14 +14024,14 @@ } }, "load-json-file": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-2.0.0.tgz", - "integrity": "sha1-eUfkIUmvgNaWy/eXvKq8/h/inKg=", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-4.0.0.tgz", + "integrity": "sha1-L19Fq5HjMhYjT9U62rZo607AmTs=", "dev": true, "requires": { "graceful-fs": "^4.1.2", - "parse-json": "^2.2.0", - "pify": "^2.0.0", + "parse-json": "^4.0.0", + "pify": "^3.0.0", "strip-bom": "^3.0.0" } }, @@ -14044,9 +14046,15 @@ } }, "lodash": { - "version": "4.17.20", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.20.tgz", - "integrity": "sha512-PlhdFcillOINfeV7Ni6oF1TAEayyZBoZ8bcshTHqOYJYlrqzRK5hagpagky5o4HfCzzd1TRkXPMFq6cKk9rGmA==", + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", + "dev": true + }, + "lodash.clonedeep": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/lodash.clonedeep/-/lodash.clonedeep-4.5.0.tgz", + "integrity": "sha1-4j8/nE+Pvd6HJSnBBxhXoIblzO8=", "dev": true }, "lodash.flattendeep": { @@ -14055,10 +14063,16 @@ "integrity": "sha1-+wMJF/hqMTTlvJvsDWngAT3f7bI=", "dev": true }, - "lodash.sortby": { - "version": "4.7.0", - "resolved": "https://registry.npmjs.org/lodash.sortby/-/lodash.sortby-4.7.0.tgz", - "integrity": "sha1-7dFMgk4sycHgsKG0K7UhBRakJDg=", + "lodash.merge": { + "version": "4.6.2", + "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", + "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", + "dev": true + }, + "lodash.truncate": { + "version": "4.4.2", + "resolved": "https://registry.npmjs.org/lodash.truncate/-/lodash.truncate-4.4.2.tgz", + "integrity": "sha1-WjUNoLERO4N+z//VgSy+WNbq4ZM=", "dev": true }, "lodash.uniq": { @@ -14091,42 +14105,29 @@ } }, "make-dir": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-2.1.0.tgz", - "integrity": "sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", + "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", "dev": true, "requires": { - "pify": "^4.0.1", - "semver": "^5.6.0" + "semver": "^6.0.0" }, "dependencies": { - "pify": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz", - "integrity": "sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==", - "dev": true - }, "semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", "dev": true } } }, - "make-error": { - "version": "1.3.6", - "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz", - "integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==", - "dev": true - }, "make-fetch-happen": { - "version": "8.0.14", - "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-8.0.14.tgz", - "integrity": "sha512-EsS89h6l4vbfJEtBZnENTOFk8mCRpY5ru36Xe5bcX1KYIli2mkSHqoFsp5O1wMDvTJJzxe/4THpCTtygjeeGWQ==", + "version": "9.0.4", + "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-9.0.4.tgz", + "integrity": "sha512-sQWNKMYqSmbAGXqJg2jZ+PmHh5JAybvwu0xM8mZR/bsTjGiTASj3ldXJV7KFHy1k/IJIBkjxQFoWIVsv9+PQMg==", "requires": { "agentkeepalive": "^4.1.3", - "cacache": "^15.0.5", + "cacache": "^15.2.0", "http-cache-semantics": "^4.1.0", "http-proxy-agent": "^4.0.1", "https-proxy-agent": "^5.0.0", @@ -14137,6 +14138,7 @@ "minipass-fetch": "^1.3.2", "minipass-flush": "^1.0.5", "minipass-pipeline": "^1.2.4", + "negotiator": "^0.6.2", "promise-retry": "^2.0.1", "socks-proxy-agent": "^5.0.0", "ssri": "^8.0.0" @@ -14202,26 +14204,17 @@ "integrity": "sha1-/oWy7HWlkDfyrf7BAP1sYBdhFS4=", "dev": true }, - "merge-source-map": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/merge-source-map/-/merge-source-map-1.1.0.tgz", - "integrity": "sha512-Qkcp7P2ygktpMPh2mCQZaf3jhN6D3Z/qVZHSdWvQ+2Ef5HgRAPBO57A77+ENm0CPx2+1Ce/MYKi3ymqdfuqibw==", - "dev": true, - "requires": { - "source-map": "^0.6.1" - } - }, "mime-db": { - "version": "1.45.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.45.0.tgz", - "integrity": "sha512-CkqLUxUk15hofLoLyljJSrukZi8mAtgd+yE5uO4tqRZsdsAJKv0O+rFMhVDRJgozy+yG6md5KwuXhD4ocIoP+w==" + "version": "1.49.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.49.0.tgz", + "integrity": "sha512-CIc8j9URtOVApSFCQIF+VBkX1RwXp/oMMOrqdyXSBXq5RWNEsRfyj1kiRnQgmNXmHxPoFIxOroKA3zcU9P+nAA==" }, "mime-types": { - "version": "2.1.28", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.28.tgz", - "integrity": "sha512-0TO2yJ5YHYr7M2zzT7gDU1tbwHxEUWBCLt0lscSNpcdAfFyJOVEpRYNS7EXVcTLNj/25QO8gulHC5JtTzSE2UQ==", + "version": "2.1.32", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.32.tgz", + "integrity": "sha512-hJGaVS4G4c9TSMYh2n6SQAGrC4RnfU+daP8G7cSCmaqNjiOoUY0VHCMS42pxnQmVF1GWwFhbHWn3RIxCqTmZ9A==", "requires": { - "mime-db": "1.45.0" + "mime-db": "1.49.0" } }, "mimic-response": { @@ -14261,9 +14254,9 @@ } }, "minipass-fetch": { - "version": "1.3.3", - "resolved": "https://registry.npmjs.org/minipass-fetch/-/minipass-fetch-1.3.3.tgz", - "integrity": "sha512-akCrLDWfbdAWkMLBxJEeWTdNsjML+dt5YgOI4gJ53vuO0vrmYQkUPxa6j6V65s9CcePIr2SSWqjT2EcrNseryQ==", + "version": "1.3.4", + "resolved": "https://registry.npmjs.org/minipass-fetch/-/minipass-fetch-1.3.4.tgz", + "integrity": "sha512-TielGogIzbUEtd1LsjZFs47RWuHHfhl6TiCx1InVxApBAmQ8bL0dL5ilkLGcRvuyW/A9nE+Lvn855Ewz8S0PnQ==", "requires": { "encoding": "^0.1.12", "minipass": "^3.1.0", @@ -14352,26 +14345,19 @@ }, "natural-compare": { "version": "1.4.0", - "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", - "integrity": "sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc=", - "dev": true - }, - "nested-error-stacks": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/nested-error-stacks/-/nested-error-stacks-2.1.0.tgz", - "integrity": "sha512-AO81vsIO1k1sM4Zrd6Hu7regmJN1NSiAja10gc4bX3F0wd+9rQmcuHQaHVQCYIEC8iFXnE+mavh23GOt7wBgug==", + "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", + "integrity": "sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc=", "dev": true }, - "nice-try": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/nice-try/-/nice-try-1.0.5.tgz", - "integrity": "sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==", - "dev": true + "negotiator": { + "version": "0.6.2", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.2.tgz", + "integrity": "sha512-hZXc7K2e+PgeI1eDBe/10Ard4ekbfrrqG8Ep+8Jmf4JID2bNg7NvCPOZN+kfF574pFQI7mum2AUqDidoKqcTOw==" }, "node-abi": { - "version": "2.19.3", - "resolved": "https://registry.npmjs.org/node-abi/-/node-abi-2.19.3.tgz", - "integrity": "sha512-9xZrlyfvKhWme2EXFKQhZRp1yNWT/uI1luYPr3sFl+H4keYY4xR+1jO7mvTTijIsHf1M+QDe9uWuKeEpLInIlg==", + "version": "2.30.0", + "resolved": "https://registry.npmjs.org/node-abi/-/node-abi-2.30.0.tgz", + "integrity": "sha512-g6bZh3YCKQRdwuO/tSZZYJAw622SjsRfJ2X0Iy4sSOHZ34/sPPdVBn8fev2tj7njzLwuqPw9uMtGsGkO5kIQvg==", "dev": true, "requires": { "semver": "^5.4.1" @@ -14386,9 +14372,9 @@ } }, "node-addon-api": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-3.1.0.tgz", - "integrity": "sha512-flmrDNB06LIl5lywUz7YlNGZH/5p0M7W28k8hzd9Lshtdh1wshD2Y+U4h9LD6KObOy1f+fEVdgprPrEymjM5uw==", + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-3.2.1.tgz", + "integrity": "sha512-mmcei9JghVNDYydghQmeDX8KoAm0FAiYyIcUt/N4nhyAipB17pllZQDOJD2fotxABnt4Mdz+dKTO7eftLg4d0A==", "dev": true }, "node-gyp": { @@ -14406,13 +14392,67 @@ "semver": "^7.3.2", "tar": "^6.0.2", "which": "^2.0.2" + }, + "dependencies": { + "aproba": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/aproba/-/aproba-1.2.0.tgz", + "integrity": "sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw==" + }, + "gauge": { + "version": "2.7.4", + "resolved": "https://registry.npmjs.org/gauge/-/gauge-2.7.4.tgz", + "integrity": "sha1-LANAXHU4w51+s3sxcCLjJfsBi/c=", + "requires": { + "aproba": "^1.0.3", + "console-control-strings": "^1.0.0", + "has-unicode": "^2.0.0", + "object-assign": "^4.1.0", + "signal-exit": "^3.0.0", + "string-width": "^1.0.1", + "strip-ansi": "^3.0.1", + "wide-align": "^1.1.0" + } + }, + "is-fullwidth-code-point": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz", + "integrity": "sha1-754xOG8DGn8NZDr4L95QxFfvAMs=", + "requires": { + "number-is-nan": "^1.0.0" + } + }, + "npmlog": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-4.1.2.tgz", + "integrity": "sha512-2uUqazuKlTaSI/dC8AzicUck7+IrEaOnN/e0jd3Xtt1KcGpwx30v50mL7oPyr/h9bL3E4aZccVwpwP+5W9Vjkg==", + "requires": { + "are-we-there-yet": "~1.1.2", + "console-control-strings": "~1.1.0", + "gauge": "~2.7.3", + "set-blocking": "~2.0.0" + } + }, + "string-width": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz", + "integrity": "sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=", + "requires": { + "code-point-at": "^1.0.0", + "is-fullwidth-code-point": "^1.0.0", + "strip-ansi": "^3.0.0" + } + } } }, - "node-modules-regexp": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/node-modules-regexp/-/node-modules-regexp-1.0.0.tgz", - "integrity": "sha1-jZ2+KJZKSsVxLpExZCEHxx6Q7EA=", - "dev": true + "node-preload": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/node-preload/-/node-preload-0.2.1.tgz", + "integrity": "sha512-RM5oyBy45cLEoHqCeh+MNuFAxO0vTFBLskvQbOKnEE7YTTSN4tbN8QWDIPQ6L+WvKsB/qLEGpYe2ZZ9d4W9OIQ==", + "dev": true, + "requires": { + "process-on-spawn": "^1.0.0" + } }, "noop-logger": { "version": "0.1.1", @@ -14429,13 +14469,13 @@ } }, "normalize-package-data": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-3.0.0.tgz", - "integrity": "sha512-6lUjEI0d3v6kFrtgA/lOx4zHCWULXsFNIjHolnZCKCTLA6m/G625cdn3O7eNmT0iD3jfo6HZ9cdImGZwf21prw==", + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-3.0.2.tgz", + "integrity": "sha512-6CdZocmfGaKnIHPVFhJJZ3GuR8SsLKvDANFp47Jmy51aKIr8akjAWTSxtpI+MBgBFdSMRyo4hMpDlT6dTffgZg==", "requires": { - "hosted-git-info": "^3.0.6", - "resolve": "^1.17.0", - "semver": "^7.3.2", + "hosted-git-info": "^4.0.1", + "resolve": "^1.20.0", + "semver": "^7.3.4", "validate-npm-package-license": "^3.0.1" } }, @@ -14446,17 +14486,17 @@ "dev": true }, "npm-audit-report": { - "version": "2.1.4", - "resolved": "https://registry.npmjs.org/npm-audit-report/-/npm-audit-report-2.1.4.tgz", - "integrity": "sha512-Tz7rnfskSdZ0msTzt2mENC/B+H2QI8u0jN0ck7o3zDsQYIQrek/l3MjEc+CARer+64LsVTU6ZIqNuh0X55QPhw==", + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/npm-audit-report/-/npm-audit-report-2.1.5.tgz", + "integrity": "sha512-YB8qOoEmBhUH1UJgh1xFAv7Jg1d+xoNhsDYiFQlEFThEBui0W1vIz2ZK6FVg4WZjwEdl7uBQlm1jy3MUfyHeEw==", "requires": { "chalk": "^4.0.0" } }, "npm-bundled": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/npm-bundled/-/npm-bundled-1.1.1.tgz", - "integrity": "sha512-gqkfgGePhTpAEgUsGEgcq1rqPXA+tv/aVBlgEzfXwA1yiUJF7xtEt3CtVwOjNYQOVknDk0F20w58Fnm3EtG0fA==", + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/npm-bundled/-/npm-bundled-1.1.2.tgz", + "integrity": "sha512-x5DHup0SuyQcmL3s7Rx/YQ8sbw/Hzg0rj48eN0dV7hf5cmQq5PXIeioroH3raV1QC1yh3uTYuMThvEQF3iKgGQ==", "requires": { "npm-normalize-package-bin": "^1.0.1" } @@ -14481,19 +14521,19 @@ "integrity": "sha512-EPfafl6JL5/rU+ot6P3gRSCpPDW5VmIzX959Ob1+ySFUuuYHWHekXpwdUZcKP5C+DS4GEtdJluwBjnsNDl+fSA==" }, "npm-package-arg": { - "version": "8.1.1", - "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-8.1.1.tgz", - "integrity": "sha512-CsP95FhWQDwNqiYS+Q0mZ7FAEDytDZAkNxQqea6IaAFJTAY9Lhhqyl0irU/6PMc7BGfUmnsbHcqxJD7XuVM/rg==", + "version": "8.1.5", + "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-8.1.5.tgz", + "integrity": "sha512-LhgZrg0n0VgvzVdSm1oiZworPbTxYHUJCgtsJW8mGvlDpxTM1vSJc3m5QZeUkhAHIzbz3VCHd/R4osi1L1Tg/Q==", "requires": { - "hosted-git-info": "^3.0.6", - "semver": "^7.0.0", + "hosted-git-info": "^4.0.1", + "semver": "^7.3.4", "validate-npm-package-name": "^3.0.0" } }, "npm-packlist": { - "version": "2.1.4", - "resolved": "https://registry.npmjs.org/npm-packlist/-/npm-packlist-2.1.4.tgz", - "integrity": "sha512-Qzg2pvXC9U4I4fLnUrBmcIT4x0woLtUgxUi9eC+Zrcv1Xx5eamytGAfbDWQ67j7xOcQ2VW1I3su9smVTIdu7Hw==", + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/npm-packlist/-/npm-packlist-2.2.2.tgz", + "integrity": "sha512-Jt01acDvJRhJGthnUJVF/w6gumWOZxO7IkpY/lsX9//zqQgnF7OJaxgQXcerd4uQOLu7W5bkb4mChL9mdfm+Zg==", "requires": { "glob": "^7.1.6", "ignore-walk": "^3.0.3", @@ -14502,31 +14542,30 @@ } }, "npm-pick-manifest": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-6.1.0.tgz", - "integrity": "sha512-ygs4k6f54ZxJXrzT0x34NybRlLeZ4+6nECAIbr2i0foTnijtS1TJiyzpqtuUAJOps/hO0tNDr8fRV5g+BtRlTw==", + "version": "6.1.1", + "resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-6.1.1.tgz", + "integrity": "sha512-dBsdBtORT84S8V8UTad1WlUyKIY9iMsAmqxHbLdeEeBNMLQDlDWWra3wYUx9EBEIiG/YwAy0XyNHDd2goAsfuA==", "requires": { "npm-install-checks": "^4.0.0", - "npm-package-arg": "^8.0.0", - "semver": "^7.0.0" + "npm-normalize-package-bin": "^1.0.1", + "npm-package-arg": "^8.1.2", + "semver": "^7.3.4" } }, "npm-profile": { - "version": "5.0.2", - "resolved": "https://registry.npmjs.org/npm-profile/-/npm-profile-5.0.2.tgz", - "integrity": "sha512-hOhpH23PeWUFParJ6T1nquiHJLmFZ5VReTjBf1TJpl1YGuqfUS+ZYujVYPfuMbixosO82kWzvnxg4ZmP4VkTeg==", + "version": "5.0.4", + "resolved": "https://registry.npmjs.org/npm-profile/-/npm-profile-5.0.4.tgz", + "integrity": "sha512-OKtU7yoAEBOnc8zJ+/uo5E4ugPp09sopo+6y1njPp+W99P8DvQon3BJYmpvyK2Bf1+3YV5LN1bvgXRoZ1LUJBA==", "requires": { - "npm-registry-fetch": "^9.0.0" + "npm-registry-fetch": "^11.0.0" } }, "npm-registry-fetch": { - "version": "9.0.0", - "resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-9.0.0.tgz", - "integrity": "sha512-PuFYYtnQ8IyVl6ib9d3PepeehcUeHN9IO5N/iCRhyg9tStQcqGQBRVHmfmMWPDERU3KwZoHFvbJ4FPXPspvzbA==", + "version": "11.0.0", + "resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-11.0.0.tgz", + "integrity": "sha512-jmlgSxoDNuhAtxUIG6pVwwtz840i994dL14FoNVZisrmZW5kWd63IUTNv1m/hyRSGSqWjCUp/YZlS1BJyNp9XA==", "requires": { - "@npmcli/ci-detect": "^1.0.0", - "lru-cache": "^6.0.0", - "make-fetch-happen": "^8.0.9", + "make-fetch-happen": "^9.0.1", "minipass": "^3.1.3", "minipass-fetch": "^1.3.0", "minipass-json-stream": "^1.0.1", @@ -14540,14 +14579,14 @@ "integrity": "sha512-uQwcd/tY+h1jnEaze6cdX/LrhWhoBxfSknxentoqmIuStxUExxjWd3ULMLFPiFUrZKbOVMowH6Jq2FRWfmhcEw==" }, "npmlog": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-4.1.2.tgz", - "integrity": "sha512-2uUqazuKlTaSI/dC8AzicUck7+IrEaOnN/e0jd3Xtt1KcGpwx30v50mL7oPyr/h9bL3E4aZccVwpwP+5W9Vjkg==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-5.0.0.tgz", + "integrity": "sha512-ftpIiLjerL2tUg3dCqN8pOSoB90gqZlzv/gaZoxHaKjeLClrfJIEQ1Pdxi6qSzflz916Bljdy8dTWQ4J7hAFSQ==", "requires": { - "are-we-there-yet": "~1.1.2", - "console-control-strings": "~1.1.0", - "gauge": "~2.7.3", - "set-blocking": "~2.0.0" + "are-we-there-yet": "^1.1.5", + "console-control-strings": "^1.1.0", + "gauge": "^3.0.0", + "set-blocking": "^2.0.0" } }, "null-check": { @@ -14568,55 +14607,57 @@ "dev": true }, "nyc": { - "version": "14.1.1", - "resolved": "https://registry.npmjs.org/nyc/-/nyc-14.1.1.tgz", - "integrity": "sha512-OI0vm6ZGUnoGZv/tLdZ2esSVzDwUC88SNs+6JoSOMVxA+gKMB8Tk7jBwgemLx4O40lhhvZCVw1C+OYLOBOPXWw==", + "version": "15.1.0", + "resolved": "https://registry.npmjs.org/nyc/-/nyc-15.1.0.tgz", + "integrity": "sha512-jMW04n9SxKdKi1ZMGhvUTHBN0EICCRkHemEoE5jm6mTYcqcdas0ATzgUgejlQUHMvpnOZqGB5Xxsv9KxJW1j8A==", "dev": true, "requires": { - "archy": "^1.0.0", - "caching-transform": "^3.0.2", - "convert-source-map": "^1.6.0", - "cp-file": "^6.2.0", - "find-cache-dir": "^2.1.0", - "find-up": "^3.0.0", - "foreground-child": "^1.5.6", - "glob": "^7.1.3", - "istanbul-lib-coverage": "^2.0.5", - "istanbul-lib-hook": "^2.0.7", - "istanbul-lib-instrument": "^3.3.0", - "istanbul-lib-report": "^2.0.8", - "istanbul-lib-source-maps": "^3.0.6", - "istanbul-reports": "^2.2.4", - "js-yaml": "^3.13.1", - "make-dir": "^2.1.0", - "merge-source-map": "^1.1.0", - "resolve-from": "^4.0.0", - "rimraf": "^2.6.3", + "@istanbuljs/load-nyc-config": "^1.0.0", + "@istanbuljs/schema": "^0.1.2", + "caching-transform": "^4.0.0", + "convert-source-map": "^1.7.0", + "decamelize": "^1.2.0", + "find-cache-dir": "^3.2.0", + "find-up": "^4.1.0", + "foreground-child": "^2.0.0", + "get-package-type": "^0.1.0", + "glob": "^7.1.6", + "istanbul-lib-coverage": "^3.0.0", + "istanbul-lib-hook": "^3.0.0", + "istanbul-lib-instrument": "^4.0.0", + "istanbul-lib-processinfo": "^2.0.2", + "istanbul-lib-report": "^3.0.0", + "istanbul-lib-source-maps": "^4.0.0", + "istanbul-reports": "^3.0.2", + "make-dir": "^3.0.0", + "node-preload": "^0.2.1", + "p-map": "^3.0.0", + "process-on-spawn": "^1.0.0", + "resolve-from": "^5.0.0", + "rimraf": "^3.0.0", "signal-exit": "^3.0.2", - "spawn-wrap": "^1.4.2", - "test-exclude": "^5.2.3", - "uuid": "^3.3.2", - "yargs": "^13.2.2", - "yargs-parser": "^13.0.0" + "spawn-wrap": "^2.0.0", + "test-exclude": "^6.0.0", + "yargs": "^15.0.2" }, "dependencies": { "find-up": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", - "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", "dev": true, "requires": { - "locate-path": "^3.0.0" + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" } }, "locate-path": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", - "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", "dev": true, "requires": { - "p-locate": "^3.0.0", - "path-exists": "^3.0.0" + "p-locate": "^4.1.0" } }, "p-limit": { @@ -14629,12 +14670,21 @@ } }, "p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dev": true, + "requires": { + "p-limit": "^2.2.0" + } + }, + "p-map": { "version": "3.0.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", - "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "resolved": "https://registry.npmjs.org/p-map/-/p-map-3.0.0.tgz", + "integrity": "sha512-d3qXVTF/s+W+CdJ5A29wywV2n8CQQYahlgz2bFiA+4eVNJbHJodPZ+/gXwPGh0bOqA+j8S+6+ckmvLGPk1QpxQ==", "dev": true, "requires": { - "p-limit": "^2.0.0" + "aggregate-error": "^3.0.0" } }, "p-try": { @@ -14643,14 +14693,17 @@ "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", "dev": true }, - "rimraf": { - "version": "2.7.1", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", - "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==", - "dev": true, - "requires": { - "glob": "^7.1.3" - } + "path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true + }, + "resolve-from": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", + "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", + "dev": true } } }, @@ -14665,9 +14718,9 @@ "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=" }, "object-inspect": { - "version": "1.9.0", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.9.0.tgz", - "integrity": "sha512-i3Bp9iTqwhaLZBxGkRfo5ZbE07BQRT7MGu8+nNgwW9ItGp1TzCTw2DLEoWwjClxBjOFI/hWljTAmYGCEwmtnOw==", + "version": "1.10.3", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.10.3.tgz", + "integrity": "sha512-e5mCJlSH7poANfC8z8S9s9S2IN5/4Zb3aZ33f5s8YqoazCFzNLloLU8r5VCG+G7WoqLvAAZoVMcy3tp/3X0Plw==", "dev": true }, "object-keys": { @@ -14689,26 +14742,25 @@ } }, "object.getownpropertydescriptors": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.1.1.tgz", - "integrity": "sha512-6DtXgZ/lIZ9hqx4GtZETobXLR/ZLaa0aqV0kzbn80Rf8Z2e/XFnhA0I7p07N2wH8bBBltr2xQPi6sbKWAY2Eng==", + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.1.2.tgz", + "integrity": "sha512-WtxeKSzfBjlzL+F9b7M7hewDzMwy+C8NRssHd1YrNlzHzIDrXcXiNOMrezdAEM4UXixgV+vvnyBeN7Rygl2ttQ==", "dev": true, "requires": { - "call-bind": "^1.0.0", + "call-bind": "^1.0.2", "define-properties": "^1.1.3", - "es-abstract": "^1.18.0-next.1" + "es-abstract": "^1.18.0-next.2" } }, "object.values": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.2.tgz", - "integrity": "sha512-MYC0jvJopr8EK6dPBiO8Nb9mvjdypOachO5REGk6MXzujbBrAisKo3HmdEI6kZDL6fC31Mwee/5YbtMebixeag==", + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.4.tgz", + "integrity": "sha512-TnGo7j4XSnKQoK3MfvkzqKCi0nVe/D9I9IjwTNYdb/fxYHpjrluHVOgw0AF6jrRFGMPHdfuidR09tIDiIvnaSg==", "dev": true, "requires": { - "call-bind": "^1.0.0", + "call-bind": "^1.0.2", "define-properties": "^1.1.3", - "es-abstract": "^1.18.0-next.1", - "has": "^1.0.3" + "es-abstract": "^1.18.2" } }, "once": { @@ -14738,12 +14790,6 @@ "word-wrap": "^1.2.3" } }, - "os-homedir": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/os-homedir/-/os-homedir-1.0.2.tgz", - "integrity": "sha1-/7xJiDNuDoM94MFox+8VISGqf7M=", - "dev": true - }, "own-or": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/own-or/-/own-or-1.0.0.tgz", @@ -14792,23 +14838,23 @@ "dev": true }, "package-hash": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/package-hash/-/package-hash-3.0.0.tgz", - "integrity": "sha512-lOtmukMDVvtkL84rJHI7dpTYq+0rli8N2wlnqUcBuDWCfVhRUfOmnR9SsoHFMLpACvEV60dX7rd0rFaYDZI+FA==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/package-hash/-/package-hash-4.0.0.tgz", + "integrity": "sha512-whdkPIooSu/bASggZ96BWVvZTRMOFxnyUG5PnTSGKoJE2gd5mbVNmR2Nj20QFzxYYgAXpoqC+AiXzl+UMRh7zQ==", "dev": true, "requires": { "graceful-fs": "^4.1.15", - "hasha": "^3.0.0", + "hasha": "^5.0.0", "lodash.flattendeep": "^4.4.0", "release-zalgo": "^1.0.0" } }, "pacote": { - "version": "11.2.7", - "resolved": "https://registry.npmjs.org/pacote/-/pacote-11.2.7.tgz", - "integrity": "sha512-ogxPor11v/rnU9ukwLlI2dPx22q9iob1+yZyqSwerKsOvBMhU9e+SJHtxY4y2N0MRH4/5jGsGiRLsZeJWyM4dQ==", + "version": "11.3.5", + "resolved": "https://registry.npmjs.org/pacote/-/pacote-11.3.5.tgz", + "integrity": "sha512-fT375Yczn4zi+6Hkk2TBe1x1sP8FgFsEIZ2/iWaXY2r/NkhDJfxbcn5paz1+RTFCyNf+dPnaoBDJoAxXSU8Bkg==", "requires": { - "@npmcli/git": "^2.0.1", + "@npmcli/git": "^2.1.0", "@npmcli/installed-package-contents": "^1.0.6", "@npmcli/promise-spawn": "^1.2.0", "@npmcli/run-script": "^1.8.2", @@ -14821,7 +14867,7 @@ "npm-package-arg": "^8.0.1", "npm-packlist": "^2.1.4", "npm-pick-manifest": "^6.0.0", - "npm-registry-fetch": "^9.0.0", + "npm-registry-fetch": "^11.0.0", "promise-retry": "^2.0.1", "read-package-json-fast": "^2.0.1", "rimraf": "^3.0.2", @@ -14863,18 +14909,19 @@ } }, "parse-json": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-2.2.0.tgz", - "integrity": "sha1-9ID0BDTvgHQfhGkJn43qGPVaTck=", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-4.0.0.tgz", + "integrity": "sha1-vjX1Qlvh9/bHRxhPmKeIy5lHfuA=", "dev": true, "requires": { - "error-ex": "^1.2.0" + "error-ex": "^1.3.1", + "json-parse-better-errors": "^1.0.1" } }, "parse5": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/parse5/-/parse5-5.1.1.tgz", - "integrity": "sha512-ugq4DFI0Ptb+WWjAdOK16+u/nHfiIrcE+sh8kZMaM0WllQKLI9rOUq6c2b7cwPkXdzfQESqvoqK6ug7U/Yyzug==", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/parse5/-/parse5-6.0.1.tgz", + "integrity": "sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw==", "dev": true }, "path-exists": { @@ -14895,17 +14942,17 @@ "dev": true }, "path-parse": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.6.tgz", - "integrity": "sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw==" + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==" }, "path-type": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-2.0.0.tgz", - "integrity": "sha1-8BLMuEFbcJb8LaoQVMPXI4lZTHM=", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-3.0.0.tgz", + "integrity": "sha512-T2ZUsdZFHgA3u4e5PfPbjd7HDDpxPnQb5jN0SrDsjNSuVXHJqtwTnWqG0B1jZrgmJ/7lj1EmVIByWt1gxGkWvg==", "dev": true, "requires": { - "pify": "^2.0.0" + "pify": "^3.0.0" } }, "performance-now": { @@ -14914,39 +14961,39 @@ "integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=" }, "picomatch": { - "version": "2.2.2", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.2.2.tgz", - "integrity": "sha512-q0M/9eZHzmr0AulXyPwNfZjtwZ/RBZlbN3K3CErVrk50T2ASYI7Bye0EvekFY3IP1Nt2DHu0re+V2ZHIpMkuWg==", + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.0.tgz", + "integrity": "sha512-lY1Q/PiJGC2zOv/z391WOTD+Z02bCgsFfvxoXXf6h7kv9o+WmsmzYqrAwY63sNgOxE4xEdq0WyUnXfKeBrSvYw==", "dev": true }, "pify": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", - "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", + "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=", "dev": true }, - "pirates": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/pirates/-/pirates-3.0.2.tgz", - "integrity": "sha512-c5CgUJq6H2k6MJz72Ak1F5sN9n9wlSlJyEnwvpm9/y3WB4E3pHBDT2c6PEiS1vyJvq2bUxUAIu0EGf8Cx4Ic7Q==", + "pkg-dir": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-2.0.0.tgz", + "integrity": "sha1-9tXREJ4Z1j7fQo4L1X4Sd3YVM0s=", "dev": true, "requires": { - "node-modules-regexp": "^1.0.0" + "find-up": "^2.1.0" } }, - "pkg-dir": { + "pkg-up": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-2.0.0.tgz", - "integrity": "sha1-9tXREJ4Z1j7fQo4L1X4Sd3YVM0s=", + "resolved": "https://registry.npmjs.org/pkg-up/-/pkg-up-2.0.0.tgz", + "integrity": "sha1-yBmscoBZpGHKscOImivjxJoATX8=", "dev": true, "requires": { "find-up": "^2.1.0" } }, "prebuild-install": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/prebuild-install/-/prebuild-install-6.0.0.tgz", - "integrity": "sha512-h2ZJ1PXHKWZpp1caLw0oX9sagVpL2YTk+ZwInQbQ3QqNd4J03O6MpFNmMTJlkfgPENWqe5kP0WjQLqz5OjLfsw==", + "version": "6.1.2", + "resolved": "https://registry.npmjs.org/prebuild-install/-/prebuild-install-6.1.2.tgz", + "integrity": "sha512-PzYWIKZeP+967WuKYXlTOhYBgGOvTRSfaKI89XnfJ0ansRAH7hDU45X+K+FZeI1Wb/7p/NnuctPH3g0IqKUuSQ==", "dev": true, "requires": { "detect-libc": "^1.0.3", @@ -14955,15 +15002,70 @@ "minimist": "^1.2.3", "mkdirp-classic": "^0.5.3", "napi-build-utils": "^1.0.1", - "node-abi": "^2.7.0", + "node-abi": "^2.21.0", "noop-logger": "^0.1.1", "npmlog": "^4.0.1", "pump": "^3.0.0", "rc": "^1.2.7", "simple-get": "^3.0.3", "tar-fs": "^2.0.0", - "tunnel-agent": "^0.6.0", - "which-pm-runs": "^1.0.0" + "tunnel-agent": "^0.6.0" + }, + "dependencies": { + "aproba": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/aproba/-/aproba-1.2.0.tgz", + "integrity": "sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw==", + "dev": true + }, + "gauge": { + "version": "2.7.4", + "resolved": "https://registry.npmjs.org/gauge/-/gauge-2.7.4.tgz", + "integrity": "sha1-LANAXHU4w51+s3sxcCLjJfsBi/c=", + "dev": true, + "requires": { + "aproba": "^1.0.3", + "console-control-strings": "^1.0.0", + "has-unicode": "^2.0.0", + "object-assign": "^4.1.0", + "signal-exit": "^3.0.0", + "string-width": "^1.0.1", + "strip-ansi": "^3.0.1", + "wide-align": "^1.1.0" + } + }, + "is-fullwidth-code-point": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz", + "integrity": "sha1-754xOG8DGn8NZDr4L95QxFfvAMs=", + "dev": true, + "requires": { + "number-is-nan": "^1.0.0" + } + }, + "npmlog": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-4.1.2.tgz", + "integrity": "sha512-2uUqazuKlTaSI/dC8AzicUck7+IrEaOnN/e0jd3Xtt1KcGpwx30v50mL7oPyr/h9bL3E4aZccVwpwP+5W9Vjkg==", + "dev": true, + "requires": { + "are-we-there-yet": "~1.1.2", + "console-control-strings": "~1.1.0", + "gauge": "~2.7.3", + "set-blocking": "~2.0.0" + } + }, + "string-width": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz", + "integrity": "sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=", + "dev": true, + "requires": { + "code-point-at": "^1.0.0", + "is-fullwidth-code-point": "^1.0.0", + "strip-ansi": "^3.0.0" + } + } } }, "prelude-ls": { @@ -14972,11 +15074,25 @@ "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", "dev": true }, + "proc-log": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-1.0.0.tgz", + "integrity": "sha512-aCk8AO51s+4JyuYGg3Q/a6gnrlDO09NpVWePtjp7xwphcoQ04x5WAfCyugcsbLooWcMJ87CLkD4+604IckEdhg==" + }, "process-nextick-args": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==" }, + "process-on-spawn": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/process-on-spawn/-/process-on-spawn-1.0.0.tgz", + "integrity": "sha512-1WsPDsUSMmZH5LeMLegqkPDrsGgsWwk1Exipy2hvB0o/F0ASzbpIctSCcZIK1ykJvtTJULEH+20WOFjMvGnCTg==", + "dev": true, + "requires": { + "fromentries": "^1.2.0" + } + }, "progress": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/progress/-/progress-2.0.3.tgz", @@ -15035,22 +15151,11 @@ "xtend": "^4.0.0" } }, - "pseudomap": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/pseudomap/-/pseudomap-1.0.2.tgz", - "integrity": "sha1-8FKijacOYYkX7wqKw0wa5aaChrM=", - "dev": true - }, "psl": { "version": "1.8.0", "resolved": "https://registry.npmjs.org/psl/-/psl-1.8.0.tgz", "integrity": "sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ==" }, - "puka": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/puka/-/puka-1.0.1.tgz", - "integrity": "sha512-ssjRZxBd7BT3dte1RR3VoeT2cT/ODH8x+h0rUF1rMqB0srHYf48stSDWfiYakTp5UBZMxroZhB2+ExLDHm7W3g==" - }, "pump": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", @@ -15077,9 +15182,9 @@ "integrity": "sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA==" }, "queue-microtask": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.2.tgz", - "integrity": "sha512-dB15eXv3p2jDlbOiNLyMabYg1/sXvppd8DP2J3EOCQ0AkuSXCW2tP7mnVouVLJKgUMY6yP0kcQDVpLCN13h4Xg==", + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", "dev": true }, "rc": { @@ -15150,9 +15255,9 @@ } }, "read-package-json-fast": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/read-package-json-fast/-/read-package-json-fast-2.0.2.tgz", - "integrity": "sha512-5fyFUyO9B799foVk4n6ylcoAktG/FbE3jwRKxvwaeSrIunaoMc0u81dzXxjeAFKOce7O5KncdfwpGvvs6r5PsQ==", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/read-package-json-fast/-/read-package-json-fast-2.0.3.tgz", + "integrity": "sha512-W/BKtbL+dUjTuRL2vziuYhp76s5HZ9qQhd/dKfWIZveD0O40453QNyZhC0e63lqZrAQ4jiOapVoeJ7JrszenQQ==", "requires": { "json-parse-even-better-errors": "^2.3.0", "npm-normalize-package-bin": "^1.0.1" @@ -15170,9 +15275,9 @@ }, "dependencies": { "hosted-git-info": { - "version": "2.8.8", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.8.tgz", - "integrity": "sha512-f/wzC2QaWBs7t9IYqB4T3sR1xviIViXJRJTWBlx2Gf3g0Xi5vI7Yy4koXQ1c9OYDGHN9sBy1DQ2AB8fqZBWhUg==", + "version": "2.8.9", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.9.tgz", + "integrity": "sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==", "dev": true }, "normalize-package-data": { @@ -15208,20 +15313,20 @@ } }, "read-pkg": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-2.0.0.tgz", - "integrity": "sha1-jvHAYjxqbbDcZxPEv6xGMysjaPg=", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-3.0.0.tgz", + "integrity": "sha1-nLxoaXj+5l0WwA4rGcI3/Pbjg4k=", "dev": true, "requires": { - "load-json-file": "^2.0.0", + "load-json-file": "^4.0.0", "normalize-package-data": "^2.3.2", - "path-type": "^2.0.0" + "path-type": "^3.0.0" }, "dependencies": { "hosted-git-info": { - "version": "2.8.8", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.8.tgz", - "integrity": "sha512-f/wzC2QaWBs7t9IYqB4T3sR1xviIViXJRJTWBlx2Gf3g0Xi5vI7Yy4koXQ1c9OYDGHN9sBy1DQ2AB8fqZBWhUg==", + "version": "2.8.9", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.9.tgz", + "integrity": "sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==", "dev": true }, "normalize-package-data": { @@ -15245,13 +15350,13 @@ } }, "read-pkg-up": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-2.0.0.tgz", - "integrity": "sha1-a3KoBImE4MQeeVEP1en6mbO1Sb4=", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-3.0.0.tgz", + "integrity": "sha1-PtSWaF26D4/hGNBpHcUfSh/5bwc=", "dev": true, "requires": { "find-up": "^2.0.0", - "read-pkg": "^2.0.0" + "read-pkg": "^3.0.0" } }, "readable-stream": { @@ -15399,42 +15504,20 @@ "uuid": "^3.3.2" }, "dependencies": { - "tough-cookie": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.5.0.tgz", - "integrity": "sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==", + "form-data": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz", + "integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==", "requires": { - "psl": "^1.1.28", - "punycode": "^2.1.1" + "asynckit": "^0.4.0", + "combined-stream": "^1.0.6", + "mime-types": "^2.1.12" } - } - } - }, - "request-promise-core": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/request-promise-core/-/request-promise-core-1.1.4.tgz", - "integrity": "sha512-TTbAfBBRdWD7aNNOoVOBH4pN/KigV6LyapYNNlAPA8JwbovRti1E88m3sYAwsLi5ryhPKsE9APwnjFTgdUjTpw==", - "dev": true, - "requires": { - "lodash": "^4.17.19" - } - }, - "request-promise-native": { - "version": "1.0.9", - "resolved": "https://registry.npmjs.org/request-promise-native/-/request-promise-native-1.0.9.tgz", - "integrity": "sha512-wcW+sIUiWnKgNY0dqCpOZkUbF/I+YPi+f09JZIDa39Ec+q82CpSYniDp+ISgTTbKmnpJWASeJBPZmoxH84wt3g==", - "dev": true, - "requires": { - "request-promise-core": "1.1.4", - "stealthy-require": "^1.1.1", - "tough-cookie": "^2.3.3" - }, - "dependencies": { + }, "tough-cookie": { "version": "2.5.0", "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.5.0.tgz", "integrity": "sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==", - "dev": true, "requires": { "psl": "^1.1.28", "punycode": "^2.1.1" @@ -15454,15 +15537,6 @@ "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==", "dev": true }, - "require-inject": { - "version": "1.4.4", - "resolved": "https://registry.npmjs.org/require-inject/-/require-inject-1.4.4.tgz", - "integrity": "sha512-5Y5ctRN84+I4iOZO61gm+48tgP/6Hcd3VZydkaEM3MCuOvnHRsTJYQBOc01faI/Z9at5nsCAJVHhlfPA6Pc0Og==", - "dev": true, - "requires": { - "caller": "^1.0.1" - } - }, "require-main-filename": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz", @@ -15526,9 +15600,9 @@ } }, "semver": { - "version": "7.3.4", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.4.tgz", - "integrity": "sha512-tCfb2WLjqFAtXn4KEdxIhalnRtoKFN7nAwj0B3ZXCbQloV2tq5eDbcTmT68JJD3nRJq24/XgxtQKFIpQdtvmVw==", + "version": "7.3.5", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", + "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", "requires": { "lru-cache": "^6.0.0" } @@ -15600,9 +15674,9 @@ "integrity": "sha512-iVICrxOzCynf/SNaBQCw34eM9jROU/s5rzIhpOvzhzuYHfJR/DhZfDkXiZSgKXfgv26HT3Yni3AV/DGw0cGnnw==" }, "socks": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/socks/-/socks-2.5.1.tgz", - "integrity": "sha512-oZCsJJxapULAYJaEYBSzMcz8m3jqgGrHaGhkmU/o/PQfFWYWxkAaA0UMGImb6s6tEXfKi959X6VJjMMQ3P6TTQ==", + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/socks/-/socks-2.6.1.tgz", + "integrity": "sha512-kLQ9N5ucj8uIcxrDwjm0Jsqk06xdpBjGNQtpXy4Q8/QY2k+fY7nZH8CARy+hkbG+SGAovmzzuauCpBlb8FrnBA==", "requires": { "ip": "^1.1.5", "smart-buffer": "^4.1.0" @@ -15619,9 +15693,9 @@ } }, "source-map": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "version": "0.5.7", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", + "integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=", "dev": true }, "source-map-support": { @@ -15632,6 +15706,14 @@ "requires": { "buffer-from": "^1.0.0", "source-map": "^0.6.0" + }, + "dependencies": { + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true + } } }, "space-separated-tokens": { @@ -15641,46 +15723,17 @@ "dev": true }, "spawn-wrap": { - "version": "1.4.3", - "resolved": "https://registry.npmjs.org/spawn-wrap/-/spawn-wrap-1.4.3.tgz", - "integrity": "sha512-IgB8md0QW/+tWqcavuFgKYR/qIRvJkRLPJDFaoXtLLUaVcCDK0+HeFTkmQHj3eprcYhc+gOl0aEA1w7qZlYezw==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/spawn-wrap/-/spawn-wrap-2.0.0.tgz", + "integrity": "sha512-EeajNjfN9zMnULLwhZZQU3GWBoFNkbngTUPfaawT4RkMiviTxcX0qfhVbGey39mfctfDHkWtuecgQ8NJcyQWHg==", "dev": true, "requires": { - "foreground-child": "^1.5.6", - "mkdirp": "^0.5.0", - "os-homedir": "^1.0.1", - "rimraf": "^2.6.2", + "foreground-child": "^2.0.0", + "is-windows": "^1.0.2", + "make-dir": "^3.0.0", + "rimraf": "^3.0.0", "signal-exit": "^3.0.2", - "which": "^1.3.0" - }, - "dependencies": { - "mkdirp": { - "version": "0.5.5", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz", - "integrity": "sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==", - "dev": true, - "requires": { - "minimist": "^1.2.5" - } - }, - "rimraf": { - "version": "2.7.1", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", - "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==", - "dev": true, - "requires": { - "glob": "^7.1.3" - } - }, - "which": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", - "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", - "dev": true, - "requires": { - "isexe": "^2.0.0" - } - } + "which": "^2.0.1" } }, "spdx-compare": { @@ -15727,9 +15780,9 @@ } }, "spdx-license-ids": { - "version": "3.0.7", - "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.7.tgz", - "integrity": "sha512-U+MTEOO0AiDzxwFvoa4JVnMV6mZlJKk2sBLt90s7G0Gd0Mlknc7kxEn3nuDPNZRta7O2uy8oLcZLVT+4sqNZHQ==" + "version": "3.0.9", + "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.9.tgz", + "integrity": "sha512-Ki212dKK4ogX+xDo4CtOZBVIwhsKBEfsEEcwmJfLQzirgc2jIWdzg40Unxz/HzEUqM1WFzVlQSMF9kZZ2HboLQ==" }, "spdx-osi": { "version": "3.0.0", @@ -15784,12 +15837,20 @@ } }, "stack-utils": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/stack-utils/-/stack-utils-1.0.4.tgz", - "integrity": "sha512-IPDJfugEGbfizBwBZRZ3xpccMdRyP5lqsBWXGQWimVjua/ccLCeMOAVjlc1R7LxFjo5sEDhyNIXd8mo/AiDS9w==", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/stack-utils/-/stack-utils-2.0.3.tgz", + "integrity": "sha512-gL//fkxfWUsIlFL2Tl42Cl6+HFALEaB1FU76I/Fy+oZjRreP7OPMXFlGbxM7NQsI0ZpUfw76sHnv0WNYuTb7Iw==", "dev": true, "requires": { "escape-string-regexp": "^2.0.0" + }, + "dependencies": { + "escape-string-regexp": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz", + "integrity": "sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==", + "dev": true + } } }, "state-toggle": { @@ -15798,12 +15859,6 @@ "integrity": "sha512-d/5Z4/2iiCnHw6Xzghyhb+GcmF89bxwgXG60wjIiZaxnymbyOmI8Hk4VqHXiVVp6u2ysaskFfXg3ekCj4WNftQ==", "dev": true }, - "stealthy-require": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/stealthy-require/-/stealthy-require-1.1.1.tgz", - "integrity": "sha1-NbCYdbT/SfJqd35QmzCQoyJr8ks=", - "dev": true - }, "string_decoder": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", @@ -15837,22 +15892,22 @@ } }, "string.prototype.trimend": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.3.tgz", - "integrity": "sha512-ayH0pB+uf0U28CtjlLvL7NaohvR1amUvVZk+y3DYb0Ey2PUV5zPkkKy9+U1ndVEIXO8hNg18eIv9Jntbii+dKw==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.4.tgz", + "integrity": "sha512-y9xCjw1P23Awk8EvTpcyL2NIr1j7wJ39f+k6lvRnSMz+mz9CGz9NYPelDk42kOz6+ql8xjfK8oYzy3jAP5QU5A==", "dev": true, "requires": { - "call-bind": "^1.0.0", + "call-bind": "^1.0.2", "define-properties": "^1.1.3" } }, "string.prototype.trimstart": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.3.tgz", - "integrity": "sha512-oBIBUy5lea5tt0ovtOFiEQaBkoBBkyJhZXzJYrSmDo5IUUqbOPvVezuRs/agBIdZ2p2Eo1FD6bD9USyBLfl3xg==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.4.tgz", + "integrity": "sha512-jh6e984OBfvxS50tdY2nRZnoC5/mLFKOREQfw8t5yytkoUsJRNxvI/E39qu1sD0OtWI3OC0XgKSmcWwziwYuZw==", "dev": true, "requires": { - "call-bind": "^1.0.0", + "call-bind": "^1.0.2", "define-properties": "^1.1.3" } }, @@ -15905,21 +15960,23 @@ "dev": true }, "table": { - "version": "6.0.7", - "resolved": "https://registry.npmjs.org/table/-/table-6.0.7.tgz", - "integrity": "sha512-rxZevLGTUzWna/qBLObOe16kB2RTnnbhciwgPbMMlazz1yZGVEgnZK762xyVdVznhqxrfCeBMmMkgOOaPwjH7g==", + "version": "6.7.1", + "resolved": "https://registry.npmjs.org/table/-/table-6.7.1.tgz", + "integrity": "sha512-ZGum47Yi6KOOFDE8m223td53ath2enHcYLgOCjGr5ngu8bdIARQk6mN/wRMv4yMRcHnCSnHbCEha4sobQx5yWg==", "dev": true, "requires": { - "ajv": "^7.0.2", - "lodash": "^4.17.20", + "ajv": "^8.0.1", + "lodash.clonedeep": "^4.5.0", + "lodash.truncate": "^4.4.2", "slice-ansi": "^4.0.0", - "string-width": "^4.2.0" + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0" }, "dependencies": { "ajv": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-7.1.0.tgz", - "integrity": "sha512-svS9uILze/cXbH0z2myCK2Brqprx/+JJYK5pHicT/GQiBfzzhUVAIT6MwqJg8y4xV/zoGsUeuPuwtoiKSGE15g==", + "version": "8.5.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.5.0.tgz", + "integrity": "sha512-Y2l399Tt1AguU3BPRP9Fn4eN+Or+StUGWCUpbnFyXSo8NZ9S4uj+AG2pjs5apK+ZMOwYOz1+a+VKvKH7CudXgQ==", "dev": true, "requires": { "fast-deep-equal": "^3.1.1", @@ -15947,9 +16004,9 @@ "dev": true }, "string-width": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.0.tgz", - "integrity": "sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg==", + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.2.tgz", + "integrity": "sha512-XBJbT3N4JhVumXE0eoLU9DCjcaF92KLNqTmFCnG1pf8duUxFGwtP6AD6nkjw9a3IdiRtL3E2w3JDiE/xi3vOeA==", "dev": true, "requires": { "emoji-regex": "^8.0.0", @@ -15969,273 +16026,246 @@ } }, "tap": { - "version": "14.11.0", - "resolved": "https://registry.npmjs.org/tap/-/tap-14.11.0.tgz", - "integrity": "sha512-z8qnNFVyIjLh/bNoTLFRkEk09XZUDAZbCkz/BjvHHly3ao5H+y60gPnedALfheEjA6dA4tpp/mrKq2NWlMuq0A==", + "version": "15.0.9", + "resolved": "https://registry.npmjs.org/tap/-/tap-15.0.9.tgz", + "integrity": "sha512-bqY5SxEqYKRd37PIUfKBf9HMs/hklyl/fGXkuStr9rYTIGa0/icpSLsm6IVOmx2qT0/TliPNJ6OvS5kddJYHdg==", "dev": true, "requires": { - "@types/react": "^16.9.16", - "async-hook-domain": "^1.1.3", - "bind-obj-methods": "^2.0.0", - "browser-process-hrtime": "^1.0.0", + "@types/react": "^16.9.23", "chokidar": "^3.3.0", - "color-support": "^1.1.0", "coveralls": "^3.0.11", - "diff": "^4.0.1", - "esm": "^3.2.25", "findit": "^2.0.0", - "flow-remove-types": "^2.112.0", - "foreground-child": "^1.3.3", + "foreground-child": "^2.0.0", "fs-exists-cached": "^1.0.0", - "function-loop": "^1.0.2", "glob": "^7.1.6", - "import-jsx": "^3.1.0", - "ink": "^2.6.0", + "import-jsx": "^4.0.0", + "ink": "^2.7.1", "isexe": "^2.0.0", - "istanbul-lib-processinfo": "^1.0.0", + "istanbul-lib-processinfo": "^2.0.2", "jackspeak": "^1.4.0", + "libtap": "^1.1.1", "minipass": "^3.1.1", - "mkdirp": "^0.5.4", - "nyc": "^14.1.1", + "mkdirp": "^1.0.4", + "nyc": "^15.1.0", "opener": "^1.5.1", - "own-or": "^1.0.0", - "own-or-env": "^1.0.1", "react": "^16.12.0", - "rimraf": "^2.7.1", + "rimraf": "^3.0.0", "signal-exit": "^3.0.0", "source-map-support": "^0.5.16", - "stack-utils": "^1.0.3", "tap-mocha-reporter": "^5.0.0", "tap-parser": "^10.0.1", "tap-yaml": "^1.0.0", - "tcompare": "^3.0.0", - "treport": "^1.0.2", - "trivial-deferred": "^1.0.1", - "ts-node": "^8.5.2", - "typescript": "^3.7.2", - "which": "^2.0.2", - "write-file-atomic": "^3.0.1", - "yaml": "^1.7.2", - "yapool": "^1.0.0" + "tcompare": "^5.0.6", + "treport": "^2.0.2", + "which": "^2.0.2" }, "dependencies": { "@babel/code-frame": { - "version": "7.10.4", + "version": "7.12.13", "bundled": true, "dev": true, "requires": { - "@babel/highlight": "^7.10.4" + "@babel/highlight": "^7.12.13" } }, + "@babel/compat-data": { + "version": "7.14.0", + "bundled": true, + "dev": true + }, "@babel/core": { - "version": "7.10.5", + "version": "7.14.0", "bundled": true, "dev": true, "requires": { - "@babel/code-frame": "^7.10.4", - "@babel/generator": "^7.10.5", - "@babel/helper-module-transforms": "^7.10.5", - "@babel/helpers": "^7.10.4", - "@babel/parser": "^7.10.5", - "@babel/template": "^7.10.4", - "@babel/traverse": "^7.10.5", - "@babel/types": "^7.10.5", + "@babel/code-frame": "^7.12.13", + "@babel/generator": "^7.14.0", + "@babel/helper-compilation-targets": "^7.13.16", + "@babel/helper-module-transforms": "^7.14.0", + "@babel/helpers": "^7.14.0", + "@babel/parser": "^7.14.0", + "@babel/template": "^7.12.13", + "@babel/traverse": "^7.14.0", + "@babel/types": "^7.14.0", "convert-source-map": "^1.7.0", "debug": "^4.1.0", - "gensync": "^1.0.0-beta.1", + "gensync": "^1.0.0-beta.2", "json5": "^2.1.2", - "lodash": "^4.17.19", - "resolve": "^1.3.2", - "semver": "^5.4.1", + "semver": "^6.3.0", "source-map": "^0.5.0" - }, - "dependencies": { - "source-map": { - "version": "0.5.7", - "bundled": true, - "dev": true - } } }, "@babel/generator": { - "version": "7.10.5", + "version": "7.14.1", "bundled": true, "dev": true, "requires": { - "@babel/types": "^7.10.5", + "@babel/types": "^7.14.1", "jsesc": "^2.5.1", "source-map": "^0.5.0" - }, - "dependencies": { - "source-map": { - "version": "0.5.7", - "bundled": true, - "dev": true - } } }, "@babel/helper-annotate-as-pure": { - "version": "7.10.4", - "bundled": true, - "dev": true, - "requires": { - "@babel/types": "^7.10.4" - } - }, - "@babel/helper-builder-react-jsx": { - "version": "7.10.4", + "version": "7.12.13", "bundled": true, "dev": true, "requires": { - "@babel/helper-annotate-as-pure": "^7.10.4", - "@babel/types": "^7.10.4" + "@babel/types": "^7.12.13" } }, - "@babel/helper-builder-react-jsx-experimental": { - "version": "7.10.5", + "@babel/helper-compilation-targets": { + "version": "7.13.16", "bundled": true, "dev": true, "requires": { - "@babel/helper-annotate-as-pure": "^7.10.4", - "@babel/helper-module-imports": "^7.10.4", - "@babel/types": "^7.10.5" + "@babel/compat-data": "^7.13.15", + "@babel/helper-validator-option": "^7.12.17", + "browserslist": "^4.14.5", + "semver": "^6.3.0" } }, "@babel/helper-function-name": { - "version": "7.10.4", + "version": "7.12.13", "bundled": true, "dev": true, "requires": { - "@babel/helper-get-function-arity": "^7.10.4", - "@babel/template": "^7.10.4", - "@babel/types": "^7.10.4" + "@babel/helper-get-function-arity": "^7.12.13", + "@babel/template": "^7.12.13", + "@babel/types": "^7.12.13" } }, "@babel/helper-get-function-arity": { - "version": "7.10.4", + "version": "7.12.13", "bundled": true, "dev": true, "requires": { - "@babel/types": "^7.10.4" + "@babel/types": "^7.12.13" } }, "@babel/helper-member-expression-to-functions": { - "version": "7.10.5", + "version": "7.13.12", "bundled": true, "dev": true, "requires": { - "@babel/types": "^7.10.5" + "@babel/types": "^7.13.12" } }, "@babel/helper-module-imports": { - "version": "7.10.4", + "version": "7.13.12", "bundled": true, "dev": true, "requires": { - "@babel/types": "^7.10.4" + "@babel/types": "^7.13.12" } }, "@babel/helper-module-transforms": { - "version": "7.10.5", + "version": "7.14.0", "bundled": true, "dev": true, "requires": { - "@babel/helper-module-imports": "^7.10.4", - "@babel/helper-replace-supers": "^7.10.4", - "@babel/helper-simple-access": "^7.10.4", - "@babel/helper-split-export-declaration": "^7.10.4", - "@babel/template": "^7.10.4", - "@babel/types": "^7.10.5", - "lodash": "^4.17.19" + "@babel/helper-module-imports": "^7.13.12", + "@babel/helper-replace-supers": "^7.13.12", + "@babel/helper-simple-access": "^7.13.12", + "@babel/helper-split-export-declaration": "^7.12.13", + "@babel/helper-validator-identifier": "^7.14.0", + "@babel/template": "^7.12.13", + "@babel/traverse": "^7.14.0", + "@babel/types": "^7.14.0" } }, "@babel/helper-optimise-call-expression": { - "version": "7.10.4", + "version": "7.12.13", "bundled": true, "dev": true, "requires": { - "@babel/types": "^7.10.4" + "@babel/types": "^7.12.13" } }, "@babel/helper-plugin-utils": { - "version": "7.10.4", + "version": "7.13.0", "bundled": true, "dev": true }, "@babel/helper-replace-supers": { - "version": "7.10.4", + "version": "7.13.12", "bundled": true, "dev": true, "requires": { - "@babel/helper-member-expression-to-functions": "^7.10.4", - "@babel/helper-optimise-call-expression": "^7.10.4", - "@babel/traverse": "^7.10.4", - "@babel/types": "^7.10.4" + "@babel/helper-member-expression-to-functions": "^7.13.12", + "@babel/helper-optimise-call-expression": "^7.12.13", + "@babel/traverse": "^7.13.0", + "@babel/types": "^7.13.12" } }, "@babel/helper-simple-access": { - "version": "7.10.4", + "version": "7.13.12", "bundled": true, "dev": true, "requires": { - "@babel/template": "^7.10.4", - "@babel/types": "^7.10.4" + "@babel/types": "^7.13.12" } }, "@babel/helper-split-export-declaration": { - "version": "7.10.4", + "version": "7.12.13", "bundled": true, "dev": true, "requires": { - "@babel/types": "^7.10.4" + "@babel/types": "^7.12.13" } }, "@babel/helper-validator-identifier": { - "version": "7.10.4", + "version": "7.14.0", + "bundled": true, + "dev": true + }, + "@babel/helper-validator-option": { + "version": "7.12.17", "bundled": true, "dev": true }, "@babel/helpers": { - "version": "7.10.4", + "version": "7.14.0", "bundled": true, "dev": true, "requires": { - "@babel/template": "^7.10.4", - "@babel/traverse": "^7.10.4", - "@babel/types": "^7.10.4" + "@babel/template": "^7.12.13", + "@babel/traverse": "^7.14.0", + "@babel/types": "^7.14.0" } }, "@babel/highlight": { - "version": "7.10.4", + "version": "7.14.0", "bundled": true, "dev": true, "requires": { - "@babel/helper-validator-identifier": "^7.10.4", + "@babel/helper-validator-identifier": "^7.14.0", "chalk": "^2.0.0", "js-tokens": "^4.0.0" } }, "@babel/parser": { - "version": "7.10.5", + "version": "7.14.1", "bundled": true, "dev": true }, "@babel/plugin-proposal-object-rest-spread": { - "version": "7.10.4", + "version": "7.13.8", "bundled": true, "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.10.4", - "@babel/plugin-syntax-object-rest-spread": "^7.8.0", - "@babel/plugin-transform-parameters": "^7.10.4" + "@babel/compat-data": "^7.13.8", + "@babel/helper-compilation-targets": "^7.13.8", + "@babel/helper-plugin-utils": "^7.13.0", + "@babel/plugin-syntax-object-rest-spread": "^7.8.3", + "@babel/plugin-transform-parameters": "^7.13.0" } }, "@babel/plugin-syntax-jsx": { - "version": "7.10.4", + "version": "7.12.13", "bundled": true, "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.10.4" + "@babel/helper-plugin-utils": "^7.12.13" } }, "@babel/plugin-syntax-object-rest-spread": { @@ -16247,106 +16277,100 @@ } }, "@babel/plugin-transform-destructuring": { - "version": "7.10.4", + "version": "7.13.17", "bundled": true, "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.10.4" + "@babel/helper-plugin-utils": "^7.13.0" } }, "@babel/plugin-transform-parameters": { - "version": "7.10.5", + "version": "7.13.0", "bundled": true, "dev": true, "requires": { - "@babel/helper-get-function-arity": "^7.10.4", - "@babel/helper-plugin-utils": "^7.10.4" + "@babel/helper-plugin-utils": "^7.13.0" } }, "@babel/plugin-transform-react-jsx": { - "version": "7.10.4", + "version": "7.13.12", "bundled": true, "dev": true, "requires": { - "@babel/helper-builder-react-jsx": "^7.10.4", - "@babel/helper-builder-react-jsx-experimental": "^7.10.4", - "@babel/helper-plugin-utils": "^7.10.4", - "@babel/plugin-syntax-jsx": "^7.10.4" + "@babel/helper-annotate-as-pure": "^7.12.13", + "@babel/helper-module-imports": "^7.13.12", + "@babel/helper-plugin-utils": "^7.13.0", + "@babel/plugin-syntax-jsx": "^7.12.13", + "@babel/types": "^7.13.12" } }, "@babel/template": { - "version": "7.10.4", + "version": "7.12.13", "bundled": true, "dev": true, "requires": { - "@babel/code-frame": "^7.10.4", - "@babel/parser": "^7.10.4", - "@babel/types": "^7.10.4" + "@babel/code-frame": "^7.12.13", + "@babel/parser": "^7.12.13", + "@babel/types": "^7.12.13" } }, "@babel/traverse": { - "version": "7.10.5", + "version": "7.14.0", "bundled": true, "dev": true, "requires": { - "@babel/code-frame": "^7.10.4", - "@babel/generator": "^7.10.5", - "@babel/helper-function-name": "^7.10.4", - "@babel/helper-split-export-declaration": "^7.10.4", - "@babel/parser": "^7.10.5", - "@babel/types": "^7.10.5", + "@babel/code-frame": "^7.12.13", + "@babel/generator": "^7.14.0", + "@babel/helper-function-name": "^7.12.13", + "@babel/helper-split-export-declaration": "^7.12.13", + "@babel/parser": "^7.14.0", + "@babel/types": "^7.14.0", "debug": "^4.1.0", - "globals": "^11.1.0", - "lodash": "^4.17.19" + "globals": "^11.1.0" } }, "@babel/types": { - "version": "7.10.5", + "version": "7.14.1", "bundled": true, "dev": true, "requires": { - "@babel/helper-validator-identifier": "^7.10.4", - "lodash": "^4.17.19", + "@babel/helper-validator-identifier": "^7.14.0", "to-fast-properties": "^2.0.0" } }, - "@types/color-name": { - "version": "1.1.1", - "bundled": true, - "dev": true - }, "@types/prop-types": { "version": "15.7.3", "bundled": true, "dev": true }, "@types/react": { - "version": "16.9.43", + "version": "16.14.6", "bundled": true, "dev": true, "requires": { "@types/prop-types": "*", - "csstype": "^2.2.0" + "@types/scheduler": "*", + "csstype": "^3.0.2" } }, + "@types/scheduler": { + "version": "0.16.1", + "bundled": true, + "dev": true + }, "@types/yoga-layout": { "version": "1.9.2", "bundled": true, "dev": true }, "ansi-escapes": { - "version": "4.3.1", + "version": "4.3.2", "bundled": true, "dev": true, "requires": { - "type-fest": "^0.11.0" + "type-fest": "^0.21.3" } }, - "ansi-regex": { - "version": "5.0.0", - "bundled": true, - "dev": true - }, "ansi-styles": { "version": "3.2.1", "bundled": true, @@ -16375,6 +16399,32 @@ "bundled": true, "dev": true }, + "balanced-match": { + "version": "1.0.2", + "bundled": true, + "dev": true + }, + "brace-expansion": { + "version": "1.1.11", + "bundled": true, + "dev": true, + "requires": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "browserslist": { + "version": "4.16.6", + "bundled": true, + "dev": true, + "requires": { + "caniuse-lite": "^1.0.30001219", + "colorette": "^1.2.2", + "electron-to-chromium": "^1.3.723", + "escalade": "^3.1.1", + "node-releases": "^1.1.71" + } + }, "caller-callsite": { "version": "2.0.0", "bundled": true, @@ -16396,6 +16446,11 @@ "bundled": true, "dev": true }, + "caniuse-lite": { + "version": "1.0.30001223", + "bundled": true, + "dev": true + }, "cardinal": { "version": "2.1.1", "bundled": true, @@ -16450,38 +16505,45 @@ "bundled": true, "dev": true }, + "colorette": { + "version": "1.2.2", + "bundled": true, + "dev": true + }, + "commondir": { + "version": "1.0.1", + "bundled": true, + "dev": true + }, + "concat-map": { + "version": "0.0.1", + "bundled": true, + "dev": true + }, "convert-source-map": { "version": "1.7.0", "bundled": true, "dev": true, "requires": { "safe-buffer": "~5.1.1" - }, - "dependencies": { - "safe-buffer": { - "version": "5.1.2", - "bundled": true, - "dev": true - } } }, "csstype": { - "version": "2.6.11", + "version": "3.0.8", "bundled": true, "dev": true }, "debug": { - "version": "4.1.1", + "version": "4.3.1", "bundled": true, "dev": true, "requires": { - "ms": "^2.1.1" + "ms": "2.1.2" } }, - "diff": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", - "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", + "electron-to-chromium": { + "version": "1.3.727", + "bundled": true, "dev": true }, "emoji-regex": { @@ -16489,6 +16551,11 @@ "bundled": true, "dev": true }, + "escalade": { + "version": "3.1.1", + "bundled": true, + "dev": true + }, "escape-string-regexp": { "version": "1.0.5", "bundled": true, @@ -16504,11 +16571,48 @@ "bundled": true, "dev": true }, + "find-cache-dir": { + "version": "3.3.1", + "bundled": true, + "dev": true, + "requires": { + "commondir": "^1.0.1", + "make-dir": "^3.0.2", + "pkg-dir": "^4.1.0" + } + }, + "find-up": { + "version": "4.1.0", + "bundled": true, + "dev": true, + "requires": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + } + }, + "fs.realpath": { + "version": "1.0.0", + "bundled": true, + "dev": true + }, "gensync": { - "version": "1.0.0-beta.1", + "version": "1.0.0-beta.2", "bundled": true, "dev": true }, + "glob": { + "version": "7.1.7", + "bundled": true, + "dev": true, + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + }, "globals": { "version": "11.12.0", "bundled": true, @@ -16520,7 +16624,7 @@ "dev": true }, "import-jsx": { - "version": "3.1.0", + "version": "4.0.0", "bundled": true, "dev": true, "requires": { @@ -16529,9 +16633,26 @@ "@babel/plugin-transform-destructuring": "^7.5.0", "@babel/plugin-transform-react-jsx": "^7.3.0", "caller-path": "^2.0.0", - "resolve-from": "^3.0.0" + "find-cache-dir": "^3.2.0", + "make-dir": "^3.0.2", + "resolve-from": "^3.0.0", + "rimraf": "^3.0.0" + } + }, + "inflight": { + "version": "1.0.6", + "bundled": true, + "dev": true, + "requires": { + "once": "^1.3.0", + "wrappy": "1" } }, + "inherits": { + "version": "2.0.4", + "bundled": true, + "dev": true + }, "ink": { "version": "2.7.1", "bundled": true, @@ -16558,11 +16679,10 @@ }, "dependencies": { "ansi-styles": { - "version": "4.2.1", + "version": "4.3.0", "bundled": true, "dev": true, "requires": { - "@types/color-name": "^1.1.1", "color-convert": "^2.0.1" } }, @@ -16594,7 +16714,7 @@ "dev": true }, "supports-color": { - "version": "7.1.0", + "version": "7.2.0", "bundled": true, "dev": true, "requires": { @@ -16627,17 +16747,20 @@ "dev": true }, "json5": { - "version": "2.1.3", + "version": "2.2.0", "bundled": true, "dev": true, "requires": { "minimist": "^1.2.5" } }, - "lodash": { - "version": "4.17.19", + "locate-path": { + "version": "5.0.0", "bundled": true, - "dev": true + "dev": true, + "requires": { + "p-locate": "^4.1.0" + } }, "lodash.throttle": { "version": "4.1.1", @@ -16742,11 +16865,27 @@ "js-tokens": "^3.0.0 || ^4.0.0" } }, + "make-dir": { + "version": "3.1.0", + "bundled": true, + "dev": true, + "requires": { + "semver": "^6.0.0" + } + }, "mimic-fn": { "version": "2.1.0", "bundled": true, "dev": true }, + "minimatch": { + "version": "3.0.4", + "bundled": true, + "dev": true, + "requires": { + "brace-expansion": "^1.1.7" + } + }, "minimist": { "version": "1.2.5", "bundled": true, @@ -16758,22 +16897,6 @@ "dev": true, "requires": { "yallist": "^4.0.0" - }, - "dependencies": { - "yallist": { - "version": "4.0.0", - "bundled": true, - "dev": true - } - } - }, - "mkdirp": { - "version": "0.5.5", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz", - "integrity": "sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==", - "dev": true, - "requires": { - "minimist": "^1.2.5" } }, "ms": { @@ -16781,24 +16904,71 @@ "bundled": true, "dev": true }, + "node-releases": { + "version": "1.1.71", + "bundled": true, + "dev": true + }, "object-assign": { "version": "4.1.1", "bundled": true, "dev": true }, + "once": { + "version": "1.4.0", + "bundled": true, + "dev": true, + "requires": { + "wrappy": "1" + } + }, "onetime": { - "version": "5.1.0", + "version": "5.1.2", "bundled": true, "dev": true, "requires": { "mimic-fn": "^2.1.0" } }, - "path-parse": { - "version": "1.0.6", + "p-limit": { + "version": "2.3.0", + "bundled": true, + "dev": true, + "requires": { + "p-try": "^2.0.0" + } + }, + "p-locate": { + "version": "4.1.0", + "bundled": true, + "dev": true, + "requires": { + "p-limit": "^2.2.0" + } + }, + "p-try": { + "version": "2.2.0", + "bundled": true, + "dev": true + }, + "path-exists": { + "version": "4.0.0", + "bundled": true, + "dev": true + }, + "path-is-absolute": { + "version": "1.0.1", "bundled": true, "dev": true }, + "pkg-dir": { + "version": "4.2.0", + "bundled": true, + "dev": true, + "requires": { + "find-up": "^4.0.0" + } + }, "prop-types": { "version": "15.7.2", "bundled": true, @@ -16838,14 +17008,6 @@ "esprima": "~4.0.0" } }, - "resolve": { - "version": "1.17.0", - "bundled": true, - "dev": true, - "requires": { - "path-parse": "^1.0.6" - } - }, "resolve-from": { "version": "3.0.0", "bundled": true, @@ -16861,14 +17023,18 @@ } }, "rimraf": { - "version": "2.7.1", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", - "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==", + "version": "3.0.2", + "bundled": true, "dev": true, "requires": { "glob": "^7.1.3" } }, + "safe-buffer": { + "version": "5.1.2", + "bundled": true, + "dev": true + }, "scheduler": { "version": "0.18.0", "bundled": true, @@ -16879,7 +17045,7 @@ } }, "semver": { - "version": "5.7.1", + "version": "6.3.0", "bundled": true, "dev": true }, @@ -16899,11 +17065,10 @@ }, "dependencies": { "ansi-styles": { - "version": "4.2.1", + "version": "4.3.0", "bundled": true, "dev": true, "requires": { - "@types/color-name": "^1.1.1", "color-convert": "^2.0.1" } }, @@ -16922,6 +17087,11 @@ } } }, + "source-map": { + "version": "0.5.7", + "bundled": true, + "dev": true + }, "string-length": { "version": "3.1.0", "bundled": true, @@ -16952,21 +17122,28 @@ } }, "string-width": { - "version": "4.2.0", + "version": "4.2.2", "bundled": true, "dev": true, "requires": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", "strip-ansi": "^6.0.0" - } - }, - "strip-ansi": { - "version": "6.0.0", - "bundled": true, - "dev": true, - "requires": { - "ansi-regex": "^5.0.0" + }, + "dependencies": { + "ansi-regex": { + "version": "5.0.0", + "bundled": true, + "dev": true + }, + "strip-ansi": { + "version": "6.0.0", + "bundled": true, + "dev": true, + "requires": { + "ansi-regex": "^5.0.0" + } + } } }, "supports-color": { @@ -16978,7 +17155,7 @@ } }, "tap-parser": { - "version": "10.0.1", + "version": "10.1.0", "bundled": true, "dev": true, "requires": { @@ -17001,13 +17178,13 @@ "dev": true }, "treport": { - "version": "1.0.2", + "version": "2.0.2", "bundled": true, "dev": true, "requires": { "cardinal": "^2.1.1", "chalk": "^3.0.0", - "import-jsx": "^3.1.0", + "import-jsx": "^4.0.0", "ink": "^2.6.0", "ms": "^2.1.2", "string-length": "^3.1.0", @@ -17016,11 +17193,10 @@ }, "dependencies": { "ansi-styles": { - "version": "4.2.1", + "version": "4.3.0", "bundled": true, "dev": true, "requires": { - "@types/color-name": "^1.1.1", "color-convert": "^2.0.1" } }, @@ -17052,7 +17228,7 @@ "dev": true }, "supports-color": { - "version": "7.1.0", + "version": "7.2.0", "bundled": true, "dev": true, "requires": { @@ -17062,7 +17238,7 @@ } }, "type-fest": { - "version": "0.11.0", + "version": "0.21.3", "bundled": true, "dev": true }, @@ -17108,12 +17284,16 @@ "strip-ansi": "^6.0.0" }, "dependencies": { + "ansi-regex": { + "version": "5.0.0", + "bundled": true, + "dev": true + }, "ansi-styles": { - "version": "4.2.1", + "version": "4.3.0", "bundled": true, "dev": true, "requires": { - "@types/color-name": "^1.1.1", "color-convert": "^2.0.1" } }, @@ -17129,16 +17309,34 @@ "version": "1.1.4", "bundled": true, "dev": true + }, + "strip-ansi": { + "version": "6.0.0", + "bundled": true, + "dev": true, + "requires": { + "ansi-regex": "^5.0.0" + } } } }, + "wrappy": { + "version": "1.0.2", + "bundled": true, + "dev": true + }, + "yallist": { + "version": "4.0.0", + "bundled": true, + "dev": true + }, "yaml": { - "version": "1.10.0", + "version": "1.10.2", "bundled": true, "dev": true }, "yoga-layout-prebuilt": { - "version": "1.9.6", + "version": "1.10.0", "bundled": true, "dev": true, "requires": { @@ -17168,6 +17366,12 @@ "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", "dev": true + }, + "escape-string-regexp": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz", + "integrity": "sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==", + "dev": true } } }, @@ -17192,9 +17396,9 @@ } }, "tar": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/tar/-/tar-6.1.0.tgz", - "integrity": "sha512-DUCttfhsnLCjwoDoFcI+B2iJgYa93vBnDUATYEeRx6sntCTdN01VnqsIuTlALXla/LWooNg0yEGeB+Y8WdFxGA==", + "version": "6.1.2", + "resolved": "https://registry.npmjs.org/tar/-/tar-6.1.2.tgz", + "integrity": "sha512-EwKEgqJ7nJoS+s8QfLYVGMDmAsj+StbI2AM/RTHeUSsOw6Z8bwNBRv5z3CY0m7laC5qUAqruLX5AhMuc5deY3Q==", "requires": { "chownr": "^2.0.0", "fs-minipass": "^2.0.0", @@ -17251,151 +17455,31 @@ } }, "tcompare": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/tcompare/-/tcompare-3.0.4.tgz", - "integrity": "sha512-Q3TitMVK59NyKgQyFh+857wTAUE329IzLDehuPgU4nF5e8g+EUQ+yUbjUy1/6ugiNnXztphT+NnqlCXolv9P3A==", + "version": "5.0.6", + "resolved": "https://registry.npmjs.org/tcompare/-/tcompare-5.0.6.tgz", + "integrity": "sha512-OvO7omN/wkdsKzmOqr3sQFfLbghs/2X5mwSkcfgRiXZshfPnTsAs3IRf1RixR/Pff26qG/r9ogcZMpV0YdeGXg==", "dev": true, "requires": { - "diff-frag": "^1.0.1" + "diff": "^4.0.2" + }, + "dependencies": { + "diff": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", + "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", + "dev": true + } } }, "test-exclude": { - "version": "5.2.3", - "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-5.2.3.tgz", - "integrity": "sha512-M+oxtseCFO3EDtAaGH7iiej3CBkzXqFMbzqYAACdzKui4eZA+pq3tZEwChvOdNfa7xxy8BfbmgJSIr43cC/+2g==", + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz", + "integrity": "sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==", "dev": true, "requires": { - "glob": "^7.1.3", - "minimatch": "^3.0.4", - "read-pkg-up": "^4.0.0", - "require-main-filename": "^2.0.0" - }, - "dependencies": { - "find-up": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", - "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", - "dev": true, - "requires": { - "locate-path": "^3.0.0" - } - }, - "hosted-git-info": { - "version": "2.8.8", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.8.tgz", - "integrity": "sha512-f/wzC2QaWBs7t9IYqB4T3sR1xviIViXJRJTWBlx2Gf3g0Xi5vI7Yy4koXQ1c9OYDGHN9sBy1DQ2AB8fqZBWhUg==", - "dev": true - }, - "load-json-file": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-4.0.0.tgz", - "integrity": "sha1-L19Fq5HjMhYjT9U62rZo607AmTs=", - "dev": true, - "requires": { - "graceful-fs": "^4.1.2", - "parse-json": "^4.0.0", - "pify": "^3.0.0", - "strip-bom": "^3.0.0" - } - }, - "locate-path": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", - "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", - "dev": true, - "requires": { - "p-locate": "^3.0.0", - "path-exists": "^3.0.0" - } - }, - "normalize-package-data": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz", - "integrity": "sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==", - "dev": true, - "requires": { - "hosted-git-info": "^2.1.4", - "resolve": "^1.10.0", - "semver": "2 || 3 || 4 || 5", - "validate-npm-package-license": "^3.0.1" - } - }, - "p-limit": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", - "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", - "dev": true, - "requires": { - "p-try": "^2.0.0" - } - }, - "p-locate": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", - "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", - "dev": true, - "requires": { - "p-limit": "^2.0.0" - } - }, - "p-try": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", - "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", - "dev": true - }, - "parse-json": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-4.0.0.tgz", - "integrity": "sha1-vjX1Qlvh9/bHRxhPmKeIy5lHfuA=", - "dev": true, - "requires": { - "error-ex": "^1.3.1", - "json-parse-better-errors": "^1.0.1" - } - }, - "path-type": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-3.0.0.tgz", - "integrity": "sha512-T2ZUsdZFHgA3u4e5PfPbjd7HDDpxPnQb5jN0SrDsjNSuVXHJqtwTnWqG0B1jZrgmJ/7lj1EmVIByWt1gxGkWvg==", - "dev": true, - "requires": { - "pify": "^3.0.0" - } - }, - "pify": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", - "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=", - "dev": true - }, - "read-pkg": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-3.0.0.tgz", - "integrity": "sha1-nLxoaXj+5l0WwA4rGcI3/Pbjg4k=", - "dev": true, - "requires": { - "load-json-file": "^4.0.0", - "normalize-package-data": "^2.3.2", - "path-type": "^3.0.0" - } - }, - "read-pkg-up": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-4.0.0.tgz", - "integrity": "sha512-6etQSH7nJGsK0RbG/2TeDzZFa8shjQ1um+SwQQ5cwKy0dhSXdOncEhb1CPpvQG4h7FyOV6EB6YlV0yJvZQNAkA==", - "dev": true, - "requires": { - "find-up": "^3.0.0", - "read-pkg": "^3.0.0" - } - }, - "semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", - "dev": true - } + "@istanbuljs/schema": "^0.1.2", + "glob": "^7.1.4", + "minimatch": "^3.0.4" } }, "text-table": { @@ -17424,28 +17508,20 @@ } }, "tough-cookie": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-3.0.1.tgz", - "integrity": "sha512-yQyJ0u4pZsv9D4clxO69OEjLWYw+jbgspjTue4lTQZLfV0c5l1VmK2y1JK8E9ahdpltPOaAThPcp5nKPUgSnsg==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.0.0.tgz", + "integrity": "sha512-tHdtEpQCMrc1YLrMaqXXcj6AxhYi/xgit6mZu1+EDWUn+qhUf8wMQoFIy9NXuq23zAwtcB0t/MjACGR18pcRbg==", "dev": true, "requires": { - "ip-regex": "^2.1.0", - "psl": "^1.1.28", - "punycode": "^2.1.1" - }, - "dependencies": { - "ip-regex": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/ip-regex/-/ip-regex-2.1.0.tgz", - "integrity": "sha1-+ni/XS5pE8kRzp+BnuUUa7bYROk=", - "dev": true - } + "psl": "^1.1.33", + "punycode": "^2.1.1", + "universalify": "^0.1.2" } }, "tr46": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/tr46/-/tr46-2.0.2.tgz", - "integrity": "sha512-3n1qG+/5kg+jrbTzwAykB5yRYtQCTqOGKq5U5PE3b0a1/mzo6snDhjGS0zJVJunO0NrT3Dg1MLy5TjWP/UJppg==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-2.1.0.tgz", + "integrity": "sha512-15Ih7phfcdP5YxqiB+iDtLoaTz4Nd35+IiAv0kQ5FNKHzXgdWqPoTIqEDDJmXceQt4JZk6lVPT8lnDlPpGDppw==", "dev": true, "requires": { "punycode": "^2.1.1" @@ -17480,27 +17556,6 @@ "integrity": "sha512-rvuRbTarPXmMb79SmzEp8aqXNKcK+y0XaB298IXueQ8I2PsrATcPBCSPyK/dDNa2iWOhKlfNnOjdAOTBU/nkFA==", "dev": true }, - "ts-node": { - "version": "8.10.2", - "resolved": "https://registry.npmjs.org/ts-node/-/ts-node-8.10.2.tgz", - "integrity": "sha512-ISJJGgkIpDdBhWVu3jufsWpK3Rzo7bdiIXJjQc0ynKxVOVcg2oIrf2H2cejminGrptVc6q6/uynAHNCuWGbpVA==", - "dev": true, - "requires": { - "arg": "^4.1.0", - "diff": "^4.0.1", - "make-error": "^1.1.1", - "source-map-support": "^0.5.17", - "yn": "3.1.1" - }, - "dependencies": { - "diff": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", - "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", - "dev": true - } - } - }, "tsconfig-paths": { "version": "3.9.0", "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.9.0.tgz", @@ -17511,6 +17566,17 @@ "json5": "^1.0.1", "minimist": "^1.2.0", "strip-bom": "^3.0.0" + }, + "dependencies": { + "json5": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json5/-/json5-1.0.1.tgz", + "integrity": "sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow==", + "dev": true, + "requires": { + "minimist": "^1.2.0" + } + } } }, "tunnel-agent": { @@ -17549,11 +17615,17 @@ "is-typedarray": "^1.0.0" } }, - "typescript": { - "version": "3.9.9", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.9.9.tgz", - "integrity": "sha512-kdMjTiekY+z/ubJCATUPlRDl39vXYiMV9iyeMuEuXZh2we6zz80uovNN2WlAxmmdE/Z/YQe+EbOEXB5RHEED3w==", - "dev": true + "unbox-primitive": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.1.tgz", + "integrity": "sha512-tZU/3NqK3dA5gpE1KtyiJUrEB0lxnGkMFHptJ7q6ewdZ8s12QrODwNbhIJStmJkd1QDXa1NRA8aF2A1zk/Ypyw==", + "dev": true, + "requires": { + "function-bind": "^1.1.1", + "has-bigints": "^1.0.1", + "has-symbols": "^1.0.2", + "which-boxed-primitive": "^1.0.2" + } }, "unherit": { "version": "1.1.3", @@ -17618,9 +17690,9 @@ "dev": true }, "unist-util-is": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-4.0.4.tgz", - "integrity": "sha512-3dF39j/u423v4BBQrk1AQ2Ve1FxY5W3JKwXxVFzBODQ6WEvccguhgp802qQLKSnxPODE6WuRZtV+ohlUg4meBA==", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-4.1.0.tgz", + "integrity": "sha512-ZOQSsnce92GrxSqlnEEseX0gi7GH9zTJZ0p9dtu87WRb/37mMPO2Ilx1s/t9vBHrFhbgweUwb+t7cIn5dxPhZg==", "dev": true }, "unist-util-position": { @@ -17630,9 +17702,9 @@ "dev": true }, "unist-util-remove": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/unist-util-remove/-/unist-util-remove-2.0.1.tgz", - "integrity": "sha512-YtuetK6o16CMfG+0u4nndsWpujgsHDHHLyE0yGpJLLn5xSjKeyGyzEBOI2XbmoUHCYabmNgX52uxlWoQhcvR7Q==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/unist-util-remove/-/unist-util-remove-2.1.0.tgz", + "integrity": "sha512-J8NYPyBm4baYLdCbjmf1bhPu45Cr1MWTm77qd9istEkzWpnN6O9tMsEbB2JhNnBCqGENRqEWomQ+He6au0B27Q==", "dev": true, "requires": { "unist-util-is": "^4.0.0" @@ -17677,6 +17749,12 @@ "unist-util-is": "^4.0.0" } }, + "universalify": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz", + "integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==", + "dev": true + }, "uri-js": { "version": "4.4.1", "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", @@ -17705,9 +17783,9 @@ "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==" }, "v8-compile-cache": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.2.0.tgz", - "integrity": "sha512-gTpR5XQNKFwOd4clxfnhaqvfqMpqEwr4tOtCyz4MtYZX2JYhfr1JvBFKdS+7K/9rfpZR3VLX+YWBbKoxCgS43Q==", + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.3.0.tgz", + "integrity": "sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA==", "dev": true }, "validate-npm-package-license": { @@ -17765,12 +17843,6 @@ "unist-util-stringify-position": "^2.0.0" } }, - "vlq": { - "version": "0.2.3", - "resolved": "https://registry.npmjs.org/vlq/-/vlq-0.2.3.tgz", - "integrity": "sha512-DRibZL6DsNhIgYQ+wNdWDL2SL3bKPlVrRiBqV5yuMm++op8W4kGFtaQfCs4KEJn0wBZcHVHJ3eoywX8983k1ow==", - "dev": true - }, "w3c-hr-time": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/w3c-hr-time/-/w3c-hr-time-1.0.2.tgz", @@ -17841,12 +17913,12 @@ "dev": true }, "whatwg-url": { - "version": "8.4.0", - "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-8.4.0.tgz", - "integrity": "sha512-vwTUFf6V4zhcPkWp/4CQPr1TW9Ml6SF4lVyaIMBdJw5i6qUUJ1QWM4Z6YYVkfka0OUIzVo/0aNtGVGk256IKWw==", + "version": "8.5.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-8.5.0.tgz", + "integrity": "sha512-fy+R77xWv0AiqfLl4nuGUlQ3/6b5uNfQ4WAbGQVMYshCTCCPK9psC1nWh3XHuxGVCtlcDDQPQW1csmmIQo+fwg==", "dev": true, "requires": { - "lodash.sortby": "^4.7.0", + "lodash": "^4.7.0", "tr46": "^2.0.2", "webidl-conversions": "^6.1.0" } @@ -17859,18 +17931,25 @@ "isexe": "^2.0.0" } }, + "which-boxed-primitive": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz", + "integrity": "sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==", + "dev": true, + "requires": { + "is-bigint": "^1.0.1", + "is-boolean-object": "^1.1.0", + "is-number-object": "^1.0.4", + "is-string": "^1.0.5", + "is-symbol": "^1.0.3" + } + }, "which-module": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.0.tgz", "integrity": "sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=", "dev": true }, - "which-pm-runs": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/which-pm-runs/-/which-pm-runs-1.0.0.tgz", - "integrity": "sha1-Zws6+8VS4LVd9rd4DKdGFfI60cs=", - "dev": true - }, "wide-align": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.3.tgz", @@ -17934,9 +18013,9 @@ } }, "ws": { - "version": "7.4.3", - "resolved": "https://registry.npmjs.org/ws/-/ws-7.4.3.tgz", - "integrity": "sha512-hr6vCR76GsossIRsr8OLR9acVVm1jyfEWvhbNjtgPOrfvAlKzvyeg/P6r8RuDjRyrcQoPQT7K0DGEPc7Ae6jzA==", + "version": "7.4.6", + "resolved": "https://registry.npmjs.org/ws/-/ws-7.4.6.tgz", + "integrity": "sha512-YmhHDO4MzaDLB+M9ym/mDA5z0naX8j7SIlT8f8z+I0VtzsRbekxEutHSme7NPS2qE8StCYQNUnfWdXta/Yu85A==", "dev": true, "requires": {} }, @@ -17959,9 +18038,9 @@ "dev": true }, "y18n": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.1.tgz", - "integrity": "sha512-wNcy4NvjMYL8gogWWYAO7ZFWFfHcbdbE57tZO8e4cbpj8tfUcwrwqSl3ad8HxpYWCdXcJUCeKKZS62Av1affwQ==", + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.3.tgz", + "integrity": "sha512-JKhqTOwSrqNA1NY5lSztJ1GrBiUodLMmIZuLiDaMRJ+itFd+ABVE8XBjOvIWL+rSqNDC74LCSFmlb/U4UZ4hJQ==", "dev": true }, "yallist": { @@ -17970,9 +18049,9 @@ "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" }, "yaml": { - "version": "1.10.0", - "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.0.tgz", - "integrity": "sha512-yr2icI4glYaNG+KWONODapy2/jDdMSDnrONSjblABjD9B4Z5LgiircSt8m8sRZFNi08kG9Sm0uSHtEmP3zaEGg==", + "version": "1.10.2", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz", + "integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==", "dev": true }, "yapool": { @@ -17982,87 +18061,64 @@ "dev": true }, "yargs": { - "version": "13.3.2", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-13.3.2.tgz", - "integrity": "sha512-AX3Zw5iPruN5ie6xGRIDgqkT+ZhnRlZMLMHAs8tg7nRruy2Nb+i5o9bwghAogtM08q1dpr2LVoS8KSTMYpWXUw==", + "version": "15.4.1", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-15.4.1.tgz", + "integrity": "sha512-aePbxDmcYW++PaqBsJ+HYUFwCdv4LVvdnhBy78E57PIor8/OVvhMrADFFEDh8DHDFRv/O9i3lPhsENjO7QX0+A==", "dev": true, "requires": { - "cliui": "^5.0.0", - "find-up": "^3.0.0", + "cliui": "^6.0.0", + "decamelize": "^1.2.0", + "find-up": "^4.1.0", "get-caller-file": "^2.0.1", "require-directory": "^2.1.1", "require-main-filename": "^2.0.0", "set-blocking": "^2.0.0", - "string-width": "^3.0.0", + "string-width": "^4.2.0", "which-module": "^2.0.0", "y18n": "^4.0.0", - "yargs-parser": "^13.1.2" + "yargs-parser": "^18.1.2" }, "dependencies": { "ansi-regex": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", - "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz", + "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==", "dev": true }, - "ansi-styles": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", - "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", - "dev": true, - "requires": { - "color-convert": "^1.9.0" - } - }, "cliui": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-5.0.0.tgz", - "integrity": "sha512-PYeGSEmmHM6zvoef2w8TPzlrnNpXIjTipYK780YswmIP9vjxmd6Y2a3CB2Ks6/AU8NHjZugXvo8w3oWM2qnwXA==", + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-6.0.0.tgz", + "integrity": "sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ==", "dev": true, "requires": { - "string-width": "^3.1.0", - "strip-ansi": "^5.2.0", - "wrap-ansi": "^5.1.0" + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0", + "wrap-ansi": "^6.2.0" } }, - "color-convert": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", - "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", "dev": true, "requires": { - "color-name": "1.1.3" + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" } }, - "color-name": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=", - "dev": true - }, - "emoji-regex": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", - "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==", - "dev": true - }, - "find-up": { + "is-fullwidth-code-point": { "version": "3.0.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", - "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", - "dev": true, - "requires": { - "locate-path": "^3.0.0" - } + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true }, "locate-path": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", - "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", "dev": true, "requires": { - "p-locate": "^3.0.0", - "path-exists": "^3.0.0" + "p-locate": "^4.1.0" } }, "p-limit": { @@ -18075,12 +18131,12 @@ } }, "p-locate": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", - "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", "dev": true, "requires": { - "p-limit": "^2.0.0" + "p-limit": "^2.2.0" } }, "p-try": { @@ -18089,55 +18145,55 @@ "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", "dev": true }, + "path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true + }, "string-width": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", - "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.2.tgz", + "integrity": "sha512-XBJbT3N4JhVumXE0eoLU9DCjcaF92KLNqTmFCnG1pf8duUxFGwtP6AD6nkjw9a3IdiRtL3E2w3JDiE/xi3vOeA==", "dev": true, "requires": { - "emoji-regex": "^7.0.1", - "is-fullwidth-code-point": "^2.0.0", - "strip-ansi": "^5.1.0" + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.0" } }, "strip-ansi": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", - "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", + "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", "dev": true, "requires": { - "ansi-regex": "^4.1.0" + "ansi-regex": "^5.0.0" } }, "wrap-ansi": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-5.1.0.tgz", - "integrity": "sha512-QC1/iN/2/RPVJ5jYK8BGttj5z83LmSKmvbvrXPNCLZSEb32KKVDJDl/MOt2N01qU2H/FkzEa9PKto1BqDjtd7Q==", + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz", + "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==", "dev": true, "requires": { - "ansi-styles": "^3.2.0", - "string-width": "^3.0.0", - "strip-ansi": "^5.0.0" + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" } } } }, "yargs-parser": { - "version": "13.1.2", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-13.1.2.tgz", - "integrity": "sha512-3lbsNRf/j+A4QuSZfDRA7HRSfWrzO0YjqTJd5kjAq37Zep1CEgaYmrH9Q3GwPiB9cHyd1Y1UwggGhJGoxipbzg==", + "version": "18.1.3", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-18.1.3.tgz", + "integrity": "sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ==", "dev": true, "requires": { "camelcase": "^5.0.0", "decamelize": "^1.2.0" } }, - "yn": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", - "integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==", - "dev": true - }, "zwitch": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/zwitch/-/zwitch-1.0.5.tgz", diff --git a/package.json b/package.json index 8ed3c32b88900..b728f9f88d1ec 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,18 @@ { - "version": "7.6.0", + "version": "7.20.3", "name": "npm", "description": "a package manager for JavaScript", + "workspaces": [ + "docs", + "packages/*" + ], + "files": [ + "bin", + "docs/content/**/*.md", + "docs/output/**/*.html", + "lib", + "man" + ], "keywords": [ "install", "modules", @@ -42,40 +53,42 @@ "./package.json": "./package.json" }, "dependencies": { - "@npmcli/arborist": "^2.2.5", + "@npmcli/arborist": "^2.8.0", "@npmcli/ci-detect": "^1.2.0", - "@npmcli/config": "^1.2.9", - "@npmcli/run-script": "^1.8.3", + "@npmcli/config": "^2.2.0", + "@npmcli/package-json": "^1.0.1", + "@npmcli/run-script": "^1.8.5", "abbrev": "~1.1.1", "ansicolors": "~0.3.2", "ansistyles": "~0.1.3", "archy": "~1.0.0", - "byte-size": "^7.0.0", - "cacache": "^15.0.5", + "byte-size": "^7.0.1", + "cacache": "^15.2.0", "chalk": "^4.1.0", "chownr": "^2.0.0", "cli-columns": "^3.1.2", "cli-table3": "^0.6.0", "columnify": "~1.5.4", - "glob": "^7.1.4", + "glob": "^7.1.7", "graceful-fs": "^4.2.6", - "hosted-git-info": "^3.0.8", + "hosted-git-info": "^4.0.2", "ini": "^2.0.0", - "init-package-json": "^2.0.2", + "init-package-json": "^2.0.3", "is-cidr": "^4.0.2", "json-parse-even-better-errors": "^2.3.1", "leven": "^3.1.0", - "libnpmaccess": "^4.0.1", - "libnpmdiff": "^2.0.3", - "libnpmfund": "^1.0.2", - "libnpmhook": "^6.0.1", - "libnpmorg": "^2.0.1", + "libnpmaccess": "^4.0.2", + "libnpmdiff": "^2.0.4", + "libnpmexec": "^2.0.0", + "libnpmfund": "^1.1.0", + "libnpmhook": "^6.0.2", + "libnpmorg": "^2.0.2", "libnpmpack": "^2.0.1", - "libnpmpublish": "^4.0.0", - "libnpmsearch": "^3.1.0", - "libnpmteam": "^2.0.2", - "libnpmversion": "^1.0.11", - "make-fetch-happen": "^8.0.14", + "libnpmpublish": "^4.0.1", + "libnpmsearch": "^3.1.1", + "libnpmteam": "^2.0.3", + "libnpmversion": "^1.2.1", + "make-fetch-happen": "^9.0.4", "minipass": "^3.1.3", "minipass-pipeline": "^1.2.4", "mkdirp": "^1.0.4", @@ -83,25 +96,25 @@ "ms": "^2.1.2", "node-gyp": "^7.1.2", "nopt": "^5.0.0", - "npm-audit-report": "^2.1.4", - "npm-package-arg": "^8.1.1", - "npm-pick-manifest": "^6.1.0", - "npm-profile": "^5.0.2", - "npm-registry-fetch": "^9.0.0", + "npm-audit-report": "^2.1.5", + "npm-package-arg": "^8.1.5", + "npm-pick-manifest": "^6.1.1", + "npm-profile": "^5.0.3", + "npm-registry-fetch": "^11.0.0", "npm-user-validate": "^1.0.1", - "npmlog": "~4.1.2", + "npmlog": "^5.0.0", "opener": "^1.5.2", - "pacote": "^11.2.7", + "pacote": "^11.3.5", "parse-conflict-json": "^1.1.1", "qrcode-terminal": "^0.12.0", "read": "~1.0.7", "read-package-json": "^3.0.1", - "read-package-json-fast": "^2.0.2", + "read-package-json-fast": "^2.0.3", "readdir-scoped-modules": "^1.1.0", "rimraf": "^3.0.2", - "semver": "^7.3.4", + "semver": "^7.3.5", "ssri": "^8.0.1", - "tar": "^6.1.0", + "tar": "^6.1.2", "text-table": "~0.2.0", "tiny-relative-date": "^1.3.0", "treeverse": "^1.0.4", @@ -113,6 +126,7 @@ "@npmcli/arborist", "@npmcli/ci-detect", "@npmcli/config", + "@npmcli/package-json", "@npmcli/run-script", "abbrev", "ansicolors", @@ -135,6 +149,7 @@ "leven", "libnpmaccess", "libnpmdiff", + "libnpmexec", "libnpmfund", "libnpmhook", "libnpmorg", @@ -178,19 +193,13 @@ "write-file-atomic" ], "devDependencies": { - "@mdx-js/mdx": "^1.6.22", - "cmark-gfm": "^0.8.5", - "eslint": "^7.19.0", - "eslint-plugin-import": "^2.22.1", + "eslint": "^7.31.0", + "eslint-plugin-import": "^2.23.4", "eslint-plugin-node": "^11.1.0", - "eslint-plugin-promise": "^4.3.1", + "eslint-plugin-promise": "^5.1.0", "eslint-plugin-standard": "^5.0.0", - "jsdom": "^16.4.0", - "licensee": "^8.1.0", - "marked-man": "^0.7.0", - "require-inject": "^1.4.4", - "tap": "^14.11.0", - "yaml": "^1.10.0" + "licensee": "^8.2.0", + "tap": "^15.0.9" }, "scripts": { "dumpconf": "env | grep npm | sort | uniq", @@ -199,26 +208,26 @@ "test": "tap", "check-coverage": "tap", "snap": "tap", + "postsnap": "make -s docs/content/*/*.md", "test:nocleanup": "NO_TEST_CLEANUP=1 npm run test --", "sudotest": "sudo npm run test --", "sudotest:nocleanup": "sudo NO_TEST_CLEANUP=1 npm run test --", "posttest": "npm run lint", "eslint": "eslint", - "lint": "npm run eslint -- test/lib test/bin \"lib/**/*.js\"", + "lint": "npm run eslint -- test/lib test/bin bin lib scripts docs smoke-tests", "lintfix": "npm run lint -- --fix", "prelint": "rimraf test/npm_cache*", - "resetdeps": "bash scripts/resetdeps.sh" + "resetdeps": "bash scripts/resetdeps.sh", + "smoke-tests": "tap smoke-tests/index.js" }, - "//": [ - "XXX temporarily only run unit tests while v7 beta is in progress", - "Remove the 'files' below once we're done porting old tests over" - ], "tap": { + "test-env": [ + "LC_ALL=sk" + ], "color": 1, "files": "test/{lib,bin}", "coverage-map": "test/coverage-map.js", "check-coverage": true, - "esm": false, "timeout": 600 }, "license": "Artistic-2.0", diff --git a/packages/libnpmdiff/.eslintrc.json b/packages/libnpmdiff/.eslintrc.json new file mode 100644 index 0000000000000..6232a8f82187f --- /dev/null +++ b/packages/libnpmdiff/.eslintrc.json @@ -0,0 +1,207 @@ +{ + "parserOptions": { + "ecmaVersion": 2018, + "ecmaFeatures": {}, + "sourceType": "script" + }, + + "env": { + "es6": true, + "node": true + }, + + "plugins": [ + "import", + "node", + "promise", + "standard" + ], + + "globals": { + "document": "readonly", + "navigator": "readonly", + "window": "readonly" + }, + + "rules": { + "accessor-pairs": "error", + "array-bracket-spacing": ["error", "never"], + "arrow-spacing": ["error", { "before": true, "after": true }], + "block-spacing": ["error", "always"], + "brace-style": ["error", "1tbs", { "allowSingleLine": false }], + "camelcase": ["error", { "properties": "never" }], + "comma-dangle": ["error", { + "arrays": "always-multiline", + "objects": "always-multiline", + "imports": "always-multiline", + "exports": "always-multiline", + "functions": "never" + }], + "comma-spacing": ["error", { "before": false, "after": true }], + "comma-style": ["error", "last"], + "computed-property-spacing": ["error", "never"], + "constructor-super": "error", + "curly": ["error", "multi-or-nest"], + "dot-location": ["error", "property"], + "dot-notation": ["error", { "allowKeywords": true }], + "eol-last": "error", + "eqeqeq": ["error", "always", { "null": "ignore" }], + "func-call-spacing": ["error", "never"], + "generator-star-spacing": ["error", { "before": true, "after": true }], + "handle-callback-err": ["error", "^(err|error)$" ], + "indent": ["error", 2, { + "SwitchCase": 1, + "VariableDeclarator": 1, + "outerIIFEBody": 1, + "MemberExpression": 1, + "FunctionDeclaration": { "parameters": 1, "body": 1 }, + "FunctionExpression": { "parameters": 1, "body": 1 }, + "CallExpression": { "arguments": 1 }, + "ArrayExpression": 1, + "ObjectExpression": 1, + "ImportDeclaration": 1, + "flatTernaryExpressions": true, + "ignoreComments": false, + "ignoredNodes": ["TemplateLiteral *"] + }], + "key-spacing": ["error", { "beforeColon": false, "afterColon": true }], + "keyword-spacing": ["error", { "before": true, "after": true }], + "lines-between-class-members": ["error", "always", { "exceptAfterSingleLine": true }], + "new-cap": ["error", { "newIsCap": true, "capIsNew": false, "properties": true }], + "new-parens": "error", + "no-array-constructor": "error", + "no-async-promise-executor": "error", + "no-caller": "error", + "no-case-declarations": "error", + "no-class-assign": "error", + "no-compare-neg-zero": "error", + "no-cond-assign": "off", + "no-const-assign": "error", + "no-constant-condition": ["error", { "checkLoops": false }], + "no-control-regex": "error", + "no-debugger": "error", + "no-delete-var": "error", + "no-dupe-args": "error", + "no-dupe-class-members": "error", + "no-dupe-keys": "error", + "no-duplicate-case": "error", + "no-empty-character-class": "error", + "no-empty-pattern": "error", + "no-eval": "error", + "no-ex-assign": "error", + "no-extend-native": "error", + "no-extra-bind": "error", + "no-extra-boolean-cast": "error", + "no-extra-parens": ["error", "functions"], + "no-fallthrough": "error", + "no-floating-decimal": "error", + "no-func-assign": "error", + "no-global-assign": "error", + "no-implied-eval": "error", + "no-inner-declarations": ["error", "functions"], + "no-invalid-regexp": "error", + "no-irregular-whitespace": "error", + "no-iterator": "error", + "no-labels": ["error", { "allowLoop": true, "allowSwitch": false }], + "no-lone-blocks": "error", + "no-misleading-character-class": "error", + "no-prototype-builtins": "error", + "no-useless-catch": "error", + "no-mixed-operators": "off", + "no-mixed-spaces-and-tabs": "error", + "no-multi-spaces": "error", + "no-multi-str": "error", + "no-multiple-empty-lines": ["error", { "max": 1, "maxEOF": 0 }], + "no-negated-in-lhs": "error", + "no-new": "off", + "no-new-func": "error", + "no-new-object": "error", + "no-new-require": "error", + "no-new-symbol": "error", + "no-new-wrappers": "error", + "no-obj-calls": "error", + "no-octal": "error", + "no-octal-escape": "error", + "no-path-concat": "error", + "no-proto": "error", + "no-redeclare": ["error", { "builtinGlobals": false }], + "no-regex-spaces": "error", + "no-return-assign": "off", + "no-self-assign": "off", + "no-self-compare": "error", + "no-sequences": "error", + "no-shadow-restricted-names": "error", + "no-sparse-arrays": "error", + "no-tabs": "error", + "no-template-curly-in-string": "error", + "no-this-before-super": "error", + "no-throw-literal": "off", + "no-trailing-spaces": "error", + "no-undef": "error", + "no-undef-init": "error", + "no-unexpected-multiline": "error", + "no-unmodified-loop-condition": "error", + "no-unneeded-ternary": ["error", { "defaultAssignment": false }], + "no-unreachable": "error", + "no-unsafe-finally": 0, + "no-unsafe-negation": "error", + "no-unused-expressions": ["error", { "allowShortCircuit": true, "allowTernary": true, "allowTaggedTemplates": true }], + "no-unused-vars": ["error", { "vars": "all", "args": "none", "ignoreRestSiblings": true }], + "no-use-before-define": ["error", { "functions": false, "classes": false, "variables": false }], + "no-useless-call": "error", + "no-useless-computed-key": "error", + "no-useless-constructor": "error", + "no-useless-escape": "error", + "no-useless-rename": "error", + "no-useless-return": "error", + "no-void": "error", + "no-whitespace-before-property": "error", + "no-with": "error", + "nonblock-statement-body-position": [2, "below"], + "object-curly-newline": "off", + "object-curly-spacing": "off", + "object-property-newline": ["error", { "allowMultiplePropertiesPerLine": true }], + "one-var": ["error", { "initialized": "never" }], + "operator-linebreak": "off", + "padded-blocks": ["error", { "blocks": "never", "switches": "never", "classes": "never" }], + "prefer-const": ["error", {"destructuring": "all"}], + "prefer-promise-reject-errors": "error", + "quote-props": ["error", "as-needed"], + "quotes": ["error", "single", { "avoidEscape": true, "allowTemplateLiterals": true }], + "rest-spread-spacing": ["error", "never"], + "semi": ["error", "never"], + "semi-spacing": ["error", { "before": false, "after": true }], + "space-before-blocks": ["error", "always"], + "space-before-function-paren": ["error", "always"], + "space-in-parens": ["error", "never"], + "space-infix-ops": "error", + "space-unary-ops": ["error", { "words": true, "nonwords": false }], + "spaced-comment": ["error", "always", { + "line": { "markers": ["*package", "!", "/", ",", "="] }, + "block": { "balanced": true, "markers": ["*package", "!", ",", ":", "::", "flow-include"], "exceptions": ["*"] } + }], + "symbol-description": "error", + "template-curly-spacing": ["error", "never"], + "template-tag-spacing": ["error", "never"], + "unicode-bom": ["error", "never"], + "use-isnan": "error", + "valid-typeof": ["error", { "requireStringLiterals": true }], + "wrap-iife": ["error", "any", { "functionPrototypeMethods": true }], + "yield-star-spacing": ["error", "both"], + "yoda": ["error", "never"], + + "import/export": "error", + "import/first": "error", + "import/no-absolute-path": ["error", { "esmodule": true, "commonjs": true, "amd": false }], + "import/no-duplicates": "error", + "import/no-named-default": "error", + "import/no-webpack-loader-syntax": "error", + + "node/no-deprecated-api": "error", + "node/process-exit-as-throw": "error", + + "promise/param-names": "off", + + "standard/no-callback-literal": "error" + } +} diff --git a/packages/libnpmdiff/.gitignore b/packages/libnpmdiff/.gitignore new file mode 100644 index 0000000000000..0aba557bf2857 --- /dev/null +++ b/packages/libnpmdiff/.gitignore @@ -0,0 +1,99 @@ +# Logs +logs +*.log +npm-debug.log* +lerna-debug.log* + +# Diagnostic reports (https://nodejs.org/api/report.html) +report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json + +# Runtime data +pids +*.pid +*.seed +*.pid.lock + +# Directory for instrumented libs generated by jscoverage/JSCover +lib-cov + +# Coverage directory used by tools like istanbul +coverage +*.lcov + +# nyc test coverage +.nyc_output + +# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) +.grunt + +# Bower dependency directory (https://bower.io/) +bower_components + +# node-waf configuration +.lock-wscript + +# Compiled binary addons (https://nodejs.org/api/addons.html) +build/Release + +# Dependency directories +node_modules/ +jspm_packages/ + +# TypeScript v1 declaration files +typings/ + +# TypeScript cache +*.tsbuildinfo + +# Optional npm cache directory +.npm + +# Optional eslint cache +.eslintcache + +# Microbundle cache +.rpt2_cache/ +.rts2_cache_cjs/ +.rts2_cache_es/ +.rts2_cache_umd/ + +# Optional REPL history +.node_repl_history + +# Output of 'npm pack' +*.tgz + +# Yarn Integrity file +.yarn-integrity + +# dotenv environment variables file +.env +.env.test + +# parcel-bundler cache (https://parceljs.org/) +.cache + +# next.js build output +.next + +# nuxt.js build output +.nuxt + +# gatsby files +.cache/ +public + +# vuepress build output +.vuepress/dist + +# Serverless directories +.serverless/ + +# FuseBox cache +.fusebox/ + +# DynamoDB Local files +.dynamodb/ + +# Editors +Session.vim diff --git a/node_modules/libnpmdiff/CHANGELOG.md b/packages/libnpmdiff/CHANGELOG.md similarity index 100% rename from node_modules/libnpmdiff/CHANGELOG.md rename to packages/libnpmdiff/CHANGELOG.md diff --git a/packages/libnpmdiff/LICENSE b/packages/libnpmdiff/LICENSE new file mode 100644 index 0000000000000..d3a1cdfd217b6 --- /dev/null +++ b/packages/libnpmdiff/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) GitHub Inc. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/libnpmdiff/README.md b/packages/libnpmdiff/README.md similarity index 94% rename from node_modules/libnpmdiff/README.md rename to packages/libnpmdiff/README.md index d1cf53fc6c5c7..6c60f714bb776 100644 --- a/node_modules/libnpmdiff/README.md +++ b/packages/libnpmdiff/README.md @@ -61,7 +61,7 @@ hesitate to jump in if you'd like to, or even ask us questions if something isn't clear. All participants and maintainers in this project are expected to follow the -[npm Code of Conduct](https://www.npmjs.com/policies/conduct), and just +[npm Code of Conduct](https://docs.npmjs.com/policies/conduct), and just generally be excellent to each other. Please refer to the [Changelog](CHANGELOG.md) for project history details, too. @@ -86,7 +86,7 @@ Fetches the registry tarballs and compare files between a spec `a` and spec `b`. - `diffSrcPrefix <String>`: Prefix to be used in the filenames from `a`. Defaults to `a/`. - `diffDstPrefix <String>`: Prefix to be used in the filenames from `b`. Defaults to `b/`. - `diffText <Boolean>`: Should treat all files as text and try to print diff for binary files. Defaults to `false`. -- ...`cache`, `registry` and other common options accepted by [pacote](https://github.com/npm/pacote#options) +- ...`cache`, `registry`, `where` and other common options accepted by [pacote](https://github.com/npm/pacote#options) Returns a `Promise` that fullfils with a `String` containing the resulting patch diffs. diff --git a/node_modules/libnpmdiff/index.js b/packages/libnpmdiff/index.js similarity index 89% rename from node_modules/libnpmdiff/index.js rename to packages/libnpmdiff/index.js index 0bfc8734ef639..73dc3ee64e3ce 100644 --- a/node_modules/libnpmdiff/index.js +++ b/packages/libnpmdiff/index.js @@ -1,6 +1,7 @@ const pacote = require('pacote') const formatDiff = require('./lib/format-diff.js') +const getTarball = require('./lib/tarball.js') const untar = require('./lib/untar.js') const argsError = () => @@ -25,8 +26,8 @@ const diff = async (specs, opts = {}) => { // fetches tarball using pacote const [a, b] = await Promise.all([ - pacote.tarball(aManifest._resolved, opts), - pacote.tarball(bManifest._resolved, opts), + getTarball(aManifest, opts), + getTarball(bManifest, opts), ]) // read all files diff --git a/node_modules/libnpmdiff/lib/format-diff.js b/packages/libnpmdiff/lib/format-diff.js similarity index 100% rename from node_modules/libnpmdiff/lib/format-diff.js rename to packages/libnpmdiff/lib/format-diff.js diff --git a/node_modules/libnpmdiff/lib/should-print-patch.js b/packages/libnpmdiff/lib/should-print-patch.js similarity index 100% rename from node_modules/libnpmdiff/lib/should-print-patch.js rename to packages/libnpmdiff/lib/should-print-patch.js diff --git a/packages/libnpmdiff/lib/tarball.js b/packages/libnpmdiff/lib/tarball.js new file mode 100644 index 0000000000000..0c8fb177a3885 --- /dev/null +++ b/packages/libnpmdiff/lib/tarball.js @@ -0,0 +1,33 @@ +const { relative } = require('path') + +const npa = require('npm-package-arg') +const pkgContents = require('@npmcli/installed-package-contents') +const pacote = require('pacote') +const { tarCreateOptions } = pacote.DirFetcher +const tar = require('tar') + +// returns a simplified tarball when reading files from node_modules folder, +// thus avoiding running the prepare scripts and the extra logic from packlist +const nodeModulesTarball = (manifest, opts) => + pkgContents({ path: manifest._resolved, depth: 1 }) + .then(files => + files.map(file => relative(manifest._resolved, file)) + ) + .then(files => + tar.c(tarCreateOptions(manifest), files).concat() + ) + +const tarball = (manifest, opts) => { + const resolved = manifest._resolved + const where = opts.where || process.cwd() + + const fromNodeModules = npa(resolved).type === 'directory' + && /node_modules[\\/](@[^\\/]+\/)?[^\\/]+[\\/]?$/.test(relative(where, resolved)) + + if (fromNodeModules) + return nodeModulesTarball(manifest, opts) + + return pacote.tarball(manifest._resolved, opts) +} + +module.exports = tarball diff --git a/node_modules/libnpmdiff/lib/untar.js b/packages/libnpmdiff/lib/untar.js similarity index 100% rename from node_modules/libnpmdiff/lib/untar.js rename to packages/libnpmdiff/lib/untar.js diff --git a/node_modules/libnpmdiff/package.json b/packages/libnpmdiff/package.json similarity index 82% rename from node_modules/libnpmdiff/package.json rename to packages/libnpmdiff/package.json index fab4293e9374e..53fd5d4befd5e 100644 --- a/node_modules/libnpmdiff/package.json +++ b/packages/libnpmdiff/package.json @@ -1,6 +1,6 @@ { "name": "libnpmdiff", - "version": "2.0.3", + "version": "2.0.4", "description": "The registry diff", "repository": "https://github.com/npm/libnpmdiff", "files": [ @@ -46,19 +46,21 @@ ] }, "devDependencies": { - "eslint": "^7.18.0", - "eslint-plugin-import": "^2.22.1", + "eslint": "^7.28.0", + "eslint-plugin-import": "^2.23.4", "eslint-plugin-node": "^11.1.0", - "eslint-plugin-promise": "^4.2.1", + "eslint-plugin-promise": "^5.1.0", "eslint-plugin-standard": "^5.0.0", - "tap": "^14.11.0" + "tap": "^15.0.9" }, "dependencies": { "@npmcli/disparity-colors": "^1.0.1", + "@npmcli/installed-package-contents": "^1.0.7", "binary-extensions": "^2.2.0", "diff": "^5.0.0", "minimatch": "^3.0.4", - "pacote": "^11.2.3", + "npm-package-arg": "^8.1.4", + "pacote": "^11.3.4", "tar": "^6.1.0" } } diff --git a/packages/libnpmdiff/tap-snapshots/test/format-diff.js.test.cjs b/packages/libnpmdiff/tap-snapshots/test/format-diff.js.test.cjs new file mode 100644 index 0000000000000..f735d8925820a --- /dev/null +++ b/packages/libnpmdiff/tap-snapshots/test/format-diff.js.test.cjs @@ -0,0 +1,152 @@ +/* IMPORTANT + * This snapshot file is auto-generated, but designed for humans. + * It should be checked into source control and tracked carefully. + * Re-generate by setting TAP_SNAPSHOT=1 and running tests. + * Make sure to inspect the output below. Do not ignore changes! + */ +'use strict' +exports[`test/format-diff.js TAP added file > should output expected added file diff result 1`] = ` +diff --git a/foo.js b/foo.js +new file mode 100755 +index v1.0.0..v2.0.0 +--- a/foo.js ++++ b/foo.js +@@ -0,0 +1,2 @@ ++"use strict" ++module.exports = "foo" +` + +exports[`test/format-diff.js TAP binary file > should output expected bin file diff result 1`] = ` +diff --git a/foo.jpg b/foo.jpg +index v1.0.0..v2.0.0 100644 +--- a/foo.jpg ++++ b/foo.jpg +` + +exports[`test/format-diff.js TAP changed file mode > should output expected changed file mode diff result 1`] = ` +diff --git a/foo.js b/foo.js +old mode 100644 +new mode 100755 +index v1.0.0..v2.0.0 +--- a/foo.js ++++ b/foo.js +` + +exports[`test/format-diff.js TAP colored output > should output expected colored diff result 1`] = ` +diff --git a/foo.js b/foo.js +index v1.0.0..v2.0.0 100644 +--- a/foo.js ++++ b/foo.js +@@ -1,2 +1,2 @@ + "use strict" +-module.exports = "foo" ++module.exports = "foobar" +` + +exports[`test/format-diff.js TAP diff options > should output expected diff result 1`] = ` +diff --git before/foo.js after/foo.js +index v1.0.0..v2.0.0 100644 +--- before/foo.js ++++ after/foo.js +@@ -4,4 +4,6 @@ + const c = "c" ++const d = "d" + module.exports = () => a+ + b+ +-c ++c+ ++d +` + +exports[`test/format-diff.js TAP diffUnified=0 > should output no context lines in output 1`] = ` +diff --git a/foo.js b/foo.js +index v1.0.0..v2.0.0 100644 +--- a/foo.js ++++ b/foo.js +@@ -3,2 +3,3 @@ +-const b = "b" +-const c = "c" ++ const b = "b" ++ const c = "c" ++ const d = "d" +@@ -7,1 +8,2 @@ +-c ++c+ ++d +` + +exports[`test/format-diff.js TAP format multiple files patch > should output expected result for multiple files 1`] = ` +diff --git a/foo.js b/foo.js +index v1.0.0..v1.1.1 100644 +--- a/foo.js ++++ b/foo.js +@@ -1,2 +1,2 @@ + "use strict" +-module.exports = "foo" ++module.exports = "foobar" +diff --git a/lib/utils.js b/lib/utils.js +index v1.0.0..v1.1.1 100644 +--- a/lib/utils.js ++++ b/lib/utils.js +@@ -1,3 +1,4 @@ + "use strict" + const bar = require("./bar.js") +-module.exports = () => bar ++module.exports = ++ () => bar + "util" +` + +exports[`test/format-diff.js TAP format removed file > should output expected removed file diff result 1`] = ` +diff --git a/foo.js b/foo.js +deleted file mode 100644 +index v1.0.0..v2.0.0 +--- a/foo.js ++++ b/foo.js +@@ -1,2 +0,0 @@ +-"use strict" +-module.exports = "foo" +/ No newline at end of file +` + +exports[`test/format-diff.js TAP format simple diff > should output expected diff result 1`] = ` +diff --git a/foo.js b/foo.js +index v1.0.0..v2.0.0 100644 +--- a/foo.js ++++ b/foo.js +@@ -1,2 +1,2 @@ + "use strict" +-module.exports = "foo" ++module.exports = "foobar" +` + +exports[`test/format-diff.js TAP noPrefix > should output result with no prefixes 1`] = ` +diff --git foo.js foo.js +index v1.0.0..v2.0.0 100644 +Index: foo.js +--- foo.js ++++ foo.js +@@ -1,2 +1,2 @@ + "use strict" +-module.exports = "foo" ++module.exports = "foobar" +` + +exports[`test/format-diff.js TAP nothing to diff > should output empty result 1`] = ` + +` + +exports[`test/format-diff.js TAP respect --tag-version-prefix option > should output expected diff result 1`] = ` +diff --git a/foo.js b/foo.js +index b1.0.0..b2.0.0 100644 +--- a/foo.js ++++ b/foo.js +@@ -1,2 +1,2 @@ + "use strict" +-module.exports = "foo" ++module.exports = "foobar" +` + +exports[`test/format-diff.js TAP using --name-only option > should output expected diff result 1`] = ` +foo.js +lib/utils.js +` diff --git a/packages/libnpmdiff/tap-snapshots/test/index.js.test.cjs b/packages/libnpmdiff/tap-snapshots/test/index.js.test.cjs new file mode 100644 index 0000000000000..21db3deac4d70 --- /dev/null +++ b/packages/libnpmdiff/tap-snapshots/test/index.js.test.cjs @@ -0,0 +1,115 @@ +/* IMPORTANT + * This snapshot file is auto-generated, but designed for humans. + * It should be checked into source control and tracked carefully. + * Re-generate by setting TAP_SNAPSHOT=1 and running tests. + * Make sure to inspect the output below. Do not ignore changes! + */ +'use strict' +exports[`test/index.js TAP compare two diff specs > should output expected diff 1`] = ` +diff --git a/index.js b/index.js +index v1.0.0..v2.0.0 100644 +--- a/index.js ++++ b/index.js +@@ -1,2 +1,2 @@ + module.exports = +- "a1" ++ "a2" +diff --git a/package.json b/package.json +index v1.0.0..v2.0.0 100644 +--- a/package.json ++++ b/package.json +@@ -1,4 +1,4 @@ + { + "name": "a", +- "version": "1.0.0" ++ "version": "2.0.0" + } +` + +exports[`test/index.js TAP folder in node_modules nested, absolute path > should output expected diff 1`] = ` +diff --git a/package.json b/package.json +index v2.0.0..v2.0.1 100644 +--- a/package.json ++++ b/package.json +@@ -1,6 +1,6 @@ + { + "name": "b", +- "version": "2.0.0", ++ "version": "2.0.1", + "scripts": { + "prepare": "node prepare.js" + } +diff --git a/prepare.js b/prepare.js +index v2.0.0..v2.0.1 100644 +--- a/prepare.js ++++ b/prepare.js +@@ -1,1 +0,0 @@ +-throw new Error("ERR") +/ No newline at end of file +` + +exports[`test/index.js TAP folder in node_modules nested, relative path > should output expected diff 1`] = ` +diff --git a/package.json b/package.json +index v2.0.0..v2.0.1 100644 +--- a/package.json ++++ b/package.json +@@ -1,6 +1,6 @@ + { + "name": "b", +- "version": "2.0.0", ++ "version": "2.0.1", + "scripts": { + "prepare": "node prepare.js" + } +diff --git a/prepare.js b/prepare.js +index v2.0.0..v2.0.1 100644 +--- a/prepare.js ++++ b/prepare.js +@@ -1,1 +0,0 @@ +-throw new Error("ERR") +/ No newline at end of file +` + +exports[`test/index.js TAP folder in node_modules top-level, absolute path > should output expected diff 1`] = ` +diff --git a/package.json b/package.json +index v1.0.0..v1.0.1 100644 +--- a/package.json ++++ b/package.json +@@ -1,6 +1,6 @@ + { + "name": "a", +- "version": "1.0.0", ++ "version": "1.0.1", + "scripts": { + "prepare": "node prepare.js" + } +diff --git a/prepare.js b/prepare.js +index v1.0.0..v1.0.1 100644 +--- a/prepare.js ++++ b/prepare.js +@@ -1,1 +0,0 @@ +-throw new Error("ERR") +/ No newline at end of file +` + +exports[`test/index.js TAP folder in node_modules top-level, relative path > should output expected diff 1`] = ` +diff --git a/package.json b/package.json +index v1.0.0..v1.0.1 100644 +--- a/package.json ++++ b/package.json +@@ -1,6 +1,6 @@ + { + "name": "a", +- "version": "1.0.0", ++ "version": "1.0.1", + "scripts": { + "prepare": "node prepare.js" + } +diff --git a/prepare.js b/prepare.js +index v1.0.0..v1.0.1 100644 +--- a/prepare.js ++++ b/prepare.js +@@ -1,1 +0,0 @@ +-throw new Error("ERR") +/ No newline at end of file +` diff --git a/packages/libnpmdiff/tap-snapshots/test/untar.js.test.cjs b/packages/libnpmdiff/tap-snapshots/test/untar.js.test.cjs new file mode 100644 index 0000000000000..b1092feb6ee8c --- /dev/null +++ b/packages/libnpmdiff/tap-snapshots/test/untar.js.test.cjs @@ -0,0 +1,134 @@ +/* IMPORTANT + * This snapshot file is auto-generated, but designed for humans. + * It should be checked into source control and tracked carefully. + * Re-generate by setting TAP_SNAPSHOT=1 and running tests. + * Make sure to inspect the output below. Do not ignore changes! + */ +'use strict' +exports[`test/untar.js TAP filter files > should return list of filenames 1`] = ` +LICENSE +README.md +` + +exports[`test/untar.js TAP filter files > should return map of filenames with valid contents 1`] = ` +a/LICENSE: true +a/README.md: true +` + +exports[`test/untar.js TAP filter files by exact filename > should return no filenames 1`] = ` + +` + +exports[`test/untar.js TAP filter files by exact filename > should return no filenames 2`] = ` + +` + +exports[`test/untar.js TAP filter files using glob expressions > should return list of filenames 1`] = ` +lib/index.js +lib/utils/b.js +package-lock.json +test/index.js +` + +exports[`test/untar.js TAP filter files using glob expressions > should return map of filenames with valid contents 1`] = ` +a/lib/index.js: true +a/lib/utils/b.js: true +a/package-lock.json: true +a/test/index.js: true +` + +exports[`test/untar.js TAP match files by end of filename > should return list of filenames 1`] = ` +lib/index.js +lib/utils/b.js +test/index.js +test/utils/b.js +` + +exports[`test/untar.js TAP match files by end of filename > should return map of filenames with valid contents 1`] = ` +a/lib/index.js: true +a/lib/utils/b.js: true +a/test/index.js: true +a/test/utils/b.js: true +` + +exports[`test/untar.js TAP match files by simple folder name > should return list of filenames 1`] = ` +lib/index.js +lib/utils/b.js +` + +exports[`test/untar.js TAP match files by simple folder name > should return map of filenames with valid contents 1`] = ` +a/lib/index.js: true +a/lib/utils/b.js: true +` + +exports[`test/untar.js TAP match files by simple folder name variation > should return list of filenames 1`] = ` +test/index.js +test/utils/b.js +` + +exports[`test/untar.js TAP match files by simple folder name variation > should return map of filenames with valid contents 1`] = ` +a/test/index.js: true +a/test/utils/b.js: true +` + +exports[`test/untar.js TAP untar package with folders > should have read contents 1`] = ` +module.exports = 'b' + +` + +exports[`test/untar.js TAP untar package with folders > should return list of filenames 1`] = ` +lib/index.js +lib/utils/b.js +package-lock.json +package.json +test/index.js +test/utils/b.js +` + +exports[`test/untar.js TAP untar package with folders > should return map of filenames to its contents 1`] = ` +a/lib/index.js: true +a/lib/utils/b.js: true +a/package-lock.json: true +a/package.json: true +a/test/index.js: true +a/test/utils/b.js: true +` + +exports[`test/untar.js TAP untar simple package > should have read contents 1`] = ` +The MIT License (MIT) + +Copyright (c) Ruy Adorno (ruyadorno.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + +` + +exports[`test/untar.js TAP untar simple package > should return list of filenames 1`] = ` +LICENSE +index.js +package.json +README.md +` + +exports[`test/untar.js TAP untar simple package > should return map of filenames to its contents 1`] = ` +a/LICENSE: true +a/index.js: true +a/package.json: true +a/README.md: true +` diff --git a/packages/libnpmdiff/test/fixtures/archive.tgz b/packages/libnpmdiff/test/fixtures/archive.tgz new file mode 100644 index 0000000000000..843a611239bbb Binary files /dev/null and b/packages/libnpmdiff/test/fixtures/archive.tgz differ diff --git a/packages/libnpmdiff/test/fixtures/ruyadorno-simplistic-pkg-with-folders-1.0.0.tgz b/packages/libnpmdiff/test/fixtures/ruyadorno-simplistic-pkg-with-folders-1.0.0.tgz new file mode 100644 index 0000000000000..11bbb44c4e9a4 Binary files /dev/null and b/packages/libnpmdiff/test/fixtures/ruyadorno-simplistic-pkg-with-folders-1.0.0.tgz differ diff --git a/packages/libnpmdiff/test/fixtures/simple-output-2.2.1.tgz b/packages/libnpmdiff/test/fixtures/simple-output-2.2.1.tgz new file mode 100644 index 0000000000000..8d442f4c1c078 Binary files /dev/null and b/packages/libnpmdiff/test/fixtures/simple-output-2.2.1.tgz differ diff --git a/packages/libnpmdiff/test/format-diff.js b/packages/libnpmdiff/test/format-diff.js new file mode 100644 index 0000000000000..f2fc7c77d7f11 --- /dev/null +++ b/packages/libnpmdiff/test/format-diff.js @@ -0,0 +1,483 @@ +const t = require('tap') + +const formatDiff = require('../lib/format-diff.js') + +const normalizeWin = (str) => str + .replace(/\\+/g, '/') + .replace(/\r\n/g, '\n') + +t.cleanSnapshot = (str) => normalizeWin(str) + +t.test('format simple diff', t => { + const files = new Set([ + 'foo.js', + ]) + const refs = new Map(Object.entries({ + 'a/foo.js': { + content: '"use strict"\nmodule.exports = "foo"\n', + mode: '100644', + }, + 'b/foo.js': { + content: '"use strict"\nmodule.exports = "foobar"\n', + mode: '100644', + }, + })) + const versions = { + a: '1.0.0', + b: '2.0.0', + } + + t.matchSnapshot( + formatDiff({ + files, + refs, + versions, + }), + 'should output expected diff result' + ) + t.end() +}) + +t.test('nothing to diff', t => { + const files = new Set([ + 'foo.js', + ]) + const refs = new Map(Object.entries({ + 'a/foo.js': { + content: '"use strict"\nmodule.exports = "foo"\n', + mode: '100644', + }, + 'b/foo.js': { + content: '"use strict"\nmodule.exports = "foo"\n', + mode: '100644', + }, + })) + const versions = { + a: '1.0.0', + b: '1.0.0', + } + + t.matchSnapshot( + formatDiff({ + files, + refs, + versions, + }), + 'should output empty result' + ) + t.end() +}) + +t.test('format removed file', t => { + const files = new Set([ + 'foo.js', + ]) + const refs = new Map(Object.entries({ + 'a/foo.js': { + content: '"use strict"\nmodule.exports = "foo"\n', + mode: '100644', + }, + })) + const versions = { + a: '1.0.0', + b: '2.0.0', + } + + t.matchSnapshot( + formatDiff({ + files, + refs, + versions, + }), + 'should output expected removed file diff result' + ) + t.end() +}) + +t.test('changed file mode', t => { + const files = new Set([ + 'foo.js', + ]) + const refs = new Map(Object.entries({ + 'a/foo.js': { + content: '"use strict"\nmodule.exports = "foo"\n', + mode: '100644', + }, + 'b/foo.js': { + content: '"use strict"\nmodule.exports = "foo"\n', + mode: '100755', + }, + })) + const versions = { + a: '1.0.0', + b: '2.0.0', + } + + t.matchSnapshot( + formatDiff({ + files, + refs, + versions, + }), + 'should output expected changed file mode diff result' + ) + t.end() +}) + +t.test('added file', t => { + const files = new Set([ + 'foo.js', + ]) + const refs = new Map(Object.entries({ + 'b/foo.js': { + content: '"use strict"\nmodule.exports = "foo"\n', + mode: '100755', + }, + })) + const versions = { + a: '1.0.0', + b: '2.0.0', + } + + t.matchSnapshot( + formatDiff({ + files, + refs, + versions, + }), + 'should output expected added file diff result' + ) + t.end() +}) + +t.test('binary file', t => { + const files = new Set([ + 'foo.jpg', + ]) + const refs = new Map(Object.entries({ + 'a/foo.jpg': { + content: Buffer.from(''), + mode: '100644', + }, + 'b/foo.jpg': { + content: Buffer.from(''), + mode: '100644', + }, + })) + const versions = { + a: '1.0.0', + b: '2.0.0', + } + + t.matchSnapshot( + formatDiff({ + files, + refs, + versions, + }), + 'should output expected bin file diff result' + ) + t.end() +}) + +t.test('nothing to compare', t => { + const files = new Set([ + 'foo.jpg', + ]) + const refs = new Map(Object.entries({ + 'a/foo.jpg': {}, + 'b/foo.jpg': {}, + })) + const versions = { + a: '1.0.0', + b: '2.0.0', + } + + t.equal( + formatDiff({ + files, + refs, + versions, + }), + '', + 'should have no output' + ) + t.end() +}) + +t.test('colored output', t => { + const files = new Set([ + 'foo.js', + ]) + const refs = new Map(Object.entries({ + 'a/foo.js': { + content: '"use strict"\nmodule.exports = "foo"\n', + mode: '100644', + }, + 'b/foo.js': { + content: '"use strict"\nmodule.exports = "foobar"\n', + mode: '100644', + }, + })) + const versions = { + a: '1.0.0', + b: '2.0.0', + } + + t.matchSnapshot( + formatDiff({ + files, + refs, + versions, + opts: { + color: true, + }, + }), + 'should output expected colored diff result' + ) + t.end() +}) + +t.test('using --name-only option', t => { + const files = new Set([ + 'foo.js', + 'lib/bar.js', + 'lib/utils.js', + ]) + const refs = new Map(Object.entries({ + 'a/foo.js': { + content: '"use strict"\nmodule.exports = "foo"\n', + mode: '100644', + }, + 'b/foo.js': { + content: '"use strict"\nmodule.exports = "foobar"\n', + mode: '100644', + }, + 'a/lib/bar.js': { + content: '"use strict"\nmodule.exports = "bar"\n', + mode: '100644', + }, + 'b/lib/bar.js': { + content: '"use strict"\nmodule.exports = "bar"\n', + mode: '100644', + }, + 'a/lib/utils.js': { + content: '"use strict"\nconst bar = require("./bar.js")\n' + + 'module.exports = () => bar\n', + mode: '100644', + }, + 'b/lib/utils.js': { + content: '"use strict"\nconst bar = require("./bar.js")\n' + + 'module.exports =\n () => bar + "util"\n', + mode: '100644', + }, + })) + const versions = { + a: '1.0.0', + b: '2.0.0', + } + + t.matchSnapshot( + formatDiff({ + files, + refs, + versions, + opts: { + diffNameOnly: true, + }, + }), + 'should output expected diff result' + ) + t.end() +}) + +t.test('respect --tag-version-prefix option', t => { + const files = new Set([ + 'foo.js', + ]) + const refs = new Map(Object.entries({ + 'a/foo.js': { + content: '"use strict"\nmodule.exports = "foo"\n', + mode: '100644', + }, + 'b/foo.js': { + content: '"use strict"\nmodule.exports = "foobar"\n', + mode: '100644', + }, + })) + const versions = { + a: '1.0.0', + b: '2.0.0', + } + + t.matchSnapshot( + formatDiff({ + files, + refs, + versions, + opts: { + tagVersionPrefix: 'b', + }, + }), + 'should output expected diff result' + ) + t.end() +}) + +t.test('diff options', t => { + const files = new Set([ + 'foo.js', + ]) + const refs = new Map(Object.entries({ + 'a/foo.js': { + content: '"use strict"\nconst a = "a"\nconst b = "b"\n' + + 'const c = "c"\nmodule.exports = () => a+\nb+\nc\n', + mode: '100644', + }, + 'b/foo.js': { + content: '"use strict"\nconst a = "a"\n const b = "b"\n' + + ' const c = "c"\n const d = "d"\n' + + 'module.exports = () => a+\nb+\nc+\nd\n', + mode: '100644', + }, + })) + const versions = { + a: '1.0.0', + b: '2.0.0', + } + + t.matchSnapshot( + formatDiff({ + files, + refs, + versions, + opts: { + diffUnified: 1, + diffIgnoreAllSpace: true, + diffSrcPrefix: 'before/', + diffDstPrefix: 'after/', + }, + }), + 'should output expected diff result' + ) + t.end() +}) + +t.test('diffUnified=0', t => { + const files = new Set([ + 'foo.js', + ]) + const refs = new Map(Object.entries({ + 'a/foo.js': { + content: '"use strict"\nconst a = "a"\nconst b = "b"\n' + + 'const c = "c"\nmodule.exports = () => a+\nb+\nc\n', + mode: '100644', + }, + 'b/foo.js': { + content: '"use strict"\nconst a = "a"\n const b = "b"\n' + + ' const c = "c"\n const d = "d"\n' + + 'module.exports = () => a+\nb+\nc+\nd\n', + mode: '100644', + }, + })) + const versions = { + a: '1.0.0', + b: '2.0.0', + } + + t.matchSnapshot( + formatDiff({ + files, + refs, + versions, + opts: { + diffUnified: 0, + }, + }), + 'should output no context lines in output' + ) + t.end() +}) + +t.test('noPrefix', t => { + const files = new Set([ + 'foo.js', + ]) + const refs = new Map(Object.entries({ + 'a/foo.js': { + content: '"use strict"\nmodule.exports = "foo"\n', + mode: '100644', + }, + 'b/foo.js': { + content: '"use strict"\nmodule.exports = "foobar"\n', + mode: '100644', + }, + })) + const versions = { + a: '1.0.0', + b: '2.0.0', + } + + t.matchSnapshot( + formatDiff({ + files, + refs, + versions, + opts: { + diffNoPrefix: true, + }, + }), + 'should output result with no prefixes' + ) + t.end() +}) + +t.test('format multiple files patch', t => { + const files = new Set([ + 'foo.js', + 'lib/bar.js', + 'lib/utils.js', + ]) + const refs = new Map(Object.entries({ + 'a/foo.js': { + content: '"use strict"\nmodule.exports = "foo"\n', + mode: '100644', + }, + 'b/foo.js': { + content: '"use strict"\nmodule.exports = "foobar"\n', + mode: '100644', + }, + 'a/lib/bar.js': { + content: '"use strict"\nmodule.exports = "bar"\n', + mode: '100644', + }, + 'b/lib/bar.js': { + content: '"use strict"\nmodule.exports = "bar"\n', + mode: '100644', + }, + 'a/lib/utils.js': { + content: '"use strict"\nconst bar = require("./bar.js")\n' + + 'module.exports = () => bar\n', + mode: '100644', + }, + 'b/lib/utils.js': { + content: '"use strict"\nconst bar = require("./bar.js")\n' + + 'module.exports =\n () => bar + "util"\n', + mode: '100644', + }, + })) + const versions = { + a: '1.0.0', + b: '1.1.1', + } + + t.matchSnapshot( + formatDiff({ + files, + refs, + versions, + }), + 'should output expected result for multiple files' + ) + t.end() +}) diff --git a/packages/libnpmdiff/test/index.js b/packages/libnpmdiff/test/index.js new file mode 100644 index 0000000000000..88b474c111f15 --- /dev/null +++ b/packages/libnpmdiff/test/index.js @@ -0,0 +1,147 @@ +const { resolve } = require('path') + +const t = require('tap') + +const diff = require('../index.js') + +const normalizePath = p => p + .replace(/\\+/g, '/') + .replace(/\r\n/g, '\n') + +t.cleanSnapshot = (str) => normalizePath(str) + .replace(normalizePath(process.execPath), 'node') + +const json = (obj) => `${JSON.stringify(obj, null, 2)}\n` + +t.test('compare two diff specs', async t => { + const path = t.testdir({ + a1: { + 'package.json': json({ + name: 'a', + version: '1.0.0', + }), + 'index.js': 'module.exports =\n "a1"\n', + }, + a2: { + 'package.json': json({ + name: 'a', + version: '2.0.0', + }), + 'index.js': 'module.exports =\n "a2"\n', + }, + }) + + const a = `file:${resolve(path, 'a1')}` + const b = `file:${resolve(path, 'a2')}` + + t.resolveMatchSnapshot(diff([a, b], {}), 'should output expected diff') +}) + +t.test('using single arg', async t => { + await t.rejects( + diff(['abbrev@1.0.3']), + /libnpmdiff needs two arguments to compare/, + 'should throw EDIFFARGS error' + ) +}) + +t.test('too many args', async t => { + const args = ['abbrev@1.0.3', 'abbrev@1.0.4', 'abbrev@1.0.5'] + await t.rejects( + diff(args), + /libnpmdiff needs two arguments to compare/, + 'should output diff against cwd files' + ) +}) + +t.test('folder in node_modules', async t => { + const path = t.testdir({ + node_modules: { + a: { + 'package.json': json({ + name: 'a', + version: '1.0.0', + scripts: { + prepare: `${process.execPath} prepare.js`, + }, + }), + 'prepare.js': 'throw new Error("ERR")', + node_modules: { + b: { + 'package.json': json({ + name: 'b', + version: '2.0.0', + scripts: { + prepare: `${process.execPath} prepare.js`, + }, + }), + 'prepare.js': 'throw new Error("ERR")', + }, + }, + }, + }, + packages: { + a: { + 'package.json': json({ + name: 'a', + version: '1.0.1', + scripts: { + prepare: `${process.execPath} prepare.js`, + }, + }), + 'prepare.js': '', + }, + b: { + 'package.json': json({ + name: 'b', + version: '2.0.1', + scripts: { + prepare: `${process.execPath} prepare.js`, + }, + }), + 'prepare.js': '', + }, + }, + 'package.json': json({ + name: 'my-project', + version: '1.0.0', + }), + }) + + t.test('top-level, absolute path', async t => { + t.resolveMatchSnapshot(diff([ + `file:${resolve(path, 'node_modules/a')}`, + `file:${resolve(path, 'packages/a')}`, + ], { where: path }), 'should output expected diff') + }) + t.test('top-level, relative path', async t => { + const _cwd = process.cwd() + process.chdir(path) + t.teardown(() => { + process.chdir(_cwd) + }) + + t.resolveMatchSnapshot(diff([ + 'file:./node_modules/a', + 'file:./packages/a', + ], { where: path }), 'should output expected diff') + }) + t.test('nested, absolute path', async t => { + t.resolveMatchSnapshot(diff([ + `file:${resolve(path, 'node_modules/a/node_modules/b')}`, + `file:${resolve(path, 'packages/b')}`, + ], { where: path}), 'should output expected diff') + }) + t.test('nested, relative path', async t => { + const _cwd = process.cwd() + process.chdir(path) + t.teardown(() => { + process.chdir(_cwd) + }) + + t.resolveMatchSnapshot(diff([ + 'file:./node_modules/a/node_modules/b', + 'file:./packages/b', + ], { where: path }), 'should output expected diff') + }) +}) diff --git a/packages/libnpmdiff/test/should-print-patch.js b/packages/libnpmdiff/test/should-print-patch.js new file mode 100644 index 0000000000000..97b15787d3933 --- /dev/null +++ b/packages/libnpmdiff/test/should-print-patch.js @@ -0,0 +1,28 @@ +const t = require('tap') +const shouldPrintPatch = require('../lib/should-print-patch.js') + +t.test('valid filenames', t => { + t.ok(shouldPrintPatch('LICENSE')) + t.ok(shouldPrintPatch('.gitignore')) + t.ok(shouldPrintPatch('foo.md')) + t.ok(shouldPrintPatch('./bar.txt')) + t.ok(shouldPrintPatch('/a/b/c/bar.html')) + t.end() +}) + +t.test('invalid filenames', t => { + t.notOk(shouldPrintPatch('foo.exe')) + t.notOk(shouldPrintPatch('./foo.jpg')) + t.notOk(shouldPrintPatch('/a/b/c/bar.bin')) + t.end() +}) + +t.test('using --text/-a option', t => { + const opts = { + diffText: true, + } + t.ok(shouldPrintPatch('foo.exe', opts)) + t.ok(shouldPrintPatch('./foo.jpg', opts)) + t.ok(shouldPrintPatch('/a/b/c/bar.bin', opts)) + t.end() +}) diff --git a/packages/libnpmdiff/test/tarball.js b/packages/libnpmdiff/test/tarball.js new file mode 100644 index 0000000000000..3a959be6e53bc --- /dev/null +++ b/packages/libnpmdiff/test/tarball.js @@ -0,0 +1,96 @@ +const { resolve } = require('path') + +const t = require('tap') +const tar = require('tar') +const pacote = require('pacote') +pacote.tarball = () => { + throw new Error('Failed to detect node_modules tarball') +} + +const tarball = require('../lib/tarball.js') + +const json = (obj) => `${JSON.stringify(obj, null, 2)}\n` + +t.test('returns a tarball from node_modules', t => { + t.plan(2) + + const path = t.testdir({ + node_modules: { + a: { + 'package.json': json({ + name: 'a', + version: '1.0.0', + bin: { a: 'index.js' }, + }), + 'index.js': '', + }, + }, + }) + + const _cwd = process.cwd() + process.chdir(path) + t.teardown(() => { + process.chdir(_cwd) + }) + + tarball({ bin: { a: 'index.js' }, _resolved: resolve(path, 'node_modules/a') }, { where: path }) + .then(res => { + tar.list({ + filter: path => { + t.match( + path, + /package.json|index.js/, + 'should return tarball with expected files' + ) + }, + }) + .on('error', e => { + throw e + }) + .end(res) + }) +}) + +t.test('node_modules folder within a linked dir', async t => { + const path = t.testdir({ + node_modules: { + a: t.fixture('symlink', '../packages/a'), + }, + packages: { + a: { + node_modules: { + b: { + 'package.json': json({ + name: 'a', + version: '1.0.0', + }), + }, + }, + }, + }, + }) + + const link = await tarball({ _resolved: resolve(path, 'node_modules/a/node_modules/b') }, {}) + t.ok(link, 'should retrieve tarball from reading link') + + const target = await tarball({ _resolved: resolve(path, 'packages/a/node_modules/b') }, {}) + t.ok(target, 'should retrieve tarball from reading target') +}) + +t.test('pkg not in a node_modules folder', async t => { + const path = t.testdir({ + packages: { + a: { + 'package.json': json({ + name: 'a', + version: '1.0.0', + }), + }, + }, + }) + + t.throws( + () => tarball({ _resolved: resolve(path, 'packages/a') }, {}), + 'should call regular pacote.tarball method instead' + ) +}) diff --git a/packages/libnpmdiff/test/untar.js b/packages/libnpmdiff/test/untar.js new file mode 100644 index 0000000000000..62be1c6ba9003 --- /dev/null +++ b/packages/libnpmdiff/test/untar.js @@ -0,0 +1,231 @@ +const { resolve } = require('path') +const t = require('tap') +const pacote = require('pacote') +const untar = require('../lib/untar.js') + +t.test('untar simple package', async t => { + const item = + await pacote.tarball(resolve('./test/fixtures/simple-output-2.2.1.tgz')) + + const { + files, + refs, + } = await untar({ + item, + prefix: 'a/', + }) + + t.matchSnapshot([...files].join('\n'), 'should return list of filenames') + t.matchSnapshot( + [...refs.entries()].map(([k, v]) => `${k}: ${!!v}`).join('\n'), + 'should return map of filenames to its contents' + ) + t.matchSnapshot(refs.get('a/LICENSE').content, 'should have read contents') +}) + +t.test('untar package with folders', async t => { + const item = + await pacote.tarball(resolve('./test/fixtures/archive.tgz')) + + const { + files, + refs, + } = await untar({ + item, + prefix: 'a/', + }) + + t.matchSnapshot([...files].join('\n'), 'should return list of filenames') + t.matchSnapshot( + [...refs.entries()].map(([k, v]) => `${k}: ${!!v}`).join('\n'), + 'should return map of filenames to its contents' + ) + t.matchSnapshot( + refs.get('a/lib/utils/b.js').content, + 'should have read contents' + ) +}) + +t.test('filter files', async t => { + const item = + await pacote.tarball(resolve('./test/fixtures/simple-output-2.2.1.tgz')) + + const { + files, + refs, + } = await untar({ + item, + prefix: 'a/', + }, { + diffFiles: [ + './LICENSE', + 'missing-file', + 'README.md', + ], + }) + + t.matchSnapshot([...files].join('\n'), 'should return list of filenames') + t.matchSnapshot( + [...refs.entries()].map(([k, v]) => `${k}: ${!!v.content}`).join('\n'), + 'should return map of filenames with valid contents' + ) +}) + +t.test('filter files using glob expressions', async t => { + const item = + await pacote.tarball(resolve('./test/fixtures/archive.tgz')) + const cwd = t.testdir({ + lib: { + 'index.js': '', + utils: { + '/b.js': '', + }, + }, + 'package-lock.json': '', + 'package.json': '', + test: { + '/index.js': '', + utils: { + 'b.js': '', + }, + }, + }) + + const _cwd = process.cwd() + process.chdir(cwd) + t.teardown(() => { + process.chdir(_cwd) + }) + + const { + files, + refs, + } = await untar({ + item, + prefix: 'a/', + }, { + diffFiles: [ + './lib/**', + '*-lock.json', + 'test\\*', // windows-style sep should be normalized + ], + }) + + t.matchSnapshot([...files].join('\n'), 'should return list of filenames') + t.matchSnapshot( + [...refs.entries()].map(([k, v]) => `${k}: ${!!v.content}`).join('\n'), + 'should return map of filenames with valid contents' + ) +}) + +t.test('match files by end of filename', async t => { + const item = + await pacote.tarball(resolve('./test/fixtures/archive.tgz')) + + const { + files, + refs, + } = await untar({ + item, + prefix: 'a/', + }, { + diffFiles: [ + '*.js', + ], + }) + + t.matchSnapshot([...files].join('\n'), 'should return list of filenames') + t.matchSnapshot( + [...refs.entries()].map(([k, v]) => `${k}: ${!!v.content}`).join('\n'), + 'should return map of filenames with valid contents' + ) +}) + +t.test('filter files by exact filename', async t => { + const item = + await pacote.tarball(resolve('./test/fixtures/archive.tgz')) + + const { + files, + refs, + } = await untar({ + item, + prefix: 'a/', + }, { + diffFiles: [ + 'index.js', + ], + }) + + t.matchSnapshot([...files].join('\n'), 'should return no filenames') + t.matchSnapshot( + [...refs.entries()].map(([k, v]) => `${k}: ${!!v.content}`).join('\n'), + 'should return no filenames' + ) +}) + +t.test('match files by simple folder name', async t => { + const item = + await pacote.tarball(resolve('./test/fixtures/archive.tgz')) + + const { + files, + refs, + } = await untar({ + item, + prefix: 'a/', + }, { + diffFiles: [ + 'lib', + ], + }) + + t.matchSnapshot([...files].join('\n'), 'should return list of filenames') + t.matchSnapshot( + [...refs.entries()].map(([k, v]) => `${k}: ${!!v.content}`).join('\n'), + 'should return map of filenames with valid contents' + ) +}) + +t.test('match files by simple folder name variation', async t => { + const item = + await pacote.tarball(resolve('./test/fixtures/archive.tgz')) + + const { + files, + refs, + } = await untar({ + item, + prefix: 'a/', + }, { + diffFiles: [ + './test/', + ], + }) + + t.matchSnapshot([...files].join('\n'), 'should return list of filenames') + t.matchSnapshot( + [...refs.entries()].map(([k, v]) => `${k}: ${!!v.content}`).join('\n'), + 'should return map of filenames with valid contents' + ) +}) + +t.test('filter out all files', async t => { + const item = + await pacote.tarball(resolve('./test/fixtures/simple-output-2.2.1.tgz')) + + const { + files, + refs, + } = await untar({ + item, + prefix: 'a/', + }, { + diffFiles: [ + 'non-matching-pattern', + ], + }) + + t.equal(files.size, 0, 'should have no files') + t.equal(refs.size, 0, 'should have no refs') +}) diff --git a/scripts/bundle-and-gitignore-deps.js b/scripts/bundle-and-gitignore-deps.js index 0aedec7811ec6..96c1419e21807 100644 --- a/scripts/bundle-and-gitignore-deps.js +++ b/scripts/bundle-and-gitignore-deps.js @@ -9,8 +9,11 @@ const arb = new Arborist({ path: resolve(__dirname, '..') }) const shouldIgnore = [] arb.loadVirtual().then(tree => { - for (const [name, node] of tree.children.entries()) { - if (node.dev) { + for (const node of tree.children.values()) { + const has = (obj, key) => Object.prototype.hasOwnProperty.call(obj, key) + const nonProdWorkspace = + node.isWorkspace && !(has(tree.package.dependencies, node.name)) + if (node.dev || nonProdWorkspace) { console.error('ignore', node.name) shouldIgnore.push(node.name) } else if (tree.edgesOut.has(node.name)) { @@ -18,14 +21,37 @@ arb.loadVirtual().then(tree => { bundle.push(node.name) } } - pkg.bundleDependencies = bundle.sort((a, b) => a.localeCompare(b)) + pkg.bundleDependencies = bundle.sort((a, b) => a.localeCompare(b, 'en')) - const ignores = shouldIgnore.sort((a, b) => a.localeCompare(b)) + const ignores = shouldIgnore.sort((a, b) => a.localeCompare(b, 'en')) .map(i => `/${i}`) .join('\n') const ignoreData = `# Automatically generated to ignore dev deps /.package-lock.json package-lock.json +CHANGELOG* +changelog* +README* +readme* +.editorconfig +.idea/ +.npmignore +.eslintrc* +.travis* +.github +.jscsrc +.nycrc +.istanbul* +.eslintignore +.jshintrc* +.prettierrc* +.jscs.json +.dir-locals* +.coveralls* +.babelrc* +.nyc_output +.gitkeep + ${ignores} ` writeFileSync(ignore, ignoreData) diff --git a/scripts/changelog.js b/scripts/changelog.js index f36ad56c9c5bf..0951bd0275cfc 100644 --- a/scripts/changelog.js +++ b/scripts/changelog.js @@ -18,40 +18,38 @@ const log = execSync(`git log --reverse --pretty='format:%h %H%d %s (%aN)%n%b%n- main() function shortname (url) { - let matched = url.match(/https:\/\/github\.com\/([^/]+\/[^/]+)\/(?:pull|issues)\/(\d+)/) || + const matched = url.match(/https:\/\/github\.com\/([^/]+\/[^/]+)\/(?:pull|issues)\/(\d+)/) || url.match(/https:\/\/(npm\.community)\/t\/(?:[^/]+\/)(\d+)/) - if (!matched) return false - let repo = matched[1] - let id = matched[2] - if (repo !== 'npm/cli') { + if (!matched) + return false + const repo = matched[1] + const id = matched[2] + if (repo !== 'npm/cli') return `${repo}#${id}` - } else { + else return `#${id}` - } } function printCommit (c) { console.log(`* [\`${c.shortid}\`](https://github.com/npm/cli/commit/${c.fullid})`) if (c.fixes.length) { for (const fix of c.fixes) { - let label = shortname(fix) - if (label) { + const label = shortname(fix) + if (label) console.log(` [${label}](${fix})`) - } } } else if (c.prurl) { - let label = shortname(c.prurl) - if (label) { + const label = shortname(c.prurl) + if (label) console.log(` [${label}](${c.prurl})`) - } else { + else console.log(` [#](${c.prurl})`) - } } - let msg = c.message + const msg = c.message .replace(/^\s+/mg, '') .replace(/^[-a-z]+: /, '') .replace(/^/mg, ' ') - .replace(/^ Reviewed-by: @.*/mg, '') + .replace(/^ {2}Reviewed-by: @.*/mg, '') .replace(/\n$/, '') // backtickify package@version .replace(/^(\s*@?[^@\s]+@\d+[.]\d+[.]\d+)\b(\s*\S)/g, '$1:$2') @@ -60,14 +58,13 @@ function printCommit (c) { .replace(/\b([a-f0-9]{7,8})\b/g, '[`$1`](https://github.com/npm/cli/commit/$1)') console.log(msg) // don't assign credit for dep updates - if (!/^ `[^`]+@\d+\.\d+\.\d+[^`]*`:?$/m.test(msg)) { + if (!/^ {2}`[^`]+@\d+\.\d+\.\d+[^`]*`:?$/m.test(msg)) { if (c.credit) { c.credit.forEach(function (credit) { console.log(` ([@${credit}](https://github.com/${credit}))`) }) - } else { + } else console.log(` ([@${c.author}](https://github.com/${c.author}))`) - } } } @@ -77,9 +74,9 @@ function main () { line = line.replace(/\r/g, '') let m /* eslint no-cond-assign:0 */ - if (/^---$/.test(line)) { + if (/^---$/.test(line)) printCommit(commit) - } else if (m = line.match(/^([a-f0-9]{7,10}) ([a-f0-9]+) (?:[(]([^)]+)[)] )?(.*?) [(](.*?)[)]/)) { + else if (m = line.match(/^([a-f0-9]{7,10}) ([a-f0-9]+) (?:[(]([^)]+)[)] )?(.*?) [(](.*?)[)]/)) { commit = { shortid: m[1], fullid: m[2], @@ -88,23 +85,23 @@ function main () { author: m[5], prurl: null, fixes: [], - credit: null + credit: null, } - } else if (m = line.match(/^PR-URL: (.*)/)) { + } else if (m = line.match(/^PR-URL: (.*)/)) commit.prurl = m[1] - } else if (m = line.match(/^Credit: @(.*)/)) { - if (!commit.credit) commit.credit = [] + else if (m = line.match(/^Credit: @(.*)/)) { + if (!commit.credit) + commit.credit = [] commit.credit.push(m[1]) - } else if (m = line.match(/^(?:Fix(?:es)|Closes?): #?([0-9]+)/)) { + } else if (m = line.match(/^(?:Fix(?:es)|Closes?): #?([0-9]+)/)) commit.fixes.push(`https://github.com/npm/cli/issues/${m[1]}`) - } else if (m = line.match(/^(?:Fix(?:es)|Closes?): ([^#]+)#([0-9]*)/)) { + else if (m = line.match(/^(?:Fix(?:es)|Closes?): ([^#]+)#([0-9]*)/)) commit.fixes.push(`https://github.com/${m[1]}/issues/${m[2]}`) - } else if (m = line.match(/^(?:Fix(?:es)|Closes?): (https?:\/\/.*)/)) { + else if (m = line.match(/^(?:Fix(?:es)|Closes?): (https?:\/\/.*)/)) commit.fixes.push(m[1]) - } else if (m = line.match(/^Reviewed-By: @(.*)/)) { + else if (m = line.match(/^Reviewed-By: @(.*)/)) commit.reviewed = m[1] - } else if (/\S/.test(line)) { + else if (/\S/.test(line)) commit.message += `\n${line}` - } }) } diff --git a/scripts/config-doc-command.js b/scripts/config-doc-command.js new file mode 100644 index 0000000000000..48bc026543e67 --- /dev/null +++ b/scripts/config-doc-command.js @@ -0,0 +1,46 @@ +const { definitions } = require('../lib/utils/config/index.js') +const { writeFileSync, readFileSync } = require('fs') +const { resolve } = require('path') + +const configDoc = process.argv[2] +const commandFile = process.argv[3] + +// Note: commands without params skip this whole process. +const { params } = require(resolve(commandFile)) + +const describeAll = () => + params.map(name => definitions[name].describe()).join('\n\n') + +const addBetweenTags = (doc, startTag, endTag, body) => { + const startSplit = doc.split(startTag) + if (startSplit.length !== 2) + throw new Error('Did not find exactly one start tag') + + const endSplit = startSplit[1].split(endTag) + if (endSplit.length !== 2) + throw new Error('Did not find exactly one end tag') + + return [ + startSplit[0], + startTag, + '\n<!-- automatically generated, do not edit manually -->\n', + body, + '\n\n', + endTag, + endSplit[1], + ].join('') +} + +const addDescriptions = doc => { + const startTag = '<!-- AUTOGENERATED CONFIG DESCRIPTIONS START -->' + const endTag = '<!-- AUTOGENERATED CONFIG DESCRIPTIONS END -->' + return addBetweenTags(doc, startTag, endTag, describeAll()) +} + +// always write SOMETHING so that Make sees the file is up to date. +const doc = readFileSync(configDoc, 'utf8') +const hasTag = doc.includes('<!-- AUTOGENERATED CONFIG DESCRIPTIONS START -->') +const newDoc = params && hasTag ? addDescriptions(doc) : doc +if (params && !hasTag) + console.error('WARNING: did not find config description section', configDoc) +writeFileSync(configDoc, newDoc) diff --git a/scripts/config-doc.js b/scripts/config-doc.js new file mode 100644 index 0000000000000..5014bcdc46c19 --- /dev/null +++ b/scripts/config-doc.js @@ -0,0 +1,50 @@ +const { shorthands, describeAll } = require('../lib/utils/config/index.js') +const { writeFileSync, readFileSync } = require('fs') +const { resolve } = require('path') +const configDoc = resolve(__dirname, '../docs/content/using-npm/config.md') + +const addBetweenTags = (doc, startTag, endTag, body) => { + const startSplit = doc.split(startTag) + if (startSplit.length !== 2) + throw new Error('Did not find exactly one start tag') + + const endSplit = startSplit[1].split(endTag) + if (endSplit.length !== 2) + throw new Error('Did not find exactly one end tag') + + return [ + startSplit[0], + startTag, + '\n<!-- automatically generated, do not edit manually -->\n', + body, + '\n\n', + endTag, + endSplit[1], + ].join('') +} + +const addDescriptions = doc => { + const startTag = '<!-- AUTOGENERATED CONFIG DESCRIPTIONS START -->' + const endTag = '<!-- AUTOGENERATED CONFIG DESCRIPTIONS END -->' + return addBetweenTags(doc, startTag, endTag, describeAll()) +} + +const addShorthands = doc => { + const startTag = '<!-- AUTOGENERATED CONFIG SHORTHANDS START -->' + const endTag = '<!-- AUTOGENERATED CONFIG SHORTHANDS END -->' + const body = Object.entries(shorthands) + .sort(([shorta, expansiona], [shortb, expansionb]) => { + // sort by what they're short FOR + return expansiona.join(' ').localeCompare(expansionb.join(' '), 'en') || + shorta.localeCompare(shortb, 'en') + }) + .map(([short, expansion]) => { + const dash = short.length === 1 ? '-' : '--' + return `* \`${dash}${short}\`: \`${expansion.join(' ')}\`` + }).join('\n') + return addBetweenTags(doc, startTag, endTag, body) +} + +const doc = readFileSync(configDoc, 'utf8') +writeFileSync(configDoc, addDescriptions(addShorthands(doc))) +console.log(`updated docs/content/using-npm/config.md`) diff --git a/scripts/docs-build.js b/scripts/docs-build.js index a1540ebb9b97c..8e217d2259a54 100644 --- a/scripts/docs-build.js +++ b/scripts/docs-build.js @@ -8,19 +8,19 @@ var src = args[0] var dest = args[1] || src fs.readFile(src, 'utf8', function (err, data) { - if (err) return console.log(err) + if (err) + return console.log(err) function frontmatter (match, p1) { const fm = { } p1.split(/\r?\n/).forEach((kv) => { - let result = kv.match(/^([^\s:]+):\s*(.*)/) - if (result) { + const result = kv.match(/^([^\s:]+):\s*(.*)/) + if (result) fm[result[1]] = result[2] - } }) - return `# ${fm['title']}(${fm['section']}) - ${fm['description']}` + return `# ${fm.title}(${fm.section}) - ${fm.description}` } function replacer (match, p1) { @@ -35,6 +35,7 @@ fs.readFile(src, 'utf8', function (err, data) { .trim() fs.writeFile(dest, marked(result), 'utf8', function (err) { - if (err) return console.log(err) + if (err) + return console.log(err) }) }) diff --git a/scripts/update-dist-tags.js b/scripts/update-dist-tags.js index f28bfd0b91739..371d0c03a47d6 100644 --- a/scripts/update-dist-tags.js +++ b/scripts/update-dist-tags.js @@ -79,9 +79,9 @@ function parseOTP (args) { } case 1: { // --otp=123456 or --otp123456 - if (otp) { + if (otp) return otp - } + console.error('Invalid otp value supplied. [CASE 1]') process.exit(1) } @@ -89,9 +89,9 @@ function parseOTP (args) { // --otp 123456 // INFO: validating the second argument is an otp code const isValidOtp = PARSE_OTP_VALUE.test(args[1]) - if (isValidOtp) { + if (isValidOtp) return args[1] - } + console.error('Invalid otp value supplied. [CASE 2]') process.exit(1) } diff --git a/smoke-tests/content/abbrev.json b/smoke-tests/content/abbrev.json new file mode 100644 index 0000000000000..ffcf5474a9de8 --- /dev/null +++ b/smoke-tests/content/abbrev.json @@ -0,0 +1,449 @@ +{ + "_id": "abbrev", + "_rev": "72-d1d46bef3d311d6da6737e109e771869", + "name": "abbrev", + "dist-tags": { + "latest": "1.1.1" + }, + "versions": { + "1.0.3": { + "name": "abbrev", + "version": "1.0.3", + "description": "Like ruby's abbrev module, but in js", + "author": { + "name": "Isaac Z. Schlueter", + "email": "i@izs.me" + }, + "main": "./lib/abbrev.js", + "scripts": { + "test": "node lib/abbrev.js" + }, + "repository": { + "type": "git", + "url": "git://github.com/isaacs/abbrev-js.git" + }, + "_id": "abbrev@1.0.3", + "engines": { + "node": "*" + }, + "_engineSupported": true, + "_npmVersion": "1.0.0rc7", + "_nodeVersion": "v0.5.0-pre", + "_defaultsLoaded": true, + "dist": { + "shasum": "aa049c967f999222aa42e14434f0c562ef468241", + "tarball": "https://registry.npmjs.org/abbrev/-/abbrev-1.0.3.tgz" + }, + "directories": {} + }, + "1.0.4": { + "name": "abbrev", + "version": "1.0.4", + "description": "Like ruby's abbrev module, but in js", + "author": { + "name": "Isaac Z. Schlueter", + "email": "i@izs.me" + }, + "main": "./lib/abbrev.js", + "scripts": { + "test": "node lib/abbrev.js" + }, + "repository": { + "type": "git", + "url": "http://github.com/isaacs/abbrev-js" + }, + "license": { + "type": "MIT", + "url": "https://github.com/isaacs/abbrev-js/raw/master/LICENSE" + }, + "_id": "abbrev@1.0.4", + "dist": { + "shasum": "bd55ae5e413ba1722ee4caba1f6ea10414a59ecd", + "tarball": "https://registry.npmjs.org/abbrev/-/abbrev-1.0.4.tgz" + }, + "_npmVersion": "1.1.70", + "_npmUser": { + "name": "isaacs", + "email": "i@izs.me" + }, + "maintainers": [ + { + "name": "isaacs", + "email": "i@izs.me" + } + ], + "directories": {} + }, + "1.0.5": { + "name": "abbrev", + "version": "1.0.5", + "description": "Like ruby's abbrev module, but in js", + "author": { + "name": "Isaac Z. Schlueter", + "email": "i@izs.me" + }, + "main": "abbrev.js", + "scripts": { + "test": "node test.js" + }, + "repository": { + "type": "git", + "url": "http://github.com/isaacs/abbrev-js" + }, + "license": { + "type": "MIT", + "url": "https://github.com/isaacs/abbrev-js/raw/master/LICENSE" + }, + "bugs": { + "url": "https://github.com/isaacs/abbrev-js/issues" + }, + "homepage": "https://github.com/isaacs/abbrev-js", + "_id": "abbrev@1.0.5", + "_shasum": "5d8257bd9ebe435e698b2fa431afde4fe7b10b03", + "_from": ".", + "_npmVersion": "1.4.7", + "_npmUser": { + "name": "isaacs", + "email": "i@izs.me" + }, + "maintainers": [ + { + "name": "isaacs", + "email": "i@izs.me" + } + ], + "dist": { + "shasum": "5d8257bd9ebe435e698b2fa431afde4fe7b10b03", + "tarball": "https://registry.npmjs.org/abbrev/-/abbrev-1.0.5.tgz" + }, + "directories": {} + }, + "1.0.6": { + "name": "abbrev", + "version": "1.0.6", + "description": "Like ruby's abbrev module, but in js", + "author": { + "name": "Isaac Z. Schlueter", + "email": "i@izs.me" + }, + "main": "abbrev.js", + "scripts": { + "test": "node test.js" + }, + "repository": { + "type": "git", + "url": "git+ssh://git@github.com/isaacs/abbrev-js.git" + }, + "license": "ISC", + "gitHead": "648a6735d9c5a7a04885e3ada49eed4db36181c2", + "bugs": { + "url": "https://github.com/isaacs/abbrev-js/issues" + }, + "homepage": "https://github.com/isaacs/abbrev-js#readme", + "_id": "abbrev@1.0.6", + "_shasum": "b6d632b859b3fa2d6f7e4b195472461b9e32dc30", + "_from": ".", + "_npmVersion": "2.10.0", + "_nodeVersion": "2.0.1", + "_npmUser": { + "name": "isaacs", + "email": "isaacs@npmjs.com" + }, + "dist": { + "shasum": "b6d632b859b3fa2d6f7e4b195472461b9e32dc30", + "tarball": "https://registry.npmjs.org/abbrev/-/abbrev-1.0.6.tgz" + }, + "maintainers": [ + { + "name": "isaacs", + "email": "i@izs.me" + } + ], + "directories": {} + }, + "1.0.7": { + "name": "abbrev", + "version": "1.0.7", + "description": "Like ruby's abbrev module, but in js", + "author": { + "name": "Isaac Z. Schlueter", + "email": "i@izs.me" + }, + "main": "abbrev.js", + "scripts": { + "test": "tap test.js --cov" + }, + "repository": { + "type": "git", + "url": "git+ssh://git@github.com/isaacs/abbrev-js.git" + }, + "license": "ISC", + "devDependencies": { + "tap": "^1.2.0" + }, + "gitHead": "821d09ce7da33627f91bbd8ed631497ed6f760c2", + "bugs": { + "url": "https://github.com/isaacs/abbrev-js/issues" + }, + "homepage": "https://github.com/isaacs/abbrev-js#readme", + "_id": "abbrev@1.0.7", + "_shasum": "5b6035b2ee9d4fb5cf859f08a9be81b208491843", + "_from": ".", + "_npmVersion": "2.10.1", + "_nodeVersion": "2.0.1", + "_npmUser": { + "name": "isaacs", + "email": "isaacs@npmjs.com" + }, + "dist": { + "shasum": "5b6035b2ee9d4fb5cf859f08a9be81b208491843", + "tarball": "https://registry.npmjs.org/abbrev/-/abbrev-1.0.7.tgz" + }, + "maintainers": [ + { + "name": "isaacs", + "email": "i@izs.me" + } + ], + "directories": {} + }, + "1.0.9": { + "name": "abbrev", + "version": "1.0.9", + "description": "Like ruby's abbrev module, but in js", + "author": { + "name": "Isaac Z. Schlueter", + "email": "i@izs.me" + }, + "main": "abbrev.js", + "scripts": { + "test": "tap test.js --cov" + }, + "repository": { + "type": "git", + "url": "git+ssh://git@github.com/isaacs/abbrev-js.git" + }, + "license": "ISC", + "devDependencies": { + "tap": "^5.7.2" + }, + "files": [ + "abbrev.js" + ], + "gitHead": "c386cd9dbb1d8d7581718c54d4ba944cc9298d6f", + "bugs": { + "url": "https://github.com/isaacs/abbrev-js/issues" + }, + "homepage": "https://github.com/isaacs/abbrev-js#readme", + "_id": "abbrev@1.0.9", + "_shasum": "91b4792588a7738c25f35dd6f63752a2f8776135", + "_from": ".", + "_npmVersion": "3.9.1", + "_nodeVersion": "4.4.4", + "_npmUser": { + "name": "isaacs", + "email": "i@izs.me" + }, + "dist": { + "shasum": "91b4792588a7738c25f35dd6f63752a2f8776135", + "tarball": "https://registry.npmjs.org/abbrev/-/abbrev-1.0.9.tgz" + }, + "maintainers": [ + { + "name": "isaacs", + "email": "i@izs.me" + } + ], + "_npmOperationalInternal": { + "host": "packages-16-east.internal.npmjs.com", + "tmp": "tmp/abbrev-1.0.9.tgz_1466016055839_0.7825860097073019" + }, + "directories": {} + }, + "1.1.0": { + "name": "abbrev", + "version": "1.1.0", + "description": "Like ruby's abbrev module, but in js", + "author": { + "name": "Isaac Z. Schlueter", + "email": "i@izs.me" + }, + "main": "abbrev.js", + "scripts": { + "test": "tap test.js --100", + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --all; git push origin --tags" + }, + "repository": { + "type": "git", + "url": "git+ssh://git@github.com/isaacs/abbrev-js.git" + }, + "license": "ISC", + "devDependencies": { + "tap": "^10.1" + }, + "files": [ + "abbrev.js" + ], + "gitHead": "7136d4d95449dc44115d4f78b80ec907724f64e0", + "bugs": { + "url": "https://github.com/isaacs/abbrev-js/issues" + }, + "homepage": "https://github.com/isaacs/abbrev-js#readme", + "_id": "abbrev@1.1.0", + "_shasum": "d0554c2256636e2f56e7c2e5ad183f859428d81f", + "_from": ".", + "_npmVersion": "4.3.0", + "_nodeVersion": "8.0.0-pre", + "_npmUser": { + "name": "isaacs", + "email": "i@izs.me" + }, + "dist": { + "shasum": "d0554c2256636e2f56e7c2e5ad183f859428d81f", + "tarball": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.0.tgz" + }, + "maintainers": [ + { + "name": "isaacs", + "email": "i@izs.me" + } + ], + "_npmOperationalInternal": { + "host": "packages-12-west.internal.npmjs.com", + "tmp": "tmp/abbrev-1.1.0.tgz_1487054000015_0.9229173036292195" + }, + "directories": {} + }, + "1.1.1": { + "name": "abbrev", + "version": "1.1.1", + "description": "Like ruby's abbrev module, but in js", + "author": { + "name": "Isaac Z. Schlueter", + "email": "i@izs.me" + }, + "main": "abbrev.js", + "scripts": { + "test": "tap test.js --100", + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --all; git push origin --tags" + }, + "repository": { + "type": "git", + "url": "git+ssh://git@github.com/isaacs/abbrev-js.git" + }, + "license": "ISC", + "devDependencies": { + "tap": "^10.1" + }, + "files": [ + "abbrev.js" + ], + "gitHead": "a9ee72ebc8fe3975f1b0c7aeb3a8f2a806a432eb", + "bugs": { + "url": "https://github.com/isaacs/abbrev-js/issues" + }, + "homepage": "https://github.com/isaacs/abbrev-js#readme", + "_id": "abbrev@1.1.1", + "_npmVersion": "5.4.2", + "_nodeVersion": "8.5.0", + "_npmUser": { + "name": "isaacs", + "email": "i@izs.me" + }, + "dist": { + "integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==", + "shasum": "f8f2c887ad10bf67f634f005b6987fed3179aac8", + "tarball": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz" + }, + "maintainers": [ + { + "name": "gabra", + "email": "jerry+1@npmjs.com" + }, + { + "name": "isaacs", + "email": "i@izs.me" + } + ], + "_npmOperationalInternal": { + "host": "s3://npm-registry-packages", + "tmp": "tmp/abbrev-1.1.1.tgz_1506566833068_0.05750026390887797" + }, + "directories": {} + } + }, + "maintainers": [ + { + "email": "quitlahok@gmail.com", + "name": "nlf" + }, + { + "email": "ruyadorno@hotmail.com", + "name": "ruyadorno" + }, + { + "email": "darcy@darcyclarke.me", + "name": "darcyclarke" + }, + { + "email": "evilpacket@gmail.com", + "name": "adam_baldwin" + }, + { + "email": "i@izs.me", + "name": "isaacs" + } + ], + "author": { + "name": "Isaac Z. Schlueter", + "email": "i@izs.me" + }, + "description": "Like ruby's abbrev module, but in js", + "time": { + "modified": "2020-10-13T05:04:03.636Z", + "created": "2011-03-21T22:21:11.183Z", + "1.0.1": "2011-03-21T22:21:11.183Z", + "1.0.2": "2011-03-21T22:21:11.183Z", + "1.0.3": "2011-03-21T22:21:11.183Z", + "1.0.3-1": "2011-03-24T23:01:19.581Z", + "1.0.4": "2013-01-09T00:01:24.135Z", + "1.0.5": "2014-04-17T20:09:12.523Z", + "1.0.6": "2015-05-21T00:58:16.778Z", + "1.0.7": "2015-05-30T22:57:54.685Z", + "1.0.9": "2016-06-15T18:41:01.215Z", + "1.1.0": "2017-02-14T06:33:20.235Z", + "1.1.1": "2017-09-28T02:47:13.220Z" + }, + "repository": { + "type": "git", + "url": "git+ssh://git@github.com/isaacs/abbrev-js.git" + }, + "users": { + "leesei": true, + "ceejbot": true, + "isaacs": true, + "npm-www": true, + "tunnckocore": true, + "ruanyu1": true, + "leodutra": true, + "jessaustin": true, + "jian263994241": true, + "floriannagel": true, + "tdmalone": true, + "ryanve": true, + "detj": true, + "monjer": true, + "d-band": true + }, + "readme": "# abbrev-js\n\nJust like [ruby's Abbrev](http://apidock.com/ruby/Abbrev).\n\nUsage:\n\n var abbrev = require(\"abbrev\");\n abbrev(\"foo\", \"fool\", \"folding\", \"flop\");\n \n // returns:\n { fl: 'flop'\n , flo: 'flop'\n , flop: 'flop'\n , fol: 'folding'\n , fold: 'folding'\n , foldi: 'folding'\n , foldin: 'folding'\n , folding: 'folding'\n , foo: 'foo'\n , fool: 'fool'\n }\n\nThis is handy for command-line scripts, or other cases where you want to be able to accept shorthands.\n", + "readmeFilename": "README.md", + "homepage": "https://github.com/isaacs/abbrev-js#readme", + "bugs": { + "url": "https://github.com/isaacs/abbrev-js/issues" + }, + "license": "ISC" +} diff --git a/smoke-tests/content/abbrev.min.json b/smoke-tests/content/abbrev.min.json new file mode 100644 index 0000000000000..c03d91c9c8c19 --- /dev/null +++ b/smoke-tests/content/abbrev.min.json @@ -0,0 +1,89 @@ +{ + "name": "abbrev", + "dist-tags": { + "latest": "1.1.1" + }, + "versions": { + "1.0.3": { + "name": "abbrev", + "version": "1.0.3", + "dist": { + "shasum": "aa049c967f999222aa42e14434f0c562ef468241", + "tarball": "https://registry.npmjs.org/abbrev/-/abbrev-1.0.3.tgz" + }, + "engines": { + "node": "*" + } + }, + "1.0.4": { + "name": "abbrev", + "version": "1.0.4", + "dist": { + "shasum": "bd55ae5e413ba1722ee4caba1f6ea10414a59ecd", + "tarball": "https://registry.npmjs.org/abbrev/-/abbrev-1.0.4.tgz" + } + }, + "1.0.5": { + "name": "abbrev", + "version": "1.0.5", + "dist": { + "shasum": "5d8257bd9ebe435e698b2fa431afde4fe7b10b03", + "tarball": "https://registry.npmjs.org/abbrev/-/abbrev-1.0.5.tgz" + } + }, + "1.0.6": { + "name": "abbrev", + "version": "1.0.6", + "dist": { + "shasum": "b6d632b859b3fa2d6f7e4b195472461b9e32dc30", + "tarball": "https://registry.npmjs.org/abbrev/-/abbrev-1.0.6.tgz" + } + }, + "1.0.7": { + "name": "abbrev", + "version": "1.0.7", + "devDependencies": { + "tap": "^1.2.0" + }, + "dist": { + "shasum": "5b6035b2ee9d4fb5cf859f08a9be81b208491843", + "tarball": "https://registry.npmjs.org/abbrev/-/abbrev-1.0.7.tgz" + } + }, + "1.0.9": { + "name": "abbrev", + "version": "1.0.9", + "devDependencies": { + "tap": "^5.7.2" + }, + "dist": { + "shasum": "91b4792588a7738c25f35dd6f63752a2f8776135", + "tarball": "https://registry.npmjs.org/abbrev/-/abbrev-1.0.9.tgz" + } + }, + "1.1.0": { + "name": "abbrev", + "version": "1.1.0", + "devDependencies": { + "tap": "^10.1" + }, + "dist": { + "shasum": "d0554c2256636e2f56e7c2e5ad183f859428d81f", + "tarball": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.0.tgz" + } + }, + "1.1.1": { + "name": "abbrev", + "version": "1.1.1", + "devDependencies": { + "tap": "^10.1" + }, + "dist": { + "integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==", + "shasum": "f8f2c887ad10bf67f634f005b6987fed3179aac8", + "tarball": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz" + } + } + }, + "modified": "2020-10-13T05:04:03.636Z" +} diff --git a/smoke-tests/content/abbrev/-/abbrev-1.0.4.tgz b/smoke-tests/content/abbrev/-/abbrev-1.0.4.tgz new file mode 100644 index 0000000000000..dfd1b55919e2f Binary files /dev/null and b/smoke-tests/content/abbrev/-/abbrev-1.0.4.tgz differ diff --git a/smoke-tests/content/abbrev/-/abbrev-1.1.1.tgz b/smoke-tests/content/abbrev/-/abbrev-1.1.1.tgz new file mode 100644 index 0000000000000..4d9504504f5a3 Binary files /dev/null and b/smoke-tests/content/abbrev/-/abbrev-1.1.1.tgz differ diff --git a/smoke-tests/content/promise-all-reject-late.json b/smoke-tests/content/promise-all-reject-late.json new file mode 100644 index 0000000000000..e243b92a3b92e --- /dev/null +++ b/smoke-tests/content/promise-all-reject-late.json @@ -0,0 +1,138 @@ +{ + "_id": "promise-all-reject-late", + "_rev": "1-bb2ac9479869cc8479d1dd01c568acc0", + "name": "promise-all-reject-late", + "dist-tags": { + "latest": "1.0.1" + }, + "versions": { + "1.0.0": { + "name": "promise-all-reject-late", + "version": "1.0.0", + "description": "Like Promise.all, but save rejections until all promises are resolved", + "author": { + "name": "Isaac Z. Schlueter", + "email": "i@izs.me", + "url": "https://izs.me" + }, + "license": "ISC", + "scripts": { + "test": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags" + }, + "tap": { + "check-coverage": true + }, + "devDependencies": { + "tap": "^14.10.5" + }, + "gitHead": "e9614a15b22f421aa97ff281d4e0f23681edbe98", + "_id": "promise-all-reject-late@1.0.0", + "_nodeVersion": "13.3.0", + "_npmVersion": "6.13.4", + "dist": { + "integrity": "sha512-f5XvVl++12pEo7Sv7f7FGfzVuVpeY2msNKjn7nNcXyOSKh5uVu7IAzDO6RE9hDVoHJhxvg+gqEacwkZ891Se5g==", + "shasum": "4fa37515e2d78c3b0462414402a8debce62b8b9f", + "tarball": "https://registry.npmjs.org/promise-all-reject-late/-/promise-all-reject-late-1.0.0.tgz", + "fileCount": 7, + "unpackedSize": 123039, + "npm-signature": "-----BEGIN PGP SIGNATURE-----\r\nVersion: OpenPGP.js v3.0.4\r\nComment: https://openpgpjs.org\r\n\r\nwsFcBAEBCAAQBQJeBmldCRA9TVsSAnZWagAAaigP/2CarvNCbglNS0dgjOoH\n7ZuFo1cG+N8BkZct00TyEJjuB+5UUmv9TSnZogfEOGutvMqUTSRhvm3fOWsJ\n7TXs8zJ6SotDR9+xlxqi/skpYXfRdNjdaMvo9kYO5jaV84pstdbl17sPeYXd\nCudbAKp1sYodlaJyqpyfUd2PWUNe/VGLODmjLogHB4/bevT3tdjsdauKrUS4\n3VFw8sS1Fwp7P2YneNIK3C1TY/Yb66KysZO23VsQemCQFKXpQJMa9B6yj8zs\n5BQp+W5tM70IfW6OXD0+Vt2jWr9jmKmoWVEiL5usJT3zD7vRbeH3xQvSEgDD\nskI8vH8iJ+3EbEOWTGlIu7mX88Dp2KnHOoRUkOR03WJWuGnsTC8Uyqi0F1Xd\nFeFlaeNzynR/R2LcdRNiFOM+1xtzfAtVGF7TIp9UjgJwSNNkEMlkNzQqSiC7\n/AeqsAYoBBNmYWY2fvXdS9HQ4HfIGjI++jCYWX4I7sUvOjqfcwlEz8MwromA\nqeBAFPdvnB0F/q/AOOLkcdsO81jES7ts0nB7bDt0rDbztWWq34BSMDnNoSDo\nDE9q8u7g68tQcr3WmOQr4ro10sSbJVJZmz8DSJKCbVJ+FN0+GM+49oyyhIFl\nOjokXn5U8ASEdiZnmFnt51dr9A4fyjhehotJA6qSs7t2fBe86VnufijC971U\nv3Jc\r\n=lLwf\r\n-----END PGP SIGNATURE-----\r\n" + }, + "maintainers": [ + { + "name": "isaacs", + "email": "i@izs.me" + } + ], + "_npmUser": { + "name": "isaacs", + "email": "i@izs.me" + }, + "directories": {}, + "_npmOperationalInternal": { + "host": "s3://npm-registry-packages", + "tmp": "tmp/promise-all-reject-late_1.0.0_1577478492470_0.9438095135747766" + }, + "_hasShrinkwrap": false + }, + "1.0.1": { + "name": "promise-all-reject-late", + "version": "1.0.1", + "description": "Like Promise.all, but save rejections until all promises are resolved", + "author": { + "name": "Isaac Z. Schlueter", + "email": "i@izs.me", + "url": "https://izs.me" + }, + "license": "ISC", + "scripts": { + "test": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags" + }, + "tap": { + "check-coverage": true + }, + "devDependencies": { + "tap": "^14.10.5" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + }, + "gitHead": "c892a9db86650c9229ab4cc70395106684d6818a", + "_id": "promise-all-reject-late@1.0.1", + "_nodeVersion": "12.14.1", + "_npmVersion": "6.13.6", + "dist": { + "integrity": "sha512-vuf0Lf0lOxyQREH7GDIOUMLS7kz+gs8i6B+Yi8dC68a2sychGrHTJYghMBD6k7eUcH0H5P73EckCA48xijWqXw==", + "shasum": "f8ebf13483e5ca91ad809ccc2fcf25f26f8643c2", + "tarball": "https://registry.npmjs.org/promise-all-reject-late/-/promise-all-reject-late-1.0.1.tgz", + "fileCount": 8, + "unpackedSize": 123171, + "npm-signature": "-----BEGIN PGP SIGNATURE-----\r\nVersion: OpenPGP.js v3.0.4\r\nComment: https://openpgpjs.org\r\n\r\nwsFcBAEBCAAQBQJeL65kCRA9TVsSAnZWagAAas4P/2WFFJvncp0LWb3DbE0t\ndx9BhZEwY3W8V6ug8uKvph24LoQp1PakkncscKS7PsCVHyIslD+fi6V99AmI\nOmL2ECAMUd5N69Cs8eTi4tKTNtUoIslfCu0+SMlCAF11D7oBXSabdOxGQofA\nuksoHdCqGM6M1y2BGjK7FR8dSwvgbQCPaUzazZ5w7w4XqVxDlzbvNj2E5mSF\n5HjlT5q239uNQppwPIFpisyi9DKa0ran2N7F2ioZ1PHvhFCqo6rmL8tAQsxQ\n+3OA4eFD0FJCJuqd3MOaY66mkncfNpmPvQYMyigKBUdJJyrgNsB67yfaFduy\ndK198Bnva5kotttQ4EHxM6gkqRm2d9o1/sYmAUtDELgrVDxzeNl+yG+nCkho\n1ta4cY+wy1dTjqAYaprQJ855nIeGGnr3tvz4dEGX/5eyh5K+oYVOYRFvWFX6\nVlEhBmSRqamfW5N1ndMyY18FM+Vc12yu66yZ3z1FqbgEGqdf3EP3lwWqClpP\nbPdXANzHM1FIz1PGHC7IZFWXH5KV1z+JXXahg/d8CLzz0PY6jaBt4c2xDvo7\nLaEAm7kNMbdewKvuTuG7x2Kqyf1KwjpOhXMrq6h0rlFm0pRt0xAArQ9Sglw8\n2Vq9Ic9EEsSIpzA5iQ86O1xkTREGHTB3uTRXUJixXIGkhLkhBB+Uj9y+GoOh\nX+Dm\r\n=yF+m\r\n-----END PGP SIGNATURE-----\r\n" + }, + "maintainers": [ + { + "name": "isaacs", + "email": "i@izs.me" + } + ], + "_npmUser": { + "name": "isaacs", + "email": "i@izs.me" + }, + "directories": {}, + "_npmOperationalInternal": { + "host": "s3://npm-registry-packages", + "tmp": "tmp/promise-all-reject-late_1.0.1_1580183139628_0.5159334029276426" + }, + "_hasShrinkwrap": false + } + }, + "time": { + "created": "2019-12-27T20:28:12.428Z", + "1.0.0": "2019-12-27T20:28:12.645Z", + "modified": "2020-01-28T03:45:42.154Z", + "1.0.1": "2020-01-28T03:45:39.762Z" + }, + "maintainers": [ + { + "name": "isaacs", + "email": "i@izs.me" + } + ], + "description": "Like Promise.all, but save rejections until all promises are resolved", + "author": { + "name": "Isaac Z. Schlueter", + "email": "i@izs.me", + "url": "https://izs.me" + }, + "license": "ISC", + "readme": "# promise-all-reject-late\n\nLike Promise.all, but save rejections until all promises are resolved.\n\nThis is handy when you want to do a bunch of things in parallel, and\nrollback on failure, without clobbering or conflicting with those parallel\nactions that may be in flight. For example, creating a bunch of files,\nand deleting any if they don't all succeed.\n\nExample:\n\n```js\nconst lateReject = require('promise-all-reject-late')\n\nconst { promisify } = require('util')\nconst fs = require('fs')\nconst writeFile = promisify(fs.writeFile)\n\nconst createFilesOrRollback = (files) => {\n return lateReject(files.map(file => writeFile(file, 'some data')))\n .catch(er => {\n // try to clean up, then fail with the initial error\n // we know that all write attempts are finished at this point\n return lateReject(files.map(file => rimraf(file)))\n .catch(er => {\n console.error('failed to clean up, youre on your own i guess', er)\n })\n .then(() => {\n // fail with the original error\n throw er\n })\n })\n}\n```\n\n## API\n\n* `lateReject([array, of, promises])` - Resolve all the promises,\n returning a promise that rejects with the first error, or resolves with\n the array of results, but only after all promises are settled.\n", + "readmeFilename": "README.md", + "_cached": false, + "_contentLength": 0 +} \ No newline at end of file diff --git a/smoke-tests/content/promise-all-reject-late.min.json b/smoke-tests/content/promise-all-reject-late.min.json new file mode 100644 index 0000000000000..699be7aaf2e82 --- /dev/null +++ b/smoke-tests/content/promise-all-reject-late.min.json @@ -0,0 +1,44 @@ +{ + "name": "promise-all-reject-late", + "dist-tags": { + "latest": "1.0.1" + }, + "versions": { + "1.0.0": { + "name": "promise-all-reject-late", + "version": "1.0.0", + "devDependencies": { + "tap": "^14.10.5" + }, + "dist": { + "integrity": "sha512-f5XvVl++12pEo7Sv7f7FGfzVuVpeY2msNKjn7nNcXyOSKh5uVu7IAzDO6RE9hDVoHJhxvg+gqEacwkZ891Se5g==", + "shasum": "4fa37515e2d78c3b0462414402a8debce62b8b9f", + "tarball": "https://registry.npmjs.org/promise-all-reject-late/-/promise-all-reject-late-1.0.0.tgz", + "fileCount": 7, + "unpackedSize": 123039, + "npm-signature": "-----BEGIN PGP SIGNATURE-----\r\nVersion: OpenPGP.js v3.0.4\r\nComment: https://openpgpjs.org\r\n\r\nwsFcBAEBCAAQBQJeBmldCRA9TVsSAnZWagAAaigP/2CarvNCbglNS0dgjOoH\n7ZuFo1cG+N8BkZct00TyEJjuB+5UUmv9TSnZogfEOGutvMqUTSRhvm3fOWsJ\n7TXs8zJ6SotDR9+xlxqi/skpYXfRdNjdaMvo9kYO5jaV84pstdbl17sPeYXd\nCudbAKp1sYodlaJyqpyfUd2PWUNe/VGLODmjLogHB4/bevT3tdjsdauKrUS4\n3VFw8sS1Fwp7P2YneNIK3C1TY/Yb66KysZO23VsQemCQFKXpQJMa9B6yj8zs\n5BQp+W5tM70IfW6OXD0+Vt2jWr9jmKmoWVEiL5usJT3zD7vRbeH3xQvSEgDD\nskI8vH8iJ+3EbEOWTGlIu7mX88Dp2KnHOoRUkOR03WJWuGnsTC8Uyqi0F1Xd\nFeFlaeNzynR/R2LcdRNiFOM+1xtzfAtVGF7TIp9UjgJwSNNkEMlkNzQqSiC7\n/AeqsAYoBBNmYWY2fvXdS9HQ4HfIGjI++jCYWX4I7sUvOjqfcwlEz8MwromA\nqeBAFPdvnB0F/q/AOOLkcdsO81jES7ts0nB7bDt0rDbztWWq34BSMDnNoSDo\nDE9q8u7g68tQcr3WmOQr4ro10sSbJVJZmz8DSJKCbVJ+FN0+GM+49oyyhIFl\nOjokXn5U8ASEdiZnmFnt51dr9A4fyjhehotJA6qSs7t2fBe86VnufijC971U\nv3Jc\r\n=lLwf\r\n-----END PGP SIGNATURE-----\r\n" + } + }, + "1.0.1": { + "name": "promise-all-reject-late", + "version": "1.0.1", + "devDependencies": { + "tap": "^14.10.5" + }, + "dist": { + "integrity": "sha512-vuf0Lf0lOxyQREH7GDIOUMLS7kz+gs8i6B+Yi8dC68a2sychGrHTJYghMBD6k7eUcH0H5P73EckCA48xijWqXw==", + "shasum": "f8ebf13483e5ca91ad809ccc2fcf25f26f8643c2", + "tarball": "https://registry.npmjs.org/promise-all-reject-late/-/promise-all-reject-late-1.0.1.tgz", + "fileCount": 8, + "unpackedSize": 123171, + "npm-signature": "-----BEGIN PGP SIGNATURE-----\r\nVersion: OpenPGP.js v3.0.4\r\nComment: https://openpgpjs.org\r\n\r\nwsFcBAEBCAAQBQJeL65kCRA9TVsSAnZWagAAas4P/2WFFJvncp0LWb3DbE0t\ndx9BhZEwY3W8V6ug8uKvph24LoQp1PakkncscKS7PsCVHyIslD+fi6V99AmI\nOmL2ECAMUd5N69Cs8eTi4tKTNtUoIslfCu0+SMlCAF11D7oBXSabdOxGQofA\nuksoHdCqGM6M1y2BGjK7FR8dSwvgbQCPaUzazZ5w7w4XqVxDlzbvNj2E5mSF\n5HjlT5q239uNQppwPIFpisyi9DKa0ran2N7F2ioZ1PHvhFCqo6rmL8tAQsxQ\n+3OA4eFD0FJCJuqd3MOaY66mkncfNpmPvQYMyigKBUdJJyrgNsB67yfaFduy\ndK198Bnva5kotttQ4EHxM6gkqRm2d9o1/sYmAUtDELgrVDxzeNl+yG+nCkho\n1ta4cY+wy1dTjqAYaprQJ855nIeGGnr3tvz4dEGX/5eyh5K+oYVOYRFvWFX6\nVlEhBmSRqamfW5N1ndMyY18FM+Vc12yu66yZ3z1FqbgEGqdf3EP3lwWqClpP\nbPdXANzHM1FIz1PGHC7IZFWXH5KV1z+JXXahg/d8CLzz0PY6jaBt4c2xDvo7\nLaEAm7kNMbdewKvuTuG7x2Kqyf1KwjpOhXMrq6h0rlFm0pRt0xAArQ9Sglw8\n2Vq9Ic9EEsSIpzA5iQ86O1xkTREGHTB3uTRXUJixXIGkhLkhBB+Uj9y+GoOh\nX+Dm\r\n=yF+m\r\n-----END PGP SIGNATURE-----\r\n" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + } + }, + "modified": "2020-01-28T03:45:42.154Z", + "_cached": false, + "_contentLength": 2803 +} \ No newline at end of file diff --git a/smoke-tests/content/promise-all-reject-late/-/promise-all-reject-late-1.0.1.tgz b/smoke-tests/content/promise-all-reject-late/-/promise-all-reject-late-1.0.1.tgz new file mode 100644 index 0000000000000..7da4044238766 Binary files /dev/null and b/smoke-tests/content/promise-all-reject-late/-/promise-all-reject-late-1.0.1.tgz differ diff --git a/smoke-tests/index.js b/smoke-tests/index.js new file mode 100644 index 0000000000000..9235c8960a26a --- /dev/null +++ b/smoke-tests/index.js @@ -0,0 +1,243 @@ +const fs = require('fs') +const { promisify } = require('util') +const execAsync = promisify(require('child_process').exec) +const { resolve } = require('path') +const t = require('tap') + +const normalizePath = path => path.replace(/[A-Z]:/, '').replace(/\\/g, '/') +const cwd = normalizePath(process.cwd()) +t.cleanSnapshot = s => s.split(cwd).join('{CWD}') + .split(registry).join('https://registry.npmjs.org/') + .split(normalizePath(process.execPath)).join('node') + .split(process.cwd()).join('{CWD}') + .replace(/\\+/g, '/') + .replace(/\r\n/g, '\n') + .replace(/ \(in a browser\)/g, '') + .replace(/^npm@.* /mg, 'npm ') + +// setup server +const { start, stop, registry } = require('./server.js') +t.before(start) +t.teardown(stop) + +// setup fixtures +const path = t.testdir({ + '.npmrc': '', + cache: {}, + project: {}, + bin: {}, +}) +const localPrefix = resolve(path, 'project') +const userconfigLocation = resolve(path, '.npmrc') +const npmLocation = resolve(__dirname, '..') +const cacheLocation = resolve(path, 'cache') +const binLocation = resolve(path, 'bin') +const env = { + HOME: path, + PATH: `${process.env.PATH}:${binLocation}`, +} +const npmOpts = `--registry=${registry} --cache="${cacheLocation}" --userconfig="${userconfigLocation}" --no-audit --no-update-notifier --loglevel=silly` +const npmBin = `"${process.execPath}" "${npmLocation}" ${npmOpts}` +const exec = async cmd => { + const res = await execAsync(cmd, { cwd: localPrefix, env }) + if (res.stderr) + console.error(res.stderr) + return String(res.stdout) +} +const readFile = filename => + String(fs.readFileSync(resolve(localPrefix, filename))) + +t.test('npm init', async t => { + const cmd = `${npmBin} init -y` + const cmdRes = await exec(cmd) + + t.matchSnapshot(cmdRes, 'should have successful npm init result') + const pkg = JSON.parse(fs.readFileSync(resolve(localPrefix, 'package.json'))) + t.equal(pkg.name, 'project', 'should have expected generated name') + t.equal(pkg.version, '1.0.0', 'should have expected generated version') +}) + +t.test('npm (no args)', async t => { + const cmd = `"${process.execPath}" "${npmLocation}" --no-audit --no-update-notifier` + const cmdRes = await execAsync(cmd, { cwd: localPrefix, env }) + .catch(err => { + t.equal(err.code, 1, 'should exit with error code') + return err + }) + + t.equal(cmdRes.stderr, '', 'should have no stderr output') + t.matchSnapshot(String(cmdRes.stdout), + 'should have expected no args output') +}) + +t.test('npm install prodDep@version', async t => { + const cmd = `${npmBin} install abbrev@1.0.4` + const cmdRes = await exec(cmd) + + t.matchSnapshot(cmdRes.replace(/in.*s/, ''), + 'should have expected install reify output') + t.matchSnapshot( + readFile('package.json'), + 'should have expected package.json result' + ) + t.matchSnapshot( + readFile('package-lock.json'), + 'should have expected lockfile result' + ) +}) + +t.test('npm install dev dep', async t => { + const cmd = `${npmBin} install -D promise-all-reject-late` + const cmdRes = await exec(cmd) + + t.matchSnapshot(cmdRes.replace(/in.*s/, ''), + 'should have expected dev dep added reify output') + t.matchSnapshot( + readFile('package.json'), + 'should have expected dev dep added package.json result' + ) + t.matchSnapshot( + readFile('package-lock.json'), + 'should have expected dev dep added lockfile result' + ) +}) + +t.test('npm ls', async t => { + const cmd = `${npmBin} ls` + const cmdRes = await exec(cmd) + + t.matchSnapshot(cmdRes, + 'should have expected ls output') +}) + +t.test('npm fund', async t => { + const cmd = `${npmBin} fund` + const cmdRes = await exec(cmd) + + t.matchSnapshot(cmdRes, + 'should have expected fund output') +}) + +t.test('npm explain', async t => { + const cmd = `${npmBin} explain abbrev` + const cmdRes = await exec(cmd) + + t.matchSnapshot(cmdRes, + 'should have expected explain output') +}) + +t.test('npm diff', async t => { + const cmd = `${npmBin} diff --diff=abbrev@1.0.4 --diff=abbrev@1.1.1` + const cmdRes = await exec(cmd) + + t.matchSnapshot(cmdRes, + 'should have expected diff output') +}) + +t.test('npm outdated', async t => { + const cmd = `${npmBin} outdated` + const cmdRes = await exec(cmd) + + t.matchSnapshot(cmdRes, + 'should have expected outdated output') +}) + +t.test('npm set-script', async t => { + const cmd = `${npmBin} set-script "hello" "echo Hello"` + const cmdRes = await exec(cmd) + + t.matchSnapshot(cmdRes, + 'should have expected set-script output') + t.matchSnapshot( + readFile('package.json'), + 'should have expected script added package.json result' + ) +}) + +t.test('npm run-script', async t => { + const cmd = `${npmBin} run hello` + const cmdRes = await exec(cmd) + + t.matchSnapshot(cmdRes, + 'should have expected run-script output') +}) + +t.test('npm prefix', async t => { + const cmd = `${npmBin} prefix` + const cmdRes = await exec(cmd) + + t.matchSnapshot(cmdRes, + 'should have expected prefix output') +}) + +t.test('npm view', async t => { + const cmd = `${npmBin} view abbrev@1.0.4` + const cmdRes = await exec(cmd) + + t.matchSnapshot(cmdRes, + 'should have expected view output') +}) + +t.test('npm update dep', async t => { + const cmd = `${npmBin} update abbrev` + const cmdRes = await exec(cmd) + + t.matchSnapshot(cmdRes.replace(/in.*s/, ''), + 'should have expected update reify output') + t.matchSnapshot( + readFile('package.json'), + 'should have expected update package.json result' + ) + t.matchSnapshot( + readFile('package-lock.json'), + 'should have expected update lockfile result' + ) +}) + +t.test('npm uninstall', async t => { + const cmd = `${npmBin} uninstall promise-all-reject-late` + const cmdRes = await exec(cmd) + + t.matchSnapshot(cmdRes.replace(/in.*s/, ''), + 'should have expected uninstall reify output') + t.matchSnapshot( + readFile('package.json'), + 'should have expected uninstall package.json result' + ) + t.matchSnapshot( + readFile('package-lock.json'), + 'should have expected uninstall lockfile result' + ) +}) + +t.test('npm pkg', async t => { + let cmd = `${npmBin} pkg get license` + let cmdRes = await exec(cmd) + t.matchSnapshot(cmdRes.replace(/in.*s/, ''), + 'should have expected pkg get output') + + cmd = `${npmBin} pkg set tap[test-env][0]=LC_ALL=sk` + cmdRes = await exec(cmd) + t.matchSnapshot(cmdRes.replace(/in.*s/, ''), + 'should have expected pkg set output') + + t.matchSnapshot( + readFile('package.json'), + 'should have expected npm pkg set modified package.json result' + ) + + cmd = `${npmBin} pkg get` + cmdRes = await exec(cmd) + t.matchSnapshot(cmdRes.replace(/in.*s/, ''), + 'should print package.json contents') + + cmd = `${npmBin} pkg delete tap` + cmdRes = await exec(cmd) + t.matchSnapshot(cmdRes.replace(/in.*s/, ''), + 'should have expected pkg delete output') + + t.matchSnapshot( + readFile('package.json'), + 'should have expected npm pkg delete modified package.json result' + ) +}) diff --git a/smoke-tests/server.js b/smoke-tests/server.js new file mode 100644 index 0000000000000..b864114af64a8 --- /dev/null +++ b/smoke-tests/server.js @@ -0,0 +1,272 @@ +/* istanbul ignore file */ +const {join, dirname} = require('path') +const {existsSync, readFileSync, writeFileSync} = require('fs') +const PORT = 12345 + (+process.env.TAP_CHILD_ID || 0) +const http = require('http') +const https = require('https') + +const mkdirp = require('mkdirp') +const doProxy = process.env.ARBORIST_TEST_PROXY +const missing = /\/@isaacs(\/|%2[fF])(this-does-not-exist-at-all|testing-missing-tgz\/-\/)/ +const corgiDoc = 'application/vnd.npm.install-v1+json; q=1.0, application/json; q=0.8, */*' +const { gzipSync, unzipSync } = require('zlib') + +let advisoryBulkResponse = null +let failAdvisoryBulk = false +let auditResponse = null +let failAudit = false +const startServer = () => new Promise((res, rej) => { + const server = exports.server = http.createServer((req, res) => { + res.setHeader('connection', 'close') + + if (req.url === '/-/npm/v1/security/advisories/bulk') { + const body = [] + req.on('data', c => body.push(c)) + req.on('end', () => { + res.setHeader('connection', 'close') + if (failAdvisoryBulk) { + res.statusCode = 503 + return res.end('no advisory bulk for you') + } + if (!advisoryBulkResponse) { + if (auditResponse && !failAudit) { + // simulate what the registry does when quick audits are allowed, + // but advisory bulk requests are not + res.statusCode = 405 + return res.end(JSON.stringify({ + code: 'MethodNotAllowedError', + message: 'POST is not allowed', + })) + } else { + res.statusCode = 404 + return res.end('not found') + } + } + if (doProxy && !existsSync(advisoryBulkResponse)) { + // hit the main registry, then fall back to staging for now + // XXX: remove this when bulk advisory endpoint pushed to production! + const opts = { + host: 'registry.npmjs.org', + method: req.method, + path: req.url, + headers: { + ...req.headers, + accept: '*', + host: 'registry.npmjs.org', + connection: 'close', + 'if-none-match': '', + }, + } + const handleUpstream = upstream => { + res.statusCode = upstream.statusCode + if (upstream.statusCode >= 300 || upstream.statusCode < 200) { + console.error('UPSTREAM ERROR', upstream.statusCode) + return upstream.pipe(res) + } + res.setHeader('content-encoding', upstream.headers['content-encoding']) + const file = advisoryBulkResponse + console.error('PROXY', `${req.url} -> ${file} ${upstream.statusCode}`) + mkdirp.sync(dirname(file)) + const data = [] + upstream.on('end', () => { + const out = Buffer.concat(data) + const obj = JSON.parse(unzipSync(out).toString()) + writeFileSync(file, JSON.stringify(obj, 0, 2) + '\n') + res.end(out) + }) + upstream.on('data', c => data.push(c)) + } + return https.request(opts).on('response', upstream => { + if (upstream.statusCode !== 200) { + console.error('ATTEMPTING TO PROXY FROM STAGING') + console.error('NOTE: THIS WILL FAIL WHEN NOT ON VPN!') + opts.host = 'security-microservice-3-west.npm.red' + opts.headers.host = opts.host + opts.path = '/v1/advisories/bulk' + https.request(opts) + .on('response', upstream => handleUpstream(upstream)) + .end(Buffer.concat(body)) + } else + handleUpstream(upstream) + }).end(Buffer.concat(body)) + } else { + res.setHeader('content-encoding', 'gzip') + res.end(gzipSync(readFileSync(advisoryBulkResponse))) + } + }) + return + } else if (req.url === '/-/npm/v1/security/audits/quick') { + const body = [] + req.on('data', c => body.push(c)) + req.on('end', () => { + res.setHeader('connection', 'close') + if (failAudit) { + res.statusCode = 503 + return res.end('no audit for you') + } + if (!auditResponse) { + res.statusCode = 404 + return res.end('not found') + } + if (doProxy && !existsSync(auditResponse)) { + return https.request({ + host: 'registry.npmjs.org', + method: req.method, + path: req.url, + headers: { + ...req.headers, + accept: '*', + host: 'registry.npmjs.org', + connection: 'close', + 'if-none-match': '', + }, + }).on('response', upstream => { + res.statusCode = upstream.statusCode + if (upstream.statusCode >= 300 || upstream.statusCode < 200) { + console.error('UPSTREAM ERROR', upstream.statusCode) + // don't save if it's not a valid response + return upstream.pipe(res) + } + res.setHeader('content-encoding', upstream.headers['content-encoding']) + const file = auditResponse + console.error('PROXY', `${req.url} -> ${file} ${upstream.statusCode}`) + mkdirp.sync(dirname(file)) + const data = [] + upstream.on('end', () => { + const out = Buffer.concat(data) + // make it a bit prettier to read later + const obj = JSON.parse(unzipSync(out).toString()) + writeFileSync(file, JSON.stringify(obj, 0, 2) + '\n') + res.end(out) + }) + upstream.on('data', c => data.push(c)) + }).end(Buffer.concat(body)) + } else { + res.setHeader('content-encoding', 'gzip') + res.end(gzipSync(readFileSync(auditResponse))) + } + }) + return + } + + const f = join(__dirname, 'content', join('/', req.url.replace(/@/, '').replace(/%2f/i, '/'))) + const isCorgi = req.headers.accept.includes('application/vnd.npm.install-v1+json') + const file = f + ( + isCorgi && existsSync(`${f}.min.json`) ? '.min.json' + : existsSync(`${f}.json`) ? '.json' + : existsSync(`${f}/index.json`) ? 'index.json' + : '' + ) + + try { + const body = readFileSync(file) + res.setHeader('content-length', body.length) + res.setHeader('content-type', /\.min\.json$/.test(file) ? corgiDoc + : /\.json$/.test(file) ? 'application/json' + : 'application/octet-stream') + res.end(body) + } catch (er) { + // testing things going missing from the registry somehow + if (missing.test(req.url)) { + res.statusCode = 404 + res.end('{"error": "not found"}') + return + } + + if (doProxy) { + return https.get({ + host: 'registry.npmjs.org', + path: req.url, + headers: { + ...req.headers, + accept: '*', + 'accept-encoding': 'identity', + host: 'registry.npmjs.org', + connection: 'close', + 'if-none-match': '', + }, + }).on('response', upstream => { + const errorStatus = + upstream.statusCode >= 300 || upstream.statusCode < 200 + + if (errorStatus) + console.error('UPSTREAM ERROR', upstream.statusCode) + + const ct = upstream.headers['content-type'] + const isJson = ct.includes('application/json') + const file = isJson ? f + '.json' : f + console.error('PROXY', `${req.url} -> ${file} ${ct}`) + mkdirp.sync(dirname(file)) + const data = [] + res.statusCode = upstream.statusCode + res.setHeader('content-type', ct) + upstream.on('end', () => { + console.error('ENDING', req.url) + const out = Buffer.concat(data) + if (!errorStatus) { + if (isJson) { + const obj = JSON.parse(out.toString()) + writeFileSync(file, JSON.stringify(obj, 0, 2) + '\n') + const mrm = require('minify-registry-metadata') + const minFile = file.replace(/\.json$/, '.min.json') + writeFileSync(minFile, JSON.stringify(mrm(obj), 0, 2) + '\n') + console.error('WROTE JSONS', [file, minFile]) + } else + writeFileSync(file, out) + } + res.end(out) + }) + upstream.on('data', c => data.push(c)) + }).end() + } + + res.statusCode = er.code === 'ENOENT' ? 404 : 500 + if (res.method === 'GET') + console.error(er) + res.setHeader('content-type', 'text/plain') + res.end(er.stack) + } + }) + server.listen(PORT, res) +}) + +exports.auditResponse = value => { + if (auditResponse && auditResponse !== value) { + throw new Error('setting audit response, but already set\n' + + '(did you forget to call the returned function on teardown?)') + } + auditResponse = value + return () => auditResponse = null +} +exports.failAudit = () => { + failAudit = true + return () => failAudit = false +} + +exports.advisoryBulkResponse = value => { + if (advisoryBulkResponse && advisoryBulkResponse !== value) { + throw new Error('setting advisory bulk response, but already set\n' + + '(did you forget to call the returned function on teardown?)') + } + advisoryBulkResponse = value + return () => advisoryBulkResponse = null +} +exports.failAdvisoryBulk = () => { + failAdvisoryBulk = true + return () => failAdvisoryBulk = false +} + +exports.registry = `http://localhost:${PORT}/` + +exports.start = startServer +exports.stop = () => exports.server.close() + +if (require.main === module) { + startServer().then(() => { + console.log(`Mock registry live at: + ${exports.registry} +Press ^D to close gracefully.`) + }) + process.openStdin() + process.stdin.on('end', () => exports.stop()) +} diff --git a/tap-snapshots/smoke-tests/index.js.test.cjs b/tap-snapshots/smoke-tests/index.js.test.cjs new file mode 100644 index 0000000000000..0a79e38cdfa03 --- /dev/null +++ b/tap-snapshots/smoke-tests/index.js.test.cjs @@ -0,0 +1,769 @@ +/* IMPORTANT + * This snapshot file is auto-generated, but designed for humans. + * It should be checked into source control and tracked carefully. + * Re-generate by setting TAP_SNAPSHOT=1 and running tests. + * Make sure to inspect the output below. Do not ignore changes! + */ +'use strict' +exports[`smoke-tests/index.js TAP npm (no args) > should have expected no args output 1`] = ` +npm <command> + +Usage: + +npm install install all the dependencies in your project +npm install <foo> add the <foo> dependency to your project +npm test run this project's tests +npm run <foo> run the script named <foo> +npm <command> -h quick help on <command> +npm -l display usage info for all commands +npm help <term> search for help on <term> +npm help npm more involved overview + +All commands: + + access, adduser, audit, bin, bugs, cache, ci, completion, + config, dedupe, deprecate, diff, dist-tag, docs, doctor, + edit, exec, explain, explore, find-dupes, fund, get, help, + hook, init, install, install-ci-test, install-test, link, + ll, login, logout, ls, org, outdated, owner, pack, ping, + pkg, prefix, profile, prune, publish, rebuild, repo, + restart, root, run-script, search, set, set-script, + shrinkwrap, star, stars, start, stop, team, test, token, + uninstall, unpublish, unstar, update, version, view, whoami + +Specify configs in the ini-formatted file: + {CWD}/smoke-tests/tap-testdir-index/.npmrc +or on the command line via: npm <command> --key=value + +More configuration info: npm help config +Configuration fields: npm help 7 config + +npm {CWD} + +` + +exports[`smoke-tests/index.js TAP npm diff > should have expected diff output 1`] = ` +diff --git a/package.json b/package.json +index v1.0.4..v1.1.1 100644 +--- a/package.json ++++ b/package.json +@@ -1,15 +1,21 @@ + { + "name": "abbrev", +- "version": "1.0.4", ++ "version": "1.1.1", + "description": "Like ruby's abbrev module, but in js", + "author": "Isaac Z. Schlueter <i@izs.me>", +- "main": "./lib/abbrev.js", ++ "main": "abbrev.js", + "scripts": { +- "test": "node lib/abbrev.js" ++ "test": "tap test.js --100", ++ "preversion": "npm test", ++ "postversion": "npm publish", ++ "postpublish": "git push origin --all; git push origin --tags" + }, + "repository": "http://github.com/isaacs/abbrev-js", +- "license": { +- "type": "MIT", +- "url": "https://github.com/isaacs/abbrev-js/raw/master/LICENSE" +- } ++ "license": "ISC", ++ "devDependencies": { ++ "tap": "^10.1" ++ }, ++ "files": [ ++ "abbrev.js" ++ ] + } +diff --git a/LICENSE b/LICENSE +index v1.0.4..v1.1.1 100644 +--- a/LICENSE ++++ b/LICENSE +@@ -1,4 +1,27 @@ +-Copyright 2009, 2010, 2011 Isaac Z. Schlueter. ++This software is dual-licensed under the ISC and MIT licenses. ++You may use this software under EITHER of the following licenses. ++ ++---------- ++ ++The ISC License ++ ++Copyright (c) Isaac Z. Schlueter and Contributors ++ ++Permission to use, copy, modify, and/or distribute this software for any ++purpose with or without fee is hereby granted, provided that the above ++copyright notice and this permission notice appear in all copies. ++ ++THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES ++WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF ++MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ++ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES ++WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ++ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR ++IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. ++ ++---------- ++ ++Copyright Isaac Z. Schlueter and Contributors + All rights reserved. + + Permission is hereby granted, free of charge, to any person +diff --git a/lib/abbrev.js b/lib/abbrev.js +deleted file mode 100644 +index v1.0.4..v1.1.1 +--- a/lib/abbrev.js ++++ b/lib/abbrev.js +@@ -1,111 +0,0 @@ +- +-module.exports = exports = abbrev.abbrev = abbrev +- +-abbrev.monkeyPatch = monkeyPatch +- +-function monkeyPatch () { +- Object.defineProperty(Array.prototype, 'abbrev', { +- value: function () { return abbrev(this) }, +- enumerable: false, configurable: true, writable: true +- }) +- +- Object.defineProperty(Object.prototype, 'abbrev', { +- value: function () { return abbrev(Object.keys(this)) }, +- enumerable: false, configurable: true, writable: true +- }) +-} +- +-function abbrev (list) { +- if (arguments.length !== 1 || !Array.isArray(list)) { +- list = Array.prototype.slice.call(arguments, 0) +- } +- for (var i = 0, l = list.length, args = [] ; i < l ; i ++) { +- args[i] = typeof list[i] === "string" ? list[i] : String(list[i]) +- } +- +- // sort them lexicographically, so that they're next to their nearest kin +- args = args.sort(lexSort) +- +- // walk through each, seeing how much it has in common with the next and previous +- var abbrevs = {} +- , prev = "" +- for (var i = 0, l = args.length ; i < l ; i ++) { +- var current = args[i] +- , next = args[i + 1] || "" +- , nextMatches = true +- , prevMatches = true +- if (current === next) continue +- for (var j = 0, cl = current.length ; j < cl ; j ++) { +- var curChar = current.charAt(j) +- nextMatches = nextMatches && curChar === next.charAt(j) +- prevMatches = prevMatches && curChar === prev.charAt(j) +- if (!nextMatches && !prevMatches) { +- j ++ +- break +- } +- } +- prev = current +- if (j === cl) { +- abbrevs[current] = current +- continue +- } +- for (var a = current.substr(0, j) ; j <= cl ; j ++) { +- abbrevs[a] = current +- a += current.charAt(j) +- } +- } +- return abbrevs +-} +- +-function lexSort (a, b) { +- return a === b ? 0 : a > b ? 1 : -1 +-} +- +- +-// tests +-if (module === require.main) { +- +-var assert = require("assert") +-var util = require("util") +- +-console.log("running tests") +-function test (list, expect) { +- var actual = abbrev(list) +- assert.deepEqual(actual, expect, +- "abbrev("+util.inspect(list)+") === " + util.inspect(expect) + "/n"+ +- "actual: "+util.inspect(actual)) +- actual = abbrev.apply(exports, list) +- assert.deepEqual(abbrev.apply(exports, list), expect, +- "abbrev("+list.map(JSON.stringify).join(",")+") === " + util.inspect(expect) + "/n"+ +- "actual: "+util.inspect(actual)) +-} +- +-test([ "ruby", "ruby", "rules", "rules", "rules" ], +-{ rub: 'ruby' +-, ruby: 'ruby' +-, rul: 'rules' +-, rule: 'rules' +-, rules: 'rules' +-}) +-test(["fool", "foom", "pool", "pope"], +-{ fool: 'fool' +-, foom: 'foom' +-, poo: 'pool' +-, pool: 'pool' +-, pop: 'pope' +-, pope: 'pope' +-}) +-test(["a", "ab", "abc", "abcd", "abcde", "acde"], +-{ a: 'a' +-, ab: 'ab' +-, abc: 'abc' +-, abcd: 'abcd' +-, abcde: 'abcde' +-, ac: 'acde' +-, acd: 'acde' +-, acde: 'acde' +-}) +- +-console.log("pass") +- +-} +/ No newline at end of file +diff --git a/abbrev.js b/abbrev.js +new file mode 100644 +index v1.0.4..v1.1.1 +--- a/abbrev.js ++++ b/abbrev.js +@@ -0,0 +1,61 @@ ++module.exports = exports = abbrev.abbrev = abbrev ++ ++abbrev.monkeyPatch = monkeyPatch ++ ++function monkeyPatch () { ++ Object.defineProperty(Array.prototype, 'abbrev', { ++ value: function () { return abbrev(this) }, ++ enumerable: false, configurable: true, writable: true ++ }) ++ ++ Object.defineProperty(Object.prototype, 'abbrev', { ++ value: function () { return abbrev(Object.keys(this)) }, ++ enumerable: false, configurable: true, writable: true ++ }) ++} ++ ++function abbrev (list) { ++ if (arguments.length !== 1 || !Array.isArray(list)) { ++ list = Array.prototype.slice.call(arguments, 0) ++ } ++ for (var i = 0, l = list.length, args = [] ; i < l ; i ++) { ++ args[i] = typeof list[i] === "string" ? list[i] : String(list[i]) ++ } ++ ++ // sort them lexicographically, so that they're next to their nearest kin ++ args = args.sort(lexSort) ++ ++ // walk through each, seeing how much it has in common with the next and previous ++ var abbrevs = {} ++ , prev = "" ++ for (var i = 0, l = args.length ; i < l ; i ++) { ++ var current = args[i] ++ , next = args[i + 1] || "" ++ , nextMatches = true ++ , prevMatches = true ++ if (current === next) continue ++ for (var j = 0, cl = current.length ; j < cl ; j ++) { ++ var curChar = current.charAt(j) ++ nextMatches = nextMatches && curChar === next.charAt(j) ++ prevMatches = prevMatches && curChar === prev.charAt(j) ++ if (!nextMatches && !prevMatches) { ++ j ++ ++ break ++ } ++ } ++ prev = current ++ if (j === cl) { ++ abbrevs[current] = current ++ continue ++ } ++ for (var a = current.substr(0, j) ; j <= cl ; j ++) { ++ abbrevs[a] = current ++ a += current.charAt(j) ++ } ++ } ++ return abbrevs ++} ++ ++function lexSort (a, b) { ++ return a === b ? 0 : a > b ? 1 : -1 ++} + +` + +exports[`smoke-tests/index.js TAP npm explain > should have expected explain output 1`] = ` +abbrev@1.0.4 +node_modules/abbrev + abbrev@"^1.0.4" from the root project + +` + +exports[`smoke-tests/index.js TAP npm fund > should have expected fund output 1`] = ` +project@1.0.0 +\`-- https://github.com/sponsors/isaacs + \`-- promise-all-reject-late@1.0.1 + + +` + +exports[`smoke-tests/index.js TAP npm init > should have successful npm init result 1`] = ` +Wrote to {CWD}/smoke-tests/tap-testdir-index/project/package.json: + +{ + "name": "project", + "version": "1.0.0", + "description": "", + "main": "index.js", + "scripts": { + "test": "echo /"Error: no test specified/" && exit 1" + }, + "keywords": [], + "author": "", + "license": "ISC" +} + + + +` + +exports[`smoke-tests/index.js TAP npm install dev dep > should have expected dev dep added lockfile result 1`] = ` +{ + "name": "project", + "version": "1.0.0", + "lockfileVersion": 2, + "requires": true, + "packages": { + "": { + "version": "1.0.0", + "license": "ISC", + "dependencies": { + "abbrev": "^1.0.4" + }, + "devDependencies": { + "promise-all-reject-late": "^1.0.1" + } + }, + "node_modules/abbrev": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.0.4.tgz", + "integrity": "sha1-vVWuXkE7oXIu5Mq6H26hBBSlns0=" + }, + "node_modules/promise-all-reject-late": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/promise-all-reject-late/-/promise-all-reject-late-1.0.1.tgz", + "integrity": "sha512-vuf0Lf0lOxyQREH7GDIOUMLS7kz+gs8i6B+Yi8dC68a2sychGrHTJYghMBD6k7eUcH0H5P73EckCA48xijWqXw==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + } + }, + "dependencies": { + "abbrev": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.0.4.tgz", + "integrity": "sha1-vVWuXkE7oXIu5Mq6H26hBBSlns0=" + }, + "promise-all-reject-late": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/promise-all-reject-late/-/promise-all-reject-late-1.0.1.tgz", + "integrity": "sha512-vuf0Lf0lOxyQREH7GDIOUMLS7kz+gs8i6B+Yi8dC68a2sychGrHTJYghMBD6k7eUcH0H5P73EckCA48xijWqXw==", + "dev": true + } + } +} + +` + +exports[`smoke-tests/index.js TAP npm install dev dep > should have expected dev dep added package.json result 1`] = ` +{ + "name": "project", + "version": "1.0.0", + "description": "", + "main": "index.js", + "scripts": { + "test": "echo /"Error: no test specified/" && exit 1" + }, + "keywords": [], + "author": "", + "license": "ISC", + "dependencies": { + "abbrev": "^1.0.4" + }, + "devDependencies": { + "promise-all-reject-late": "^1.0.1" + } +} + +` + +exports[`smoke-tests/index.js TAP npm install dev dep > should have expected dev dep added reify output 1`] = ` + +added 1 package + +1 package is looking for funding + run \`npm fund\` for details + +` + +exports[`smoke-tests/index.js TAP npm install prodDep@version > should have expected install reify output 1`] = ` + +added 1 package + +` + +exports[`smoke-tests/index.js TAP npm install prodDep@version > should have expected lockfile result 1`] = ` +{ + "name": "project", + "version": "1.0.0", + "lockfileVersion": 2, + "requires": true, + "packages": { + "": { + "version": "1.0.0", + "license": "ISC", + "dependencies": { + "abbrev": "^1.0.4" + } + }, + "node_modules/abbrev": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.0.4.tgz", + "integrity": "sha1-vVWuXkE7oXIu5Mq6H26hBBSlns0=" + } + }, + "dependencies": { + "abbrev": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.0.4.tgz", + "integrity": "sha1-vVWuXkE7oXIu5Mq6H26hBBSlns0=" + } + } +} + +` + +exports[`smoke-tests/index.js TAP npm install prodDep@version > should have expected package.json result 1`] = ` +{ + "name": "project", + "version": "1.0.0", + "description": "", + "main": "index.js", + "scripts": { + "test": "echo /"Error: no test specified/" && exit 1" + }, + "keywords": [], + "author": "", + "license": "ISC", + "dependencies": { + "abbrev": "^1.0.4" + } +} + +` + +exports[`smoke-tests/index.js TAP npm ls > should have expected ls output 1`] = ` +project@1.0.0 {CWD}/smoke-tests/tap-testdir-index/project ++-- abbrev@1.0.4 +\`-- promise-all-reject-late@1.0.1 + + +` + +exports[`smoke-tests/index.js TAP npm outdated > should have expected outdated output 1`] = ` +Package Current Wanted Latest Location Depended by +abbrev 1.0.4 1.1.1 1.1.1 node_modules/abbrev project + +` + +exports[`smoke-tests/index.js TAP npm pkg > should have expected npm pkg delete modified package.json result 1`] = ` +{ + "name": "project", + "version": "1.0.0", + "description": "", + "main": "index.js", + "scripts": { + "test": "echo /"Error: no test specified/" && exit 1", + "hello": "echo Hello" + }, + "keywords": [], + "author": "", + "license": "ISC", + "dependencies": { + "abbrev": "^1.0.4" + } +} + +` + +exports[`smoke-tests/index.js TAP npm pkg > should have expected npm pkg set modified package.json result 1`] = ` +{ + "name": "project", + "version": "1.0.0", + "description": "", + "main": "index.js", + "scripts": { + "test": "echo /"Error: no test specified/" && exit 1", + "hello": "echo Hello" + }, + "keywords": [], + "author": "", + "license": "ISC", + "dependencies": { + "abbrev": "^1.0.4" + }, + "tap": { + "test-env": [ + "LC_ALL=sk" + ] + } +} + +` + +exports[`smoke-tests/index.js TAP npm pkg > should have expected pkg delete output 1`] = ` + +` + +exports[`smoke-tests/index.js TAP npm pkg > should have expected pkg get output 1`] = ` +"ISC" + +` + +exports[`smoke-tests/index.js TAP npm pkg > should have expected pkg set output 1`] = ` + +` + +exports[`smoke-tests/index.js TAP npm pkg > should print package.json contents 1`] = ` +{ + "name": "project", + "version": "1.0.0", + "description": "", + "ma", + "scripts": { + "test": "echo /"Error: no test specified/" && exit 1", + "hello": "echo Hello" + }, + "keywords": [], + "author": "", + "license": "ISC", + "dependencies": { + "abbrev": "^1.0.4" + }, + "tap": { + "test-env": [ + "LC_ALL=sk" + ] + } +} + +` + +exports[`smoke-tests/index.js TAP npm prefix > should have expected prefix output 1`] = ` +{CWD}/smoke-tests/tap-testdir-index/project + +` + +exports[`smoke-tests/index.js TAP npm run-script > should have expected run-script output 1`] = ` + +> project@1.0.0 hello +> echo Hello + +Hello + +` + +exports[`smoke-tests/index.js TAP npm set-script > should have expected script added package.json result 1`] = ` +{ + "name": "project", + "version": "1.0.0", + "description": "", + "main": "index.js", + "scripts": { + "test": "echo /"Error: no test specified/" && exit 1", + "hello": "echo Hello" + }, + "keywords": [], + "author": "", + "license": "ISC", + "dependencies": { + "abbrev": "^1.0.4" + }, + "devDependencies": { + "promise-all-reject-late": "^1.0.1" + } +} + +` + +exports[`smoke-tests/index.js TAP npm set-script > should have expected set-script output 1`] = ` + +` + +exports[`smoke-tests/index.js TAP npm uninstall > should have expected uninstall lockfile result 1`] = ` +{ + "name": "project", + "version": "1.0.0", + "lockfileVersion": 2, + "requires": true, + "packages": { + "": { + "version": "1.0.0", + "license": "ISC", + "dependencies": { + "abbrev": "^1.0.4" + } + }, + "node_modules/abbrev": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz", + "integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==" + } + }, + "dependencies": { + "abbrev": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz", + "integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==" + } + } +} + +` + +exports[`smoke-tests/index.js TAP npm uninstall > should have expected uninstall package.json result 1`] = ` +{ + "name": "project", + "version": "1.0.0", + "description": "", + "main": "index.js", + "scripts": { + "test": "echo /"Error: no test specified/" && exit 1", + "hello": "echo Hello" + }, + "keywords": [], + "author": "", + "license": "ISC", + "dependencies": { + "abbrev": "^1.0.4" + } +} + +` + +exports[`smoke-tests/index.js TAP npm uninstall > should have expected uninstall reify output 1`] = ` + +removed 1 package + +` + +exports[`smoke-tests/index.js TAP npm update dep > should have expected update lockfile result 1`] = ` +{ + "name": "project", + "version": "1.0.0", + "lockfileVersion": 2, + "requires": true, + "packages": { + "": { + "version": "1.0.0", + "license": "ISC", + "dependencies": { + "abbrev": "^1.0.4" + }, + "devDependencies": { + "promise-all-reject-late": "^1.0.1" + } + }, + "node_modules/abbrev": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz", + "integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==" + }, + "node_modules/promise-all-reject-late": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/promise-all-reject-late/-/promise-all-reject-late-1.0.1.tgz", + "integrity": "sha512-vuf0Lf0lOxyQREH7GDIOUMLS7kz+gs8i6B+Yi8dC68a2sychGrHTJYghMBD6k7eUcH0H5P73EckCA48xijWqXw==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + } + }, + "dependencies": { + "abbrev": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz", + "integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==" + }, + "promise-all-reject-late": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/promise-all-reject-late/-/promise-all-reject-late-1.0.1.tgz", + "integrity": "sha512-vuf0Lf0lOxyQREH7GDIOUMLS7kz+gs8i6B+Yi8dC68a2sychGrHTJYghMBD6k7eUcH0H5P73EckCA48xijWqXw==", + "dev": true + } + } +} + +` + +exports[`smoke-tests/index.js TAP npm update dep > should have expected update package.json result 1`] = ` +{ + "name": "project", + "version": "1.0.0", + "description": "", + "main": "index.js", + "scripts": { + "test": "echo /"Error: no test specified/" && exit 1", + "hello": "echo Hello" + }, + "keywords": [], + "author": "", + "license": "ISC", + "dependencies": { + "abbrev": "^1.0.4" + }, + "devDependencies": { + "promise-all-reject-late": "^1.0.1" + } +} + +` + +exports[`smoke-tests/index.js TAP npm update dep > should have expected update reify output 1`] = ` + +changed 1 package + +1 package is looking for funding + run \`npm fund\` for details + +` + +exports[`smoke-tests/index.js TAP npm view > should have expected view output 1`] = ` + +abbrev@1.0.4 | MIT | deps: none | versions: 8 +Like ruby's abbrev module, but in js +https://github.com/isaacs/abbrev-js#readme + +dist +.tarball: https://registry.npmjs.org/abbrev/-/abbrev-1.0.4.tgz +.shasum: bd55ae5e413ba1722ee4caba1f6ea10414a59ecd + +maintainers: +- nlf <quitlahok@gmail.com> +- ruyadorno <ruyadorno@hotmail.com> +- darcyclarke <darcy@darcyclarke.me> +- adam_baldwin <evilpacket@gmail.com> +- isaacs <i@izs.me> + +dist-tags: +latest: 1.1.1 + +published over a year ago by isaacs <i@izs.me> + +` diff --git a/tap-snapshots/test-lib-dist-tag.js-TAP.test.js b/tap-snapshots/test-lib-dist-tag.js-TAP.test.js deleted file mode 100644 index b135b1001bcec..0000000000000 --- a/tap-snapshots/test-lib-dist-tag.js-TAP.test.js +++ /dev/null @@ -1,109 +0,0 @@ -/* IMPORTANT - * This snapshot file is auto-generated, but designed for humans. - * It should be checked into source control and tracked carefully. - * Re-generate by setting TAP_SNAPSHOT=1 and running tests. - * Make sure to inspect the output below. Do not ignore changes! - */ -'use strict' -exports[`test/lib/dist-tag.js TAP add missing args > should exit usage error message 1`] = ` -npm dist-tag add <pkg>@<version> [<tag>] -npm dist-tag rm <pkg> <tag> -npm dist-tag ls [<pkg>] - -alias: dist-tags -` - -exports[`test/lib/dist-tag.js TAP add missing pkg name > should exit usage error message 1`] = ` -npm dist-tag add <pkg>@<version> [<tag>] -npm dist-tag rm <pkg> <tag> -npm dist-tag ls [<pkg>] - -alias: dist-tags -` - -exports[`test/lib/dist-tag.js TAP add new tag > should return success msg 1`] = ` -+c: @scoped/another@7.7.7 -` - -exports[`test/lib/dist-tag.js TAP add using valid semver range as name > should return success msg 1`] = ` -dist-tag add 1.0.0 to @scoped/another@7.7.7 - -` - -exports[`test/lib/dist-tag.js TAP borked cmd usage > should show usage error 1`] = ` -npm dist-tag add <pkg>@<version> [<tag>] -npm dist-tag rm <pkg> <tag> -npm dist-tag ls [<pkg>] - -alias: dist-tags -` - -exports[`test/lib/dist-tag.js TAP ls in current package > should list available tags for current package 1`] = ` -a: 0.0.1 -b: 0.5.0 -latest: 1.0.0 -` - -exports[`test/lib/dist-tag.js TAP ls on missing name in current package > should throw usage error message 1`] = ` -npm dist-tag add <pkg>@<version> [<tag>] -npm dist-tag rm <pkg> <tag> -npm dist-tag ls [<pkg>] - -alias: dist-tags -` - -exports[`test/lib/dist-tag.js TAP ls on missing package > should log no dist-tag found msg 1`] = ` -dist-tag ls Couldn't get dist-tag data for foo@latest - -` - -exports[`test/lib/dist-tag.js TAP ls on missing package > should throw error message 1`] = ` -Error: No dist-tags found for foo -` - -exports[`test/lib/dist-tag.js TAP ls on named package > should list tags for the specified package 1`] = ` -a: 0.0.2 -b: 0.6.0 -latest: 2.0.0 -` - -exports[`test/lib/dist-tag.js TAP no args in current package > should default to listing available tags for current package 1`] = ` -a: 0.0.1 -b: 0.5.0 -latest: 1.0.0 -` - -exports[`test/lib/dist-tag.js TAP only named package arg > should default to listing tags for the specified package 1`] = ` -a: 0.0.2 -b: 0.6.0 -latest: 2.0.0 -` - -exports[`test/lib/dist-tag.js TAP remove existing tag > should log remove info 1`] = ` -dist-tag del c from @scoped/another - -` - -exports[`test/lib/dist-tag.js TAP remove existing tag > should return success msg 1`] = ` --c: @scoped/another@7.7.7 -` - -exports[`test/lib/dist-tag.js TAP remove missing pkg name > should exit usage error message 1`] = ` -npm dist-tag add <pkg>@<version> [<tag>] -npm dist-tag rm <pkg> <tag> -npm dist-tag ls [<pkg>] - -alias: dist-tags -` - -exports[`test/lib/dist-tag.js TAP remove non-existing tag > should log error msg 1`] = ` -dist-tag del nonexistent from @scoped/another -dist-tag del nonexistent is not a dist-tag on @scoped/another - -` - -exports[`test/lib/dist-tag.js TAP set existing version > should log warn msg 1`] = ` -dist-tag add b to @scoped/another@0.6.0 -dist-tag add b is already set to version 0.6.0 - -` diff --git a/tap-snapshots/test-lib-init.js-TAP.test.js b/tap-snapshots/test-lib-init.js-TAP.test.js deleted file mode 100644 index 25015aab65cb6..0000000000000 --- a/tap-snapshots/test-lib-init.js-TAP.test.js +++ /dev/null @@ -1,19 +0,0 @@ -/* IMPORTANT - * This snapshot file is auto-generated, but designed for humans. - * It should be checked into source control and tracked carefully. - * Re-generate by setting TAP_SNAPSHOT=1 and running tests. - * Make sure to inspect the output below. Do not ignore changes! - */ -'use strict' -exports[`test/lib/init.js TAP classic npm init no args > should print helper info 1`] = ` -This utility will walk you through creating a package.json file. -It only covers the most common items, and tries to guess sensible defaults. - -See \`npm help init\` for definitive documentation on these fields -and exactly what they do. - -Use \`npm install <pkg>\` afterwards to install a package and -save it as a dependency in the package.json file. - -Press ^C at any time to quit. -` diff --git a/tap-snapshots/test-lib-link.js-TAP.test.js b/tap-snapshots/test-lib-link.js-TAP.test.js deleted file mode 100644 index ab1d5c6b830fb..0000000000000 --- a/tap-snapshots/test-lib-link.js-TAP.test.js +++ /dev/null @@ -1,30 +0,0 @@ -/* IMPORTANT - * This snapshot file is auto-generated, but designed for humans. - * It should be checked into source control and tracked carefully. - * Re-generate by setting TAP_SNAPSHOT=1 and running tests. - * Make sure to inspect the output below. Do not ignore changes! - */ -'use strict' -exports[`test/lib/link.js TAP link global linked pkg to local nm when using args > should create a local symlink to global pkg 1`] = ` -{CWD}/test/lib/link-link-global-linked-pkg-to-local-nm-when-using-args/my-project/node_modules/@myscope/bar -> {CWD}/test/lib/link-link-global-linked-pkg-to-local-nm-when-using-args/global-prefix/lib/node_modules/@myscope/bar -{CWD}/test/lib/link-link-global-linked-pkg-to-local-nm-when-using-args/my-project/node_modules/@myscope/linked -> {CWD}/test/lib/link-link-global-linked-pkg-to-local-nm-when-using-args/scoped-linked -{CWD}/test/lib/link-link-global-linked-pkg-to-local-nm-when-using-args/my-project/node_modules/a -> {CWD}/test/lib/link-link-global-linked-pkg-to-local-nm-when-using-args/global-prefix/lib/node_modules/a -{CWD}/test/lib/link-link-global-linked-pkg-to-local-nm-when-using-args/my-project/node_modules/link-me-too -> {CWD}/test/lib/link-link-global-linked-pkg-to-local-nm-when-using-args/link-me-too -{CWD}/test/lib/link-link-global-linked-pkg-to-local-nm-when-using-args/my-project/node_modules/test-pkg-link -> {CWD}/test/lib/link-link-global-linked-pkg-to-local-nm-when-using-args/test-pkg-link - -` - -exports[`test/lib/link.js TAP link pkg already in global space > should create a local symlink to global pkg 1`] = ` -{CWD}/test/lib/link-link-pkg-already-in-global-space/my-project/node_modules/@myscope/linked -> {CWD}/test/lib/link-link-pkg-already-in-global-space/scoped-linked - -` - -exports[`test/lib/link.js TAP link pkg already in global space when prefix is a symlink > should create a local symlink to global pkg 1`] = ` -{CWD}/test/lib/link-link-pkg-already-in-global-space-when-prefix-is-a-symlink/my-project/node_modules/@myscope/linked -> {CWD}/test/lib/link-link-pkg-already-in-global-space-when-prefix-is-a-symlink/scoped-linked - -` - -exports[`test/lib/link.js TAP link to globalDir when in current working dir of pkg and no args > should create a global link to current pkg 1`] = ` -{CWD}/test/lib/link-link-to-globalDir-when-in-current-working-dir-of-pkg-and-no-args/global-prefix/lib/node_modules/test-pkg-link -> {CWD}/test/lib/link-link-to-globalDir-when-in-current-working-dir-of-pkg-and-no-args/test-pkg-link - -` diff --git a/tap-snapshots/test-lib-ls.js-TAP.test.js b/tap-snapshots/test-lib-ls.js-TAP.test.js deleted file mode 100644 index 120a65baffae9..0000000000000 --- a/tap-snapshots/test-lib-ls.js-TAP.test.js +++ /dev/null @@ -1,602 +0,0 @@ -/* IMPORTANT - * This snapshot file is auto-generated, but designed for humans. - * It should be checked into source control and tracked carefully. - * Re-generate by setting TAP_SNAPSHOT=1 and running tests. - * Make sure to inspect the output below. Do not ignore changes! - */ -'use strict' -exports[`test/lib/ls.js TAP ls --depth=0 > should output tree containing only top-level dependencies 1`] = ` -test-npm-ls@1.0.0 {CWD}/ls-ls--depth-0 -+-- foo@1.0.0 -\`-- lorem@1.0.0 - -` - -exports[`test/lib/ls.js TAP ls --depth=1 > should output tree containing top-level deps and their deps only 1`] = ` -test-npm-ls@1.0.0 {CWD}/ls-ls--depth-1 -+-- a@1.0.0 -| \`-- b@1.0.0 -\`-- e@1.0.0 - -` - -exports[`test/lib/ls.js TAP ls --dev > should output tree containing dev deps 1`] = ` -test-npm-ls@1.0.0 {CWD}/ls-ls--dev -\`-- dev-dep@1.0.0 - \`-- foo@1.0.0 - \`-- bar@1.0.0 - -` - -exports[`test/lib/ls.js TAP ls --link > should output tree containing linked deps 1`] = ` -test-npm-ls@1.0.0 {CWD}/ls-ls--link -\`-- linked-dep@1.0.0 -> {CWD}/ls-ls--link/linked-dep - -` - -exports[`test/lib/ls.js TAP ls --long --depth=0 > should output tree containing top-level deps with descriptions 1`] = ` -test-npm-ls@1.0.0 -| {CWD}/ls-ls--long-depth-0 -| -+-- dev-dep@1.0.0 -| A DEV dep kind of dep -+-- lorem@1.0.0 -| -+-- optional-dep@1.0.0 -| Maybe a dep? -+-- peer-dep@1.0.0 -| Peer-dep description here -\`-- prod-dep@1.0.0 - A PROD dep kind of dep - -` - -exports[`test/lib/ls.js TAP ls --long > should output tree info with descriptions 1`] = ` -test-npm-ls@1.0.0 -| {CWD}/ls-ls--long -| -+-- dev-dep@1.0.0 -| | A DEV dep kind of dep -| \`-- foo@1.0.0 -| | -| \`-- bar@1.0.0 -| -+-- lorem@1.0.0 -| -+-- optional-dep@1.0.0 -| Maybe a dep? -+-- peer-dep@1.0.0 -| Peer-dep description here -\`-- prod-dep@1.0.0 - | A PROD dep kind of dep - \`-- bar@2.0.0 - A dep that bars - -` - -exports[`test/lib/ls.js TAP ls --only=development > should output tree containing only development deps 1`] = ` -test-npm-ls@1.0.0 {CWD}/ls-ls--only-development -\`-- dev-dep@1.0.0 - \`-- foo@1.0.0 - \`-- bar@1.0.0 - -` - -exports[`test/lib/ls.js TAP ls --only=prod > should output tree containing only prod deps 1`] = ` -test-npm-ls@1.0.0 {CWD}/ls-ls--only-prod -+-- lorem@1.0.0 -+-- optional-dep@1.0.0 -\`-- prod-dep@1.0.0 - \`-- bar@2.0.0 - -` - -exports[`test/lib/ls.js TAP ls --parseable --depth=0 > should output tree containing only top-level dependencies 1`] = ` -{CWD}/ls-ls-parseable--depth-0 -{CWD}/ls-ls-parseable--depth-0/node_modules/foo -{CWD}/ls-ls-parseable--depth-0/node_modules/lorem -` - -exports[`test/lib/ls.js TAP ls --parseable --depth=1 > should output parseable containing top-level deps and their deps only 1`] = ` -{CWD}/ls-ls-parseable--depth-1 -{CWD}/ls-ls-parseable--depth-1/node_modules/foo -{CWD}/ls-ls-parseable--depth-1/node_modules/lorem -{CWD}/ls-ls-parseable--depth-1/node_modules/bar -` - -exports[`test/lib/ls.js TAP ls --parseable --dev > should output tree containing dev deps 1`] = ` -{CWD}/ls-ls-parseable--dev -{CWD}/ls-ls-parseable--dev/node_modules/dev-dep -{CWD}/ls-ls-parseable--dev/node_modules/foo -{CWD}/ls-ls-parseable--dev/node_modules/bar -` - -exports[`test/lib/ls.js TAP ls --parseable --link > should output tree containing linked deps 1`] = ` -{CWD}/ls-ls-parseable--link -{CWD}/ls-ls-parseable--link/node_modules/linked-dep -` - -exports[`test/lib/ls.js TAP ls --parseable --long --depth=0 > should output tree containing top-level deps with descriptions 1`] = ` -{CWD}/ls-ls-parseable--long-depth-0:test-npm-ls@1.0.0 -{CWD}/ls-ls-parseable--long-depth-0/node_modules/dev-dep:dev-dep@1.0.0 -{CWD}/ls-ls-parseable--long-depth-0/node_modules/lorem:lorem@1.0.0 -{CWD}/ls-ls-parseable--long-depth-0/node_modules/optional-dep:optional-dep@1.0.0 -{CWD}/ls-ls-parseable--long-depth-0/node_modules/peer-dep:peer-dep@1.0.0 -{CWD}/ls-ls-parseable--long-depth-0/node_modules/prod-dep:prod-dep@1.0.0 -` - -exports[`test/lib/ls.js TAP ls --parseable --long > should output tree info with descriptions 1`] = ` -{CWD}/ls-ls-parseable--long:test-npm-ls@1.0.0 -{CWD}/ls-ls-parseable--long/node_modules/dev-dep:dev-dep@1.0.0 -{CWD}/ls-ls-parseable--long/node_modules/lorem:lorem@1.0.0 -{CWD}/ls-ls-parseable--long/node_modules/optional-dep:optional-dep@1.0.0 -{CWD}/ls-ls-parseable--long/node_modules/peer-dep:peer-dep@1.0.0 -{CWD}/ls-ls-parseable--long/node_modules/prod-dep:prod-dep@1.0.0 -{CWD}/ls-ls-parseable--long/node_modules/foo:foo@1.0.0 -{CWD}/ls-ls-parseable--long/node_modules/prod-dep/node_modules/bar:bar@2.0.0 -{CWD}/ls-ls-parseable--long/node_modules/bar:bar@1.0.0 -` - -exports[`test/lib/ls.js TAP ls --parseable --long missing/invalid/extraneous > should output parseable result containing EXTRANEOUS/INVALID labels 1`] = ` -{CWD}/ls-ls-parseable--long-missing-invalid-extraneous:test-npm-ls@1.0.0 -{CWD}/ls-ls-parseable--long-missing-invalid-extraneous/node_modules/foo:foo@1.0.0:INVALID -{CWD}/ls-ls-parseable--long-missing-invalid-extraneous/node_modules/lorem:lorem@1.0.0:EXTRANEOUS -{CWD}/ls-ls-parseable--long-missing-invalid-extraneous/node_modules/bar:bar@1.0.0 -` - -exports[`test/lib/ls.js TAP ls --parseable --long print symlink target location > should output parseable results with symlink targets 1`] = ` -{CWD}/ls-ls-parseable--long-print-symlink-target-location:test-npm-ls@1.0.0 -{CWD}/ls-ls-parseable--long-print-symlink-target-location/node_modules/dev-dep:dev-dep@1.0.0 -{CWD}/ls-ls-parseable--long-print-symlink-target-location/node_modules/linked-dep:linked-dep@1.0.0:{CWD}/ls-ls-parseable--long-print-symlink-target-location/linked-dep -{CWD}/ls-ls-parseable--long-print-symlink-target-location/node_modules/lorem:lorem@1.0.0 -{CWD}/ls-ls-parseable--long-print-symlink-target-location/node_modules/optional-dep:optional-dep@1.0.0 -{CWD}/ls-ls-parseable--long-print-symlink-target-location/node_modules/peer-dep:peer-dep@1.0.0 -{CWD}/ls-ls-parseable--long-print-symlink-target-location/node_modules/prod-dep:prod-dep@1.0.0 -{CWD}/ls-ls-parseable--long-print-symlink-target-location/node_modules/foo:foo@1.0.0 -{CWD}/ls-ls-parseable--long-print-symlink-target-location/node_modules/prod-dep/node_modules/bar:bar@2.0.0 -{CWD}/ls-ls-parseable--long-print-symlink-target-location/node_modules/bar:bar@1.0.0 -` - -exports[`test/lib/ls.js TAP ls --parseable --long with extraneous deps > should output long parseable output with extraneous info 1`] = ` -{CWD}/ls-ls-parseable--long-with-extraneous-deps:test-npm-ls@1.0.0 -{CWD}/ls-ls-parseable--long-with-extraneous-deps/node_modules/foo:foo@1.0.0 -{CWD}/ls-ls-parseable--long-with-extraneous-deps/node_modules/lorem:lorem@1.0.0:EXTRANEOUS -{CWD}/ls-ls-parseable--long-with-extraneous-deps/node_modules/bar:bar@1.0.0 -` - -exports[`test/lib/ls.js TAP ls --parseable --only=development > should output tree containing only development deps 1`] = ` -{CWD}/ls-ls-parseable--only-development -{CWD}/ls-ls-parseable--only-development/node_modules/dev-dep -{CWD}/ls-ls-parseable--only-development/node_modules/foo -{CWD}/ls-ls-parseable--only-development/node_modules/bar -` - -exports[`test/lib/ls.js TAP ls --parseable --only=prod > should output tree containing only prod deps 1`] = ` -{CWD}/ls-ls-parseable--only-prod -{CWD}/ls-ls-parseable--only-prod/node_modules/lorem -{CWD}/ls-ls-parseable--only-prod/node_modules/optional-dep -{CWD}/ls-ls-parseable--only-prod/node_modules/prod-dep -{CWD}/ls-ls-parseable--only-prod/node_modules/prod-dep/node_modules/bar -` - -exports[`test/lib/ls.js TAP ls --parseable --production > should output tree containing production deps 1`] = ` -{CWD}/ls-ls-parseable--production -{CWD}/ls-ls-parseable--production/node_modules/lorem -{CWD}/ls-ls-parseable--production/node_modules/optional-dep -{CWD}/ls-ls-parseable--production/node_modules/prod-dep -{CWD}/ls-ls-parseable--production/node_modules/prod-dep/node_modules/bar -` - -exports[`test/lib/ls.js TAP ls --parseable cycle deps > should print tree output omitting deduped ref 1`] = ` -{CWD}/ls-ls-parseable-cycle-deps -{CWD}/ls-ls-parseable-cycle-deps/node_modules/a -{CWD}/ls-ls-parseable-cycle-deps/node_modules/b -` - -exports[`test/lib/ls.js TAP ls --parseable default --depth value should be 0 > should output parseable output containing only top-level dependencies 1`] = ` -{CWD}/ls-ls-parseable-default-depth-value-should-be-0 -{CWD}/ls-ls-parseable-default-depth-value-should-be-0/node_modules/foo -{CWD}/ls-ls-parseable-default-depth-value-should-be-0/node_modules/lorem -` - -exports[`test/lib/ls.js TAP ls --parseable empty location > should print empty result 1`] = ` -{CWD}/ls-ls-parseable-empty-location -` - -exports[`test/lib/ls.js TAP ls --parseable extraneous deps > should output containing problems info 1`] = ` -{CWD}/ls-ls-parseable-extraneous-deps -{CWD}/ls-ls-parseable-extraneous-deps/node_modules/foo -{CWD}/ls-ls-parseable-extraneous-deps/node_modules/lorem -{CWD}/ls-ls-parseable-extraneous-deps/node_modules/bar -` - -exports[`test/lib/ls.js TAP ls --parseable from and resolved properties > should not be printed in tree output 1`] = ` -{CWD}/ls-ls-parseable-from-and-resolved-properties -{CWD}/ls-ls-parseable-from-and-resolved-properties/node_modules/simple-output -` - -exports[`test/lib/ls.js TAP ls --parseable global > should print parseable output for global deps 1`] = ` -{CWD}/ls-ls-parseable-global -{CWD}/ls-ls-parseable-global/node_modules/a -{CWD}/ls-ls-parseable-global/node_modules/b -{CWD}/ls-ls-parseable-global/node_modules/b/node_modules/c -` - -exports[`test/lib/ls.js TAP ls --parseable json read problems > should print empty result 1`] = ` -{CWD}/ls-ls-parseable-json-read-problems -` - -exports[`test/lib/ls.js TAP ls --parseable missing package.json > should log all extraneous deps on error msg 1`] = ` -extraneous: bar@1.0.0 {CWD}/ls-ls-parseable-missing-package-json/node_modules/bar -extraneous: foo@1.0.0 {CWD}/ls-ls-parseable-missing-package-json/node_modules/foo -extraneous: lorem@1.0.0 {CWD}/ls-ls-parseable-missing-package-json/node_modules/lorem -` - -exports[`test/lib/ls.js TAP ls --parseable missing package.json > should output parseable missing name/version of top-level package 1`] = ` -{CWD}/ls-ls-parseable-missing-package-json -{CWD}/ls-ls-parseable-missing-package-json/node_modules/bar -{CWD}/ls-ls-parseable-missing-package-json/node_modules/foo -{CWD}/ls-ls-parseable-missing-package-json/node_modules/lorem -` - -exports[`test/lib/ls.js TAP ls --parseable missing/invalid/extraneous > should output parseable containing top-level deps and their deps only 1`] = ` -{CWD}/ls-ls-parseable-missing-invalid-extraneous -{CWD}/ls-ls-parseable-missing-invalid-extraneous/node_modules/foo -{CWD}/ls-ls-parseable-missing-invalid-extraneous/node_modules/lorem -{CWD}/ls-ls-parseable-missing-invalid-extraneous/node_modules/bar -` - -exports[`test/lib/ls.js TAP ls --parseable no args > should output parseable representation of dependencies structure 1`] = ` -{CWD}/ls-ls-parseable-no-args -{CWD}/ls-ls-parseable-no-args/node_modules/foo -{CWD}/ls-ls-parseable-no-args/node_modules/lorem -{CWD}/ls-ls-parseable-no-args/node_modules/bar -` - -exports[`test/lib/ls.js TAP ls --parseable resolved points to git ref > should output tree containing git refs 1`] = ` -{CWD}/ls-ls-parseable-resolved-points-to-git-ref -{CWD}/ls-ls-parseable-resolved-points-to-git-ref/node_modules/abbrev -` - -exports[`test/lib/ls.js TAP ls --parseable unmet optional dep > should output parseable with empty entry for missing optional deps 1`] = ` -{CWD}/ls-ls-parseable-unmet-optional-dep -{CWD}/ls-ls-parseable-unmet-optional-dep/node_modules/dev-dep -{CWD}/ls-ls-parseable-unmet-optional-dep/node_modules/lorem -{CWD}/ls-ls-parseable-unmet-optional-dep/node_modules/optional-dep -{CWD}/ls-ls-parseable-unmet-optional-dep/node_modules/peer-dep -{CWD}/ls-ls-parseable-unmet-optional-dep/node_modules/prod-dep -{CWD}/ls-ls-parseable-unmet-optional-dep/node_modules/foo -{CWD}/ls-ls-parseable-unmet-optional-dep/node_modules/prod-dep/node_modules/bar -{CWD}/ls-ls-parseable-unmet-optional-dep/node_modules/bar -` - -exports[`test/lib/ls.js TAP ls --parseable unmet peer dep > should output parseable signaling missing peer dep in problems 1`] = ` -{CWD}/ls-ls-parseable-unmet-peer-dep -{CWD}/ls-ls-parseable-unmet-peer-dep/node_modules/dev-dep -{CWD}/ls-ls-parseable-unmet-peer-dep/node_modules/lorem -{CWD}/ls-ls-parseable-unmet-peer-dep/node_modules/optional-dep -{CWD}/ls-ls-parseable-unmet-peer-dep/node_modules/peer-dep -{CWD}/ls-ls-parseable-unmet-peer-dep/node_modules/prod-dep -{CWD}/ls-ls-parseable-unmet-peer-dep/node_modules/foo -{CWD}/ls-ls-parseable-unmet-peer-dep/node_modules/prod-dep/node_modules/bar -{CWD}/ls-ls-parseable-unmet-peer-dep/node_modules/bar -` - -exports[`test/lib/ls.js TAP ls --parseable using aliases > should output tree containing aliases 1`] = ` -{CWD}/ls-ls-parseable-using-aliases -{CWD}/ls-ls-parseable-using-aliases/node_modules/a -` - -exports[`test/lib/ls.js TAP ls --parseable with filter arg > should output parseable contaning only occurrences of filtered by package 1`] = ` -{CWD}/ls-ls-parseable-with-filter-arg/node_modules/lorem -` - -exports[`test/lib/ls.js TAP ls --parseable with filter arg nested dep > should output parseable contaning only occurrences of filtered package 1`] = ` -{CWD}/ls-ls-parseable-with-filter-arg-nested-dep/node_modules/bar -` - -exports[`test/lib/ls.js TAP ls --parseable with missing filter arg > should output parseable output containing no dependencies info 1`] = ` - -` - -exports[`test/lib/ls.js TAP ls --parseable with multiple filter args > should output parseable contaning only occurrences of multiple filtered packages and their ancestors 1`] = ` -{CWD}/ls-ls-parseable-with-multiple-filter-args/node_modules/lorem -{CWD}/ls-ls-parseable-with-multiple-filter-args/node_modules/bar -` - -exports[`test/lib/ls.js TAP ls --production > should output tree containing production deps 1`] = ` -test-npm-ls@1.0.0 {CWD}/ls-ls--production -+-- lorem@1.0.0 -+-- optional-dep@1.0.0 -\`-- prod-dep@1.0.0 - \`-- bar@2.0.0 - -` - -exports[`test/lib/ls.js TAP ls broken resolved field > should NOT print git refs in output tree 1`] = ` -npm-broken-resolved-field-test@1.0.0 {CWD}/ls-ls-broken-resolved-field -\`-- a@1.0.1 - -` - -exports[`test/lib/ls.js TAP ls colored output > should output tree containing color info 1`] = ` -test-npm-ls@1.0.0 {CWD}/ls-ls-colored-output -+-- foo@1.0.0 invalid -| \`-- bar@1.0.0 -+-- UNMET DEPENDENCY ipsum@^1.0.0 -\`-- lorem@1.0.0 extraneous - -` - -exports[`test/lib/ls.js TAP ls cycle deps > should print tree output containing deduped ref 1`] = ` -test-npm-ls@1.0.0 {CWD}/ls-ls-cycle-deps -\`-- a@1.0.0 - \`-- b@1.0.0 - \`-- a@1.0.0 deduped - -` - -exports[`test/lib/ls.js TAP ls cycle deps with filter args > should print tree output containing deduped ref 1`] = ` -test-npm-ls@1.0.0 {CWD}/ls-ls-cycle-deps-with-filter-args -\`-- a@1.0.0 - \`-- b@1.0.0 - \`-- a@1.0.0 deduped - -` - -exports[`test/lib/ls.js TAP ls deduped missing dep > should output parseable signaling missing peer dep in problems 1`] = ` -test-npm-ls@1.0.0 {CWD}/ls-ls-deduped-missing-dep -+-- a@1.0.0 -| \`-- UNMET DEPENDENCY b@^1.0.0 deduped -\`-- UNMET DEPENDENCY b@^1.0.0 - -` - -exports[`test/lib/ls.js TAP ls default --depth value should be 0 > should output tree containing only top-level dependencies 1`] = ` -test-npm-ls@1.0.0 {CWD}/ls-ls-default-depth-value-should-be-0 -+-- foo@1.0.0 -\`-- lorem@1.0.0 - -` - -exports[`test/lib/ls.js TAP ls empty location > should print empty result 1`] = ` -{CWD}/ls-ls-empty-location -\`-- (empty) - -` - -exports[`test/lib/ls.js TAP ls extraneous deps > should output containing problems info 1`] = ` -test-npm-ls@1.0.0 {CWD}/ls-ls-extraneous-deps -+-- foo@1.0.0 -| \`-- bar@1.0.0 -\`-- lorem@1.0.0 extraneous - -` - -exports[`test/lib/ls.js TAP ls filter pkg arg using depth option > should list a in top-level only 1`] = ` -test-pkg-arg-filter-with-depth-opt@1.0.0 {CWD}/ls-ls-filter-pkg-arg-using-depth-option -\`-- a@1.0.0 - -` - -exports[`test/lib/ls.js TAP ls filter pkg arg using depth option > should print empty results msg 1`] = ` -test-pkg-arg-filter-with-depth-opt@1.0.0 {CWD}/ls-ls-filter-pkg-arg-using-depth-option -\`-- (empty) - -` - -exports[`test/lib/ls.js TAP ls filter pkg arg using depth option > should print expected result 1`] = ` -test-pkg-arg-filter-with-depth-opt@1.0.0 {CWD}/ls-ls-filter-pkg-arg-using-depth-option -\`-- b@1.0.0 - \`-- c@1.0.0 - \`-- d@1.0.0 - -` - -exports[`test/lib/ls.js TAP ls filtering by child of missing dep > should print tree and not duplicate child of missing items 1`] = ` -filter-by-child-of-missing-dep@1.0.0 {CWD}/ls-ls-filtering-by-child-of-missing-dep -+-- b@1.0.0 extraneous -| \`-- c@1.0.0 deduped -+-- c@1.0.0 extraneous -\`-- d@1.0.0 extraneous - \`-- c@2.0.0 extraneous - -` - -exports[`test/lib/ls.js TAP ls from and resolved properties > should not be printed in tree output 1`] = ` -test-npm-ls@1.0.0 {CWD}/ls-ls-from-and-resolved-properties -\`-- simple-output@2.1.1 - -` - -exports[`test/lib/ls.js TAP ls global > should print tree and not mark top-level items extraneous 1`] = ` -{CWD}/ls-ls-global -+-- a@1.0.0 -\`-- b@1.0.0 - \`-- c@1.0.0 - -` - -exports[`test/lib/ls.js TAP ls invalid deduped dep > should output tree signaling mismatching peer dep in problems 1`] = ` -invalid-deduped-dep@1.0.0 {CWD}/ls-ls-invalid-deduped-dep -+-- a@1.0.0 -| \`-- b@1.0.0 deduped invalid -\`-- b@1.0.0 invalid - -` - -exports[`test/lib/ls.js TAP ls invalid peer dep > should output tree signaling mismatching peer dep in problems 1`] = ` -test-npm-ls@1.0.0 {CWD}/ls-ls-invalid-peer-dep -+-- dev-dep@1.0.0 -| \`-- foo@1.0.0 -| \`-- bar@1.0.0 -+-- lorem@1.0.0 -+-- optional-dep@1.0.0 -+-- peer-dep@1.0.0 invalid -\`-- prod-dep@1.0.0 - \`-- bar@2.0.0 - -` - -exports[`test/lib/ls.js TAP ls json read problems > should print empty result 1`] = ` -{CWD}/ls-ls-json-read-problems -\`-- (empty) - -` - -exports[`test/lib/ls.js TAP ls loading a tree containing workspaces > should filter single workspace 1`] = ` -filter-by-child-of-missing-dep@1.0.0 {CWD}/ls-ls-loading-a-tree-containing-workspaces -\`-- a@1.0.0 -> {CWD}/ls-ls-loading-a-tree-containing-workspaces/a - -` - -exports[`test/lib/ls.js TAP ls loading a tree containing workspaces > should list workspaces properly 1`] = ` -filter-by-child-of-missing-dep@1.0.0 {CWD}/ls-ls-loading-a-tree-containing-workspaces -+-- a@1.0.0 -> {CWD}/ls-ls-loading-a-tree-containing-workspaces/a -| \`-- c@1.0.0 -\`-- b@1.0.0 -> {CWD}/ls-ls-loading-a-tree-containing-workspaces/b - -` - -exports[`test/lib/ls.js TAP ls missing package.json > should log all extraneous deps on error msg 1`] = ` -extraneous: bar@1.0.0 {CWD}/ls-ls-missing-package-json/node_modules/bar -extraneous: foo@1.0.0 {CWD}/ls-ls-missing-package-json/node_modules/foo -extraneous: lorem@1.0.0 {CWD}/ls-ls-missing-package-json/node_modules/lorem -` - -exports[`test/lib/ls.js TAP ls missing package.json > should output tree missing name/version of top-level package 1`] = ` -{CWD}/ls-ls-missing-package-json -+-- bar@1.0.0 extraneous -+-- foo@1.0.0 extraneous -| \`-- bar@1.0.0 deduped -\`-- lorem@1.0.0 extraneous - -` - -exports[`test/lib/ls.js TAP ls missing/invalid/extraneous > should output tree containing missing, invalid, extraneous labels 1`] = ` -test-npm-ls@1.0.0 {CWD}/ls-ls-missing-invalid-extraneous -+-- foo@1.0.0 invalid -| \`-- bar@1.0.0 -+-- UNMET DEPENDENCY ipsum@^1.0.0 -\`-- lorem@1.0.0 extraneous - -` - -exports[`test/lib/ls.js TAP ls no args > should output tree representation of dependencies structure 1`] = ` -test-npm-ls@1.0.0 {CWD}/ls-ls-no-args -+-- foo@1.0.0 -| \`-- bar@1.0.0 -\`-- lorem@1.0.0 - -` - -exports[`test/lib/ls.js TAP ls print deduped symlinks > should output tree containing linked deps 1`] = ` -print-deduped-symlinks@1.0.0 {CWD}/ls-ls-print-deduped-symlinks -+-- a@1.0.0 -| \`-- b@1.0.0 deduped -> {CWD}/ls-ls-print-deduped-symlinks/b -\`-- b@1.0.0 -> {CWD}/ls-ls-print-deduped-symlinks/b - -` - -exports[`test/lib/ls.js TAP ls resolved points to git ref > should output tree containing git refs 1`] = ` -test-npm-ls@1.0.0 {CWD}/ls-ls-resolved-points-to-git-ref -\`-- abbrev@1.1.1 (git+ssh://git@github.com/isaacs/abbrev-js.git#b8f3a2fc0c3bb8ffd8b0d0072cc6b5a3667e963c) - -` - -exports[`test/lib/ls.js TAP ls unmet optional dep > should output tree with empty entry for missing optional deps 1`] = ` -test-npm-ls@1.0.0 {CWD}/ls-ls-unmet-optional-dep -+-- dev-dep@1.0.0 -| \`-- foo@1.0.0 -| \`-- bar@1.0.0 -+-- lorem@1.0.0 -+-- UNMET OPTIONAL DEPENDENCY missing-optional-dep@^1.0.0 -+-- optional-dep@1.0.0 invalid -+-- peer-dep@1.0.0 -\`-- prod-dep@1.0.0 - \`-- bar@2.0.0 - -` - -exports[`test/lib/ls.js TAP ls unmet peer dep > should output tree signaling missing peer dep in problems 1`] = ` -test-npm-ls@1.0.0 {CWD}/ls-ls-unmet-peer-dep -\`-- UNMET DEPENDENCY peer-dep@* - -` - -exports[`test/lib/ls.js TAP ls using aliases > should output tree containing aliases 1`] = ` -test-npm-ls@1.0.0 {CWD}/ls-ls-using-aliases -\`-- a@npm:b@1.0.0 - -` - -exports[`test/lib/ls.js TAP ls with args and dedupe entries > should print tree output containing deduped ref 1`] = ` -dedupe-entries@1.0.0 {CWD}/ls-ls-with-args-and-dedupe-entries -+-- @npmcli/a@1.0.0 -| \`-- @npmcli/b@1.1.2 deduped -+-- @npmcli/b@1.1.2 -\`-- @npmcli/c@1.0.0 - \`-- @npmcli/b@1.1.2 deduped - -` - -exports[`test/lib/ls.js TAP ls with args and different order of items > should print tree output containing deduped ref 1`] = ` -dedupe-entries@1.0.0 {CWD}/ls-ls-with-args-and-different-order-of-items -+-- @npmcli/a@1.0.0 -| \`-- @npmcli/c@1.0.0 deduped -+-- @npmcli/b@1.1.2 -| \`-- @npmcli/c@1.0.0 deduped -\`-- @npmcli/c@1.0.0 - -` - -exports[`test/lib/ls.js TAP ls with dot filter arg > should output tree contaning only occurrences of filtered by package and colored output 1`] = ` -test-npm-ls@1.0.0 {CWD}/ls-ls-with-dot-filter-arg -\`-- (empty) - -` - -exports[`test/lib/ls.js TAP ls with filter arg > should output tree contaning only occurrences of filtered by package and colored output 1`] = ` -test-npm-ls@1.0.0 {CWD}/ls-ls-with-filter-arg -\`-- lorem@1.0.0 - -` - -exports[`test/lib/ls.js TAP ls with filter arg nested dep > should output tree contaning only occurrences of filtered package and its ancestors 1`] = ` -test-npm-ls@1.0.0 {CWD}/ls-ls-with-filter-arg-nested-dep -\`-- foo@1.0.0 - \`-- bar@1.0.0 - -` - -exports[`test/lib/ls.js TAP ls with missing filter arg > should output tree containing no dependencies info 1`] = ` -test-npm-ls@1.0.0 {CWD}/ls-ls-with-missing-filter-arg -\`-- (empty) - -` - -exports[`test/lib/ls.js TAP ls with multiple filter args > should output tree contaning only occurrences of multiple filtered packages and their ancestors 1`] = ` -test-npm-ls@1.0.0 {CWD}/ls-ls-with-multiple-filter-args -+-- foo@1.0.0 -| \`-- bar@1.0.0 -\`-- lorem@1.0.0 - -` - -exports[`test/lib/ls.js TAP ls with no args dedupe entries > should print tree output containing deduped ref 1`] = ` -dedupe-entries@1.0.0 {CWD}/ls-ls-with-no-args-dedupe-entries -+-- @npmcli/a@1.0.0 -| \`-- @npmcli/b@1.1.2 deduped -+-- @npmcli/b@1.1.2 -\`-- @npmcli/c@1.0.0 - \`-- @npmcli/b@1.1.2 deduped - -` - -exports[`test/lib/ls.js TAP ls with no args dedupe entries and not displaying all > should print tree output containing deduped ref 1`] = ` -dedupe-entries@1.0.0 {CWD}/ls-ls-with-no-args-dedupe-entries-and-not-displaying-all -+-- @npmcli/a@1.0.0 -+-- @npmcli/b@1.1.2 -\`-- @npmcli/c@1.0.0 - -` diff --git a/tap-snapshots/test-lib-outdated.js-TAP.test.js b/tap-snapshots/test-lib-outdated.js-TAP.test.js deleted file mode 100644 index 7f245b09ed920..0000000000000 --- a/tap-snapshots/test-lib-outdated.js-TAP.test.js +++ /dev/null @@ -1,154 +0,0 @@ -/* IMPORTANT - * This snapshot file is auto-generated, but designed for humans. - * It should be checked into source control and tracked carefully. - * Re-generate by setting TAP_SNAPSHOT=1 and running tests. - * Make sure to inspect the output below. Do not ignore changes! - */ -'use strict' -exports[`test/lib/outdated.js TAP should display outdated deps outdated --all > must match snapshot 1`] = ` - -Package Current Wanted Latest Location Depended by -alpha 1.0.0 1.0.1 1.0.1 node_modules/alpha outdated-should-display-outdated-deps -beta 1.0.0 1.0.1 1.0.1 node_modules/beta outdated-should-display-outdated-deps -gamma 1.0.1 1.0.1 2.0.0 node_modules/gamma outdated-should-display-outdated-deps -theta MISSING 1.0.1 1.0.1 - outdated-should-display-outdated-deps -` - -exports[`test/lib/outdated.js TAP should display outdated deps outdated --json --long > must match snapshot 1`] = ` - -{ - "alpha": { - "current": "1.0.0", - "wanted": "1.0.1", - "latest": "1.0.1", - "dependent": "outdated-should-display-outdated-deps", - "location": "{CWD}/test/lib/outdated-should-display-outdated-deps/node_modules/alpha", - "type": "dependencies" - }, - "beta": { - "current": "1.0.0", - "wanted": "1.0.1", - "latest": "1.0.1", - "dependent": "outdated-should-display-outdated-deps", - "location": "{CWD}/test/lib/outdated-should-display-outdated-deps/node_modules/beta", - "type": "peerDependencies" - }, - "gamma": { - "current": "1.0.1", - "wanted": "1.0.1", - "latest": "2.0.0", - "dependent": "outdated-should-display-outdated-deps", - "location": "{CWD}/test/lib/outdated-should-display-outdated-deps/node_modules/gamma", - "type": "dependencies" - }, - "theta": { - "wanted": "1.0.1", - "latest": "1.0.1", - "dependent": "outdated-should-display-outdated-deps", - "type": "dependencies" - } -} -` - -exports[`test/lib/outdated.js TAP should display outdated deps outdated --json > must match snapshot 1`] = ` - -{ - "alpha": { - "current": "1.0.0", - "wanted": "1.0.1", - "latest": "1.0.1", - "dependent": "outdated-should-display-outdated-deps", - "location": "{CWD}/test/lib/outdated-should-display-outdated-deps/node_modules/alpha" - }, - "beta": { - "current": "1.0.0", - "wanted": "1.0.1", - "latest": "1.0.1", - "dependent": "outdated-should-display-outdated-deps", - "location": "{CWD}/test/lib/outdated-should-display-outdated-deps/node_modules/beta" - }, - "gamma": { - "current": "1.0.1", - "wanted": "1.0.1", - "latest": "2.0.0", - "dependent": "outdated-should-display-outdated-deps", - "location": "{CWD}/test/lib/outdated-should-display-outdated-deps/node_modules/gamma" - }, - "theta": { - "wanted": "1.0.1", - "latest": "1.0.1", - "dependent": "outdated-should-display-outdated-deps" - } -} -` - -exports[`test/lib/outdated.js TAP should display outdated deps outdated --long > must match snapshot 1`] = ` - -Package Current Wanted Latest Location Depended by Package Type Homepage -alpha 1.0.0 1.0.1 1.0.1 node_modules/alpha outdated-should-display-outdated-deps dependencies -beta 1.0.0 1.0.1 1.0.1 node_modules/beta outdated-should-display-outdated-deps peerDependencies -gamma 1.0.1 1.0.1 2.0.0 node_modules/gamma outdated-should-display-outdated-deps dependencies -theta MISSING 1.0.1 1.0.1 - outdated-should-display-outdated-deps dependencies -` - -exports[`test/lib/outdated.js TAP should display outdated deps outdated --omit=dev --omit=peer > must match snapshot 1`] = ` - -Package Current Wanted Latest Location Depended by -alpha 1.0.0 1.0.1 1.0.1 node_modules/alpha outdated-should-display-outdated-deps -gamma 1.0.1 1.0.1 2.0.0 node_modules/gamma outdated-should-display-outdated-deps -theta MISSING 1.0.1 1.0.1 - outdated-should-display-outdated-deps -` - -exports[`test/lib/outdated.js TAP should display outdated deps outdated --omit=dev > must match snapshot 1`] = ` - -Package Current Wanted Latest Location Depended by -alpha 1.0.0 1.0.1 1.0.1 node_modules/alpha outdated-should-display-outdated-deps -beta 1.0.0 1.0.1 1.0.1 node_modules/beta outdated-should-display-outdated-deps -gamma 1.0.1 1.0.1 2.0.0 node_modules/gamma outdated-should-display-outdated-deps -theta MISSING 1.0.1 1.0.1 - outdated-should-display-outdated-deps -` - -exports[`test/lib/outdated.js TAP should display outdated deps outdated --omit=prod > must match snapshot 1`] = ` - -Package Current Wanted Latest Location Depended by -alpha 1.0.0 1.0.1 1.0.1 node_modules/alpha outdated-should-display-outdated-deps -beta 1.0.0 1.0.1 1.0.1 node_modules/beta outdated-should-display-outdated-deps -gamma 1.0.1 1.0.1 2.0.0 node_modules/gamma outdated-should-display-outdated-deps -` - -exports[`test/lib/outdated.js TAP should display outdated deps outdated --parseable --long > must match snapshot 1`] = ` - -{CWD}/test/lib/outdated-should-display-outdated-deps/node_modules/alpha:alpha@1.0.1:alpha@1.0.0:alpha@1.0.1:outdated-should-display-outdated-deps:dependencies: -{CWD}/test/lib/outdated-should-display-outdated-deps/node_modules/beta:beta@1.0.1:beta@1.0.0:beta@1.0.1:outdated-should-display-outdated-deps:peerDependencies: -{CWD}/test/lib/outdated-should-display-outdated-deps/node_modules/gamma:gamma@1.0.1:gamma@1.0.1:gamma@2.0.0:outdated-should-display-outdated-deps:dependencies: -:theta@1.0.1:MISSING:theta@1.0.1:outdated-should-display-outdated-deps:dependencies: -` - -exports[`test/lib/outdated.js TAP should display outdated deps outdated --parseable > must match snapshot 1`] = ` - -{CWD}/test/lib/outdated-should-display-outdated-deps/node_modules/alpha:alpha@1.0.1:alpha@1.0.0:alpha@1.0.1:outdated-should-display-outdated-deps -{CWD}/test/lib/outdated-should-display-outdated-deps/node_modules/beta:beta@1.0.1:beta@1.0.0:beta@1.0.1:outdated-should-display-outdated-deps -{CWD}/test/lib/outdated-should-display-outdated-deps/node_modules/gamma:gamma@1.0.1:gamma@1.0.1:gamma@2.0.0:outdated-should-display-outdated-deps -:theta@1.0.1:MISSING:theta@1.0.1:outdated-should-display-outdated-deps -` - -exports[`test/lib/outdated.js TAP should display outdated deps outdated > must match snapshot 1`] = ` - -Package Current Wanted Latest Location Depended by -alpha 1.0.0 1.0.1 1.0.1 node_modules/alpha outdated-should-display-outdated-deps -beta 1.0.0 1.0.1 1.0.1 node_modules/beta outdated-should-display-outdated-deps -gamma 1.0.1 1.0.1 2.0.0 node_modules/gamma outdated-should-display-outdated-deps -theta MISSING 1.0.1 1.0.1 - outdated-should-display-outdated-deps -` - -exports[`test/lib/outdated.js TAP should display outdated deps outdated global > must match snapshot 1`] = ` - -Package Current Wanted Latest Location Depended by -alpha 1.0.0 1.0.1 1.0.1 node_modules/alpha global -` - -exports[`test/lib/outdated.js TAP should display outdated deps outdated specific dep > must match snapshot 1`] = ` - -Package Current Wanted Latest Location Depended by -alpha 1.0.0 1.0.1 1.0.1 node_modules/alpha outdated-should-display-outdated-deps -` diff --git a/tap-snapshots/test-lib-utils-config.js-TAP.test.js b/tap-snapshots/test-lib-utils-config.js-TAP.test.js deleted file mode 100644 index 39927e600e123..0000000000000 --- a/tap-snapshots/test-lib-utils-config.js-TAP.test.js +++ /dev/null @@ -1,1110 +0,0 @@ -/* IMPORTANT - * This snapshot file is auto-generated, but designed for humans. - * It should be checked into source control and tracked carefully. - * Re-generate by setting TAP_SNAPSHOT=1 and running tests. - * Make sure to inspect the output below. Do not ignore changes! - */ -'use strict' -exports[`test/lib/utils/config.js TAP no working network interfaces, on windows > must match snapshot 1`] = ` -Object { - "defaults": Object { - "_auth": null, - "access": null, - "all": false, - "allow-same-version": false, - "also": null, - "always-auth": false, - "audit": true, - "audit-level": null, - "auth-type": "legacy", - "before": null, - "bin-links": true, - "browser": null, - "ca": null, - "cache": "{CACHE DIR} npm-cache", - "cache-lock-retries": 10, - "cache-lock-stale": 60000, - "cache-lock-wait": 10000, - "cache-max": null, - "cache-min": 10, - "cafile": null, - "call": "", - "cert": null, - "ci-name": null, - "cidr": null, - "color": true, - "commit-hooks": true, - "depth": null, - "description": true, - "dev": false, - "diff": Array [], - "diff-dst-prefix": "", - "diff-ignore-all-space": false, - "diff-name-only": false, - "diff-no-prefix": false, - "diff-src-prefix": "", - "diff-text": false, - "diff-unified": null, - "dry-run": false, - "editor": "vim", - "engine-strict": false, - "fetch-retries": 2, - "fetch-retry-factor": 10, - "fetch-retry-maxtimeout": 60000, - "fetch-retry-mintimeout": 10000, - "fetch-timeout": 300000, - "force": false, - "foreground-script": false, - "format-package-lock": true, - "fund": true, - "git": "git", - "git-tag-version": true, - "global": false, - "global-style": false, - "heading": "npm", - "https-proxy": null, - "if-present": false, - "ignore-prepublish": false, - "ignore-scripts": false, - "include": Array [], - "include-staged": false, - "init-author-email": "", - "init-author-name": "", - "init-author-url": "", - "init-license": "ISC", - "init-module": "~/.npm-init.js", - "init-version": "1.0.0", - "init.author.email": "", - "init.author.name": "", - "init.author.url": "", - "init.license": "ISC", - "init.module": "~/.npm-init.js", - "init.version": "1.0.0", - "json": false, - "key": null, - "legacy-bundling": false, - "legacy-peer-deps": false, - "link": false, - "local-address": undefined, - "loglevel": "notice", - "logs-max": 10, - "long": false, - "maxsockets": 50, - "message": "%s", - "node-options": null, - "node-version": "v14.8.0", - "noproxy": null, - "npm-version": "7.0.0", - "offline": false, - "omit": Array [], - "only": null, - "optional": true, - "otp": null, - "package": Array [], - "package-lock": true, - "package-lock-only": false, - "parseable": false, - "prefer-offline": false, - "prefer-online": false, - "preid": "", - "production": false, - "progress": true, - "proxy": null, - "read-only": false, - "rebuild-bundle": true, - "registry": "https://registry.npmjs.org/", - "rollback": true, - "save": true, - "save-bundle": false, - "save-dev": false, - "save-exact": false, - "save-optional": false, - "save-prefix": "^", - "save-prod": false, - "scope": "", - "script-shell": null, - "scripts-prepend-node-path": "warn-only", - "searchexclude": null, - "searchlimit": 20, - "searchopts": "", - "searchstaleness": 900, - "shell": "cmd.exe", - "shrinkwrap": true, - "sign-git-commit": false, - "sign-git-tag": false, - "sso-poll-frequency": 500, - "sso-type": "oauth", - "strict-peer-deps": false, - "strict-ssl": true, - "tag": "latest", - "tag-version-prefix": "v", - "timing": false, - "tmp": "/tmp", - "umask": 0, - "unicode": true, - "update-notifier": true, - "usage": false, - "user-agent": "npm/{npm-version} node/{node-version} {platform} {arch} {ci}", - "userconfig": "~/.npmrc", - "version": false, - "versions": false, - "viewer": "browser", - }, - "shorthands": Object { - "?": Array [ - "--usage", - ], - "a": Array [ - "--all", - ], - "B": Array [ - "--save-bundle", - ], - "c": Array [ - "--call", - ], - "C": Array [ - "--prefix", - ], - "d": Array [ - "--loglevel", - "info", - ], - "D": Array [ - "--save-dev", - ], - "dd": Array [ - "--loglevel", - "verbose", - ], - "ddd": Array [ - "--loglevel", - "silly", - ], - "desc": Array [ - "--description", - ], - "E": Array [ - "--save-exact", - ], - "enjoy-by": Array [ - "--before", - ], - "f": Array [ - "--force", - ], - "g": Array [ - "--global", - ], - "h": Array [ - "--usage", - ], - "H": Array [ - "--usage", - ], - "help": Array [ - "--usage", - ], - "l": Array [ - "--long", - ], - "local": Array [ - "--no-global", - ], - "m": Array [ - "--message", - ], - "n": Array [ - "--no-yes", - ], - "no-desc": Array [ - "--no-description", - ], - "no-reg": Array [ - "--no-registry", - ], - "noreg": Array [ - "--no-registry", - ], - "O": Array [ - "--save-optional", - ], - "p": Array [ - "--parseable", - ], - "P": Array [ - "--save-prod", - ], - "porcelain": Array [ - "--parseable", - ], - "q": Array [ - "--loglevel", - "warn", - ], - "quiet": Array [ - "--loglevel", - "warn", - ], - "readonly": Array [ - "--read-only", - ], - "reg": Array [ - "--registry", - ], - "s": Array [ - "--loglevel", - "silent", - ], - "S": Array [ - "--save", - ], - "silent": Array [ - "--loglevel", - "silent", - ], - "v": Array [ - "--version", - ], - "verbose": Array [ - "--loglevel", - "verbose", - ], - "y": Array [ - "--yes", - ], - }, - "types": Object { - "_auth": Array [ - null, - "{String TYPE}", - ], - "access": Array [ - null, - "restricted", - "public", - ], - "all": "{Boolean TYPE}", - "allow-same-version": "{Boolean TYPE}", - "also": Array [ - null, - "dev", - "development", - ], - "always-auth": "{Boolean TYPE}", - "audit": "{Boolean TYPE}", - "audit-level": Array [ - "low", - "moderate", - "high", - "critical", - "none", - null, - ], - "auth-type": Array [ - "legacy", - "sso", - "saml", - "oauth", - ], - "before": Array [ - null, - "{Date TYPE}", - ], - "bin-links": "{Boolean TYPE}", - "browser": Array [ - null, - "{Boolean TYPE}", - "{String TYPE}", - ], - "ca": Array [ - null, - "{String TYPE}", - "{Array TYPE}", - ], - "cache": "{PATH MODULE}", - "cache-lock-retries": "{Number TYPE}", - "cache-lock-stale": "{Number TYPE}", - "cache-lock-wait": "{Number TYPE}", - "cache-max": "{Number TYPE}", - "cache-min": "{Number TYPE}", - "cafile": "{PATH MODULE}", - "call": "{String TYPE}", - "cert": Array [ - null, - "{String TYPE}", - ], - "ci-name": Array [ - null, - "{String TYPE}", - ], - "cidr": Array [ - null, - "{String TYPE}", - "{Array TYPE}", - ], - "color": Array [ - "always", - "{Boolean TYPE}", - ], - "commit-hooks": "{Boolean TYPE}", - "depth": Array [ - null, - "{Number TYPE}", - ], - "description": "{Boolean TYPE}", - "dev": "{Boolean TYPE}", - "diff": Array [ - "{String TYPE}", - "{Array TYPE}", - ], - "diff-dst-prefix": "{String TYPE}", - "diff-ignore-all-space": "{Boolean TYPE}", - "diff-name-only": "{Boolean TYPE}", - "diff-no-prefix": "{Boolean TYPE}", - "diff-src-prefix": "{String TYPE}", - "diff-text": "{Boolean TYPE}", - "diff-unified": Array [ - null, - "{Number TYPE}", - ], - "dry-run": "{Boolean TYPE}", - "editor": "{String TYPE}", - "engine-strict": "{Boolean TYPE}", - "fetch-retries": "{Number TYPE}", - "fetch-retry-factor": "{Number TYPE}", - "fetch-retry-maxtimeout": "{Number TYPE}", - "fetch-retry-mintimeout": "{Number TYPE}", - "fetch-timeout": "{Number TYPE}", - "force": "{Boolean TYPE}", - "foreground-script": "{Boolean TYPE}", - "format-package-lock": "{Boolean TYPE}", - "fund": "{Boolean TYPE}", - "git": "{String TYPE}", - "git-tag-version": "{Boolean TYPE}", - "global": "{Boolean TYPE}", - "global-style": "{Boolean TYPE}", - "globalconfig": "{PATH MODULE}", - "heading": "{String TYPE}", - "https-proxy": Array [ - null, - "{URL MODULE}", - ], - "if-present": "{Boolean TYPE}", - "ignore-prepublish": "{Boolean TYPE}", - "ignore-scripts": "{Boolean TYPE}", - "include": Array [ - "{Array TYPE}", - "prod", - "dev", - "optional", - "peer", - ], - "include-staged": "{Boolean TYPE}", - "init-author-email": "{String TYPE}", - "init-author-name": "{String TYPE}", - "init-author-url": Array [ - "", - "{URL MODULE}", - ], - "init-license": "{String TYPE}", - "init-module": "{PATH MODULE}", - "init-version": "{SEMVER MODULE}", - "init.author.email": "{String TYPE}", - "init.author.name": "{String TYPE}", - "init.author.url": Array [ - "", - "{URL MODULE}", - ], - "init.license": "{String TYPE}", - "init.module": "{PATH MODULE}", - "init.version": "{SEMVER MODULE}", - "json": "{Boolean TYPE}", - "key": Array [ - null, - "{String TYPE}", - ], - "legacy-bundling": "{Boolean TYPE}", - "legacy-peer-deps": "{Boolean TYPE}", - "link": "{Boolean TYPE}", - "local-address": Array [ - undefined, - ], - "loglevel": Array [ - "silent", - "error", - "warn", - "notice", - "http", - "timing", - "info", - "verbose", - "silly", - ], - "logs-max": "{Number TYPE}", - "long": "{Boolean TYPE}", - "maxsockets": "{Number TYPE}", - "message": "{String TYPE}", - "node-options": Array [ - null, - "{String TYPE}", - ], - "node-version": Array [ - null, - "{SEMVER MODULE}", - ], - "noproxy": Array [ - null, - "{String TYPE}", - "{Array TYPE}", - ], - "npm-version": "{SEMVER MODULE}", - "offline": "{Boolean TYPE}", - "omit": Array [ - "{Array TYPE}", - "dev", - "optional", - "peer", - ], - "only": Array [ - null, - "dev", - "development", - "prod", - "production", - ], - "optional": "{Boolean TYPE}", - "otp": Array [ - null, - "{String TYPE}", - ], - "package": Array [ - "{String TYPE}", - "{Array TYPE}", - ], - "package-lock": "{Boolean TYPE}", - "package-lock-only": "{Boolean TYPE}", - "parseable": "{Boolean TYPE}", - "prefer-offline": "{Boolean TYPE}", - "prefer-online": "{Boolean TYPE}", - "prefix": "{PATH MODULE}", - "preid": "{String TYPE}", - "production": "{Boolean TYPE}", - "progress": "{Boolean TYPE}", - "proxy": Array [ - null, - false, - "{URL MODULE}", - ], - "read-only": "{Boolean TYPE}", - "rebuild-bundle": "{Boolean TYPE}", - "registry": Array [ - null, - "{URL MODULE}", - ], - "rollback": "{Boolean TYPE}", - "save": "{Boolean TYPE}", - "save-bundle": "{Boolean TYPE}", - "save-dev": "{Boolean TYPE}", - "save-exact": "{Boolean TYPE}", - "save-optional": "{Boolean TYPE}", - "save-prefix": "{String TYPE}", - "save-prod": "{Boolean TYPE}", - "scope": "{String TYPE}", - "script-shell": Array [ - null, - "{String TYPE}", - ], - "scripts-prepend-node-path": Array [ - "{Boolean TYPE}", - "auto", - "warn-only", - ], - "searchexclude": Array [ - null, - "{String TYPE}", - ], - "searchlimit": "{Number TYPE}", - "searchopts": "{String TYPE}", - "searchstaleness": "{Number TYPE}", - "shell": "{String TYPE}", - "shrinkwrap": "{Boolean TYPE}", - "sign-git-commit": "{Boolean TYPE}", - "sign-git-tag": "{Boolean TYPE}", - "sso-poll-frequency": "{Number TYPE}", - "sso-type": Array [ - null, - "oauth", - "saml", - ], - "strict-peer-deps": "{Boolean TYPE}", - "strict-ssl": "{Boolean TYPE}", - "tag": "{String TYPE}", - "tag-version-prefix": "{String TYPE}", - "timing": "{Boolean TYPE}", - "tmp": "{PATH MODULE}", - "umask": "{Umask TYPE}", - "unicode": "{Boolean TYPE}", - "update-notifier": "{Boolean TYPE}", - "usage": "{Boolean TYPE}", - "user-agent": "{String TYPE}", - "userconfig": "{PATH MODULE}", - "version": "{Boolean TYPE}", - "versions": "{Boolean TYPE}", - "viewer": "{String TYPE}", - }, -} -` - -exports[`test/lib/utils/config.js TAP working network interfaces, not windows > must match snapshot 1`] = ` -Object { - "defaults": Object { - "_auth": null, - "access": null, - "all": false, - "allow-same-version": false, - "also": null, - "always-auth": false, - "audit": true, - "audit-level": null, - "auth-type": "legacy", - "before": null, - "bin-links": true, - "browser": null, - "ca": null, - "cache": "{CACHE DIR} .npm", - "cache-lock-retries": 10, - "cache-lock-stale": 60000, - "cache-lock-wait": 10000, - "cache-max": null, - "cache-min": 10, - "cafile": null, - "call": "", - "cert": null, - "ci-name": null, - "cidr": null, - "color": true, - "commit-hooks": true, - "depth": null, - "description": true, - "dev": false, - "diff": Array [], - "diff-dst-prefix": "", - "diff-ignore-all-space": false, - "diff-name-only": false, - "diff-no-prefix": false, - "diff-src-prefix": "", - "diff-text": false, - "diff-unified": null, - "dry-run": false, - "editor": "vim", - "engine-strict": false, - "fetch-retries": 2, - "fetch-retry-factor": 10, - "fetch-retry-maxtimeout": 60000, - "fetch-retry-mintimeout": 10000, - "fetch-timeout": 300000, - "force": false, - "foreground-script": false, - "format-package-lock": true, - "fund": true, - "git": "git", - "git-tag-version": true, - "global": false, - "global-style": false, - "heading": "npm", - "https-proxy": null, - "if-present": false, - "ignore-prepublish": false, - "ignore-scripts": false, - "include": Array [], - "include-staged": false, - "init-author-email": "", - "init-author-name": "", - "init-author-url": "", - "init-license": "ISC", - "init-module": "~/.npm-init.js", - "init-version": "1.0.0", - "init.author.email": "", - "init.author.name": "", - "init.author.url": "", - "init.license": "ISC", - "init.module": "~/.npm-init.js", - "init.version": "1.0.0", - "json": false, - "key": null, - "legacy-bundling": false, - "legacy-peer-deps": false, - "link": false, - "local-address": undefined, - "loglevel": "notice", - "logs-max": 10, - "long": false, - "maxsockets": 50, - "message": "%s", - "node-options": null, - "node-version": "v14.8.0", - "noproxy": null, - "npm-version": "7.0.0", - "offline": false, - "omit": Array [], - "only": null, - "optional": true, - "otp": null, - "package": Array [], - "package-lock": true, - "package-lock-only": false, - "parseable": false, - "prefer-offline": false, - "prefer-online": false, - "preid": "", - "production": false, - "progress": true, - "proxy": null, - "read-only": false, - "rebuild-bundle": true, - "registry": "https://registry.npmjs.org/", - "rollback": true, - "save": true, - "save-bundle": false, - "save-dev": false, - "save-exact": false, - "save-optional": false, - "save-prefix": "^", - "save-prod": false, - "scope": "", - "script-shell": null, - "scripts-prepend-node-path": "warn-only", - "searchexclude": null, - "searchlimit": 20, - "searchopts": "", - "searchstaleness": 900, - "shell": "/usr/local/bin/bash", - "shrinkwrap": true, - "sign-git-commit": false, - "sign-git-tag": false, - "sso-poll-frequency": 500, - "sso-type": "oauth", - "strict-peer-deps": false, - "strict-ssl": true, - "tag": "latest", - "tag-version-prefix": "v", - "timing": false, - "tmp": "/tmp", - "umask": 0, - "unicode": true, - "update-notifier": true, - "usage": false, - "user-agent": "npm/{npm-version} node/{node-version} {platform} {arch} {ci}", - "userconfig": "~/.npmrc", - "version": false, - "versions": false, - "viewer": "man", - }, - "shorthands": Object { - "?": Array [ - "--usage", - ], - "a": Array [ - "--all", - ], - "B": Array [ - "--save-bundle", - ], - "c": Array [ - "--call", - ], - "C": Array [ - "--prefix", - ], - "d": Array [ - "--loglevel", - "info", - ], - "D": Array [ - "--save-dev", - ], - "dd": Array [ - "--loglevel", - "verbose", - ], - "ddd": Array [ - "--loglevel", - "silly", - ], - "desc": Array [ - "--description", - ], - "E": Array [ - "--save-exact", - ], - "enjoy-by": Array [ - "--before", - ], - "f": Array [ - "--force", - ], - "g": Array [ - "--global", - ], - "h": Array [ - "--usage", - ], - "H": Array [ - "--usage", - ], - "help": Array [ - "--usage", - ], - "l": Array [ - "--long", - ], - "local": Array [ - "--no-global", - ], - "m": Array [ - "--message", - ], - "n": Array [ - "--no-yes", - ], - "no-desc": Array [ - "--no-description", - ], - "no-reg": Array [ - "--no-registry", - ], - "noreg": Array [ - "--no-registry", - ], - "O": Array [ - "--save-optional", - ], - "p": Array [ - "--parseable", - ], - "P": Array [ - "--save-prod", - ], - "porcelain": Array [ - "--parseable", - ], - "q": Array [ - "--loglevel", - "warn", - ], - "quiet": Array [ - "--loglevel", - "warn", - ], - "readonly": Array [ - "--read-only", - ], - "reg": Array [ - "--registry", - ], - "s": Array [ - "--loglevel", - "silent", - ], - "S": Array [ - "--save", - ], - "silent": Array [ - "--loglevel", - "silent", - ], - "v": Array [ - "--version", - ], - "verbose": Array [ - "--loglevel", - "verbose", - ], - "y": Array [ - "--yes", - ], - }, - "types": Object { - "_auth": Array [ - null, - "{String TYPE}", - ], - "access": Array [ - null, - "restricted", - "public", - ], - "all": "{Boolean TYPE}", - "allow-same-version": "{Boolean TYPE}", - "also": Array [ - null, - "dev", - "development", - ], - "always-auth": "{Boolean TYPE}", - "audit": "{Boolean TYPE}", - "audit-level": Array [ - "low", - "moderate", - "high", - "critical", - "none", - null, - ], - "auth-type": Array [ - "legacy", - "sso", - "saml", - "oauth", - ], - "before": Array [ - null, - "{Date TYPE}", - ], - "bin-links": "{Boolean TYPE}", - "browser": Array [ - null, - "{Boolean TYPE}", - "{String TYPE}", - ], - "ca": Array [ - null, - "{String TYPE}", - "{Array TYPE}", - ], - "cache": "{PATH MODULE}", - "cache-lock-retries": "{Number TYPE}", - "cache-lock-stale": "{Number TYPE}", - "cache-lock-wait": "{Number TYPE}", - "cache-max": "{Number TYPE}", - "cache-min": "{Number TYPE}", - "cafile": "{PATH MODULE}", - "call": "{String TYPE}", - "cert": Array [ - null, - "{String TYPE}", - ], - "ci-name": Array [ - null, - "{String TYPE}", - ], - "cidr": Array [ - null, - "{String TYPE}", - "{Array TYPE}", - ], - "color": Array [ - "always", - "{Boolean TYPE}", - ], - "commit-hooks": "{Boolean TYPE}", - "depth": Array [ - null, - "{Number TYPE}", - ], - "description": "{Boolean TYPE}", - "dev": "{Boolean TYPE}", - "diff": Array [ - "{String TYPE}", - "{Array TYPE}", - ], - "diff-dst-prefix": "{String TYPE}", - "diff-ignore-all-space": "{Boolean TYPE}", - "diff-name-only": "{Boolean TYPE}", - "diff-no-prefix": "{Boolean TYPE}", - "diff-src-prefix": "{String TYPE}", - "diff-text": "{Boolean TYPE}", - "diff-unified": Array [ - null, - "{Number TYPE}", - ], - "dry-run": "{Boolean TYPE}", - "editor": "{String TYPE}", - "engine-strict": "{Boolean TYPE}", - "fetch-retries": "{Number TYPE}", - "fetch-retry-factor": "{Number TYPE}", - "fetch-retry-maxtimeout": "{Number TYPE}", - "fetch-retry-mintimeout": "{Number TYPE}", - "fetch-timeout": "{Number TYPE}", - "force": "{Boolean TYPE}", - "foreground-script": "{Boolean TYPE}", - "format-package-lock": "{Boolean TYPE}", - "fund": "{Boolean TYPE}", - "git": "{String TYPE}", - "git-tag-version": "{Boolean TYPE}", - "global": "{Boolean TYPE}", - "global-style": "{Boolean TYPE}", - "globalconfig": "{PATH MODULE}", - "heading": "{String TYPE}", - "https-proxy": Array [ - null, - "{URL MODULE}", - ], - "if-present": "{Boolean TYPE}", - "ignore-prepublish": "{Boolean TYPE}", - "ignore-scripts": "{Boolean TYPE}", - "include": Array [ - "{Array TYPE}", - "prod", - "dev", - "optional", - "peer", - ], - "include-staged": "{Boolean TYPE}", - "init-author-email": "{String TYPE}", - "init-author-name": "{String TYPE}", - "init-author-url": Array [ - "", - "{URL MODULE}", - ], - "init-license": "{String TYPE}", - "init-module": "{PATH MODULE}", - "init-version": "{SEMVER MODULE}", - "init.author.email": "{String TYPE}", - "init.author.name": "{String TYPE}", - "init.author.url": Array [ - "", - "{URL MODULE}", - ], - "init.license": "{String TYPE}", - "init.module": "{PATH MODULE}", - "init.version": "{SEMVER MODULE}", - "json": "{Boolean TYPE}", - "key": Array [ - null, - "{String TYPE}", - ], - "legacy-bundling": "{Boolean TYPE}", - "legacy-peer-deps": "{Boolean TYPE}", - "link": "{Boolean TYPE}", - "local-address": Array [ - undefined, - "127.0.0.1", - "no place like home", - ], - "loglevel": Array [ - "silent", - "error", - "warn", - "notice", - "http", - "timing", - "info", - "verbose", - "silly", - ], - "logs-max": "{Number TYPE}", - "long": "{Boolean TYPE}", - "maxsockets": "{Number TYPE}", - "message": "{String TYPE}", - "node-options": Array [ - null, - "{String TYPE}", - ], - "node-version": Array [ - null, - "{SEMVER MODULE}", - ], - "noproxy": Array [ - null, - "{String TYPE}", - "{Array TYPE}", - ], - "npm-version": "{SEMVER MODULE}", - "offline": "{Boolean TYPE}", - "omit": Array [ - "{Array TYPE}", - "dev", - "optional", - "peer", - ], - "only": Array [ - null, - "dev", - "development", - "prod", - "production", - ], - "optional": "{Boolean TYPE}", - "otp": Array [ - null, - "{String TYPE}", - ], - "package": Array [ - "{String TYPE}", - "{Array TYPE}", - ], - "package-lock": "{Boolean TYPE}", - "package-lock-only": "{Boolean TYPE}", - "parseable": "{Boolean TYPE}", - "prefer-offline": "{Boolean TYPE}", - "prefer-online": "{Boolean TYPE}", - "prefix": "{PATH MODULE}", - "preid": "{String TYPE}", - "production": "{Boolean TYPE}", - "progress": "{Boolean TYPE}", - "proxy": Array [ - null, - false, - "{URL MODULE}", - ], - "read-only": "{Boolean TYPE}", - "rebuild-bundle": "{Boolean TYPE}", - "registry": Array [ - null, - "{URL MODULE}", - ], - "rollback": "{Boolean TYPE}", - "save": "{Boolean TYPE}", - "save-bundle": "{Boolean TYPE}", - "save-dev": "{Boolean TYPE}", - "save-exact": "{Boolean TYPE}", - "save-optional": "{Boolean TYPE}", - "save-prefix": "{String TYPE}", - "save-prod": "{Boolean TYPE}", - "scope": "{String TYPE}", - "script-shell": Array [ - null, - "{String TYPE}", - ], - "scripts-prepend-node-path": Array [ - "{Boolean TYPE}", - "auto", - "warn-only", - ], - "searchexclude": Array [ - null, - "{String TYPE}", - ], - "searchlimit": "{Number TYPE}", - "searchopts": "{String TYPE}", - "searchstaleness": "{Number TYPE}", - "shell": "{String TYPE}", - "shrinkwrap": "{Boolean TYPE}", - "sign-git-commit": "{Boolean TYPE}", - "sign-git-tag": "{Boolean TYPE}", - "sso-poll-frequency": "{Number TYPE}", - "sso-type": Array [ - null, - "oauth", - "saml", - ], - "strict-peer-deps": "{Boolean TYPE}", - "strict-ssl": "{Boolean TYPE}", - "tag": "{String TYPE}", - "tag-version-prefix": "{String TYPE}", - "timing": "{Boolean TYPE}", - "tmp": "{PATH MODULE}", - "umask": "{Umask TYPE}", - "unicode": "{Boolean TYPE}", - "update-notifier": "{Boolean TYPE}", - "usage": "{Boolean TYPE}", - "user-agent": "{String TYPE}", - "userconfig": "{PATH MODULE}", - "version": "{Boolean TYPE}", - "versions": "{Boolean TYPE}", - "viewer": "{String TYPE}", - }, -} -` diff --git a/tap-snapshots/test-lib-utils-error-handler.js-TAP.test.js b/tap-snapshots/test-lib-utils-error-handler.js-TAP.test.js deleted file mode 100644 index 909051cdab506..0000000000000 --- a/tap-snapshots/test-lib-utils-error-handler.js-TAP.test.js +++ /dev/null @@ -1,23 +0,0 @@ -/* IMPORTANT - * This snapshot file is auto-generated, but designed for humans. - * It should be checked into source control and tracked carefully. - * Re-generate by setting TAP_SNAPSHOT=1 and running tests. - * Make sure to inspect the output below. Do not ignore changes! - */ -'use strict' -exports[`test/lib/utils/error-handler.js TAP handles unknown error > should have expected log contents for unknown error 1`] = ` -0 verbose code 1 -1 error foo A complete log of this run can be found in: -1 error foo {CWD}/cachefolder/_logs/expecteddate-debug.log -2 verbose stack Error: ERROR -3 verbose cwd {CWD} -4 verbose Foo 1.0.0 -5 verbose argv "/node" "{CWD}/test/lib/utils/error-handler.js" -6 verbose node v1.0.0 -7 verbose npm v1.0.0 -8 error foo code ERROR -9 error foo ERR ERROR -10 error foo ERR ERROR -11 verbose exit 1 - -` diff --git a/tap-snapshots/test-lib-utils-flat-options.js-TAP.test.js b/tap-snapshots/test-lib-utils-flat-options.js-TAP.test.js deleted file mode 100644 index 47de89e976148..0000000000000 --- a/tap-snapshots/test-lib-utils-flat-options.js-TAP.test.js +++ /dev/null @@ -1,129 +0,0 @@ -/* IMPORTANT - * This snapshot file is auto-generated, but designed for humans. - * It should be checked into source control and tracked carefully. - * Re-generate by setting TAP_SNAPSHOT=1 and running tests. - * Make sure to inspect the output below. Do not ignore changes! - */ -'use strict' -exports[`test/lib/utils/flat-options.js TAP basic > flat options 1`] = ` -Object { - "_auth": undefined, - "@scope:registry": "@scope:registry", - "//nerf.dart:_authToken": "//nerf.dart:_authToken", - "access": "access", - "all": undefined, - "allowSameVersion": "allow-same-version", - "alwaysAuth": "always-auth", - "audit": "audit", - "auditLevel": "audit-level", - "authType": "auth-type", - "before": "before", - "binLinks": "bin-links", - "browser": "browser", - "ca": "ca", - "cache": "cache/_cacache", - "cafile": "cafile", - "call": "call", - "cert": "cert", - "cidr": "cidr", - "color": true, - "commitHooks": "commit-hooks", - "defaultTag": "tag", - "depth": "depth", - "diff": undefined, - "diffDstPrefix": undefined, - "diffIgnoreAllSpace": undefined, - "diffNameOnly": undefined, - "diffNoPrefix": undefined, - "diffSrcPrefix": undefined, - "diffText": undefined, - "diffUnified": undefined, - "dmode": 511, - "dryRun": "dry-run", - "editor": "editor", - "engineStrict": "engine-strict", - "fmode": 438, - "force": "force", - "foregroundScripts": false, - "formatPackageLock": "format-package-lock", - "fund": "fund", - "git": "git", - "gitTagVersion": "git-tag-version", - "global": "global", - "globalPrefix": "/usr/local", - "globalStyle": "global-style", - "hashAlgorithm": "sha1", - "ignoreScripts": undefined, - "includeStaged": undefined, - "json": undefined, - "key": "key", - "legacyBundling": "legacy-bundling", - "legacyPeerDeps": undefined, - "localPrefix": "/path/to/npm/cli", - "log": Object {}, - "long": undefined, - "message": "message", - "nodeBin": "/path/to/some/node", - "nodeVersion": "1.2.3", - "noProxy": "noproxy", - "npmBin": "/path/to/npm/bin.js", - "npmCommand": null, - "npmSession": "12345", - "npmVersion": "7.6.5", - "offline": "offline", - "omit": Array [], - "otp": "otp", - "package": "package", - "packageLock": true, - "packageLockOnly": "package-lock-only", - "parseable": undefined, - "preferDedupe": undefined, - "preferOffline": "prefer-offline", - "preferOnline": "prefer-online", - "prefix": "/path/to/npm/cli", - "preid": "preid", - "projectScope": "@npmcli", - "proxy": "proxy", - "readOnly": "read-only", - "rebuildBundle": "rebuild-bundle", - "registry": "registry", - "retry": Object { - "factor": "fetch-retry-factor", - "maxTimeout": "fetch-retry-maxtimeout", - "minTimeout": "fetch-retry-mintimeout", - "retries": "fetch-retries", - }, - "save": "save", - "saveBundle": false, - "savePrefix": "", - "saveType": "peerOptional", - "scope": "", - "scriptShell": "script-shell", - "search": Object { - "description": "description", - "exclude": "searchexclude", - "limit": "searchlimit", - "opts": Null Object { - "from": "1", - }, - "staleness": "searchstaleness", - }, - "shell": undefined, - "signGitCommit": "sign-git-commit", - "signGitTag": "sign-git-tag", - "ssoPollFrequency": undefined, - "ssoType": undefined, - "strictPeerDeps": undefined, - "strictSSL": "strict-ssl", - "tag": "tag", - "tagVersionPrefix": "tag-version-prefix", - "timeout": "fetch-timeout", - "tmp": "/tmp", - "umask": 18, - "unicode": undefined, - "userAgent": "user-agent", - "viewer": "viewer", - "which": undefined, - "yes": undefined, -} -` diff --git a/tap-snapshots/test-lib-utils-npm-usage.js-TAP.test.js b/tap-snapshots/test-lib-utils-npm-usage.js-TAP.test.js deleted file mode 100644 index 8e1603eb4c881..0000000000000 --- a/tap-snapshots/test-lib-utils-npm-usage.js-TAP.test.js +++ /dev/null @@ -1,498 +0,0 @@ -/* IMPORTANT - * This snapshot file is auto-generated, but designed for humans. - * It should be checked into source control and tracked carefully. - * Re-generate by setting TAP_SNAPSHOT=1 and running tests. - * Make sure to inspect the output below. Do not ignore changes! - */ -'use strict' -exports[`test/lib/utils/npm-usage.js TAP usage basic usage > must match snapshot 1`] = ` - -Usage: npm <command> - -npm install install all the dependencies in your project -npm install <foo> add the <foo> dependency to your project -npm test run this project's tests -npm run <foo> run the script named <foo> -npm <command> -h quick help on <command> -npm -l display usage info for all commands -npm help <term> search for help on <term> -npm help npm more involved overview - -All commands: - - access, adduser, audit, bin, bugs, cache, ci, completion, - config, dedupe, deprecate, diff, dist-tag, docs, doctor, - edit, exec, explain, explore, find-dupes, fund, get, help, - hook, init, install, install-ci-test, install-test, link, - ll, login, logout, ls, org, outdated, owner, pack, ping, - prefix, profile, prune, publish, rebuild, repo, restart, - root, run-script, search, set, set-script, shrinkwrap, star, - stars, start, stop, team, test, token, uninstall, unpublish, - unstar, update, version, view, whoami - -Specify configs in the ini-formatted file: - /some/config/file/.npmrc -or on the command line via: npm <command> --key=value - -More configuration info: npm help config -Configuration fields: npm help 7 config - -npm@{VERSION} {BASEDIR} - -` - -exports[`test/lib/utils/npm-usage.js TAP usage did you mean? > must match snapshot 1`] = ` - -Usage: npm <command> - -npm install install all the dependencies in your project -npm install <foo> add the <foo> dependency to your project -npm test run this project's tests -npm run <foo> run the script named <foo> -npm <command> -h quick help on <command> -npm -l display usage info for all commands -npm help <term> search for help on <term> -npm help npm more involved overview - -All commands: - - access, adduser, audit, bin, bugs, cache, ci, completion, - config, dedupe, deprecate, diff, dist-tag, docs, doctor, - edit, exec, explain, explore, find-dupes, fund, get, help, - hook, init, install, install-ci-test, install-test, link, - ll, login, logout, ls, org, outdated, owner, pack, ping, - prefix, profile, prune, publish, rebuild, repo, restart, - root, run-script, search, set, set-script, shrinkwrap, star, - stars, start, stop, team, test, token, uninstall, unpublish, - unstar, update, version, view, whoami - -Specify configs in the ini-formatted file: - /some/config/file/.npmrc -or on the command line via: npm <command> --key=value - -More configuration info: npm help config -Configuration fields: npm help 7 config - -npm@{VERSION} {BASEDIR} - -` - -exports[`test/lib/utils/npm-usage.js TAP usage did you mean? > must match snapshot 2`] = ` - -Did you mean one of these? - install - uninstall -` - -exports[`test/lib/utils/npm-usage.js TAP usage set process.stdout.columns columns=0 > must match snapshot 1`] = ` - -Usage: npm <command> - -npm install install all the dependencies in your project -npm install <foo> add the <foo> dependency to your project -npm test run this project's tests -npm run <foo> run the script named <foo> -npm <command> -h quick help on <command> -npm -l display usage info for all commands -npm help <term> search for help on <term> -npm help npm more involved overview - -All commands: - - access, adduser, audit, bin, bugs, cache, ci, completion, - config, dedupe, deprecate, diff, dist-tag, docs, doctor, - edit, exec, explain, explore, find-dupes, fund, get, help, - hook, init, install, install-ci-test, install-test, link, - ll, login, logout, ls, org, outdated, owner, pack, ping, - prefix, profile, prune, publish, rebuild, repo, restart, - root, run-script, search, set, set-script, shrinkwrap, star, - stars, start, stop, team, test, token, uninstall, unpublish, - unstar, update, version, view, whoami - -Specify configs in the ini-formatted file: - /some/config/file/.npmrc -or on the command line via: npm <command> --key=value - -More configuration info: npm help config -Configuration fields: npm help 7 config - -npm@{VERSION} {BASEDIR} - -` - -exports[`test/lib/utils/npm-usage.js TAP usage set process.stdout.columns columns=90 > must match snapshot 1`] = ` - -Usage: npm <command> - -npm install install all the dependencies in your project -npm install <foo> add the <foo> dependency to your project -npm test run this project's tests -npm run <foo> run the script named <foo> -npm <command> -h quick help on <command> -npm -l display usage info for all commands -npm help <term> search for help on <term> -npm help npm more involved overview - -All commands: - - access, adduser, audit, bin, bugs, cache, ci, completion, - config, dedupe, deprecate, diff, dist-tag, docs, doctor, - edit, exec, explain, explore, find-dupes, fund, get, help, - hook, init, install, install-ci-test, install-test, link, - ll, login, logout, ls, org, outdated, owner, pack, ping, - prefix, profile, prune, publish, rebuild, repo, restart, - root, run-script, search, set, set-script, shrinkwrap, star, - stars, start, stop, team, test, token, uninstall, unpublish, - unstar, update, version, view, whoami - -Specify configs in the ini-formatted file: - /some/config/file/.npmrc -or on the command line via: npm <command> --key=value - -More configuration info: npm help config -Configuration fields: npm help 7 config - -npm@{VERSION} {BASEDIR} - -` - -exports[`test/lib/utils/npm-usage.js TAP usage with browser > must match snapshot 1`] = ` - -Usage: npm <command> - -npm install install all the dependencies in your project -npm install <foo> add the <foo> dependency to your project -npm test run this project's tests -npm run <foo> run the script named <foo> -npm <command> -h quick help on <command> -npm -l display usage info for all commands -npm help <term> search for help on <term> (in a browser) -npm help npm more involved overview (in a browser) - -All commands: - - access, adduser, audit, bin, bugs, cache, ci, completion, - config, dedupe, deprecate, diff, dist-tag, docs, doctor, - edit, exec, explain, explore, find-dupes, fund, get, help, - hook, init, install, install-ci-test, install-test, link, - ll, login, logout, ls, org, outdated, owner, pack, ping, - prefix, profile, prune, publish, rebuild, repo, restart, - root, run-script, search, set, set-script, shrinkwrap, star, - stars, start, stop, team, test, token, uninstall, unpublish, - unstar, update, version, view, whoami - -Specify configs in the ini-formatted file: - /some/config/file/.npmrc -or on the command line via: npm <command> --key=value - -More configuration info: npm help config -Configuration fields: npm help 7 config - -npm@{VERSION} {BASEDIR} - -` - -exports[`test/lib/utils/npm-usage.js TAP usage with long > must match snapshot 1`] = ` - -Usage: npm <command> - -npm install install all the dependencies in your project -npm install <foo> add the <foo> dependency to your project -npm test run this project's tests -npm run <foo> run the script named <foo> -npm <command> -h quick help on <command> -npm -l display usage info for all commands -npm help <term> search for help on <term> -npm help npm more involved overview - -All commands: - - access npm access public [<package>] - npm access restricted [<package>] - npm access grant <read-only|read-write> <scope:team> [<package>] - npm access revoke <scope:team> [<package>] - npm access 2fa-required [<package>] - npm access 2fa-not-required [<package>] - npm access ls-packages [<user>|<scope>|<scope:team>] - npm access ls-collaborators [<package> [<user>]] - npm access edit [<package>] - - adduser npm adduser [--registry=url] [--scope=@orgname] [--always-auth] - - aliases: login, add-user - - audit npm audit [--json] [--production] - npm audit fix [--force|--package-lock-only|--dry-run|--production|--only=(dev|prod)] - - bin npm bin [-g] - - bugs npm bugs [<pkgname>] - - alias: issues - - cache npm cache add <tarball file> - npm cache add <folder> - npm cache add <tarball url> - npm cache add <git url> - npm cache add <name>@<version> - npm cache clean - npm cache verify - - ci npm ci - - aliases: clean-install, ic, install-clean, isntall-clean - - completion source <(npm completion) - - config npm config set <key>=<value> [<key>=<value> ...] - npm config get [<key> [<key> ...]] - npm config delete <key> [<key> ...] - npm config list [--json] - npm config edit - npm set <key>=<value> [<key>=<value> ...] - npm get [<key> [<key> ...]] - - alias: c - - dedupe npm dedupe - - alias: ddp - - deprecate npm deprecate <pkg>[@<version>] <message> - - diff npm diff [...<paths>] - npm diff --diff=<pkg-name> [...<paths>] - npm diff --diff=<version-a> [--diff=<version-b>] [...<paths>] - npm diff --diff=<spec-a> [--diff=<spec-b>] [...<paths>] - npm diff [--diff-ignore-all-space] [--diff-name-only] [...<paths>] [...<paths>] - - dist-tag npm dist-tag add <pkg>@<version> [<tag>] - npm dist-tag rm <pkg> <tag> - npm dist-tag ls [<pkg>] - - alias: dist-tags - - docs npm docs [<pkgname> [<pkgname> ...]] - - alias: home - - doctor npm doctor - - edit npm edit <pkg>[/<subpkg>...] - - exec Run a command from a local or remote npm package. - - npm exec -- <pkg>[@<version>] [args...] - npm exec --package=<pkg>[@<version>] -- <cmd> [args...] - npm exec -c '<cmd> [args...]' - npm exec --package=foo -c '<cmd> [args...]' - - npx <pkg>[@<specifier>] [args...] - npx -p <pkg>[@<specifier>] <cmd> [args...] - npx -c '<cmd> [args...]' - npx -p <pkg>[@<specifier>] -c '<cmd> [args...]' - Run without --call or positional args to open interactive subshell - - - alias: x - common options: - --package=<pkg> (may be specified multiple times) - -p is a shorthand for --package only when using npx executable - -c <cmd> --call=<cmd> (may not be mixed with positional arguments) - - explain npm explain <folder | specifier> - - alias: why - - explore npm explore <pkg> [ -- <command>] - - find-dupes npm find-dupes - - fund npm fund - - common options: npm fund [--json] [--browser] [--unicode] [[<@scope>/]<pkg> [--which=<fundingSourceNumber>] - - get npm get [<key> ...] (See \`npm config\`) - - help npm help <term> [<terms..>] - - alias: hlep - - hook npm hook add <pkg> <url> <secret> [--type=<type>] - npm hook ls [pkg] - npm hook rm <id> - npm hook update <id> <url> <secret> - - init - npm init [--force|-f|--yes|-y|--scope] - npm init <@scope> (same as \`npx <@scope>/create\`) - npm init [<@scope>/]<name> (same as \`npx [<@scope>/]create-<name>\`) - - aliases: create, innit - - install npm install (with no args, in package dir) - npm install [<@scope>/]<pkg> - npm install [<@scope>/]<pkg>@<tag> - npm install [<@scope>/]<pkg>@<version> - npm install [<@scope>/]<pkg>@<version range> - npm install <alias>@npm:<name> - npm install <folder> - npm install <tarball file> - npm install <tarball url> - npm install <git:// url> - npm install <github username>/<github project> - - aliases: i, in, ins, inst, insta, instal, isnt, isnta, isntal, add - common options: [--save-prod|--save-dev|--save-optional|--save-peer] [--save-exact] [--no-save] - - install-ci-test npm install-ci-test [args] - Same args as \`npm ci\` - - alias: cit - - install-test npm install-test [args] - Same args as \`npm install\` - - alias: it - - link npm link (in package dir) - npm link [<@scope>/]<pkg>[@<version>] - - alias: ln - - ll npm ll [[<@scope>/]<pkg> ...] - - alias: la - - login npm adduser [--registry=url] [--scope=@orgname] [--always-auth] - - aliases: login, add-user - - logout npm logout [--registry=<url>] [--scope=<@scope>] - - ls npm ls [[<@scope>/]<pkg> ...] - - alias: list - - org npm org set orgname username [developer | admin | owner] - npm org rm orgname username - npm org ls orgname [<username>] - - alias: ogr - - outdated npm outdated [[<@scope>/]<pkg> ...] - - owner npm owner add <user> [<@scope>/]<pkg> - npm owner rm <user> [<@scope>/]<pkg> - npm owner ls [<@scope>/]<pkg> - - alias: author - - pack npm pack [[<@scope>/]<pkg>...] [--dry-run] - - ping npm ping - ping registry - - prefix npm prefix [-g] - - profile npm profile enable-2fa [auth-only|auth-and-writes] - - - common options: npm profile disable-2fa - - - prune npm prune [[<@scope>/]<pkg>...] [--production] - - publish npm publish [<folder>] [--tag <tag>] [--access <public|restricted>] [--dry-run] - - Publishes '.' if no argument supplied - Sets tag \`latest\` if no --tag specified - - rebuild npm rebuild [[<@scope>/]<name>[@<version>] ...] - - alias: rb - - repo npm repo [<pkgname> [<pkgname> ...]] - - restart npm restart [-- <args>] - - root npm root [-g] - - run-script npm run-script <command> [-- <args>] - - aliases: run, rum, urn - - search npm search [-l|--long] [--json] [--parseable] [--no-description] [search terms ...] - - aliases: s, se, find - - set npm set <key>=<value> [<key>=<value> ...] (See \`npm config\`) - - set-script npm set-script [<script>] [<command>] - - shrinkwrap npm shrinkwrap - - star npm star [<pkg>...] - npm unstar [<pkg>...] - - stars npm stars [<user>] - - start npm start [-- <args>] - - stop npm stop [-- <args>] - - team npm team create <scope:team> [--otp <otpcode>] - npm team destroy <scope:team> [--otp <otpcode>] - npm team add <scope:team> <user> [--otp <otpcode>] - npm team rm <scope:team> <user> [--otp <otpcode>] - npm team ls <scope>|<scope:team> - - - test npm test [-- <args>] - - aliases: tst, t - - token npm token list - npm token revoke <id|token> - npm token create [--read-only] [--cidr=list] - - uninstall npm uninstall [<@scope>/]<pkg>[@<version>]... [-S|--save|--no-save] - - aliases: un, unlink, remove, rm, r - - unpublish npm unpublish [<@scope>/]<pkg>[@<version>] - - unstar npm star [<pkg>...] - npm unstar [<pkg>...] - - update npm update [-g] [<pkg>...] - - aliases: up, upgrade, udpate - - version npm version [<newversion> | major | minor | patch | premajor | preminor | prepatch | prerelease [--preid=<prerelease-id>] | from-git] - (run in package dir) - - 'npm -v' or 'npm --version' to print npm version ({VERSION}) - 'npm view <pkg> version' to view a package's published version - 'npm ls' to inspect current package/dependency versions - - - alias: verison - - view npm view [<@scope>/]<pkg>[@<version>] [<field>[.subfield]...] - - aliases: v, info, show - - whoami npm whoami [--registry <registry>] - (just prints username according to given registry) - -Specify configs in the ini-formatted file: - /some/config/file/.npmrc -or on the command line via: npm <command> --key=value - -More configuration info: npm help config -Configuration fields: npm help 7 config - -npm@{VERSION} {BASEDIR} - -` diff --git a/tap-snapshots/test-tap-repo.js-TAP.test.js b/tap-snapshots/test-tap-repo.js-TAP.test.js deleted file mode 100644 index 3fba79edb8d42..0000000000000 --- a/tap-snapshots/test-tap-repo.js-TAP.test.js +++ /dev/null @@ -1,21 +0,0 @@ -/* IMPORTANT - * This snapshot file is auto-generated, but designed for humans. - * It should be checked into source control and tracked carefully. - * Re-generate by setting TAP_SNAPSHOT=1 and running tests. - * Make sure to inspect the output below. Do not ignore changes! - */ -'use strict' -exports[`test/tap/repo.js TAP npm repo underscore --json > should print json result 1`] = ` -{ - "title": "repository available at the following URL", - "url": "https://github.com/jashkenas/underscore" -} - -` - -exports[`test/tap/repo.js TAP npm repo underscore --no-browser > should print alternative msg 1`] = ` -repository available at the following URL: - -https://github.com/jashkenas/underscore - -` diff --git a/tap-snapshots/test-lib-config.js-TAP.test.js b/tap-snapshots/test/lib/config.js.test.cjs similarity index 91% rename from tap-snapshots/test-lib-config.js-TAP.test.js rename to tap-snapshots/test/lib/config.js.test.cjs index d62969145d1d9..b5acbb0af94c5 100644 --- a/tap-snapshots/test-lib-config.js-TAP.test.js +++ b/tap-snapshots/test/lib/config.js.test.cjs @@ -5,7 +5,7 @@ * Make sure to inspect the output below. Do not ignore changes! */ 'use strict' -exports[`test/lib/config.js TAP config edit --global > should write global config file 1`] = ` +exports[`test/lib/config.js TAP config edit --location=global > should write global config file 1`] = ` ;;;; ; npm globalconfig file: /etc/npmrc ; this is a simple ini-formatted file @@ -88,9 +88,12 @@ exports[`test/lib/config.js TAP config edit > should write config file 2`] = ` exports[`test/lib/config.js TAP config get no args > should list configs on config get no args 1`] = ` ; "cli" config from command line options +cat = true +chai = true +dog = true editor = "vi" -global = false json = false +location = "user" long = false ; node bin location = /path/to/node @@ -109,18 +112,24 @@ init.version = "1.0.0" ; "cli" config from command line options +cat = true +chai = true +dog = true editor = "vi" -global = false json = false +location = "user" long = true ` exports[`test/lib/config.js TAP config list > should list configs 1`] = ` ; "cli" config from command line options +cat = true +chai = true +dog = true editor = "vi" -global = false json = false +location = "user" long = false ; node bin location = /path/to/node @@ -132,10 +141,13 @@ long = false exports[`test/lib/config.js TAP config list overrides > should list overridden configs 1`] = ` ; "cli" config from command line options +cat = true +chai = true +dog = true editor = "vi" -global = false init.author.name = "Bar" json = false +location = "user" long = false ; "user" config from ~/.npmrc diff --git a/tap-snapshots/test/lib/dist-tag.js.test.cjs b/tap-snapshots/test/lib/dist-tag.js.test.cjs new file mode 100644 index 0000000000000..21d9331db1299 --- /dev/null +++ b/tap-snapshots/test/lib/dist-tag.js.test.cjs @@ -0,0 +1,298 @@ +/* IMPORTANT + * This snapshot file is auto-generated, but designed for humans. + * It should be checked into source control and tracked carefully. + * Re-generate by setting TAP_SNAPSHOT=1 and running tests. + * Make sure to inspect the output below. Do not ignore changes! + */ +'use strict' +exports[`test/lib/dist-tag.js TAP add missing args > should exit usage error message 1`] = ` +Error: +Usage: npm dist-tag + +Modify package distribution tags + +Usage: +npm dist-tag add <pkg>@<version> [<tag>] +npm dist-tag rm <pkg> <tag> +npm dist-tag ls [<pkg>] + +Options: +[-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] +[-ws|--workspaces] + +alias: dist-tags + +Run "npm help dist-tag" for more info { + "code": "EUSAGE", +} +` + +exports[`test/lib/dist-tag.js TAP add missing pkg name > should exit usage error message 1`] = ` +Error: +Usage: npm dist-tag + +Modify package distribution tags + +Usage: +npm dist-tag add <pkg>@<version> [<tag>] +npm dist-tag rm <pkg> <tag> +npm dist-tag ls [<pkg>] + +Options: +[-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] +[-ws|--workspaces] + +alias: dist-tags + +Run "npm help dist-tag" for more info { + "code": "EUSAGE", +} +` + +exports[`test/lib/dist-tag.js TAP add new tag > should return success msg 1`] = ` ++c: @scoped/another@7.7.7 +` + +exports[`test/lib/dist-tag.js TAP add using valid semver range as name > should return success msg 1`] = ` +dist-tag add 1.0.0 to @scoped/another@7.7.7 + +` + +exports[`test/lib/dist-tag.js TAP borked cmd usage > should show usage error 1`] = ` +Error: +Usage: npm dist-tag + +Modify package distribution tags + +Usage: +npm dist-tag add <pkg>@<version> [<tag>] +npm dist-tag rm <pkg> <tag> +npm dist-tag ls [<pkg>] + +Options: +[-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] +[-ws|--workspaces] + +alias: dist-tags + +Run "npm help dist-tag" for more info { + "code": "EUSAGE", +} +` + +exports[`test/lib/dist-tag.js TAP ls global > should throw basic usage 1`] = ` +Error: +Usage: npm dist-tag + +Modify package distribution tags + +Usage: +npm dist-tag add <pkg>@<version> [<tag>] +npm dist-tag rm <pkg> <tag> +npm dist-tag ls [<pkg>] + +Options: +[-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] +[-ws|--workspaces] + +alias: dist-tags + +Run "npm help dist-tag" for more info { + "code": "EUSAGE", +} +` + +exports[`test/lib/dist-tag.js TAP ls in current package > should list available tags for current package 1`] = ` +a: 0.0.1 +b: 0.5.0 +latest: 1.0.0 +` + +exports[`test/lib/dist-tag.js TAP ls on missing name in current package > should throw usage error message 1`] = ` +Error: +Usage: npm dist-tag + +Modify package distribution tags + +Usage: +npm dist-tag add <pkg>@<version> [<tag>] +npm dist-tag rm <pkg> <tag> +npm dist-tag ls [<pkg>] + +Options: +[-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] +[-ws|--workspaces] + +alias: dist-tags + +Run "npm help dist-tag" for more info { + "code": "EUSAGE", +} +` + +exports[`test/lib/dist-tag.js TAP ls on missing package > should log no dist-tag found msg 1`] = ` +dist-tag ls Couldn't get dist-tag data for foo@latest + +` + +exports[`test/lib/dist-tag.js TAP ls on missing package > should throw error message 1`] = ` +Error: No dist-tags found for foo +` + +exports[`test/lib/dist-tag.js TAP ls on named package > should list tags for the specified package 1`] = ` +a: 0.0.2 +b: 0.6.0 +latest: 2.0.0 +` + +exports[`test/lib/dist-tag.js TAP no args in current package > should default to listing available tags for current package 1`] = ` +a: 0.0.1 +b: 0.5.0 +latest: 1.0.0 +` + +exports[`test/lib/dist-tag.js TAP only named package arg > should default to listing tags for the specified package 1`] = ` +a: 0.0.2 +b: 0.6.0 +latest: 2.0.0 +` + +exports[`test/lib/dist-tag.js TAP remove existing tag > should log remove info 1`] = ` +dist-tag del c from @scoped/another + +` + +exports[`test/lib/dist-tag.js TAP remove existing tag > should return success msg 1`] = ` +-c: @scoped/another@7.7.7 +` + +exports[`test/lib/dist-tag.js TAP remove missing pkg name > should exit usage error message 1`] = ` +Error: +Usage: npm dist-tag + +Modify package distribution tags + +Usage: +npm dist-tag add <pkg>@<version> [<tag>] +npm dist-tag rm <pkg> <tag> +npm dist-tag ls [<pkg>] + +Options: +[-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] +[-ws|--workspaces] + +alias: dist-tags + +Run "npm help dist-tag" for more info { + "code": "EUSAGE", +} +` + +exports[`test/lib/dist-tag.js TAP remove non-existing tag > should log error msg 1`] = ` +dist-tag del nonexistent from @scoped/another +dist-tag del nonexistent is not a dist-tag on @scoped/another + +` + +exports[`test/lib/dist-tag.js TAP set existing version > should log warn msg 1`] = ` +dist-tag add b to @scoped/another@0.6.0 +dist-tag add b is already set to version 0.6.0 + +` + +exports[`test/lib/dist-tag.js TAP workspaces no args > printed the expected output 1`] = ` +workspace-a: +latest-a: 1.0.0 +latest: 1.0.0 +workspace-b: +latest-b: 2.0.0 +latest: 2.0.0 +workspace-c: +latest-c: 3.0.0 +latest: 3.0.0 +` + +exports[`test/lib/dist-tag.js TAP workspaces no args, one failing workspace sets exitCode to 1 > printed the expected output 1`] = ` +workspace-a: +latest-a: 1.0.0 +latest: 1.0.0 +workspace-b: +latest-b: 2.0.0 +latest: 2.0.0 +workspace-c: +latest-c: 3.0.0 +latest: 3.0.0 +workspace-d: +` + +exports[`test/lib/dist-tag.js TAP workspaces no args, one workspace > printed the expected output 1`] = ` +workspace-a: +latest-a: 1.0.0 +latest: 1.0.0 +` + +exports[`test/lib/dist-tag.js TAP workspaces one arg -- . > printed the expected output 1`] = ` +workspace-a: +latest-a: 1.0.0 +latest: 1.0.0 +workspace-b: +latest-b: 2.0.0 +latest: 2.0.0 +workspace-c: +latest-c: 3.0.0 +latest: 3.0.0 +` + +exports[`test/lib/dist-tag.js TAP workspaces one arg -- .@1, ignores version spec > printed the expected output 1`] = ` +workspace-a: +latest-a: 1.0.0 +latest: 1.0.0 +workspace-b: +latest-b: 2.0.0 +latest: 2.0.0 +workspace-c: +latest-c: 3.0.0 +latest: 3.0.0 +` + +exports[`test/lib/dist-tag.js TAP workspaces one arg -- list > printed the expected output 1`] = ` +workspace-a: +latest-a: 1.0.0 +latest: 1.0.0 +workspace-b: +latest-b: 2.0.0 +latest: 2.0.0 +workspace-c: +latest-c: 3.0.0 +latest: 3.0.0 +` + +exports[`test/lib/dist-tag.js TAP workspaces two args -- list, . > printed the expected output 1`] = ` +workspace-a: +latest-a: 1.0.0 +latest: 1.0.0 +workspace-b: +latest-b: 2.0.0 +latest: 2.0.0 +workspace-c: +latest-c: 3.0.0 +latest: 3.0.0 +` + +exports[`test/lib/dist-tag.js TAP workspaces two args -- list, .@1, ignores version spec > printed the expected output 1`] = ` +workspace-a: +latest-a: 1.0.0 +latest: 1.0.0 +workspace-b: +latest-b: 2.0.0 +latest: 2.0.0 +workspace-c: +latest-c: 3.0.0 +latest: 3.0.0 +` + +exports[`test/lib/dist-tag.js TAP workspaces two args -- list, @scoped/pkg, logs a warning and ignores workspaces > printed the expected output 1`] = ` +a: 0.0.1 +b: 0.5.0 +latest: 1.0.0 +` diff --git a/tap-snapshots/test-lib-fund.js-TAP.test.js b/tap-snapshots/test/lib/fund.js.test.cjs similarity index 85% rename from tap-snapshots/test-lib-fund.js-TAP.test.js rename to tap-snapshots/test/lib/fund.js.test.cjs index 7ad86ebeea7e9..c078beb7d9866 100644 --- a/tap-snapshots/test-lib-fund.js-TAP.test.js +++ b/tap-snapshots/test/lib/fund.js.test.cjs @@ -92,3 +92,23 @@ test-multiple-funding-sources@1.0.0 ` + +exports[`test/lib/fund.js TAP workspaces filter funding info by a specific workspace > should display only filtered workspace name and its deps 1`] = ` +workspaces-support@1.0.0 +\`-- https://example.com/a + | \`-- a@1.0.0 + \`-- http://example.com/c + \`-- c@1.0.0 + + +` + +exports[`test/lib/fund.js TAP workspaces filter funding info by a specific workspace > should display only filtered workspace path and its deps 1`] = ` +workspaces-support@1.0.0 +\`-- https://example.com/a + | \`-- a@1.0.0 + \`-- http://example.com/c + \`-- c@1.0.0 + + +` diff --git a/tap-snapshots/test/lib/init.js.test.cjs b/tap-snapshots/test/lib/init.js.test.cjs new file mode 100644 index 0000000000000..95abbe6c1d830 --- /dev/null +++ b/tap-snapshots/test/lib/init.js.test.cjs @@ -0,0 +1,33 @@ +/* IMPORTANT + * This snapshot file is auto-generated, but designed for humans. + * It should be checked into source control and tracked carefully. + * Re-generate by setting TAP_SNAPSHOT=1 and running tests. + * Make sure to inspect the output below. Do not ignore changes! + */ +'use strict' +exports[`test/lib/init.js TAP workspaces no args > should print helper info 1`] = ` +Array [ + Array [ + String( + This utility will walk you through creating a package.json file. + It only covers the most common items, and tries to guess sensible defaults. + + See \`npm help init\` for definitive documentation on these fields + and exactly what they do. + + Use \`npm install <pkg>\` afterwards to install a package and + save it as a dependency in the package.json file. + + Press ^C at any time to quit. + ), + ], +] +` + +exports[`test/lib/init.js TAP workspaces no args, existing folder > should print helper info 1`] = ` +Array [] +` + +exports[`test/lib/init.js TAP workspaces with arg but missing workspace folder > should print helper info 1`] = ` +Array [] +` diff --git a/tap-snapshots/test/lib/link.js.test.cjs b/tap-snapshots/test/lib/link.js.test.cjs new file mode 100644 index 0000000000000..0e20bcd994e3a --- /dev/null +++ b/tap-snapshots/test/lib/link.js.test.cjs @@ -0,0 +1,45 @@ +/* IMPORTANT + * This snapshot file is auto-generated, but designed for humans. + * It should be checked into source control and tracked carefully. + * Re-generate by setting TAP_SNAPSHOT=1 and running tests. + * Make sure to inspect the output below. Do not ignore changes! + */ +'use strict' +exports[`test/lib/link.js TAP link global linked pkg to local nm when using args > should create a local symlink to global pkg 1`] = ` +{CWD}/test/lib/tap-testdir-link-link-global-linked-pkg-to-local-nm-when-using-args/my-project/node_modules/@myscope/bar -> {CWD}/test/lib/tap-testdir-link-link-global-linked-pkg-to-local-nm-when-using-args/global-prefix/lib/node_modules/@myscope/bar +{CWD}/test/lib/tap-testdir-link-link-global-linked-pkg-to-local-nm-when-using-args/my-project/node_modules/@myscope/linked -> {CWD}/test/lib/tap-testdir-link-link-global-linked-pkg-to-local-nm-when-using-args/scoped-linked +{CWD}/test/lib/tap-testdir-link-link-global-linked-pkg-to-local-nm-when-using-args/my-project/node_modules/a -> {CWD}/test/lib/tap-testdir-link-link-global-linked-pkg-to-local-nm-when-using-args/global-prefix/lib/node_modules/a +{CWD}/test/lib/tap-testdir-link-link-global-linked-pkg-to-local-nm-when-using-args/my-project/node_modules/link-me-too -> {CWD}/test/lib/tap-testdir-link-link-global-linked-pkg-to-local-nm-when-using-args/link-me-too +{CWD}/test/lib/tap-testdir-link-link-global-linked-pkg-to-local-nm-when-using-args/my-project/node_modules/test-pkg-link -> {CWD}/test/lib/tap-testdir-link-link-global-linked-pkg-to-local-nm-when-using-args/test-pkg-link + +` + +exports[`test/lib/link.js TAP link global linked pkg to local workspace using args > should create a local symlink to global pkg 1`] = ` +{CWD}/test/lib/tap-testdir-link-link-global-linked-pkg-to-local-workspace-using-args/my-project/node_modules/@myscope/bar -> {CWD}/test/lib/tap-testdir-link-link-global-linked-pkg-to-local-workspace-using-args/global-prefix/lib/node_modules/@myscope/bar +{CWD}/test/lib/tap-testdir-link-link-global-linked-pkg-to-local-workspace-using-args/my-project/node_modules/@myscope/linked -> {CWD}/test/lib/tap-testdir-link-link-global-linked-pkg-to-local-workspace-using-args/scoped-linked +{CWD}/test/lib/tap-testdir-link-link-global-linked-pkg-to-local-workspace-using-args/my-project/node_modules/a -> {CWD}/test/lib/tap-testdir-link-link-global-linked-pkg-to-local-workspace-using-args/global-prefix/lib/node_modules/a +{CWD}/test/lib/tap-testdir-link-link-global-linked-pkg-to-local-workspace-using-args/my-project/node_modules/link-me-too -> {CWD}/test/lib/tap-testdir-link-link-global-linked-pkg-to-local-workspace-using-args/link-me-too +{CWD}/test/lib/tap-testdir-link-link-global-linked-pkg-to-local-workspace-using-args/my-project/node_modules/test-pkg-link -> {CWD}/test/lib/tap-testdir-link-link-global-linked-pkg-to-local-workspace-using-args/test-pkg-link +{CWD}/test/lib/tap-testdir-link-link-global-linked-pkg-to-local-workspace-using-args/my-project/node_modules/x -> {CWD}/test/lib/tap-testdir-link-link-global-linked-pkg-to-local-workspace-using-args/my-project/packages/x + +` + +exports[`test/lib/link.js TAP link pkg already in global space > should create a local symlink to global pkg 1`] = ` +{CWD}/test/lib/tap-testdir-link-link-pkg-already-in-global-space/my-project/node_modules/@myscope/linked -> {CWD}/test/lib/tap-testdir-link-link-pkg-already-in-global-space/scoped-linked + +` + +exports[`test/lib/link.js TAP link pkg already in global space when prefix is a symlink > should create a local symlink to global pkg 1`] = ` +{CWD}/test/lib/tap-testdir-link-link-pkg-already-in-global-space-when-prefix-is-a-symlink/my-project/node_modules/@myscope/linked -> {CWD}/test/lib/tap-testdir-link-link-pkg-already-in-global-space-when-prefix-is-a-symlink/scoped-linked + +` + +exports[`test/lib/link.js TAP link to globalDir when in current working dir of pkg and no args > should create a global link to current pkg 1`] = ` +{CWD}/test/lib/tap-testdir-link-link-to-globalDir-when-in-current-working-dir-of-pkg-and-no-args/global-prefix/lib/node_modules/test-pkg-link -> {CWD}/test/lib/tap-testdir-link-link-to-globalDir-when-in-current-working-dir-of-pkg-and-no-args/test-pkg-link + +` + +exports[`test/lib/link.js TAP link ws to globalDir when workspace specified and no args > should create a global link to current pkg 1`] = ` +{CWD}/test/lib/tap-testdir-link-link-ws-to-globalDir-when-workspace-specified-and-no-args/global-prefix/lib/node_modules/a -> {CWD}/test/lib/tap-testdir-link-link-ws-to-globalDir-when-workspace-specified-and-no-args/test-pkg-link/packages/a + +` diff --git a/tap-snapshots/test/lib/load-all-commands.js.test.cjs b/tap-snapshots/test/lib/load-all-commands.js.test.cjs new file mode 100644 index 0000000000000..8cf2e2837e295 --- /dev/null +++ b/tap-snapshots/test/lib/load-all-commands.js.test.cjs @@ -0,0 +1,1121 @@ +/* IMPORTANT + * This snapshot file is auto-generated, but designed for humans. + * It should be checked into source control and tracked carefully. + * Re-generate by setting TAP_SNAPSHOT=1 and running tests. + * Make sure to inspect the output below. Do not ignore changes! + */ +'use strict' +exports[`test/lib/load-all-commands.js TAP load each command access > must match snapshot 1`] = ` +npm access + +Set access level on published packages + +Usage: +npm access public [<package>] +npm access restricted [<package>] +npm access grant <read-only|read-write> <scope:team> [<package>] +npm access revoke <scope:team> [<package>] +npm access 2fa-required [<package>] +npm access 2fa-not-required [<package>] +npm access ls-packages [<user>|<scope>|<scope:team>] +npm access ls-collaborators [<package> [<user>]] +npm access edit [<package>] + +Options: +[--registry <registry>] [--otp <otp>] + +Run "npm help access" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command adduser > must match snapshot 1`] = ` +npm adduser + +Add a registry user account + +Usage: +npm adduser + +Options: +[--registry <registry>] [--scope <@scope>] + +aliases: login, add-user + +Run "npm help adduser" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command audit > must match snapshot 1`] = ` +npm audit + +Run a security audit + +Usage: +npm audit [fix] + +Options: +[--audit-level <info|low|moderate|high|critical|none>] [--dry-run] [-f|--force] +[--json] [--package-lock-only] +[--omit <dev|optional|peer> [--omit <dev|optional|peer> ...]] +[-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] +[-ws|--workspaces] + +Run "npm help audit" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command bin > must match snapshot 1`] = ` +npm bin + +Display npm bin folder + +Usage: +npm bin + +Options: +[-g|--global] + +Run "npm help bin" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command bugs > must match snapshot 1`] = ` +npm bugs + +Report bugs for a package in a web browser + +Usage: +npm bugs [<pkgname>] + +Options: +[--no-browser|--browser <browser>] [--registry <registry>] + +alias: issues + +Run "npm help bugs" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command cache > must match snapshot 1`] = ` +npm cache + +Manipulates packages cache + +Usage: +npm cache add <tarball file> +npm cache add <folder> +npm cache add <tarball url> +npm cache add <git url> +npm cache add <name>@<version> +npm cache clean +npm cache verify + +Options: +[--cache <cache>] + +Run "npm help cache" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command ci > must match snapshot 1`] = ` +npm ci + +Install a project with a clean slate + +Usage: +npm ci + +Options: +[--no-audit] [--ignore-scripts] [--script-shell <script-shell>] + +aliases: clean-install, ic, install-clean, isntall-clean + +Run "npm help ci" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command completion > must match snapshot 1`] = ` +npm completion + +Tab Completion for npm + +Usage: +npm completion + +Run "npm help completion" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command config > must match snapshot 1`] = ` +npm config + +Manage the npm configuration files + +Usage: +npm config set <key>=<value> [<key>=<value> ...] +npm config get [<key> [<key> ...]] +npm config delete <key> [<key> ...] +npm config list [--json] +npm config edit + +Options: +[--json] [-g|--global] [--editor <editor>] [-L|--location <global|user|project>] +[-l|--long] + +alias: c + +Run "npm help config" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command dedupe > must match snapshot 1`] = ` +npm dedupe + +Reduce duplication in the package tree + +Usage: +npm dedupe + +Options: +[--global-style] [--legacy-bundling] [--strict-peer-deps] [--no-package-lock] +[--omit <dev|optional|peer> [--omit <dev|optional|peer> ...]] [--ignore-scripts] +[--no-audit] [--no-bin-links] [--no-fund] [--dry-run] +[-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] +[-ws|--workspaces] + +alias: ddp + +Run "npm help dedupe" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command deprecate > must match snapshot 1`] = ` +npm deprecate + +Deprecate a version of a package + +Usage: +npm deprecate <pkg>[@<version>] <message> + +Options: +[--registry <registry>] [--otp <otp>] + +Run "npm help deprecate" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command diff > must match snapshot 1`] = ` +npm diff + +The registry diff command + +Usage: +npm diff [...<paths>] + +Options: +[--diff <pkg-name|spec|version> [--diff <pkg-name|spec|version> ...]] +[--diff-name-only] [--diff-unified <number>] [--diff-ignore-all-space] +[--diff-no-prefix] [--diff-src-prefix <path>] [--diff-dst-prefix <path>] +[--diff-text] [-g|--global] [--tag <tag>] +[-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] +[-ws|--workspaces] + +Run "npm help diff" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command dist-tag > must match snapshot 1`] = ` +npm dist-tag + +Modify package distribution tags + +Usage: +npm dist-tag add <pkg>@<version> [<tag>] +npm dist-tag rm <pkg> <tag> +npm dist-tag ls [<pkg>] + +Options: +[-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] +[-ws|--workspaces] + +alias: dist-tags + +Run "npm help dist-tag" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command docs > must match snapshot 1`] = ` +npm docs + +Open documentation for a package in a web browser + +Usage: +npm docs [<pkgname> [<pkgname> ...]] + +Options: +[--no-browser|--browser <browser>] [--registry <registry>] +[-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] +[-ws|--workspaces] + +alias: home + +Run "npm help docs" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command doctor > must match snapshot 1`] = ` +npm doctor + +Check your npm environment + +Usage: +npm doctor + +Options: +[--registry <registry>] + +Run "npm help doctor" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command edit > must match snapshot 1`] = ` +npm edit + +Edit an installed package + +Usage: +npm edit <pkg>[/<subpkg>...] + +Options: +[--editor <editor>] + +Run "npm help edit" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command exec > must match snapshot 1`] = ` +npm exec + +Run a command from a local or remote npm package + +Usage: +npm exec -- <pkg>[@<version>] [args...] +npm exec --package=<pkg>[@<version>] -- <cmd> [args...] +npm exec -c '<cmd> [args...]' +npm exec --package=foo -c '<cmd> [args...]' + +Options: +[--package <pkg>[@<version>] [--package <pkg>[@<version>] ...]] +[-c|--call <call>] +[-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] +[-ws|--workspaces] + +alias: x + +Run "npm help exec" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command explain > must match snapshot 1`] = ` +npm explain + +Explain installed packages + +Usage: +npm explain <folder | specifier> + +Options: +[--json] [-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] + +alias: why + +Run "npm help explain" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command explore > must match snapshot 1`] = ` +npm explore + +Browse an installed package + +Usage: +npm explore <pkg> [ -- <command>] + +Options: +[--shell <shell>] + +Run "npm help explore" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command find-dupes > must match snapshot 1`] = ` +npm find-dupes + +Find duplication in the package tree + +Usage: +npm find-dupes + +Options: +[--global-style] [--legacy-bundling] [--strict-peer-deps] [--no-package-lock] +[--omit <dev|optional|peer> [--omit <dev|optional|peer> ...]] [--ignore-scripts] +[--no-audit] [--no-bin-links] [--no-fund] +[-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] +[-ws|--workspaces] + +Run "npm help find-dupes" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command fund > must match snapshot 1`] = ` +npm fund + +Retrieve funding information + +Usage: +npm fund [[<@scope>/]<pkg>] + +Options: +[--json] [--no-browser|--browser <browser>] [--unicode] +[-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] +[--which <fundingSourceNumber>] + +Run "npm help fund" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command get > must match snapshot 1`] = ` +npm get + +Get a value from the npm configuration + +Usage: +npm get [<key> ...] (See \`npm config\`) + +Run "npm help get" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command help > must match snapshot 1`] = ` +npm help + +Get help on npm + +Usage: +npm help <term> [<terms..>] + +Options: +[--viewer <viewer>] + +alias: hlep + +Run "npm help help" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command hook > must match snapshot 1`] = ` +npm hook + +Manage registry hooks + +Usage: +npm hook add <pkg> <url> <secret> [--type=<type>] +npm hook ls [pkg] +npm hook rm <id> +npm hook update <id> <url> <secret> + +Options: +[--registry <registry>] [--otp <otp>] + +Run "npm help hook" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command init > must match snapshot 1`] = ` +npm init + +Create a package.json file + +Usage: +npm init [--force|-f|--yes|-y|--scope] +npm init <@scope> (same as \`npx <@scope>/create\`) +npm init [<@scope>/]<name> (same as \`npx [<@scope>/]create-<name>\`) + +Options: +[-y|--yes] [-f|--force] +[-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] +[-ws|--workspaces] + +aliases: create, innit + +Run "npm help init" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command install > must match snapshot 1`] = ` +npm install + +Install a package + +Usage: +npm install [<@scope>/]<pkg> +npm install [<@scope>/]<pkg>@<tag> +npm install [<@scope>/]<pkg>@<version> +npm install [<@scope>/]<pkg>@<version range> +npm install <alias>@npm:<name> +npm install <folder> +npm install <tarball file> +npm install <tarball url> +npm install <git:// url> +npm install <github username>/<github project> + +Options: +[-S|--save|--no-save|--save-prod|--save-dev|--save-optional|--save-peer] +[-E|--save-exact] [-g|--global] [--global-style] [--legacy-bundling] +[--strict-peer-deps] [--no-package-lock] +[--omit <dev|optional|peer> [--omit <dev|optional|peer> ...]] [--ignore-scripts] +[--no-audit] [--no-bin-links] [--no-fund] [--dry-run] +[-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] +[-ws|--workspaces] + +aliases: i, in, ins, inst, insta, instal, isnt, isnta, isntal, add + +Run "npm help install" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command install-ci-test > must match snapshot 1`] = ` +npm install-ci-test + +Install a project with a clean slate and run tests + +Usage: +npm install-ci-test + +Options: +[--no-audit] [--ignore-scripts] [--script-shell <script-shell>] + +alias: cit + +Run "npm help install-ci-test" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command install-test > must match snapshot 1`] = ` +npm install-test + +Install package(s) and run tests + +Usage: +npm install-test [<@scope>/]<pkg> +npm install-test [<@scope>/]<pkg>@<tag> +npm install-test [<@scope>/]<pkg>@<version> +npm install-test [<@scope>/]<pkg>@<version range> +npm install-test <alias>@npm:<name> +npm install-test <folder> +npm install-test <tarball file> +npm install-test <tarball url> +npm install-test <git:// url> +npm install-test <github username>/<github project> + +Options: +[-S|--save|--no-save|--save-prod|--save-dev|--save-optional|--save-peer] +[-E|--save-exact] [-g|--global] [--global-style] [--legacy-bundling] +[--strict-peer-deps] [--no-package-lock] +[--omit <dev|optional|peer> [--omit <dev|optional|peer> ...]] [--ignore-scripts] +[--no-audit] [--no-bin-links] [--no-fund] [--dry-run] +[-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] +[-ws|--workspaces] + +alias: it + +Run "npm help install-test" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command link > must match snapshot 1`] = ` +npm link + +Symlink a package folder + +Usage: +npm link (in package dir) +npm link [<@scope>/]<pkg>[@<version>] + +Options: +[-S|--save|--no-save|--save-prod|--save-dev|--save-optional|--save-peer] +[-E|--save-exact] [-g|--global] [--global-style] [--legacy-bundling] +[--strict-peer-deps] [--no-package-lock] +[--omit <dev|optional|peer> [--omit <dev|optional|peer> ...]] [--ignore-scripts] +[--no-audit] [--no-bin-links] [--no-fund] [--dry-run] +[-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] +[-ws|--workspaces] + +alias: ln + +Run "npm help link" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command ll > must match snapshot 1`] = ` +npm ll + +List installed packages + +Usage: +npm ll [[<@scope>/]<pkg> ...] + +Options: +[-a|--all] [--json] [-l|--long] [-p|--parseable] [-g|--global] [--depth <depth>] +[--omit <dev|optional|peer> [--omit <dev|optional|peer> ...]] [--link] +[--package-lock-only] [--unicode] +[-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] +[-ws|--workspaces] + +alias: la + +Run "npm help ll" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command login > must match snapshot 1`] = ` +npm adduser + +Add a registry user account + +Usage: +npm adduser + +Options: +[--registry <registry>] [--scope <@scope>] + +aliases: login, add-user + +Run "npm help adduser" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command logout > must match snapshot 1`] = ` +npm logout + +Log out of the registry + +Usage: +npm logout + +Options: +[--registry <registry>] [--scope <@scope>] + +Run "npm help logout" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command ls > must match snapshot 1`] = ` +npm ls + +List installed packages + +Usage: +npm ls [[<@scope>/]<pkg> ...] + +Options: +[-a|--all] [--json] [-l|--long] [-p|--parseable] [-g|--global] [--depth <depth>] +[--omit <dev|optional|peer> [--omit <dev|optional|peer> ...]] [--link] +[--package-lock-only] [--unicode] +[-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] +[-ws|--workspaces] + +alias: list + +Run "npm help ls" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command org > must match snapshot 1`] = ` +npm org + +Manage orgs + +Usage: +npm org set orgname username [developer | admin | owner] +npm org rm orgname username +npm org ls orgname [<username>] + +Options: +[--registry <registry>] [--otp <otp>] [--json] [-p|--parseable] + +alias: ogr + +Run "npm help org" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command outdated > must match snapshot 1`] = ` +npm outdated + +Check for outdated packages + +Usage: +npm outdated [[<@scope>/]<pkg> ...] + +Options: +[-a|--all] [--json] [-l|--long] [-p|--parseable] [-g|--global] +[-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] + +Run "npm help outdated" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command owner > must match snapshot 1`] = ` +npm owner + +Manage package owners + +Usage: +npm owner add <user> [<@scope>/]<pkg> +npm owner rm <user> [<@scope>/]<pkg> +npm owner ls [<@scope>/]<pkg> + +Options: +[--registry <registry>] [--otp <otp>] + +alias: author + +Run "npm help owner" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command pack > must match snapshot 1`] = ` +npm pack + +Create a tarball from a package + +Usage: +npm pack [[<@scope>/]<pkg>...] + +Options: +[--dry-run] [--json] [--pack-destination <pack-destination>] +[-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] +[-ws|--workspaces] + +Run "npm help pack" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command ping > must match snapshot 1`] = ` +npm ping + +Ping npm registry + +Usage: +npm ping + +Options: +[--registry <registry>] + +Run "npm help ping" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command pkg > must match snapshot 1`] = ` +npm pkg + +Manages your package.json + +Usage: +npm pkg set <key>=<value> [<key>=<value> ...] +npm pkg get [<key> [<key> ...]] +npm pkg delete <key> [<key> ...] + +Options: +[-f|--force] [--json] +[-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] +[-ws|--workspaces] + +Run "npm help pkg" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command prefix > must match snapshot 1`] = ` +npm prefix + +Display prefix + +Usage: +npm prefix [-g] + +Options: +[-g|--global] + +Run "npm help prefix" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command profile > must match snapshot 1`] = ` +npm profile + +Change settings on your registry profile + +Usage: +npm profile enable-2fa [auth-only|auth-and-writes] +npm profile disable-2fa +npm profile get [<key>] +npm profile set <key> <value> + +Options: +[--registry <registry>] [--json] [-p|--parseable] [--otp <otp>] + +Run "npm help profile" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command prune > must match snapshot 1`] = ` +npm prune + +Remove extraneous packages + +Usage: +npm prune [[<@scope>/]<pkg>...] + +Options: +[--omit <dev|optional|peer> [--omit <dev|optional|peer> ...]] [--dry-run] +[--json] [-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] +[-ws|--workspaces] + +Run "npm help prune" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command publish > must match snapshot 1`] = ` +npm publish + +Publish a package + +Usage: +npm publish [<folder>] + +Options: +[--tag <tag>] [--access <restricted|public>] [--dry-run] [--otp <otp>] +[-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] +[-ws|--workspaces] + +Run "npm help publish" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command rebuild > must match snapshot 1`] = ` +npm rebuild + +Rebuild a package + +Usage: +npm rebuild [[<@scope>/]<name>[@<version>] ...] + +Options: +[-g|--global] [--no-bin-links] [--ignore-scripts] +[-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] +[-ws|--workspaces] + +alias: rb + +Run "npm help rebuild" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command repo > must match snapshot 1`] = ` +npm repo + +Open package repository page in the browser + +Usage: +npm repo [<pkgname> [<pkgname> ...]] + +Options: +[--no-browser|--browser <browser>] +[-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] +[-ws|--workspaces] + +Run "npm help repo" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command restart > must match snapshot 1`] = ` +npm restart + +Restart a package + +Usage: +npm restart [-- <args>] + +Options: +[--ignore-scripts] [--script-shell <script-shell>] + +Run "npm help restart" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command root > must match snapshot 1`] = ` +npm root + +Display npm root + +Usage: +npm root + +Options: +[-g|--global] + +Run "npm help root" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command run-script > must match snapshot 1`] = ` +npm run-script + +Run arbitrary package scripts + +Usage: +npm run-script <command> [-- <args>] + +Options: +[-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] +[-ws|--workspaces] [--if-present] [--ignore-scripts] +[--script-shell <script-shell>] + +aliases: run, rum, urn + +Run "npm help run-script" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command search > must match snapshot 1`] = ` +npm search + +Search for packages + +Usage: +npm search [search terms ...] + +Options: +[-l|--long] [--json] [--color|--no-color|--color always] [-p|--parseable] +[--no-description] [--searchopts <searchopts>] [--searchexclude <searchexclude>] +[--registry <registry>] [--prefer-online] [--prefer-offline] [--offline] + +aliases: s, se, find + +Run "npm help search" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command set > must match snapshot 1`] = ` +npm set + +Set a value in the npm configuration + +Usage: +npm set <key>=<value> [<key>=<value> ...] (See \`npm config\`) + +Run "npm help set" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command set-script > must match snapshot 1`] = ` +npm set-script + +Set tasks in the scripts section of package.json + +Usage: +npm set-script [<script>] [<command>] + +Options: +[-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] +[-ws|--workspaces] + +Run "npm help set-script" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command shrinkwrap > must match snapshot 1`] = ` +npm shrinkwrap + +Lock down dependency versions for publication + +Usage: +npm shrinkwrap + +Run "npm help shrinkwrap" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command star > must match snapshot 1`] = ` +npm star + +Mark your favorite packages + +Usage: +npm star [<pkg>...] + +Options: +[--registry <registry>] [--unicode] + +Run "npm help star" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command stars > must match snapshot 1`] = ` +npm stars + +View packages marked as favorites + +Usage: +npm stars [<user>] + +Options: +[--registry <registry>] + +Run "npm help stars" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command start > must match snapshot 1`] = ` +npm start + +Start a package + +Usage: +npm start [-- <args>] + +Options: +[--ignore-scripts] [--script-shell <script-shell>] + +Run "npm help start" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command stop > must match snapshot 1`] = ` +npm stop + +Stop a package + +Usage: +npm stop [-- <args>] + +Options: +[--ignore-scripts] [--script-shell <script-shell>] + +Run "npm help stop" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command team > must match snapshot 1`] = ` +npm team + +Manage organization teams and team memberships + +Usage: +npm team create <scope:team> [--otp <otpcode>] +npm team destroy <scope:team> [--otp <otpcode>] +npm team add <scope:team> <user> [--otp <otpcode>] +npm team rm <scope:team> <user> [--otp <otpcode>] +npm team ls <scope>|<scope:team> + +Options: +[--registry <registry>] [--otp <otp>] [-p|--parseable] [--json] + +Run "npm help team" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command test > must match snapshot 1`] = ` +npm test + +Test a package + +Usage: +npm test [-- <args>] + +Options: +[--ignore-scripts] [--script-shell <script-shell>] + +aliases: tst, t + +Run "npm help test" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command token > must match snapshot 1`] = ` +npm token + +Manage your authentication tokens + +Usage: +npm token list +npm token revoke <id|token> +npm token create [--read-only] [--cidr=list] + +Options: +[--read-only] [--cidr <cidr> [--cidr <cidr> ...]] [--registry <registry>] +[--otp <otp>] + +Run "npm help token" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command uninstall > must match snapshot 1`] = ` +npm uninstall + +Remove a package + +Usage: +npm uninstall [<@scope>/]<pkg>... + +Options: +[-S|--save|--no-save|--save-prod|--save-dev|--save-optional|--save-peer] +[-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] +[-ws|--workspaces] + +aliases: un, unlink, remove, rm, r + +Run "npm help uninstall" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command unpublish > must match snapshot 1`] = ` +npm unpublish + +Remove a package from the registry + +Usage: +npm unpublish [<@scope>/]<pkg>[@<version>] + +Options: +[--dry-run] [-f|--force] +[-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] +[-ws|--workspaces] + +Run "npm help unpublish" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command unstar > must match snapshot 1`] = ` +npm unstar + +Remove an item from your favorite packages + +Usage: +npm unstar [<pkg>...] + +Options: +[--registry <registry>] [--unicode] [--otp <otp>] + +Run "npm help unstar" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command update > must match snapshot 1`] = ` +npm update + +Update packages + +Usage: +npm update [<pkg>...] + +Options: +[-g|--global] [--global-style] [--legacy-bundling] [--strict-peer-deps] +[--no-package-lock] [--omit <dev|optional|peer> [--omit <dev|optional|peer> ...]] +[--ignore-scripts] [--no-audit] [--no-bin-links] [--no-fund] [--dry-run] +[-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] +[-ws|--workspaces] + +aliases: up, upgrade, udpate + +Run "npm help update" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command version > must match snapshot 1`] = ` +npm version + +Bump a package version + +Usage: +npm version [<newversion> | major | minor | patch | premajor | preminor | prepatch | prerelease | from-git] + +Options: +[--allow-same-version] [--no-commit-hooks] [--no-git-tag-version] [--json] +[--preid prerelease-id] [--sign-git-tag] +[-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] +[-ws|--workspaces] + +alias: verison + +Run "npm help version" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command view > must match snapshot 1`] = ` +npm view + +View registry info + +Usage: +npm view [<@scope>/]<pkg>[@<version>] [<field>[.subfield]...] + +Options: +[--json] [-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] +[-ws|--workspaces] + +aliases: v, info, show + +Run "npm help view" for more info +` + +exports[`test/lib/load-all-commands.js TAP load each command whoami > must match snapshot 1`] = ` +npm whoami + +Display npm username + +Usage: +npm whoami + +Options: +[--registry <registry>] + +Run "npm help whoami" for more info +` diff --git a/tap-snapshots/test/lib/ls.js.test.cjs b/tap-snapshots/test/lib/ls.js.test.cjs new file mode 100644 index 0000000000000..c3d0a87648edb --- /dev/null +++ b/tap-snapshots/test/lib/ls.js.test.cjs @@ -0,0 +1,704 @@ +/* IMPORTANT + * This snapshot file is auto-generated, but designed for humans. + * It should be checked into source control and tracked carefully. + * Re-generate by setting TAP_SNAPSHOT=1 and running tests. + * Make sure to inspect the output below. Do not ignore changes! + */ +'use strict' +exports[`test/lib/ls.js TAP ignore missing optional deps --json > ls --json problems 1`] = ` +Array [ + "invalid: optional-wrong@3.2.1 {project}/node_modules/optional-wrong", + "missing: peer-missing@1, required by test-npm-ls-ignore-missing-optional@1.2.3", + "invalid: peer-optional-wrong@3.2.1 {project}/node_modules/peer-optional-wrong", + "invalid: peer-wrong@3.2.1 {project}/node_modules/peer-wrong", + "missing: prod-missing@1, required by test-npm-ls-ignore-missing-optional@1.2.3", + "invalid: prod-wrong@3.2.1 {project}/node_modules/prod-wrong", +] +` + +exports[`test/lib/ls.js TAP ignore missing optional deps --parseable > ls --parseable result 1`] = ` +{project} +{project}/node_modules/optional-ok +{project}/node_modules/optional-wrong +{project}/node_modules/peer-ok +{project}/node_modules/peer-optional-ok +{project}/node_modules/peer-optional-wrong +{project}/node_modules/peer-wrong +{project}/node_modules/prod-ok +{project}/node_modules/prod-wrong +` + +exports[`test/lib/ls.js TAP ignore missing optional deps human output > ls result 1`] = ` +test-npm-ls-ignore-missing-optional@1.2.3 {project} ++-- unmet optional dependency optional-missing@1 ++-- optional-ok@1.2.3 ++-- optional-wrong@3.2.1 invalid: "1" from the root project ++-- unmet dependency peer-missing@1 ++-- peer-ok@1.2.3 ++-- unmet optional dependency peer-optional-missing@1 ++-- peer-optional-ok@1.2.3 ++-- peer-optional-wrong@3.2.1 invalid: "1" from the root project ++-- peer-wrong@3.2.1 invalid: "1" from the root project ++-- unmet dependency prod-missing@1 ++-- prod-ok@1.2.3 +\`-- prod-wrong@3.2.1 invalid: "1" from the root project + +` + +exports[`test/lib/ls.js TAP ls --depth=0 > should output tree containing only top-level dependencies 1`] = ` +test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls---depth-0 ++-- chai@1.0.0 +\`-- foo@1.0.0 + +` + +exports[`test/lib/ls.js TAP ls --depth=1 > should output tree containing top-level deps and their deps only 1`] = ` +test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls---depth-1 ++-- a@1.0.0 +| \`-- b@1.0.0 +\`-- e@1.0.0 + +` + +exports[`test/lib/ls.js TAP ls --dev > should output tree containing dev deps 1`] = ` +test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls---dev +\`-- dev-dep@1.0.0 + \`-- foo@1.0.0 + \`-- dog@1.0.0 + +` + +exports[`test/lib/ls.js TAP ls --link > should output tree containing linked deps 1`] = ` +test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls---link +\`-- linked-dep@1.0.0 -> ./linked-dep + +` + +exports[`test/lib/ls.js TAP ls --long --depth=0 > should output tree containing top-level deps with descriptions 1`] = ` +test-npm-ls@1.0.0 +| {CWD}/tap-testdir-ls-ls---long---depth-0 +| ++-- chai@1.0.0 +| ++-- dev-dep@1.0.0 +| A DEV dep kind of dep ++-- optional-dep@1.0.0 +| Maybe a dep? ++-- peer-dep@1.0.0 +| Peer-dep description here +\`-- prod-dep@1.0.0 + A PROD dep kind of dep + +` + +exports[`test/lib/ls.js TAP ls --long > should output tree info with descriptions 1`] = ` +test-npm-ls@1.0.0 +| {CWD}/tap-testdir-ls-ls---long +| ++-- chai@1.0.0 +| ++-- dev-dep@1.0.0 +| | A DEV dep kind of dep +| \`-- foo@1.0.0 +| | +| \`-- dog@1.0.0 +| ++-- optional-dep@1.0.0 +| Maybe a dep? ++-- peer-dep@1.0.0 +| Peer-dep description here +\`-- prod-dep@1.0.0 + | A PROD dep kind of dep + \`-- dog@2.0.0 + A dep that bars + +` + +exports[`test/lib/ls.js TAP ls --only=development > should output tree containing only development deps 1`] = ` +test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls---only-development +\`-- dev-dep@1.0.0 + \`-- foo@1.0.0 + \`-- dog@1.0.0 + +` + +exports[`test/lib/ls.js TAP ls --only=prod > should output tree containing only prod deps 1`] = ` +test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls---only-prod ++-- chai@1.0.0 ++-- optional-dep@1.0.0 +\`-- prod-dep@1.0.0 + \`-- dog@2.0.0 + +` + +exports[`test/lib/ls.js TAP ls --parseable --depth=0 > should output tree containing only top-level dependencies 1`] = ` +{CWD}/tap-testdir-ls-ls---parseable---depth-0 +{CWD}/tap-testdir-ls-ls---parseable---depth-0/node_modules/chai +{CWD}/tap-testdir-ls-ls---parseable---depth-0/node_modules/foo +` + +exports[`test/lib/ls.js TAP ls --parseable --depth=1 > should output parseable containing top-level deps and their deps only 1`] = ` +{CWD}/tap-testdir-ls-ls---parseable---depth-1 +{CWD}/tap-testdir-ls-ls---parseable---depth-1/node_modules/chai +{CWD}/tap-testdir-ls-ls---parseable---depth-1/node_modules/foo +{CWD}/tap-testdir-ls-ls---parseable---depth-1/node_modules/dog +` + +exports[`test/lib/ls.js TAP ls --parseable --dev > should output tree containing dev deps 1`] = ` +{CWD}/tap-testdir-ls-ls---parseable---dev +{CWD}/tap-testdir-ls-ls---parseable---dev/node_modules/dev-dep +{CWD}/tap-testdir-ls-ls---parseable---dev/node_modules/foo +{CWD}/tap-testdir-ls-ls---parseable---dev/node_modules/dog +` + +exports[`test/lib/ls.js TAP ls --parseable --link > should output tree containing linked deps 1`] = ` +{CWD}/tap-testdir-ls-ls---parseable---link +{CWD}/tap-testdir-ls-ls---parseable---link/node_modules/linked-dep +` + +exports[`test/lib/ls.js TAP ls --parseable --long --depth=0 > should output tree containing top-level deps with descriptions 1`] = ` +{CWD}/tap-testdir-ls-ls---parseable---long---depth-0:test-npm-ls@1.0.0 +{CWD}/tap-testdir-ls-ls---parseable---long---depth-0/node_modules/chai:chai@1.0.0 +{CWD}/tap-testdir-ls-ls---parseable---long---depth-0/node_modules/dev-dep:dev-dep@1.0.0 +{CWD}/tap-testdir-ls-ls---parseable---long---depth-0/node_modules/optional-dep:optional-dep@1.0.0 +{CWD}/tap-testdir-ls-ls---parseable---long---depth-0/node_modules/peer-dep:peer-dep@1.0.0 +{CWD}/tap-testdir-ls-ls---parseable---long---depth-0/node_modules/prod-dep:prod-dep@1.0.0 +` + +exports[`test/lib/ls.js TAP ls --parseable --long > should output tree info with descriptions 1`] = ` +{CWD}/tap-testdir-ls-ls---parseable---long:test-npm-ls@1.0.0 +{CWD}/tap-testdir-ls-ls---parseable---long/node_modules/chai:chai@1.0.0 +{CWD}/tap-testdir-ls-ls---parseable---long/node_modules/dev-dep:dev-dep@1.0.0 +{CWD}/tap-testdir-ls-ls---parseable---long/node_modules/optional-dep:optional-dep@1.0.0 +{CWD}/tap-testdir-ls-ls---parseable---long/node_modules/peer-dep:peer-dep@1.0.0 +{CWD}/tap-testdir-ls-ls---parseable---long/node_modules/prod-dep:prod-dep@1.0.0 +{CWD}/tap-testdir-ls-ls---parseable---long/node_modules/foo:foo@1.0.0 +{CWD}/tap-testdir-ls-ls---parseable---long/node_modules/prod-dep/node_modules/dog:dog@2.0.0 +{CWD}/tap-testdir-ls-ls---parseable---long/node_modules/dog:dog@1.0.0 +` + +exports[`test/lib/ls.js TAP ls --parseable --long missing/invalid/extraneous > should output parseable result containing EXTRANEOUS/INVALID labels 1`] = ` +{CWD}/tap-testdir-ls-ls---parseable---long-missing-invalid-extraneous:test-npm-ls@1.0.0 +{CWD}/tap-testdir-ls-ls---parseable---long-missing-invalid-extraneous/node_modules/chai:chai@1.0.0:EXTRANEOUS +{CWD}/tap-testdir-ls-ls---parseable---long-missing-invalid-extraneous/node_modules/foo:foo@1.0.0:INVALID +{CWD}/tap-testdir-ls-ls---parseable---long-missing-invalid-extraneous/node_modules/dog:dog@1.0.0 +` + +exports[`test/lib/ls.js TAP ls --parseable --long print symlink target location > should output parseable results with symlink targets 1`] = ` +{CWD}/tap-testdir-ls-ls---parseable---long-print-symlink-target-location:test-npm-ls@1.0.0 +{CWD}/tap-testdir-ls-ls---parseable---long-print-symlink-target-location/node_modules/chai:chai@1.0.0 +{CWD}/tap-testdir-ls-ls---parseable---long-print-symlink-target-location/node_modules/dev-dep:dev-dep@1.0.0 +{CWD}/tap-testdir-ls-ls---parseable---long-print-symlink-target-location/node_modules/linked-dep:linked-dep@1.0.0:{CWD}/tap-testdir-ls-ls---parseable---long-print-symlink-target-location/linked-dep +{CWD}/tap-testdir-ls-ls---parseable---long-print-symlink-target-location/node_modules/optional-dep:optional-dep@1.0.0 +{CWD}/tap-testdir-ls-ls---parseable---long-print-symlink-target-location/node_modules/peer-dep:peer-dep@1.0.0 +{CWD}/tap-testdir-ls-ls---parseable---long-print-symlink-target-location/node_modules/prod-dep:prod-dep@1.0.0 +{CWD}/tap-testdir-ls-ls---parseable---long-print-symlink-target-location/node_modules/foo:foo@1.0.0 +{CWD}/tap-testdir-ls-ls---parseable---long-print-symlink-target-location/node_modules/prod-dep/node_modules/dog:dog@2.0.0 +{CWD}/tap-testdir-ls-ls---parseable---long-print-symlink-target-location/node_modules/dog:dog@1.0.0 +` + +exports[`test/lib/ls.js TAP ls --parseable --long with extraneous deps > should output long parseable output with extraneous info 1`] = ` +{CWD}/tap-testdir-ls-ls---parseable---long-with-extraneous-deps:test-npm-ls@1.0.0 +{CWD}/tap-testdir-ls-ls---parseable---long-with-extraneous-deps/node_modules/chai:chai@1.0.0:EXTRANEOUS +{CWD}/tap-testdir-ls-ls---parseable---long-with-extraneous-deps/node_modules/foo:foo@1.0.0 +{CWD}/tap-testdir-ls-ls---parseable---long-with-extraneous-deps/node_modules/dog:dog@1.0.0 +` + +exports[`test/lib/ls.js TAP ls --parseable --only=development > should output tree containing only development deps 1`] = ` +{CWD}/tap-testdir-ls-ls---parseable---only-development +{CWD}/tap-testdir-ls-ls---parseable---only-development/node_modules/dev-dep +{CWD}/tap-testdir-ls-ls---parseable---only-development/node_modules/foo +{CWD}/tap-testdir-ls-ls---parseable---only-development/node_modules/dog +` + +exports[`test/lib/ls.js TAP ls --parseable --only=prod > should output tree containing only prod deps 1`] = ` +{CWD}/tap-testdir-ls-ls---parseable---only-prod +{CWD}/tap-testdir-ls-ls---parseable---only-prod/node_modules/chai +{CWD}/tap-testdir-ls-ls---parseable---only-prod/node_modules/optional-dep +{CWD}/tap-testdir-ls-ls---parseable---only-prod/node_modules/prod-dep +{CWD}/tap-testdir-ls-ls---parseable---only-prod/node_modules/prod-dep/node_modules/dog +` + +exports[`test/lib/ls.js TAP ls --parseable --production > should output tree containing production deps 1`] = ` +{CWD}/tap-testdir-ls-ls---parseable---production +{CWD}/tap-testdir-ls-ls---parseable---production/node_modules/chai +{CWD}/tap-testdir-ls-ls---parseable---production/node_modules/optional-dep +{CWD}/tap-testdir-ls-ls---parseable---production/node_modules/prod-dep +{CWD}/tap-testdir-ls-ls---parseable---production/node_modules/prod-dep/node_modules/dog +` + +exports[`test/lib/ls.js TAP ls --parseable cycle deps > should print tree output omitting deduped ref 1`] = ` +{CWD}/tap-testdir-ls-ls---parseable-cycle-deps +{CWD}/tap-testdir-ls-ls---parseable-cycle-deps/node_modules/a +{CWD}/tap-testdir-ls-ls---parseable-cycle-deps/node_modules/b +` + +exports[`test/lib/ls.js TAP ls --parseable default --depth value should be 0 > should output parseable output containing only top-level dependencies 1`] = ` +{CWD}/tap-testdir-ls-ls---parseable-default---depth-value-should-be-0 +{CWD}/tap-testdir-ls-ls---parseable-default---depth-value-should-be-0/node_modules/chai +{CWD}/tap-testdir-ls-ls---parseable-default---depth-value-should-be-0/node_modules/foo +` + +exports[`test/lib/ls.js TAP ls --parseable empty location > should print empty result 1`] = ` +{CWD}/tap-testdir-ls-ls---parseable-empty-location +` + +exports[`test/lib/ls.js TAP ls --parseable extraneous deps > should output containing problems info 1`] = ` +{CWD}/tap-testdir-ls-ls---parseable-extraneous-deps +{CWD}/tap-testdir-ls-ls---parseable-extraneous-deps/node_modules/chai +{CWD}/tap-testdir-ls-ls---parseable-extraneous-deps/node_modules/foo +{CWD}/tap-testdir-ls-ls---parseable-extraneous-deps/node_modules/dog +` + +exports[`test/lib/ls.js TAP ls --parseable from and resolved properties > should not be printed in tree output 1`] = ` +{CWD}/tap-testdir-ls-ls---parseable-from-and-resolved-properties +{CWD}/tap-testdir-ls-ls---parseable-from-and-resolved-properties/node_modules/simple-output +` + +exports[`test/lib/ls.js TAP ls --parseable global > should print parseable output for global deps 1`] = ` +{CWD}/tap-testdir-ls-ls---parseable-global +{CWD}/tap-testdir-ls-ls---parseable-global/node_modules/a +{CWD}/tap-testdir-ls-ls---parseable-global/node_modules/b +{CWD}/tap-testdir-ls-ls---parseable-global/node_modules/b/node_modules/c +` + +exports[`test/lib/ls.js TAP ls --parseable json read problems > should print empty result 1`] = ` +{CWD}/tap-testdir-ls-ls---parseable-json-read-problems +` + +exports[`test/lib/ls.js TAP ls --parseable missing package.json > should output parseable missing name/version of top-level package 1`] = ` +{CWD}/tap-testdir-ls-ls---parseable-missing-package.json +{CWD}/tap-testdir-ls-ls---parseable-missing-package.json/node_modules/chai +{CWD}/tap-testdir-ls-ls---parseable-missing-package.json/node_modules/dog +{CWD}/tap-testdir-ls-ls---parseable-missing-package.json/node_modules/foo +` + +exports[`test/lib/ls.js TAP ls --parseable missing/invalid/extraneous > should output parseable containing top-level deps and their deps only 1`] = ` +{CWD}/tap-testdir-ls-ls---parseable-missing-invalid-extraneous +{CWD}/tap-testdir-ls-ls---parseable-missing-invalid-extraneous/node_modules/chai +{CWD}/tap-testdir-ls-ls---parseable-missing-invalid-extraneous/node_modules/foo +{CWD}/tap-testdir-ls-ls---parseable-missing-invalid-extraneous/node_modules/dog +` + +exports[`test/lib/ls.js TAP ls --parseable no args > should output parseable representation of dependencies structure 1`] = ` +{CWD}/tap-testdir-ls-ls---parseable-no-args +{CWD}/tap-testdir-ls-ls---parseable-no-args/node_modules/chai +{CWD}/tap-testdir-ls-ls---parseable-no-args/node_modules/foo +{CWD}/tap-testdir-ls-ls---parseable-no-args/node_modules/dog +` + +exports[`test/lib/ls.js TAP ls --parseable resolved points to git ref > should output tree containing git refs 1`] = ` +{CWD}/tap-testdir-ls-ls---parseable-resolved-points-to-git-ref +{CWD}/tap-testdir-ls-ls---parseable-resolved-points-to-git-ref/node_modules/abbrev +` + +exports[`test/lib/ls.js TAP ls --parseable unmet optional dep > should output parseable with empty entry for missing optional deps 1`] = ` +{CWD}/tap-testdir-ls-ls---parseable-unmet-optional-dep +{CWD}/tap-testdir-ls-ls---parseable-unmet-optional-dep/node_modules/chai +{CWD}/tap-testdir-ls-ls---parseable-unmet-optional-dep/node_modules/dev-dep +{CWD}/tap-testdir-ls-ls---parseable-unmet-optional-dep/node_modules/optional-dep +{CWD}/tap-testdir-ls-ls---parseable-unmet-optional-dep/node_modules/peer-dep +{CWD}/tap-testdir-ls-ls---parseable-unmet-optional-dep/node_modules/prod-dep +{CWD}/tap-testdir-ls-ls---parseable-unmet-optional-dep/node_modules/foo +{CWD}/tap-testdir-ls-ls---parseable-unmet-optional-dep/node_modules/prod-dep/node_modules/dog +{CWD}/tap-testdir-ls-ls---parseable-unmet-optional-dep/node_modules/dog +` + +exports[`test/lib/ls.js TAP ls --parseable unmet peer dep > should output parseable signaling missing peer dep in problems 1`] = ` +{CWD}/tap-testdir-ls-ls---parseable-unmet-peer-dep +{CWD}/tap-testdir-ls-ls---parseable-unmet-peer-dep/node_modules/chai +{CWD}/tap-testdir-ls-ls---parseable-unmet-peer-dep/node_modules/dev-dep +{CWD}/tap-testdir-ls-ls---parseable-unmet-peer-dep/node_modules/optional-dep +{CWD}/tap-testdir-ls-ls---parseable-unmet-peer-dep/node_modules/peer-dep +{CWD}/tap-testdir-ls-ls---parseable-unmet-peer-dep/node_modules/prod-dep +{CWD}/tap-testdir-ls-ls---parseable-unmet-peer-dep/node_modules/foo +{CWD}/tap-testdir-ls-ls---parseable-unmet-peer-dep/node_modules/prod-dep/node_modules/dog +{CWD}/tap-testdir-ls-ls---parseable-unmet-peer-dep/node_modules/dog +` + +exports[`test/lib/ls.js TAP ls --parseable using aliases > should output tree containing aliases 1`] = ` +{CWD}/tap-testdir-ls-ls---parseable-using-aliases +{CWD}/tap-testdir-ls-ls---parseable-using-aliases/node_modules/a +` + +exports[`test/lib/ls.js TAP ls --parseable with filter arg > should output parseable contaning only occurrences of filtered by package 1`] = ` +{CWD}/tap-testdir-ls-ls---parseable-with-filter-arg/node_modules/chai +` + +exports[`test/lib/ls.js TAP ls --parseable with filter arg nested dep > should output parseable contaning only occurrences of filtered package 1`] = ` +{CWD}/tap-testdir-ls-ls---parseable-with-filter-arg-nested-dep/node_modules/dog +` + +exports[`test/lib/ls.js TAP ls --parseable with missing filter arg > should output parseable output containing no dependencies info 1`] = ` + +` + +exports[`test/lib/ls.js TAP ls --parseable with multiple filter args > should output parseable contaning only occurrences of multiple filtered packages and their ancestors 1`] = ` +{CWD}/tap-testdir-ls-ls---parseable-with-multiple-filter-args/node_modules/chai +{CWD}/tap-testdir-ls-ls---parseable-with-multiple-filter-args/node_modules/dog +` + +exports[`test/lib/ls.js TAP ls --production > should output tree containing production deps 1`] = ` +test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls---production ++-- chai@1.0.0 ++-- optional-dep@1.0.0 +\`-- prod-dep@1.0.0 + \`-- dog@2.0.0 + +` + +exports[`test/lib/ls.js TAP ls broken resolved field > should NOT print git refs in output tree 1`] = ` +npm-broken-resolved-field-test@1.0.0 {CWD}/tap-testdir-ls-ls-broken-resolved-field +\`-- a@1.0.1 + +` + +exports[`test/lib/ls.js TAP ls colored output > should output tree containing color info 1`] = ` +test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls-colored-output ++-- chai@1.0.0 extraneous ++-- foo@1.0.0 invalid: "^2.0.0" from the root project +| \`-- dog@1.0.0 +\`-- UNMET DEPENDENCY ipsum@^1.0.0 + +` + +exports[`test/lib/ls.js TAP ls cycle deps > should print tree output containing deduped ref 1`] = ` +test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls-cycle-deps +\`-- a@1.0.0 + \`-- b@1.0.0 + \`-- a@1.0.0 deduped + +` + +exports[`test/lib/ls.js TAP ls cycle deps with filter args > should print tree output containing deduped ref 1`] = ` +test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls-cycle-deps-with-filter-args +\`-- a@1.0.0 + \`-- b@1.0.0 + \`-- a@1.0.0 deduped + +` + +exports[`test/lib/ls.js TAP ls deduped missing dep > should output parseable signaling missing peer dep in problems 1`] = ` +test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls-deduped-missing-dep ++-- a@1.0.0 +| \`-- UNMET DEPENDENCY b@^1.0.0 +\`-- UNMET DEPENDENCY b@^1.0.0 + +` + +exports[`test/lib/ls.js TAP ls default --depth value should be 0 > should output tree containing only top-level dependencies 1`] = ` +test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls-default---depth-value-should-be-0 ++-- chai@1.0.0 +\`-- foo@1.0.0 + +` + +exports[`test/lib/ls.js TAP ls empty location > should print empty result 1`] = ` +{CWD}/tap-testdir-ls-ls-empty-location +\`-- (empty) + +` + +exports[`test/lib/ls.js TAP ls extraneous deps > should output containing problems info 1`] = ` +test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls-extraneous-deps ++-- chai@1.0.0 extraneous +\`-- foo@1.0.0 + \`-- dog@1.0.0 + +` + +exports[`test/lib/ls.js TAP ls filter pkg arg using depth option > should list a in top-level only 1`] = ` +test-pkg-arg-filter-with-depth-opt@1.0.0 {CWD}/tap-testdir-ls-ls-filter-pkg-arg-using-depth-option +\`-- a@1.0.0 + +` + +exports[`test/lib/ls.js TAP ls filter pkg arg using depth option > should print empty results msg 1`] = ` +test-pkg-arg-filter-with-depth-opt@1.0.0 {CWD}/tap-testdir-ls-ls-filter-pkg-arg-using-depth-option +\`-- (empty) + +` + +exports[`test/lib/ls.js TAP ls filter pkg arg using depth option > should print expected result 1`] = ` +test-pkg-arg-filter-with-depth-opt@1.0.0 {CWD}/tap-testdir-ls-ls-filter-pkg-arg-using-depth-option +\`-- b@1.0.0 + \`-- c@1.0.0 + \`-- d@1.0.0 + +` + +exports[`test/lib/ls.js TAP ls filtering by child of missing dep > should print tree and not duplicate child of missing items 1`] = ` +filter-by-child-of-missing-dep@1.0.0 {CWD}/tap-testdir-ls-ls-filtering-by-child-of-missing-dep ++-- b@1.0.0 extraneous +| \`-- c@1.0.0 deduped ++-- c@1.0.0 extraneous +\`-- d@1.0.0 extraneous + \`-- c@2.0.0 extraneous + +` + +exports[`test/lib/ls.js TAP ls from and resolved properties > should not be printed in tree output 1`] = ` +test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls-from-and-resolved-properties +\`-- simple-output@2.1.1 + +` + +exports[`test/lib/ls.js TAP ls global > should print tree and not mark top-level items extraneous 1`] = ` +{CWD}/tap-testdir-ls-ls-global ++-- a@1.0.0 +\`-- b@1.0.0 + \`-- c@1.0.0 + +` + +exports[`test/lib/ls.js TAP ls invalid deduped dep > should output tree signaling mismatching peer dep in problems 1`] = ` +invalid-deduped-dep@1.0.0 {CWD}/tap-testdir-ls-ls-invalid-deduped-dep ++-- a@1.0.0 +| \`-- b@1.0.0 deduped invalid: "^2.0.0" from the root project, "^2.0.0" from node_modules/a +\`-- b@1.0.0 invalid: "^2.0.0" from the root project, "^2.0.0" from node_modules/a + +` + +exports[`test/lib/ls.js TAP ls invalid peer dep > should output tree signaling mismatching peer dep in problems 1`] = ` +test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls-invalid-peer-dep ++-- chai@1.0.0 ++-- dev-dep@1.0.0 +| \`-- foo@1.0.0 +| \`-- dog@1.0.0 ++-- optional-dep@1.0.0 ++-- peer-dep@1.0.0 invalid: "^2.0.0" from the root project +\`-- prod-dep@1.0.0 + \`-- dog@2.0.0 + +` + +exports[`test/lib/ls.js TAP ls json read problems > should print empty result 1`] = ` +{CWD}/tap-testdir-ls-ls-json-read-problems +\`-- (empty) + +` + +exports[`test/lib/ls.js TAP ls loading a tree containing workspaces > should filter by parent folder workspace config 1`] = ` +workspaces-tree@1.0.0 {CWD}/tap-testdir-ls-ls-loading-a-tree-containing-workspaces ++-- e@1.0.0 -> ./group/e +\`-- f@1.0.0 -> ./group/f + +` + +exports[`test/lib/ls.js TAP ls loading a tree containing workspaces > should filter single workspace 1`] = ` +workspaces-tree@1.0.0 {CWD}/tap-testdir-ls-ls-loading-a-tree-containing-workspaces ++-- a@1.0.0 -> ./a +| \`-- d@1.0.0 deduped -> ./d +\`-- d@1.0.0 -> ./d + +` + +exports[`test/lib/ls.js TAP ls loading a tree containing workspaces > should filter using workspace config 1`] = ` +workspaces-tree@1.0.0 {CWD}/tap-testdir-ls-ls-loading-a-tree-containing-workspaces +\`-- a@1.0.0 -> ./a + +-- baz@1.0.0 + +-- c@1.0.0 + \`-- d@1.0.0 -> ./d + \`-- foo@1.1.1 + \`-- bar@1.0.0 + +` + +exports[`test/lib/ls.js TAP ls loading a tree containing workspaces > should list --all workspaces properly 1`] = ` +workspaces-tree@1.0.0 {CWD}/tap-testdir-ls-ls-loading-a-tree-containing-workspaces ++-- a@1.0.0 -> ./a +| +-- baz@1.0.0 +| +-- c@1.0.0 +| \`-- d@1.0.0 deduped -> ./d ++-- b@1.0.0 -> ./b ++-- d@1.0.0 -> ./d +| \`-- foo@1.1.1 +| \`-- bar@1.0.0 ++-- e@1.0.0 -> ./group/e +\`-- f@1.0.0 -> ./group/f + +` + +exports[`test/lib/ls.js TAP ls loading a tree containing workspaces > should list only prod deps of workspaces 1`] = ` +workspaces-tree@1.0.0 {CWD}/tap-testdir-ls-ls-loading-a-tree-containing-workspaces ++-- a@1.0.0 -> ./a +| +-- c@1.0.0 +| \`-- d@1.0.0 deduped -> ./d ++-- b@1.0.0 -> ./b ++-- d@1.0.0 -> ./d +| \`-- foo@1.1.1 +| \`-- bar@1.0.0 ++-- e@1.0.0 -> ./group/e +\`-- f@1.0.0 -> ./group/f + +` + +exports[`test/lib/ls.js TAP ls loading a tree containing workspaces > should list workspaces properly with default configs 1`] = ` +workspaces-tree@1.0.0 {CWD}/tap-testdir-ls-ls-loading-a-tree-containing-workspaces ++-- a@1.0.0 -> ./a +| +-- baz@1.0.0 +| +-- c@1.0.0 +| \`-- d@1.0.0 deduped -> ./d ++-- b@1.0.0 -> ./b ++-- d@1.0.0 -> ./d +| \`-- foo@1.1.1 ++-- e@1.0.0 -> ./group/e +\`-- f@1.0.0 -> ./group/f + +` + +exports[`test/lib/ls.js TAP ls loading a tree containing workspaces > should print all tree and filter by dep within only the ws subtree 1`] = ` +workspaces-tree@1.0.0 {CWD}/tap-testdir-ls-ls-loading-a-tree-containing-workspaces +\`-- d@1.0.0 -> ./d + \`-- foo@1.1.1 + \`-- bar@1.0.0 + +` + +exports[`test/lib/ls.js TAP ls missing package.json > should output tree missing name/version of top-level package 1`] = ` +{CWD}/tap-testdir-ls-ls-missing-package.json ++-- chai@1.0.0 extraneous ++-- dog@1.0.0 extraneous +\`-- foo@1.0.0 extraneous + \`-- dog@1.0.0 deduped + +` + +exports[`test/lib/ls.js TAP ls missing/invalid/extraneous > should output tree containing missing, invalid, extraneous labels 1`] = ` +test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls-missing-invalid-extraneous ++-- chai@1.0.0 extraneous ++-- foo@1.0.0 invalid: "^2.0.0" from the root project +| \`-- dog@1.0.0 +\`-- UNMET DEPENDENCY ipsum@^1.0.0 + +` + +exports[`test/lib/ls.js TAP ls no args > should output tree representation of dependencies structure 1`] = ` +test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls-no-args ++-- chai@1.0.0 +\`-- foo@1.0.0 + \`-- dog@1.0.0 + +` + +exports[`test/lib/ls.js TAP ls print deduped symlinks > should output tree containing linked deps 1`] = ` +print-deduped-symlinks@1.0.0 {CWD}/tap-testdir-ls-ls-print-deduped-symlinks ++-- a@1.0.0 +| \`-- b@1.0.0 deduped -> ./b +\`-- b@1.0.0 -> ./b + +` + +exports[`test/lib/ls.js TAP ls resolved points to git ref > should output tree containing git refs 1`] = ` +test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls-resolved-points-to-git-ref +\`-- abbrev@1.1.1 (git+ssh://git@github.com/isaacs/abbrev-js.git#b8f3a2fc0c3bb8ffd8b0d0072cc6b5a3667e963c) + +` + +exports[`test/lib/ls.js TAP ls unmet optional dep > should output tree with empty entry for missing optional deps 1`] = ` +test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls-unmet-optional-dep ++-- chai@1.0.0 ++-- dev-dep@1.0.0 +| \`-- foo@1.0.0 +| \`-- dog@1.0.0 ++-- UNMET OPTIONAL DEPENDENCY missing-optional-dep@^1.0.0 ++-- optional-dep@1.0.0 invalid: "^2.0.0" from the root project ++-- peer-dep@1.0.0 +\`-- prod-dep@1.0.0 + \`-- dog@2.0.0 + +` + +exports[`test/lib/ls.js TAP ls unmet peer dep > should output tree signaling missing peer dep in problems 1`] = ` +test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls-unmet-peer-dep +\`-- UNMET DEPENDENCY peer-dep@* + +` + +exports[`test/lib/ls.js TAP ls using aliases > should output tree containing aliases 1`] = ` +test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls-using-aliases +\`-- a@npm:b@1.0.0 + +` + +exports[`test/lib/ls.js TAP ls with args and dedupe entries > should print tree output containing deduped ref 1`] = ` +dedupe-entries@1.0.0 {CWD}/tap-testdir-ls-ls-with-args-and-dedupe-entries ++-- @npmcli/a@1.0.0 +| \`-- @npmcli/b@1.1.2 deduped ++-- @npmcli/b@1.1.2 +\`-- @npmcli/c@1.0.0 + \`-- @npmcli/b@1.1.2 deduped + +` + +exports[`test/lib/ls.js TAP ls with args and different order of items > should print tree output containing deduped ref 1`] = ` +dedupe-entries@1.0.0 {CWD}/tap-testdir-ls-ls-with-args-and-different-order-of-items ++-- @npmcli/a@1.0.0 +| \`-- @npmcli/c@1.0.0 deduped ++-- @npmcli/b@1.1.2 +| \`-- @npmcli/c@1.0.0 deduped +\`-- @npmcli/c@1.0.0 + +` + +exports[`test/lib/ls.js TAP ls with dot filter arg > should output tree contaning only occurrences of filtered by package and colored output 1`] = ` +test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls-with-dot-filter-arg +\`-- (empty) + +` + +exports[`test/lib/ls.js TAP ls with filter arg > should output tree contaning only occurrences of filtered by package and colored output 1`] = ` +test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls-with-filter-arg +\`-- chai@1.0.0 + +` + +exports[`test/lib/ls.js TAP ls with filter arg nested dep > should output tree contaning only occurrences of filtered package and its ancestors 1`] = ` +test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls-with-filter-arg-nested-dep +\`-- foo@1.0.0 + \`-- dog@1.0.0 + +` + +exports[`test/lib/ls.js TAP ls with missing filter arg > should output tree containing no dependencies info 1`] = ` +test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls-with-missing-filter-arg +\`-- (empty) + +` + +exports[`test/lib/ls.js TAP ls with multiple filter args > should output tree contaning only occurrences of multiple filtered packages and their ancestors 1`] = ` +test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls-with-multiple-filter-args ++-- chai@1.0.0 +\`-- foo@1.0.0 + \`-- dog@1.0.0 + +` + +exports[`test/lib/ls.js TAP ls with no args dedupe entries > should print tree output containing deduped ref 1`] = ` +dedupe-entries@1.0.0 {CWD}/tap-testdir-ls-ls-with-no-args-dedupe-entries ++-- @npmcli/a@1.0.0 +| \`-- @npmcli/b@1.1.2 deduped ++-- @npmcli/b@1.1.2 +\`-- @npmcli/c@1.0.0 + \`-- @npmcli/b@1.1.2 deduped + +` + +exports[`test/lib/ls.js TAP ls with no args dedupe entries and not displaying all > should print tree output containing deduped ref 1`] = ` +dedupe-entries@1.0.0 {CWD}/tap-testdir-ls-ls-with-no-args-dedupe-entries-and-not-displaying-all ++-- @npmcli/a@1.0.0 ++-- @npmcli/b@1.1.2 +\`-- @npmcli/c@1.0.0 + +` + +exports[`test/lib/ls.js TAP show multiple invalid reasons > ls result 1`] = ` +test-npm-ls@1.0.0 {cwd}/tap-testdir-ls-show-multiple-invalid-reasons ++-- cat@1.0.0 invalid: "^2.0.0" from the root project +| \`-- dog@1.0.0 deduped invalid: "^1.2.3" from the root project, "^2.0.0" from node_modules/cat ++-- chai@1.0.0 extraneous +| \`-- dog@1.0.0 deduped invalid: "^1.2.3" from the root project, "^2.0.0" from node_modules/cat, "2.x" from node_modules/chai +\`-- dog@1.0.0 invalid: "^1.2.3" from the root project, "^2.0.0" from node_modules/cat, "2.x" from node_modules/chai + \`-- cat@1.0.0 deduped invalid: "^2.0.0" from the root project + +` diff --git a/tap-snapshots/test/lib/outdated.js.test.cjs b/tap-snapshots/test/lib/outdated.js.test.cjs new file mode 100644 index 0000000000000..9f589d0134c03 --- /dev/null +++ b/tap-snapshots/test/lib/outdated.js.test.cjs @@ -0,0 +1,252 @@ +/* IMPORTANT + * This snapshot file is auto-generated, but designed for humans. + * It should be checked into source control and tracked carefully. + * Re-generate by setting TAP_SNAPSHOT=1 and running tests. + * Make sure to inspect the output below. Do not ignore changes! + */ +'use strict' +exports[`test/lib/outdated.js TAP should display outdated deps outdated --all > must match snapshot 1`] = ` + +Package Current Wanted Latest Location Depended by +cat 1.0.0 1.0.1 1.0.1 node_modules/cat tap-testdir-outdated-should-display-outdated-deps +chai 1.0.0 1.0.1 1.0.1 node_modules/chai tap-testdir-outdated-should-display-outdated-deps +dog 1.0.1 1.0.1 2.0.0 node_modules/dog tap-testdir-outdated-should-display-outdated-deps +theta MISSING 1.0.1 1.0.1 - tap-testdir-outdated-should-display-outdated-deps +` + +exports[`test/lib/outdated.js TAP should display outdated deps outdated --json --long > must match snapshot 1`] = ` + +{ + "cat": { + "current": "1.0.0", + "wanted": "1.0.1", + "latest": "1.0.1", + "dependent": "tap-testdir-outdated-should-display-outdated-deps", + "location": "{CWD}/test/lib/tap-testdir-outdated-should-display-outdated-deps/node_modules/cat", + "type": "dependencies" + }, + "chai": { + "current": "1.0.0", + "wanted": "1.0.1", + "latest": "1.0.1", + "dependent": "tap-testdir-outdated-should-display-outdated-deps", + "location": "{CWD}/test/lib/tap-testdir-outdated-should-display-outdated-deps/node_modules/chai", + "type": "peerDependencies" + }, + "dog": { + "current": "1.0.1", + "wanted": "1.0.1", + "latest": "2.0.0", + "dependent": "tap-testdir-outdated-should-display-outdated-deps", + "location": "{CWD}/test/lib/tap-testdir-outdated-should-display-outdated-deps/node_modules/dog", + "type": "dependencies" + }, + "theta": { + "wanted": "1.0.1", + "latest": "1.0.1", + "dependent": "tap-testdir-outdated-should-display-outdated-deps", + "type": "dependencies" + } +} +` + +exports[`test/lib/outdated.js TAP should display outdated deps outdated --json > must match snapshot 1`] = ` + +{ + "cat": { + "current": "1.0.0", + "wanted": "1.0.1", + "latest": "1.0.1", + "dependent": "tap-testdir-outdated-should-display-outdated-deps", + "location": "{CWD}/test/lib/tap-testdir-outdated-should-display-outdated-deps/node_modules/cat" + }, + "chai": { + "current": "1.0.0", + "wanted": "1.0.1", + "latest": "1.0.1", + "dependent": "tap-testdir-outdated-should-display-outdated-deps", + "location": "{CWD}/test/lib/tap-testdir-outdated-should-display-outdated-deps/node_modules/chai" + }, + "dog": { + "current": "1.0.1", + "wanted": "1.0.1", + "latest": "2.0.0", + "dependent": "tap-testdir-outdated-should-display-outdated-deps", + "location": "{CWD}/test/lib/tap-testdir-outdated-should-display-outdated-deps/node_modules/dog" + }, + "theta": { + "wanted": "1.0.1", + "latest": "1.0.1", + "dependent": "tap-testdir-outdated-should-display-outdated-deps" + } +} +` + +exports[`test/lib/outdated.js TAP should display outdated deps outdated --long > must match snapshot 1`] = ` + +Package Current Wanted Latest Location Depended by Package Type Homepage +cat 1.0.0 1.0.1 1.0.1 node_modules/cat tap-testdir-outdated-should-display-outdated-deps dependencies +chai 1.0.0 1.0.1 1.0.1 node_modules/chai tap-testdir-outdated-should-display-outdated-deps peerDependencies +dog 1.0.1 1.0.1 2.0.0 node_modules/dog tap-testdir-outdated-should-display-outdated-deps dependencies +theta MISSING 1.0.1 1.0.1 - tap-testdir-outdated-should-display-outdated-deps dependencies +` + +exports[`test/lib/outdated.js TAP should display outdated deps outdated --omit=dev --omit=peer > must match snapshot 1`] = ` + +Package Current Wanted Latest Location Depended by +cat 1.0.0 1.0.1 1.0.1 node_modules/cat tap-testdir-outdated-should-display-outdated-deps +dog 1.0.1 1.0.1 2.0.0 node_modules/dog tap-testdir-outdated-should-display-outdated-deps +theta MISSING 1.0.1 1.0.1 - tap-testdir-outdated-should-display-outdated-deps +` + +exports[`test/lib/outdated.js TAP should display outdated deps outdated --omit=dev > must match snapshot 1`] = ` + +Package Current Wanted Latest Location Depended by +cat 1.0.0 1.0.1 1.0.1 node_modules/cat tap-testdir-outdated-should-display-outdated-deps +chai 1.0.0 1.0.1 1.0.1 node_modules/chai tap-testdir-outdated-should-display-outdated-deps +dog 1.0.1 1.0.1 2.0.0 node_modules/dog tap-testdir-outdated-should-display-outdated-deps +theta MISSING 1.0.1 1.0.1 - tap-testdir-outdated-should-display-outdated-deps +` + +exports[`test/lib/outdated.js TAP should display outdated deps outdated --omit=prod > must match snapshot 1`] = ` + +Package Current Wanted Latest Location Depended by +cat 1.0.0 1.0.1 1.0.1 node_modules/cat tap-testdir-outdated-should-display-outdated-deps +chai 1.0.0 1.0.1 1.0.1 node_modules/chai tap-testdir-outdated-should-display-outdated-deps +dog 1.0.1 1.0.1 2.0.0 node_modules/dog tap-testdir-outdated-should-display-outdated-deps +` + +exports[`test/lib/outdated.js TAP should display outdated deps outdated --parseable --long > must match snapshot 1`] = ` + +{CWD}/test/lib/tap-testdir-outdated-should-display-outdated-deps/node_modules/cat:cat@1.0.1:cat@1.0.0:cat@1.0.1:tap-testdir-outdated-should-display-outdated-deps:dependencies: +{CWD}/test/lib/tap-testdir-outdated-should-display-outdated-deps/node_modules/chai:chai@1.0.1:chai@1.0.0:chai@1.0.1:tap-testdir-outdated-should-display-outdated-deps:peerDependencies: +{CWD}/test/lib/tap-testdir-outdated-should-display-outdated-deps/node_modules/dog:dog@1.0.1:dog@1.0.1:dog@2.0.0:tap-testdir-outdated-should-display-outdated-deps:dependencies: +:theta@1.0.1:MISSING:theta@1.0.1:tap-testdir-outdated-should-display-outdated-deps:dependencies: +` + +exports[`test/lib/outdated.js TAP should display outdated deps outdated --parseable > must match snapshot 1`] = ` + +{CWD}/test/lib/tap-testdir-outdated-should-display-outdated-deps/node_modules/cat:cat@1.0.1:cat@1.0.0:cat@1.0.1:tap-testdir-outdated-should-display-outdated-deps +{CWD}/test/lib/tap-testdir-outdated-should-display-outdated-deps/node_modules/chai:chai@1.0.1:chai@1.0.0:chai@1.0.1:tap-testdir-outdated-should-display-outdated-deps +{CWD}/test/lib/tap-testdir-outdated-should-display-outdated-deps/node_modules/dog:dog@1.0.1:dog@1.0.1:dog@2.0.0:tap-testdir-outdated-should-display-outdated-deps +:theta@1.0.1:MISSING:theta@1.0.1:tap-testdir-outdated-should-display-outdated-deps +` + +exports[`test/lib/outdated.js TAP should display outdated deps outdated > must match snapshot 1`] = ` + +Package Current Wanted Latest Location Depended by +cat 1.0.0 1.0.1 1.0.1 node_modules/cat tap-testdir-outdated-should-display-outdated-deps +chai 1.0.0 1.0.1 1.0.1 node_modules/chai tap-testdir-outdated-should-display-outdated-deps +dog 1.0.1 1.0.1 2.0.0 node_modules/dog tap-testdir-outdated-should-display-outdated-deps +theta MISSING 1.0.1 1.0.1 - tap-testdir-outdated-should-display-outdated-deps +` + +exports[`test/lib/outdated.js TAP should display outdated deps outdated global > must match snapshot 1`] = ` + +Package Current Wanted Latest Location Depended by +cat 1.0.0 1.0.1 1.0.1 node_modules/cat global +` + +exports[`test/lib/outdated.js TAP should display outdated deps outdated specific dep > must match snapshot 1`] = ` + +Package Current Wanted Latest Location Depended by +cat 1.0.0 1.0.1 1.0.1 node_modules/cat tap-testdir-outdated-should-display-outdated-deps +` + +exports[`test/lib/outdated.js TAP workspaces > should display all dependencies 1`] = ` + +Package Current Wanted Latest Location Depended by +cat 1.0.0 1.0.1 1.0.1 node_modules/cat a@1.0.0 +chai 1.0.0 1.0.1 1.0.1 node_modules/chai foo +dog 1.0.1 1.0.1 2.0.0 node_modules/dog tap-testdir-outdated-workspaces +theta MISSING 1.0.1 1.0.1 - c@1.0.0 +` + +exports[`test/lib/outdated.js TAP workspaces > should display json results filtered by ws 1`] = ` + +{ + "cat": { + "current": "1.0.0", + "wanted": "1.0.1", + "latest": "1.0.1", + "dependent": "a", + "location": "{CWD}/test/lib/tap-testdir-outdated-workspaces/node_modules/cat" + } +} +` + +exports[`test/lib/outdated.js TAP workspaces > should display missing deps when filtering by ws 1`] = ` + +Package Current Wanted Latest Location Depended by +theta MISSING 1.0.1 1.0.1 - c@1.0.0 +` + +exports[`test/lib/outdated.js TAP workspaces > should display nested deps when filtering by ws and using --all 1`] = ` + +Package Current Wanted Latest Location Depended by +cat 1.0.0 1.0.1 1.0.1 node_modules/cat a@1.0.0 +chai 1.0.0 1.0.1 1.0.1 node_modules/chai foo +` + +exports[`test/lib/outdated.js TAP workspaces > should display no results if ws has no deps to display 1`] = ` + +` + +exports[`test/lib/outdated.js TAP workspaces > should display parseable results filtered by ws 1`] = ` + +{CWD}/test/lib/tap-testdir-outdated-workspaces/node_modules/cat:cat@1.0.1:cat@1.0.0:cat@1.0.1:a +` + +exports[`test/lib/outdated.js TAP workspaces > should display results filtered by ws 1`] = ` + +Package Current Wanted Latest Location Depended by +cat 1.0.0 1.0.1 1.0.1 node_modules/cat a@1.0.0 +` + +exports[`test/lib/outdated.js TAP workspaces > should display ws outdated deps human output 1`] = ` + +Package Current Wanted Latest Location Depended by +cat 1.0.0 1.0.1 1.0.1 node_modules/cat a@1.0.0 +dog 1.0.1 1.0.1 2.0.0 node_modules/dog tap-testdir-outdated-workspaces +theta MISSING 1.0.1 1.0.1 - c@1.0.0 +` + +exports[`test/lib/outdated.js TAP workspaces > should display ws outdated deps json output 1`] = ` + +{ + "cat": { + "current": "1.0.0", + "wanted": "1.0.1", + "latest": "1.0.1", + "dependent": "a", + "location": "{CWD}/test/lib/tap-testdir-outdated-workspaces/node_modules/cat" + }, + "dog": { + "current": "1.0.1", + "wanted": "1.0.1", + "latest": "2.0.0", + "dependent": "tap-testdir-outdated-workspaces", + "location": "{CWD}/test/lib/tap-testdir-outdated-workspaces/node_modules/dog" + }, + "theta": { + "wanted": "1.0.1", + "latest": "1.0.1", + "dependent": "c" + } +} +` + +exports[`test/lib/outdated.js TAP workspaces > should display ws outdated deps parseable output 1`] = ` + +{CWD}/test/lib/tap-testdir-outdated-workspaces/node_modules/cat:cat@1.0.1:cat@1.0.0:cat@1.0.1:a +{CWD}/test/lib/tap-testdir-outdated-workspaces/node_modules/dog:dog@1.0.1:dog@1.0.1:dog@2.0.0:tap-testdir-outdated-workspaces +:theta@1.0.1:MISSING:theta@1.0.1:c +` + +exports[`test/lib/outdated.js TAP workspaces > should highlight ws in dependend by section 1`] = ` + +Package Current Wanted Latest Location Depended by +cat 1.0.0 1.0.1 1.0.1 node_modules/cat a@1.0.0 +dog 1.0.1 1.0.1 2.0.0 node_modules/dog tap-testdir-outdated-workspaces +theta MISSING 1.0.1 1.0.1 - c@1.0.0 +` diff --git a/tap-snapshots/test-lib-owner.js-TAP.test.js b/tap-snapshots/test/lib/owner.js.test.cjs similarity index 100% rename from tap-snapshots/test-lib-owner.js-TAP.test.js rename to tap-snapshots/test/lib/owner.js.test.cjs diff --git a/tap-snapshots/test-lib-profile.js-TAP.test.js b/tap-snapshots/test/lib/profile.js.test.cjs similarity index 86% rename from tap-snapshots/test-lib-profile.js-TAP.test.js rename to tap-snapshots/test/lib/profile.js.test.cjs index bb838ad92c97d..58975515162f6 100644 --- a/tap-snapshots/test-lib-profile.js-TAP.test.js +++ b/tap-snapshots/test/lib/profile.js.test.cjs @@ -9,7 +9,11 @@ exports[`test/lib/profile.js TAP enable-2fa from token and set otp, retries on p Scan into your authenticator app: qrcode Or enter code: -12342FA successfully enabled. Below are your recovery codes, please print these out.You will need these to recover access to your account if you lose your authentication device. 123456 789101 +1234 +2FA successfully enabled. Below are your recovery codes, please print these out. +You will need these to recover access to your account if you lose your authentication device. + 123456 + 789101 ` exports[`test/lib/profile.js TAP profile get <key> --parseable > should output parseable result value 1`] = ` @@ -29,7 +33,17 @@ foo foo@github.com (verified) https://github.com/npm ` exports[`test/lib/profile.js TAP profile get no args --parseable > should output all profile info as parseable result 1`] = ` -tfa auth-and-writesname fooemail foo@github.comemail_verified truecreated 2015-02-26T01:26:37.384Zupdated 2020-08-12T16:19:35.326Zfullname Foo Barhomepage https://github.comfreenode foobartwitter https://twitter.com/npmjsgithub https://github.com/npm +tfa auth-and-writes +name foo +email foo@github.com +email_verified true +created 2015-02-26T01:26:37.384Z +updated 2020-08-12T16:19:35.326Z +fullname Foo Bar +homepage https://github.com +freenode foobar +twitter https://twitter.com/npmjs +github https://github.com/npm ` exports[`test/lib/profile.js TAP profile get no args default output > should output table with contents 1`] = ` diff --git a/tap-snapshots/test/lib/publish.js.test.cjs b/tap-snapshots/test/lib/publish.js.test.cjs new file mode 100644 index 0000000000000..7a7502e02e338 --- /dev/null +++ b/tap-snapshots/test/lib/publish.js.test.cjs @@ -0,0 +1,176 @@ +/* IMPORTANT + * This snapshot file is auto-generated, but designed for humans. + * It should be checked into source control and tracked carefully. + * Re-generate by setting TAP_SNAPSHOT=1 and running tests. + * Make sure to inspect the output below. Do not ignore changes! + */ +'use strict' +exports[`test/lib/publish.js TAP private workspaces colorless > should output all publishes 1`] = ` +Array [ + "+ @npmcli/b@1.0.0", +] +` + +exports[`test/lib/publish.js TAP private workspaces colorless > should publish all non-private workspaces 1`] = ` +Array [ + Object { + "_id": "@npmcli/b@1.0.0", + "name": "@npmcli/b", + "readme": "ERROR: No README data found!", + "version": "1.0.0", + }, +] +` + +exports[`test/lib/publish.js TAP private workspaces with color > should output all publishes 1`] = ` +Array [ + "+ @npmcli/b@1.0.0", +] +` + +exports[`test/lib/publish.js TAP private workspaces with color > should publish all non-private workspaces 1`] = ` +Array [ + Object { + "_id": "@npmcli/b@1.0.0", + "name": "@npmcli/b", + "readme": "ERROR: No README data found!", + "version": "1.0.0", + }, +] +` + +exports[`test/lib/publish.js TAP shows usage with wrong set of arguments > should print usage 1`] = ` +Error: +Usage: npm publish + +Publish a package + +Usage: +npm publish [<folder>] + +Options: +[--tag <tag>] [--access <restricted|public>] [--dry-run] [--otp <otp>] +[-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] +[-ws|--workspaces] + +Run "npm help publish" for more info { + "code": "EUSAGE", +} +` + +exports[`test/lib/publish.js TAP workspaces all workspaces > should output all publishes 1`] = ` +Array [ + "+ workspace-a@1.2.3-a", + "+ workspace-b@1.2.3-n", + "+ workspace-n@1.2.3-n", +] +` + +exports[`test/lib/publish.js TAP workspaces all workspaces > should publish all workspaces 1`] = ` +Array [ + Object { + "_id": "workspace-a@1.2.3-a", + "name": "workspace-a", + "readme": "ERROR: No README data found!", + "repository": Object { + "type": "git", + "url": "http://repo.workspace-a/", + }, + "version": "1.2.3-a", + }, + Object { + "_id": "workspace-b@1.2.3-n", + "bugs": Object { + "url": "https://github.com/npm/workspace-b/issues", + }, + "homepage": "https://github.com/npm/workspace-b#readme", + "name": "workspace-b", + "readme": "ERROR: No README data found!", + "repository": Object { + "type": "git", + "url": "git+https://github.com/npm/workspace-b.git", + }, + "version": "1.2.3-n", + }, + Object { + "_id": "workspace-n@1.2.3-n", + "name": "workspace-n", + "readme": "ERROR: No README data found!", + "version": "1.2.3-n", + }, +] +` + +exports[`test/lib/publish.js TAP workspaces json > should output all publishes as json 1`] = ` +Array [ + String( + { + "workspace-a": { + "id": "workspace-a@1.2.3-a" + }, + "workspace-b": { + "id": "workspace-b@1.2.3-n" + }, + "workspace-n": { + "id": "workspace-n@1.2.3-n" + } + } + ), +] +` + +exports[`test/lib/publish.js TAP workspaces json > should publish all workspaces 1`] = ` +Array [ + Object { + "_id": "workspace-a@1.2.3-a", + "name": "workspace-a", + "readme": "ERROR: No README data found!", + "repository": Object { + "type": "git", + "url": "http://repo.workspace-a/", + }, + "version": "1.2.3-a", + }, + Object { + "_id": "workspace-b@1.2.3-n", + "bugs": Object { + "url": "https://github.com/npm/workspace-b/issues", + }, + "homepage": "https://github.com/npm/workspace-b#readme", + "name": "workspace-b", + "readme": "ERROR: No README data found!", + "repository": Object { + "type": "git", + "url": "git+https://github.com/npm/workspace-b.git", + }, + "version": "1.2.3-n", + }, + Object { + "_id": "workspace-n@1.2.3-n", + "name": "workspace-n", + "readme": "ERROR: No README data found!", + "version": "1.2.3-n", + }, +] +` + +exports[`test/lib/publish.js TAP workspaces one workspace > should output one publish 1`] = ` +Array [ + "+ workspace-a@1.2.3-a", +] +` + +exports[`test/lib/publish.js TAP workspaces one workspace > should publish given workspace 1`] = ` +Array [ + Object { + "_id": "workspace-a@1.2.3-a", + "name": "workspace-a", + "readme": "ERROR: No README data found!", + "repository": Object { + "type": "git", + "url": "http://repo.workspace-a/", + }, + "version": "1.2.3-a", + }, +] +` diff --git a/tap-snapshots/test-lib-search.js-TAP.test.js b/tap-snapshots/test/lib/search.js.test.cjs similarity index 100% rename from tap-snapshots/test-lib-search.js-TAP.test.js rename to tap-snapshots/test/lib/search.js.test.cjs diff --git a/tap-snapshots/test-lib-stars.js-TAP.test.js b/tap-snapshots/test/lib/stars.js.test.cjs similarity index 100% rename from tap-snapshots/test-lib-stars.js-TAP.test.js rename to tap-snapshots/test/lib/stars.js.test.cjs diff --git a/tap-snapshots/test-lib-team.js-TAP.test.js b/tap-snapshots/test/lib/team.js.test.cjs similarity index 100% rename from tap-snapshots/test-lib-team.js-TAP.test.js rename to tap-snapshots/test/lib/team.js.test.cjs diff --git a/tap-snapshots/test-lib-publish.js-TAP.test.js b/tap-snapshots/test/lib/unpublish.js.test.cjs similarity index 51% rename from tap-snapshots/test-lib-publish.js-TAP.test.js rename to tap-snapshots/test/lib/unpublish.js.test.cjs index 6af6c13f56b9f..5936bec6c759f 100644 --- a/tap-snapshots/test-lib-publish.js-TAP.test.js +++ b/tap-snapshots/test/lib/unpublish.js.test.cjs @@ -5,9 +5,10 @@ * Make sure to inspect the output below. Do not ignore changes! */ 'use strict' -exports[`test/lib/publish.js TAP shows usage with wrong set of arguments > should print usage 1`] = ` -npm publish [<folder>] [--tag <tag>] [--access <public|restricted>] [--dry-run] +exports[`test/lib/unpublish.js TAP workspaces all workspaces --force > should output all workspaces 1`] = ` +- workspace-a- workspace-b- workspace-n +` -Publishes '.' if no argument supplied -Sets tag \`latest\` if no --tag specified +exports[`test/lib/unpublish.js TAP workspaces one workspace --force > should output one workspaces 1`] = ` +- workspace-a ` diff --git a/tap-snapshots/test-lib-utils-cmd-list.js-TAP.test.js b/tap-snapshots/test/lib/utils/cmd-list.js.test.cjs similarity index 99% rename from tap-snapshots/test-lib-utils-cmd-list.js-TAP.test.js rename to tap-snapshots/test/lib/utils/cmd-list.js.test.cjs index 832f8560125a3..971580792048b 100644 --- a/tap-snapshots/test-lib-utils-cmd-list.js-TAP.test.js +++ b/tap-snapshots/test/lib/utils/cmd-list.js.test.cjs @@ -158,6 +158,7 @@ Object { "diff", "dist-tag", "ping", + "pkg", "test", "stop", "start", diff --git a/tap-snapshots/test/lib/utils/config/definition.js.test.cjs b/tap-snapshots/test/lib/utils/config/definition.js.test.cjs new file mode 100644 index 0000000000000..ad506ae8e3585 --- /dev/null +++ b/tap-snapshots/test/lib/utils/config/definition.js.test.cjs @@ -0,0 +1,264 @@ +/* IMPORTANT + * This snapshot file is auto-generated, but designed for humans. + * It should be checked into source control and tracked carefully. + * Re-generate by setting TAP_SNAPSHOT=1 and running tests. + * Make sure to inspect the output below. Do not ignore changes! + */ +'use strict' +exports[`test/lib/utils/config/definition.js TAP basic definition > description of deprecated thing 1`] = ` +#### \`deprecated\` + +* Default: A number bigger than 1 +* Type: An expression of a numeric quantity using numerals +* DEPRECATED: do not use this + +it should not be used ever + +not even once. +` + +exports[`test/lib/utils/config/definition.js TAP basic definition > human-readable description 1`] = ` +#### \`key\` + +* Default: "some default value" +* Type: Number or String + +just a test thingie +` + +exports[`test/lib/utils/config/definition.js TAP long description > cols=-1 1`] = ` +#### \`walden\` + +* Default: true +* Type: Boolean + +WHEN I WROTE the +following pages, +or rather the +bulk of them, I +lived alone, in +the woods, a +mile from any +neighbor, in a +house which I +had built +myself, on the +shore of Walden +Pond, in +Concord, +Massachusetts, +and earned my +living by the +labor of my +hands only. I +lived there two +years and two +months. At +present I am a +sojourner in +civilized life +again. + +I should not +obtrude my +affairs so much +on the notice of +my readers if +very particular +inquiries had +not been made by +my townsmen +concerning my +mode of life, +which some would +call +impertinent, +though they do +not appear to me +at all +impertinent, +but, considering +the +circumstances, +very natural and +pertinent. + +\`\`\` +this.is('a', { + code: 'sample', +}) + +with (multiple) { + blocks() +} +\`\`\` + +` + +exports[`test/lib/utils/config/definition.js TAP long description > cols=0 1`] = ` +#### \`walden\` + +* Default: true +* Type: Boolean + +WHEN I WROTE the +following pages, +or rather the +bulk of them, I +lived alone, in +the woods, a +mile from any +neighbor, in a +house which I +had built +myself, on the +shore of Walden +Pond, in +Concord, +Massachusetts, +and earned my +living by the +labor of my +hands only. I +lived there two +years and two +months. At +present I am a +sojourner in +civilized life +again. + +I should not +obtrude my +affairs so much +on the notice of +my readers if +very particular +inquiries had +not been made by +my townsmen +concerning my +mode of life, +which some would +call +impertinent, +though they do +not appear to me +at all +impertinent, +but, considering +the +circumstances, +very natural and +pertinent. + +\`\`\` +this.is('a', { + code: 'sample', +}) + +with (multiple) { + blocks() +} +\`\`\` + +` + +exports[`test/lib/utils/config/definition.js TAP long description > cols=40 1`] = ` +#### \`walden\` + +* Default: true +* Type: Boolean + +WHEN I WROTE the following pages, or +rather the bulk of them, I lived +alone, in the woods, a mile from any +neighbor, in a house which I had +built myself, on the shore of Walden +Pond, in Concord, Massachusetts, and +earned my living by the labor of my +hands only. I lived there two years +and two months. At present I am a +sojourner in civilized life again. + +I should not obtrude my affairs so +much on the notice of my readers if +very particular inquiries had not +been made by my townsmen concerning +my mode of life, which some would +call impertinent, though they do not +appear to me at all impertinent, +but, considering the circumstances, +very natural and pertinent. + +\`\`\` +this.is('a', { + code: 'sample', +}) + +with (multiple) { + blocks() +} +\`\`\` + +` + +exports[`test/lib/utils/config/definition.js TAP long description > cols=9000 1`] = ` +#### \`walden\` + +* Default: true +* Type: Boolean + +WHEN I WROTE the following pages, or rather the bulk of them, I lived alone, +in the woods, a mile from any neighbor, in a house which I had built myself, +on the shore of Walden Pond, in Concord, Massachusetts, and earned my living +by the labor of my hands only. I lived there two years and two months. At +present I am a sojourner in civilized life again. + +I should not obtrude my affairs so much on the notice of my readers if very +particular inquiries had not been made by my townsmen concerning my mode of +life, which some would call impertinent, though they do not appear to me at +all impertinent, but, considering the circumstances, very natural and +pertinent. + +\`\`\` +this.is('a', { + code: 'sample', +}) + +with (multiple) { + blocks() +} +\`\`\` + +` + +exports[`test/lib/utils/config/definition.js TAP long description > cols=NaN 1`] = ` +#### \`walden\` + +* Default: true +* Type: Boolean + +WHEN I WROTE the following pages, or rather the bulk of them, I lived alone, +in the woods, a mile from any neighbor, in a house which I had built myself, +on the shore of Walden Pond, in Concord, Massachusetts, and earned my living +by the labor of my hands only. I lived there two years and two months. At +present I am a sojourner in civilized life again. + +I should not obtrude my affairs so much on the notice of my readers if very +particular inquiries had not been made by my townsmen concerning my mode of +life, which some would call impertinent, though they do not appear to me at +all impertinent, but, considering the circumstances, very natural and +pertinent. + +\`\`\` +this.is('a', { + code: 'sample', +}) + +with (multiple) { + blocks() +} +\`\`\` + +` diff --git a/tap-snapshots/test/lib/utils/config/definitions.js.test.cjs b/tap-snapshots/test/lib/utils/config/definitions.js.test.cjs new file mode 100644 index 0000000000000..01b137b8af54a --- /dev/null +++ b/tap-snapshots/test/lib/utils/config/definitions.js.test.cjs @@ -0,0 +1,1866 @@ +/* IMPORTANT + * This snapshot file is auto-generated, but designed for humans. + * It should be checked into source control and tracked carefully. + * Re-generate by setting TAP_SNAPSHOT=1 and running tests. + * Make sure to inspect the output below. Do not ignore changes! + */ +'use strict' +exports[`test/lib/utils/config/definitions.js TAP > all config keys 1`] = ` +Array [ + "_auth", + "access", + "all", + "allow-same-version", + "also", + "audit", + "audit-level", + "auth-type", + "before", + "bin-links", + "browser", + "ca", + "cache", + "cache-max", + "cache-min", + "cafile", + "call", + "cert", + "ci-name", + "cidr", + "color", + "commit-hooks", + "depth", + "description", + "dev", + "diff", + "diff-ignore-all-space", + "diff-name-only", + "diff-no-prefix", + "diff-dst-prefix", + "diff-src-prefix", + "diff-text", + "diff-unified", + "dry-run", + "editor", + "engine-strict", + "fetch-retries", + "fetch-retry-factor", + "fetch-retry-maxtimeout", + "fetch-retry-mintimeout", + "fetch-timeout", + "force", + "foreground-scripts", + "format-package-lock", + "fund", + "git", + "git-tag-version", + "global", + "global-style", + "globalconfig", + "heading", + "https-proxy", + "if-present", + "ignore-scripts", + "include", + "include-staged", + "init-author-email", + "init-author-name", + "init-author-url", + "init-license", + "init-module", + "init-version", + "init.author.email", + "init.author.name", + "init.author.url", + "init.license", + "init.module", + "init.version", + "json", + "key", + "legacy-bundling", + "legacy-peer-deps", + "link", + "local-address", + "location", + "loglevel", + "logs-max", + "long", + "maxsockets", + "message", + "node-options", + "node-version", + "noproxy", + "npm-version", + "offline", + "omit", + "only", + "optional", + "otp", + "package", + "package-lock", + "package-lock-only", + "pack-destination", + "parseable", + "prefer-offline", + "prefer-online", + "prefix", + "preid", + "production", + "progress", + "proxy", + "read-only", + "rebuild-bundle", + "registry", + "save", + "save-bundle", + "save-dev", + "save-exact", + "save-optional", + "save-peer", + "save-prefix", + "save-prod", + "scope", + "script-shell", + "searchexclude", + "searchlimit", + "searchopts", + "searchstaleness", + "shell", + "shrinkwrap", + "sign-git-commit", + "sign-git-tag", + "sso-poll-frequency", + "sso-type", + "strict-peer-deps", + "strict-ssl", + "tag", + "tag-version-prefix", + "timing", + "tmp", + "umask", + "unicode", + "update-notifier", + "usage", + "user-agent", + "userconfig", + "version", + "versions", + "viewer", + "which", + "workspace", + "workspaces", + "yes", +] +` + +exports[`test/lib/utils/config/definitions.js TAP > all config keys that are shared to flatOptions 1`] = ` +Array [] +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for _auth 1`] = ` +#### \`_auth\` + +* Default: null +* Type: null or String + +A basic-auth string to use when authenticating against the npm registry. + +Warning: This should generally not be set via a command-line option. It is +safer to use a registry-provided authentication bearer token stored in the +~/.npmrc file by running \`npm login\`. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for access 1`] = ` +#### \`access\` + +* Default: 'restricted' for scoped packages, 'public' for unscoped packages +* Type: null, "restricted", or "public" + +When publishing scoped packages, the access level defaults to \`restricted\`. +If you want your scoped package to be publicly viewable (and installable) +set \`--access=public\`. The only valid values for \`access\` are \`public\` and +\`restricted\`. Unscoped packages _always_ have an access level of \`public\`. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for all 1`] = ` +#### \`all\` + +* Default: false +* Type: Boolean + +When running \`npm outdated\` and \`npm ls\`, setting \`--all\` will show all +outdated or installed packages, rather than only those directly depended +upon by the current project. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for allow-same-version 1`] = ` +#### \`allow-same-version\` + +* Default: false +* Type: Boolean + +Prevents throwing an error when \`npm version\` is used to set the new version +to the same value as the current version. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for also 1`] = ` +#### \`also\` + +* Default: null +* Type: null, "dev", or "development" +* DEPRECATED: Please use --include=dev instead. + +When set to \`dev\` or \`development\`, this is an alias for \`--include=dev\`. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for audit 1`] = ` +#### \`audit\` + +* Default: true +* Type: Boolean + +When "true" submit audit reports alongside the current npm command to the +default registry and all registries configured for scopes. See the +documentation for [\`npm audit\`](/commands/npm-audit) for details on what is +submitted. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for audit-level 1`] = ` +#### \`audit-level\` + +* Default: null +* Type: null, "info", "low", "moderate", "high", "critical", or "none" + +The minimum level of vulnerability for \`npm audit\` to exit with a non-zero +exit code. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for auth-type 1`] = ` +#### \`auth-type\` + +* Default: "legacy" +* Type: "legacy", "sso", "saml", or "oauth" +* DEPRECATED: This method of SSO/SAML/OAuth is deprecated and will be removed + in a future version of npm in favor of web-based login. + +What authentication strategy to use with \`adduser\`/\`login\`. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for before 1`] = ` +#### \`before\` + +* Default: null +* Type: null or Date + +If passed to \`npm install\`, will rebuild the npm tree such that only +versions that were available **on or before** the \`--before\` time get +installed. If there's no versions available for the current set of direct +dependencies, the command will error. + +If the requested version is a \`dist-tag\` and the given tag does not pass the +\`--before\` filter, the most recent version less than or equal to that tag +will be used. For example, \`foo@latest\` might install \`foo@1.2\` even though +\`latest\` is \`2.0\`. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for bin-links 1`] = ` +#### \`bin-links\` + +* Default: true +* Type: Boolean + +Tells npm to create symlinks (or \`.cmd\` shims on Windows) for package +executables. + +Set to false to have it not do this. This can be used to work around the +fact that some file systems don't support symlinks, even on ostensibly Unix +systems. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for browser 1`] = ` +#### \`browser\` + +* Default: OS X: \`"open"\`, Windows: \`"start"\`, Others: \`"xdg-open"\` +* Type: null, Boolean, or String + +The browser that is called by npm commands to open websites. + +Set to \`false\` to suppress browser behavior and instead print urls to +terminal. + +Set to \`true\` to use default system URL opener. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for ca 1`] = ` +#### \`ca\` + +* Default: null +* Type: null or String (can be set multiple times) + +The Certificate Authority signing certificate that is trusted for SSL +connections to the registry. Values should be in PEM format (Windows calls +it "Base-64 encoded X.509 (.CER)") with newlines replaced by the string +"\\n". For example: + +\`\`\`ini +ca="-----BEGIN CERTIFICATE-----\\nXXXX\\nXXXX\\n-----END CERTIFICATE-----" +\`\`\` + +Set to \`null\` to only allow "known" registrars, or to a specific CA cert to +trust only that specific signing authority. + +Multiple CAs can be trusted by specifying an array of certificates: + +\`\`\`ini +ca[]="..." +ca[]="..." +\`\`\` + +See also the \`strict-ssl\` config. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for cache 1`] = ` +#### \`cache\` + +* Default: Windows: \`%LocalAppData%\\npm-cache\`, Posix: \`~/.npm\` +* Type: Path + +The location of npm's cache directory. See [\`npm +cache\`](/commands/npm-cache) +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for cache-max 1`] = ` +#### \`cache-max\` + +* Default: Infinity +* Type: Number +* DEPRECATED: This option has been deprecated in favor of \`--prefer-online\` + +\`--cache-max=0\` is an alias for \`--prefer-online\` +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for cache-min 1`] = ` +#### \`cache-min\` + +* Default: 0 +* Type: Number +* DEPRECATED: This option has been deprecated in favor of \`--prefer-offline\`. + +\`--cache-min=9999 (or bigger)\` is an alias for \`--prefer-offline\`. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for cafile 1`] = ` +#### \`cafile\` + +* Default: null +* Type: Path + +A path to a file containing one or multiple Certificate Authority signing +certificates. Similar to the \`ca\` setting, but allows for multiple CA's, as +well as for the CA information to be stored in a file on disk. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for call 1`] = ` +#### \`call\` + +* Default: "" +* Type: String + +Optional companion option for \`npm exec\`, \`npx\` that allows for specifying a +custom command to be run along with the installed packages. + +\`\`\`bash +npm exec --package yo --package generator-node --call "yo node" +\`\`\` + +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for cert 1`] = ` +#### \`cert\` + +* Default: null +* Type: null or String + +A client certificate to pass when accessing the registry. Values should be +in PEM format (Windows calls it "Base-64 encoded X.509 (.CER)") with +newlines replaced by the string "\\n". For example: + +\`\`\`ini +cert="-----BEGIN CERTIFICATE-----\\nXXXX\\nXXXX\\n-----END CERTIFICATE-----" +\`\`\` + +It is _not_ the path to a certificate file (and there is no "certfile" +option). +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for ci-name 1`] = ` +#### \`ci-name\` + +* Default: The name of the current CI system, or \`null\` when not on a known CI + platform. +* Type: null or String + +The name of a continuous integration system. If not set explicitly, npm will +detect the current CI environment using the +[\`@npmcli/ci-detect\`](http://npm.im/@npmcli/ci-detect) module. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for cidr 1`] = ` +#### \`cidr\` + +* Default: null +* Type: null or String (can be set multiple times) + +This is a list of CIDR address to be used when configuring limited access +tokens with the \`npm token create\` command. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for color 1`] = ` +#### \`color\` + +* Default: true unless the NO_COLOR environ is set to something other than '0' +* Type: "always" or Boolean + +If false, never shows colors. If \`"always"\` then always shows colors. If +true, then only prints color codes for tty file descriptors. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for commit-hooks 1`] = ` +#### \`commit-hooks\` + +* Default: true +* Type: Boolean + +Run git commit hooks when using the \`npm version\` command. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for depth 1`] = ` +#### \`depth\` + +* Default: \`Infinity\` if \`--all\` is set, otherwise \`1\` +* Type: null or Number + +The depth to go when recursing packages for \`npm ls\`. + +If not set, \`npm ls\` will show only the immediate dependencies of the root +project. If \`--all\` is set, then npm will show all dependencies by default. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for description 1`] = ` +#### \`description\` + +* Default: true +* Type: Boolean + +Show the description in \`npm search\` +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for dev 1`] = ` +#### \`dev\` + +* Default: false +* Type: Boolean +* DEPRECATED: Please use --include=dev instead. + +Alias for \`--include=dev\`. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for diff 1`] = ` +#### \`diff\` + +* Default: +* Type: String (can be set multiple times) + +Define arguments to compare in \`npm diff\`. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for diff-dst-prefix 1`] = ` +#### \`diff-dst-prefix\` + +* Default: "b/" +* Type: String + +Destination prefix to be used in \`npm diff\` output. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for diff-ignore-all-space 1`] = ` +#### \`diff-ignore-all-space\` + +* Default: false +* Type: Boolean + +Ignore whitespace when comparing lines in \`npm diff\`. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for diff-name-only 1`] = ` +#### \`diff-name-only\` + +* Default: false +* Type: Boolean + +Prints only filenames when using \`npm diff\`. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for diff-no-prefix 1`] = ` +#### \`diff-no-prefix\` + +* Default: false +* Type: Boolean + +Do not show any source or destination prefix in \`npm diff\` output. + +Note: this causes \`npm diff\` to ignore the \`--diff-src-prefix\` and +\`--diff-dst-prefix\` configs. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for diff-src-prefix 1`] = ` +#### \`diff-src-prefix\` + +* Default: "a/" +* Type: String + +Source prefix to be used in \`npm diff\` output. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for diff-text 1`] = ` +#### \`diff-text\` + +* Default: false +* Type: Boolean + +Treat all files as text in \`npm diff\`. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for diff-unified 1`] = ` +#### \`diff-unified\` + +* Default: 3 +* Type: Number + +The number of lines of context to print in \`npm diff\`. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for dry-run 1`] = ` +#### \`dry-run\` + +* Default: false +* Type: Boolean + +Indicates that you don't want npm to make any changes and that it should +only report what it would have done. This can be passed into any of the +commands that modify your local installation, eg, \`install\`, \`update\`, +\`dedupe\`, \`uninstall\`, as well as \`pack\` and \`publish\`. + +Note: This is NOT honored by other network related commands, eg \`dist-tags\`, +\`owner\`, etc. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for editor 1`] = ` +#### \`editor\` + +* Default: The EDITOR or VISUAL environment variables, or 'notepad.exe' on + Windows, or 'vim' on Unix systems +* Type: String + +The command to run for \`npm edit\` and \`npm config edit\`. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for engine-strict 1`] = ` +#### \`engine-strict\` + +* Default: false +* Type: Boolean + +If set to true, then npm will stubbornly refuse to install (or even consider +installing) any package that claims to not be compatible with the current +Node.js version. + +This can be overridden by setting the \`--force\` flag. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for fetch-retries 1`] = ` +#### \`fetch-retries\` + +* Default: 2 +* Type: Number + +The "retries" config for the \`retry\` module to use when fetching packages +from the registry. + +npm will retry idempotent read requests to the registry in the case of +network failures or 5xx HTTP errors. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for fetch-retry-factor 1`] = ` +#### \`fetch-retry-factor\` + +* Default: 10 +* Type: Number + +The "factor" config for the \`retry\` module to use when fetching packages. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for fetch-retry-maxtimeout 1`] = ` +#### \`fetch-retry-maxtimeout\` + +* Default: 60000 (1 minute) +* Type: Number + +The "maxTimeout" config for the \`retry\` module to use when fetching +packages. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for fetch-retry-mintimeout 1`] = ` +#### \`fetch-retry-mintimeout\` + +* Default: 10000 (10 seconds) +* Type: Number + +The "minTimeout" config for the \`retry\` module to use when fetching +packages. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for fetch-timeout 1`] = ` +#### \`fetch-timeout\` + +* Default: 300000 (5 minutes) +* Type: Number + +The maximum amount of time to wait for HTTP requests to complete. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for force 1`] = ` +#### \`force\` + +* Default: false +* Type: Boolean + +Removes various protections against unfortunate side effects, common +mistakes, unnecessary performance degradation, and malicious input. + +* Allow clobbering non-npm files in global installs. +* Allow the \`npm version\` command to work on an unclean git repository. +* Allow deleting the cache folder with \`npm cache clean\`. +* Allow installing packages that have an \`engines\` declaration requiring a + different version of npm. +* Allow installing packages that have an \`engines\` declaration requiring a + different version of \`node\`, even if \`--engine-strict\` is enabled. +* Allow \`npm audit fix\` to install modules outside your stated dependency + range (including SemVer-major changes). +* Allow unpublishing all versions of a published package. +* Allow conflicting peerDependencies to be installed in the root project. +* Implicitly set \`--yes\` during \`npm init\`. +* Allow clobbering existing values in \`npm pkg\` + +If you don't have a clear idea of what you want to do, it is strongly +recommended that you do not use this option! +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for foreground-scripts 1`] = ` +#### \`foreground-scripts\` + +* Default: false +* Type: Boolean + +Run all build scripts (ie, \`preinstall\`, \`install\`, and \`postinstall\`) +scripts for installed packages in the foreground process, sharing standard +input, output, and error with the main npm process. + +Note that this will generally make installs run slower, and be much noisier, +but can be useful for debugging. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for format-package-lock 1`] = ` +#### \`format-package-lock\` + +* Default: true +* Type: Boolean + +Format \`package-lock.json\` or \`npm-shrinkwrap.json\` as a human readable +file. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for fund 1`] = ` +#### \`fund\` + +* Default: true +* Type: Boolean + +When "true" displays the message at the end of each \`npm install\` +acknowledging the number of dependencies looking for funding. See [\`npm +fund\`](/commands/npm-fund) for details. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for git 1`] = ` +#### \`git\` + +* Default: "git" +* Type: String + +The command to use for git commands. If git is installed on the computer, +but is not in the \`PATH\`, then set this to the full path to the git binary. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for git-tag-version 1`] = ` +#### \`git-tag-version\` + +* Default: true +* Type: Boolean + +Tag the commit when using the \`npm version\` command. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for global 1`] = ` +#### \`global\` + +* Default: false +* Type: Boolean + +Operates in "global" mode, so that packages are installed into the \`prefix\` +folder instead of the current working directory. See +[folders](/configuring-npm/folders) for more on the differences in behavior. + +* packages are installed into the \`{prefix}/lib/node_modules\` folder, instead + of the current working directory. +* bin files are linked to \`{prefix}/bin\` +* man pages are linked to \`{prefix}/share/man\` +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for global-style 1`] = ` +#### \`global-style\` + +* Default: false +* Type: Boolean + +Causes npm to install the package into your local \`node_modules\` folder with +the same layout it uses with the global \`node_modules\` folder. Only your +direct dependencies will show in \`node_modules\` and everything they depend +on will be flattened in their \`node_modules\` folders. This obviously will +eliminate some deduping. If used with \`legacy-bundling\`, \`legacy-bundling\` +will be preferred. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for globalconfig 1`] = ` +#### \`globalconfig\` + +* Default: The global --prefix setting plus 'etc/npmrc'. For example, + '/usr/local/etc/npmrc' +* Type: Path + +The config file to read for global config options. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for heading 1`] = ` +#### \`heading\` + +* Default: "npm" +* Type: String + +The string that starts all the debugging log output. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for https-proxy 1`] = ` +#### \`https-proxy\` + +* Default: null +* Type: null or URL + +A proxy to use for outgoing https requests. If the \`HTTPS_PROXY\` or +\`https_proxy\` or \`HTTP_PROXY\` or \`http_proxy\` environment variables are set, +proxy settings will be honored by the underlying \`make-fetch-happen\` +library. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for if-present 1`] = ` +#### \`if-present\` + +* Default: false +* Type: Boolean + +If true, npm will not exit with an error code when \`run-script\` is invoked +for a script that isn't defined in the \`scripts\` section of \`package.json\`. +This option can be used when it's desirable to optionally run a script when +it's present and fail if the script fails. This is useful, for example, when +running scripts that may only apply for some builds in an otherwise generic +CI setup. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for ignore-scripts 1`] = ` +#### \`ignore-scripts\` + +* Default: false +* Type: Boolean + +If true, npm does not run scripts specified in package.json files. + +Note that commands explicitly intended to run a particular script, such as +\`npm start\`, \`npm stop\`, \`npm restart\`, \`npm test\`, and \`npm run-script\` +will still run their intended script if \`ignore-scripts\` is set, but they +will *not* run any pre- or post-scripts. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for include 1`] = ` +#### \`include\` + +* Default: +* Type: "prod", "dev", "optional", or "peer" (can be set multiple times) + +Option that allows for defining which types of dependencies to install. + +This is the inverse of \`--omit=<type>\`. + +Dependency types specified in \`--include\` will not be omitted, regardless of +the order in which omit/include are specified on the command-line. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for include-staged 1`] = ` +#### \`include-staged\` + +* Default: false +* Type: Boolean + +Allow installing "staged" published packages, as defined by [npm RFC PR +#92](https://github.com/npm/rfcs/pull/92). + +This is experimental, and not implemented by the npm public registry. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for init-author-email 1`] = ` +#### \`init-author-email\` + +* Default: "" +* Type: String + +The value \`npm init\` should use by default for the package author's email. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for init-author-name 1`] = ` +#### \`init-author-name\` + +* Default: "" +* Type: String + +The value \`npm init\` should use by default for the package author's name. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for init-author-url 1`] = ` +#### \`init-author-url\` + +* Default: "" +* Type: "" or URL + +The value \`npm init\` should use by default for the package author's +homepage. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for init-license 1`] = ` +#### \`init-license\` + +* Default: "ISC" +* Type: String + +The value \`npm init\` should use by default for the package license. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for init-module 1`] = ` +#### \`init-module\` + +* Default: "~/.npm-init.js" +* Type: Path + +A module that will be loaded by the \`npm init\` command. See the +documentation for the +[init-package-json](https://github.com/npm/init-package-json) module for +more information, or [npm init](/commands/npm-init). +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for init-version 1`] = ` +#### \`init-version\` + +* Default: "1.0.0" +* Type: SemVer string + +The value that \`npm init\` should use by default for the package version +number, if not already set in package.json. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for init.author.email 1`] = ` +#### \`init.author.email\` + +* Default: "" +* Type: String +* DEPRECATED: Use \`--init-author-email\` instead. + +Alias for \`--init-author-email\` +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for init.author.name 1`] = ` +#### \`init.author.name\` + +* Default: "" +* Type: String +* DEPRECATED: Use \`--init-author-name\` instead. + +Alias for \`--init-author-name\` +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for init.author.url 1`] = ` +#### \`init.author.url\` + +* Default: "" +* Type: "" or URL +* DEPRECATED: Use \`--init-author-url\` instead. + +Alias for \`--init-author-url\` +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for init.license 1`] = ` +#### \`init.license\` + +* Default: "ISC" +* Type: String +* DEPRECATED: Use \`--init-license\` instead. + +Alias for \`--init-license\` +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for init.module 1`] = ` +#### \`init.module\` + +* Default: "~/.npm-init.js" +* Type: Path +* DEPRECATED: Use \`--init-module\` instead. + +Alias for \`--init-module\` +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for init.version 1`] = ` +#### \`init.version\` + +* Default: "1.0.0" +* Type: SemVer string +* DEPRECATED: Use \`--init-version\` instead. + +Alias for \`--init-version\` +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for json 1`] = ` +#### \`json\` + +* Default: false +* Type: Boolean + +Whether or not to output JSON data, rather than the normal output. + +* In \`npm pkg set\` it enables parsing set values with JSON.parse() before + saving them to your \`package.json\`. + +Not supported by all npm commands. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for key 1`] = ` +#### \`key\` + +* Default: null +* Type: null or String + +A client key to pass when accessing the registry. Values should be in PEM +format with newlines replaced by the string "\\n". For example: + +\`\`\`ini +key="-----BEGIN PRIVATE KEY-----\\nXXXX\\nXXXX\\n-----END PRIVATE KEY-----" +\`\`\` + +It is _not_ the path to a key file (and there is no "keyfile" option). +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for legacy-bundling 1`] = ` +#### \`legacy-bundling\` + +* Default: false +* Type: Boolean + +Causes npm to install the package such that versions of npm prior to 1.4, +such as the one included with node 0.8, can install the package. This +eliminates all automatic deduping. If used with \`global-style\` this option +will be preferred. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for legacy-peer-deps 1`] = ` +#### \`legacy-peer-deps\` + +* Default: false +* Type: Boolean + +Causes npm to completely ignore \`peerDependencies\` when building a package +tree, as in npm versions 3 through 6. + +If a package cannot be installed because of overly strict \`peerDependencies\` +that collide, it provides a way to move forward resolving the situation. + +This differs from \`--omit=peer\`, in that \`--omit=peer\` will avoid unpacking +\`peerDependencies\` on disk, but will still design a tree such that +\`peerDependencies\` _could_ be unpacked in a correct place. + +Use of \`legacy-peer-deps\` is not recommended, as it will not enforce the +\`peerDependencies\` contract that meta-dependencies may rely on. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for link 1`] = ` +#### \`link\` + +* Default: false +* Type: Boolean + +Used with \`npm ls\`, limiting output to only those packages that are linked. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for local-address 1`] = ` +#### \`local-address\` + +* Default: null +* Type: IP Address + +The IP address of the local interface to use when making connections to the +npm registry. Must be IPv4 in versions of Node prior to 0.12. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for location 1`] = ` +#### \`location\` + +* Default: "user" unless \`--global\` is passed, which will also set this value + to "global" +* Type: "global", "user", or "project" + +When passed to \`npm config\` this refers to which config file to use. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for loglevel 1`] = ` +#### \`loglevel\` + +* Default: "notice" +* Type: "silent", "error", "warn", "notice", "http", "timing", "info", + "verbose", or "silly" + +What level of logs to report. On failure, *all* logs are written to +\`npm-debug.log\` in the current working directory. + +Any logs of a higher level than the setting are shown. The default is +"notice". + +See also the \`foreground-scripts\` config. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for logs-max 1`] = ` +#### \`logs-max\` + +* Default: 10 +* Type: Number + +The maximum number of log files to store. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for long 1`] = ` +#### \`long\` + +* Default: false +* Type: Boolean + +Show extended information in \`ls\`, \`search\`, and \`help-search\`. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for maxsockets 1`] = ` +#### \`maxsockets\` + +* Default: 15 +* Type: Number + +The maximum number of connections to use per origin (protocol/host/port +combination). +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for message 1`] = ` +#### \`message\` + +* Default: "%s" +* Type: String + +Commit message which is used by \`npm version\` when creating version commit. + +Any "%s" in the message will be replaced with the version number. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for node-options 1`] = ` +#### \`node-options\` + +* Default: null +* Type: null or String + +Options to pass through to Node.js via the \`NODE_OPTIONS\` environment +variable. This does not impact how npm itself is executed but it does impact +how lifecycle scripts are called. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for node-version 1`] = ` +#### \`node-version\` + +* Default: Node.js \`process.version\` value +* Type: SemVer string + +The node version to use when checking a package's \`engines\` setting. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for noproxy 1`] = ` +#### \`noproxy\` + +* Default: The value of the NO_PROXY environment variable +* Type: String (can be set multiple times) + +Domain extensions that should bypass any proxies. + +Also accepts a comma-delimited string. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for npm-version 1`] = ` +#### \`npm-version\` + +* Default: Output of \`npm --version\` +* Type: SemVer string + +The npm version to use when checking a package's \`engines\` setting. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for offline 1`] = ` +#### \`offline\` + +* Default: false +* Type: Boolean + +Force offline mode: no network requests will be done during install. To +allow the CLI to fill in missing cache data, see \`--prefer-offline\`. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for omit 1`] = ` +#### \`omit\` + +* Default: 'dev' if the \`NODE_ENV\` environment variable is set to + 'production', otherwise empty. +* Type: "dev", "optional", or "peer" (can be set multiple times) + +Dependency types to omit from the installation tree on disk. + +Note that these dependencies _are_ still resolved and added to the +\`package-lock.json\` or \`npm-shrinkwrap.json\` file. They are just not +physically installed on disk. + +If a package type appears in both the \`--include\` and \`--omit\` lists, then +it will be included. + +If the resulting omit list includes \`'dev'\`, then the \`NODE_ENV\` environment +variable will be set to \`'production'\` for all lifecycle scripts. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for only 1`] = ` +#### \`only\` + +* Default: null +* Type: null, "prod", or "production" +* DEPRECATED: Use \`--omit=dev\` to omit dev dependencies from the install. + +When set to \`prod\` or \`production\`, this is an alias for \`--omit=dev\`. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for optional 1`] = ` +#### \`optional\` + +* Default: null +* Type: null or Boolean +* DEPRECATED: Use \`--omit=optional\` to exclude optional dependencies, or + \`--include=optional\` to include them. + +Default value does install optional deps unless otherwise omitted. + +Alias for --include=optional or --omit=optional +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for otp 1`] = ` +#### \`otp\` + +* Default: null +* Type: null or String + +This is a one-time password from a two-factor authenticator. It's needed +when publishing or changing package permissions with \`npm access\`. + +If not set, and a registry response fails with a challenge for a one-time +password, npm will prompt on the command line for one. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for pack-destination 1`] = ` +#### \`pack-destination\` + +* Default: "." +* Type: String + +Directory in which \`npm pack\` will save tarballs. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for package 1`] = ` +#### \`package\` + +* Default: +* Type: String (can be set multiple times) + +The package to install for [\`npm exec\`](/commands/npm-exec) +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for package-lock 1`] = ` +#### \`package-lock\` + +* Default: true +* Type: Boolean + +If set to false, then ignore \`package-lock.json\` files when installing. This +will also prevent _writing_ \`package-lock.json\` if \`save\` is true. + +When package package-locks are disabled, automatic pruning of extraneous +modules will also be disabled. To remove extraneous modules with +package-locks disabled use \`npm prune\`. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for package-lock-only 1`] = ` +#### \`package-lock-only\` + +* Default: false +* Type: Boolean + +If set to true, the current operation will only use the \`package-lock.json\`, +ignoring \`node_modules\`. + +For \`update\` this means only the \`package-lock.json\` will be updated, +instead of checking \`node_modules\` and downloading dependencies. + +For \`list\` this means the output will be based on the tree described by the +\`package-lock.json\`, rather than the contents of \`node_modules\`. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for parseable 1`] = ` +#### \`parseable\` + +* Default: false +* Type: Boolean + +Output parseable results from commands that write to standard output. For +\`npm search\`, this will be tab-separated table format. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for prefer-offline 1`] = ` +#### \`prefer-offline\` + +* Default: false +* Type: Boolean + +If true, staleness checks for cached data will be bypassed, but missing data +will be requested from the server. To force full offline mode, use +\`--offline\`. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for prefer-online 1`] = ` +#### \`prefer-online\` + +* Default: false +* Type: Boolean + +If true, staleness checks for cached data will be forced, making the CLI +look for updates immediately even for fresh package data. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for prefix 1`] = ` +#### \`prefix\` + +* Default: In global mode, the folder where the node executable is installed. + In local mode, the nearest parent folder containing either a package.json + file or a node_modules folder. +* Type: Path + +The location to install global items. If set on the command line, then it +forces non-global commands to run in the specified folder. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for preid 1`] = ` +#### \`preid\` + +* Default: "" +* Type: String + +The "prerelease identifier" to use as a prefix for the "prerelease" part of +a semver. Like the \`rc\` in \`1.2.0-rc.8\`. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for production 1`] = ` +#### \`production\` + +* Default: null +* Type: null or Boolean +* DEPRECATED: Use \`--omit=dev\` instead. + +Alias for \`--omit=dev\` +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for progress 1`] = ` +#### \`progress\` + +* Default: \`true\` unless running in a known CI system +* Type: Boolean + +When set to \`true\`, npm will display a progress bar during time intensive +operations, if \`process.stderr\` is a TTY. + +Set to \`false\` to suppress the progress bar. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for proxy 1`] = ` +#### \`proxy\` + +* Default: null +* Type: null, false, or URL + +A proxy to use for outgoing http requests. If the \`HTTP_PROXY\` or +\`http_proxy\` environment variables are set, proxy settings will be honored +by the underlying \`request\` library. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for read-only 1`] = ` +#### \`read-only\` + +* Default: false +* Type: Boolean + +This is used to mark a token as unable to publish when configuring limited +access tokens with the \`npm token create\` command. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for rebuild-bundle 1`] = ` +#### \`rebuild-bundle\` + +* Default: true +* Type: Boolean + +Rebuild bundled dependencies after installation. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for registry 1`] = ` +#### \`registry\` + +* Default: "https://registry.npmjs.org/" +* Type: URL + +The base URL of the npm registry. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for save 1`] = ` +#### \`save\` + +* Default: true +* Type: Boolean + +Save installed packages to a package.json file as dependencies. + +When used with the \`npm rm\` command, removes the dependency from +package.json. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for save-bundle 1`] = ` +#### \`save-bundle\` + +* Default: false +* Type: Boolean + +If a package would be saved at install time by the use of \`--save\`, +\`--save-dev\`, or \`--save-optional\`, then also put it in the +\`bundleDependencies\` list. + +Ignore if \`--save-peer\` is set, since peerDependencies cannot be bundled. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for save-dev 1`] = ` +#### \`save-dev\` + +* Default: false +* Type: Boolean + +Save installed packages to a package.json file as \`devDependencies\`. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for save-exact 1`] = ` +#### \`save-exact\` + +* Default: false +* Type: Boolean + +Dependencies saved to package.json will be configured with an exact version +rather than using npm's default semver range operator. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for save-optional 1`] = ` +#### \`save-optional\` + +* Default: false +* Type: Boolean + +Save installed packages to a package.json file as \`optionalDependencies\`. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for save-peer 1`] = ` +#### \`save-peer\` + +* Default: false +* Type: Boolean + +Save installed packages. to a package.json file as \`peerDependencies\` +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for save-prefix 1`] = ` +#### \`save-prefix\` + +* Default: "^" +* Type: String + +Configure how versions of packages installed to a package.json file via +\`--save\` or \`--save-dev\` get prefixed. + +For example if a package has version \`1.2.3\`, by default its version is set +to \`^1.2.3\` which allows minor upgrades for that package, but after \`npm +config set save-prefix='~'\` it would be set to \`~1.2.3\` which only allows +patch upgrades. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for save-prod 1`] = ` +#### \`save-prod\` + +* Default: false +* Type: Boolean + +Save installed packages into \`dependencies\` specifically. This is useful if +a package already exists in \`devDependencies\` or \`optionalDependencies\`, but +you want to move it to be a non-optional production dependency. + +This is the default behavior if \`--save\` is true, and neither \`--save-dev\` +or \`--save-optional\` are true. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for scope 1`] = ` +#### \`scope\` + +* Default: the scope of the current project, if any, or "" +* Type: String + +Associate an operation with a scope for a scoped registry. + +Useful when logging in to or out of a private registry: + +\`\`\` +# log in, linking the scope to the custom registry +npm login --scope=@mycorp --registry=https://registry.mycorp.com + +# log out, removing the link and the auth token +npm logout --scope=@mycorp +\`\`\` + +This will cause \`@mycorp\` to be mapped to the registry for future +installation of packages specified according to the pattern +\`@mycorp/package\`. + +This will also cause \`npm init\` to create a scoped package. + +\`\`\` +# accept all defaults, and create a package named "@foo/whatever", +# instead of just named "whatever" +npm init --scope=@foo --yes +\`\`\` + +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for script-shell 1`] = ` +#### \`script-shell\` + +* Default: '/bin/sh' on POSIX systems, 'cmd.exe' on Windows +* Type: null or String + +The shell to use for scripts run with the \`npm exec\`, \`npm run\` and \`npm +init <pkg>\` commands. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for searchexclude 1`] = ` +#### \`searchexclude\` + +* Default: "" +* Type: String + +Space-separated options that limit the results from search. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for searchlimit 1`] = ` +#### \`searchlimit\` + +* Default: 20 +* Type: Number + +Number of items to limit search results to. Will not apply at all to legacy +searches. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for searchopts 1`] = ` +#### \`searchopts\` + +* Default: "" +* Type: String + +Space-separated options that are always passed to search. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for searchstaleness 1`] = ` +#### \`searchstaleness\` + +* Default: 900 +* Type: Number + +The age of the cache, in seconds, before another registry request is made if +using legacy search endpoint. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for shell 1`] = ` +#### \`shell\` + +* Default: SHELL environment variable, or "bash" on Posix, or "cmd.exe" on + Windows +* Type: String + +The shell to run for the \`npm explore\` command. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for shrinkwrap 1`] = ` +#### \`shrinkwrap\` + +* Default: true +* Type: Boolean +* DEPRECATED: Use the --package-lock setting instead. + +Alias for --package-lock +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for sign-git-commit 1`] = ` +#### \`sign-git-commit\` + +* Default: false +* Type: Boolean + +If set to true, then the \`npm version\` command will commit the new package +version using \`-S\` to add a signature. + +Note that git requires you to have set up GPG keys in your git configs for +this to work properly. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for sign-git-tag 1`] = ` +#### \`sign-git-tag\` + +* Default: false +* Type: Boolean + +If set to true, then the \`npm version\` command will tag the version using +\`-s\` to add a signature. + +Note that git requires you to have set up GPG keys in your git configs for +this to work properly. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for sso-poll-frequency 1`] = ` +#### \`sso-poll-frequency\` + +* Default: 500 +* Type: Number +* DEPRECATED: The --auth-type method of SSO/SAML/OAuth will be removed in a + future version of npm in favor of web-based login. + +When used with SSO-enabled \`auth-type\`s, configures how regularly the +registry should be polled while the user is completing authentication. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for sso-type 1`] = ` +#### \`sso-type\` + +* Default: "oauth" +* Type: null, "oauth", or "saml" +* DEPRECATED: The --auth-type method of SSO/SAML/OAuth will be removed in a + future version of npm in favor of web-based login. + +If \`--auth-type=sso\`, the type of SSO type to use. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for strict-peer-deps 1`] = ` +#### \`strict-peer-deps\` + +* Default: false +* Type: Boolean + +If set to \`true\`, and \`--legacy-peer-deps\` is not set, then _any_ +conflicting \`peerDependencies\` will be treated as an install failure, even +if npm could reasonably guess the appropriate resolution based on non-peer +dependency relationships. + +By default, conflicting \`peerDependencies\` deep in the dependency graph will +be resolved using the nearest non-peer dependency specification, even if +doing so will result in some packages receiving a peer dependency outside +the range set in their package's \`peerDependencies\` object. + +When such and override is performed, a warning is printed, explaining the +conflict and the packages involved. If \`--strict-peer-deps\` is set, then +this warning is treated as a failure. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for strict-ssl 1`] = ` +#### \`strict-ssl\` + +* Default: true +* Type: Boolean + +Whether or not to do SSL key validation when making requests to the registry +via https. + +See also the \`ca\` config. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for tag 1`] = ` +#### \`tag\` + +* Default: "latest" +* Type: String + +If you ask npm to install a package and don't tell it a specific version, +then it will install the specified tag. + +Also the tag that is added to the package@version specified by the \`npm tag\` +command, if no explicit tag is given. + +When used by the \`npm diff\` command, this is the tag used to fetch the +tarball that will be compared with the local files by default. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for tag-version-prefix 1`] = ` +#### \`tag-version-prefix\` + +* Default: "v" +* Type: String + +If set, alters the prefix used when tagging a new version when performing a +version increment using \`npm-version\`. To remove the prefix altogether, set +it to the empty string: \`""\`. + +Because other tools may rely on the convention that npm version tags look +like \`v1.0.0\`, _only use this property if it is absolutely necessary_. In +particular, use care when overriding this setting for public packages. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for timing 1`] = ` +#### \`timing\` + +* Default: false +* Type: Boolean + +If true, writes an \`npm-debug\` log to \`_logs\` and timing information to +\`_timing.json\`, both in your cache, even if the command completes +successfully. \`_timing.json\` is a newline delimited list of JSON objects. + +You can quickly view it with this [json](https://npm.im/json) command line: +\`npm exec -- json -g < ~/.npm/_timing.json\`. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for tmp 1`] = ` +#### \`tmp\` + +* Default: The value returned by the Node.js \`os.tmpdir()\` method + <https://nodejs.org/api/os.html#os_os_tmpdir> +* Type: Path +* DEPRECATED: This setting is no longer used. npm stores temporary files in a + special location in the cache, and they are managed by + [\`cacache\`](http://npm.im/cacache). + +Historically, the location where temporary files were stored. No longer +relevant. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for umask 1`] = ` +#### \`umask\` + +* Default: 0 +* Type: Octal numeric string in range 0000..0777 (0..511) + +The "umask" value to use when setting the file creation mode on files and +folders. + +Folders and executables are given a mode which is \`0o777\` masked against +this value. Other files are given a mode which is \`0o666\` masked against +this value. + +Note that the underlying system will _also_ apply its own umask value to +files and folders that are created, and npm does not circumvent this, but +rather adds the \`--umask\` config to it. + +Thus, the effective default umask value on most POSIX systems is 0o22, +meaning that folders and executables are created with a mode of 0o755 and +other files are created with a mode of 0o644. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for unicode 1`] = ` +#### \`unicode\` + +* Default: false on windows, true on mac/unix systems with a unicode locale, + as defined by the \`LC_ALL\`, \`LC_CTYPE\`, or \`LANG\` environment variables. +* Type: Boolean + +When set to true, npm uses unicode characters in the tree output. When +false, it uses ascii characters instead of unicode glyphs. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for update-notifier 1`] = ` +#### \`update-notifier\` + +* Default: true +* Type: Boolean + +Set to false to suppress the update notification when using an older version +of npm than the latest. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for usage 1`] = ` +#### \`usage\` + +* Default: false +* Type: Boolean + +Show short usage output about the command specified. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for user-agent 1`] = ` +#### \`user-agent\` + +* Default: "npm/{npm-version} node/{node-version} {platform} {arch} + workspaces/{workspaces} {ci}" +* Type: String + +Sets the User-Agent request header. The following fields are replaced with +their actual counterparts: + +* \`{npm-version}\` - The npm version in use +* \`{node-version}\` - The Node.js version in use +* \`{platform}\` - The value of \`process.platform\` +* \`{arch}\` - The value of \`process.arch\` +* \`{workspaces}\` - Set to \`true\` if the \`workspaces\` or \`workspace\` options + are set. +* \`{ci}\` - The value of the \`ci-name\` config, if set, prefixed with \`ci/\`, or + an empty string if \`ci-name\` is empty. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for userconfig 1`] = ` +#### \`userconfig\` + +* Default: "~/.npmrc" +* Type: Path + +The location of user-level configuration settings. + +This may be overridden by the \`npm_config_userconfig\` environment variable +or the \`--userconfig\` command line option, but may _not_ be overridden by +settings in the \`globalconfig\` file. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for version 1`] = ` +#### \`version\` + +* Default: false +* Type: Boolean + +If true, output the npm version and exit successfully. + +Only relevant when specified explicitly on the command line. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for versions 1`] = ` +#### \`versions\` + +* Default: false +* Type: Boolean + +If true, output the npm version as well as node's \`process.versions\` map and +the version in the current working directory's \`package.json\` file if one +exists, and exit successfully. + +Only relevant when specified explicitly on the command line. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for viewer 1`] = ` +#### \`viewer\` + +* Default: "man" on Posix, "browser" on Windows +* Type: String + +The program to use to view help content. + +Set to \`"browser"\` to view html help content in the default web browser. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for which 1`] = ` +#### \`which\` + +* Default: null +* Type: null or Number + +If there are multiple funding sources, which 1-indexed source URL to open. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for workspace 1`] = ` +#### \`workspace\` + +* Default: +* Type: String (can be set multiple times) + +Enable running a command in the context of the configured workspaces of the +current project while filtering by running only the workspaces defined by +this configuration option. + +Valid values for the \`workspace\` config are either: + +* Workspace names +* Path to a workspace directory +* Path to a parent workspace directory (will result to selecting all of the + nested workspaces) + +When set for the \`npm init\` command, this may be set to the folder of a +workspace which does not yet exist, to create the folder and set it up as a +brand new workspace within the project. + +This value is not exported to the environment for child processes. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for workspaces 1`] = ` +#### \`workspaces\` + +* Default: false +* Type: Boolean + +Enable running a command in the context of **all** the configured +workspaces. + +This value is not exported to the environment for child processes. +` + +exports[`test/lib/utils/config/definitions.js TAP > config description for yes 1`] = ` +#### \`yes\` + +* Default: null +* Type: null or Boolean + +Automatically answer "yes" to any prompts that npm might print on the +command line. +` diff --git a/tap-snapshots/test/lib/utils/config/describe-all.js.test.cjs b/tap-snapshots/test/lib/utils/config/describe-all.js.test.cjs new file mode 100644 index 0000000000000..8487b45174cc3 --- /dev/null +++ b/tap-snapshots/test/lib/utils/config/describe-all.js.test.cjs @@ -0,0 +1,1430 @@ +/* IMPORTANT + * This snapshot file is auto-generated, but designed for humans. + * It should be checked into source control and tracked carefully. + * Re-generate by setting TAP_SNAPSHOT=1 and running tests. + * Make sure to inspect the output below. Do not ignore changes! + */ +'use strict' +exports[`test/lib/utils/config/describe-all.js TAP > must match snapshot 1`] = ` +#### \`_auth\` + +* Default: null +* Type: null or String + +A basic-auth string to use when authenticating against the npm registry. + +Warning: This should generally not be set via a command-line option. It is +safer to use a registry-provided authentication bearer token stored in the +~/.npmrc file by running \`npm login\`. + +#### \`access\` + +* Default: 'restricted' for scoped packages, 'public' for unscoped packages +* Type: null, "restricted", or "public" + +When publishing scoped packages, the access level defaults to \`restricted\`. +If you want your scoped package to be publicly viewable (and installable) +set \`--access=public\`. The only valid values for \`access\` are \`public\` and +\`restricted\`. Unscoped packages _always_ have an access level of \`public\`. + +#### \`all\` + +* Default: false +* Type: Boolean + +When running \`npm outdated\` and \`npm ls\`, setting \`--all\` will show all +outdated or installed packages, rather than only those directly depended +upon by the current project. + +#### \`allow-same-version\` + +* Default: false +* Type: Boolean + +Prevents throwing an error when \`npm version\` is used to set the new version +to the same value as the current version. + +#### \`audit\` + +* Default: true +* Type: Boolean + +When "true" submit audit reports alongside the current npm command to the +default registry and all registries configured for scopes. See the +documentation for [\`npm audit\`](/commands/npm-audit) for details on what is +submitted. + +#### \`audit-level\` + +* Default: null +* Type: null, "info", "low", "moderate", "high", "critical", or "none" + +The minimum level of vulnerability for \`npm audit\` to exit with a non-zero +exit code. + +#### \`before\` + +* Default: null +* Type: null or Date + +If passed to \`npm install\`, will rebuild the npm tree such that only +versions that were available **on or before** the \`--before\` time get +installed. If there's no versions available for the current set of direct +dependencies, the command will error. + +If the requested version is a \`dist-tag\` and the given tag does not pass the +\`--before\` filter, the most recent version less than or equal to that tag +will be used. For example, \`foo@latest\` might install \`foo@1.2\` even though +\`latest\` is \`2.0\`. + +#### \`bin-links\` + +* Default: true +* Type: Boolean + +Tells npm to create symlinks (or \`.cmd\` shims on Windows) for package +executables. + +Set to false to have it not do this. This can be used to work around the +fact that some file systems don't support symlinks, even on ostensibly Unix +systems. + +#### \`browser\` + +* Default: OS X: \`"open"\`, Windows: \`"start"\`, Others: \`"xdg-open"\` +* Type: null, Boolean, or String + +The browser that is called by npm commands to open websites. + +Set to \`false\` to suppress browser behavior and instead print urls to +terminal. + +Set to \`true\` to use default system URL opener. + +#### \`ca\` + +* Default: null +* Type: null or String (can be set multiple times) + +The Certificate Authority signing certificate that is trusted for SSL +connections to the registry. Values should be in PEM format (Windows calls +it "Base-64 encoded X.509 (.CER)") with newlines replaced by the string +"\\n". For example: + +\`\`\`ini +ca="-----BEGIN CERTIFICATE-----\\nXXXX\\nXXXX\\n-----END CERTIFICATE-----" +\`\`\` + +Set to \`null\` to only allow "known" registrars, or to a specific CA cert to +trust only that specific signing authority. + +Multiple CAs can be trusted by specifying an array of certificates: + +\`\`\`ini +ca[]="..." +ca[]="..." +\`\`\` + +See also the \`strict-ssl\` config. + +#### \`cache\` + +* Default: Windows: \`%LocalAppData%\\npm-cache\`, Posix: \`~/.npm\` +* Type: Path + +The location of npm's cache directory. See [\`npm +cache\`](/commands/npm-cache) + +#### \`cafile\` + +* Default: null +* Type: Path + +A path to a file containing one or multiple Certificate Authority signing +certificates. Similar to the \`ca\` setting, but allows for multiple CA's, as +well as for the CA information to be stored in a file on disk. + +#### \`call\` + +* Default: "" +* Type: String + +Optional companion option for \`npm exec\`, \`npx\` that allows for specifying a +custom command to be run along with the installed packages. + +\`\`\`bash +npm exec --package yo --package generator-node --call "yo node" +\`\`\` + + +#### \`cert\` + +* Default: null +* Type: null or String + +A client certificate to pass when accessing the registry. Values should be +in PEM format (Windows calls it "Base-64 encoded X.509 (.CER)") with +newlines replaced by the string "\\n". For example: + +\`\`\`ini +cert="-----BEGIN CERTIFICATE-----\\nXXXX\\nXXXX\\n-----END CERTIFICATE-----" +\`\`\` + +It is _not_ the path to a certificate file (and there is no "certfile" +option). + +#### \`ci-name\` + +* Default: The name of the current CI system, or \`null\` when not on a known CI + platform. +* Type: null or String + +The name of a continuous integration system. If not set explicitly, npm will +detect the current CI environment using the +[\`@npmcli/ci-detect\`](http://npm.im/@npmcli/ci-detect) module. + +#### \`cidr\` + +* Default: null +* Type: null or String (can be set multiple times) + +This is a list of CIDR address to be used when configuring limited access +tokens with the \`npm token create\` command. + +#### \`color\` + +* Default: true unless the NO_COLOR environ is set to something other than '0' +* Type: "always" or Boolean + +If false, never shows colors. If \`"always"\` then always shows colors. If +true, then only prints color codes for tty file descriptors. + +#### \`commit-hooks\` + +* Default: true +* Type: Boolean + +Run git commit hooks when using the \`npm version\` command. + +#### \`depth\` + +* Default: \`Infinity\` if \`--all\` is set, otherwise \`1\` +* Type: null or Number + +The depth to go when recursing packages for \`npm ls\`. + +If not set, \`npm ls\` will show only the immediate dependencies of the root +project. If \`--all\` is set, then npm will show all dependencies by default. + +#### \`description\` + +* Default: true +* Type: Boolean + +Show the description in \`npm search\` + +#### \`diff\` + +* Default: +* Type: String (can be set multiple times) + +Define arguments to compare in \`npm diff\`. + +#### \`diff-dst-prefix\` + +* Default: "b/" +* Type: String + +Destination prefix to be used in \`npm diff\` output. + +#### \`diff-ignore-all-space\` + +* Default: false +* Type: Boolean + +Ignore whitespace when comparing lines in \`npm diff\`. + +#### \`diff-name-only\` + +* Default: false +* Type: Boolean + +Prints only filenames when using \`npm diff\`. + +#### \`diff-no-prefix\` + +* Default: false +* Type: Boolean + +Do not show any source or destination prefix in \`npm diff\` output. + +Note: this causes \`npm diff\` to ignore the \`--diff-src-prefix\` and +\`--diff-dst-prefix\` configs. + +#### \`diff-src-prefix\` + +* Default: "a/" +* Type: String + +Source prefix to be used in \`npm diff\` output. + +#### \`diff-text\` + +* Default: false +* Type: Boolean + +Treat all files as text in \`npm diff\`. + +#### \`diff-unified\` + +* Default: 3 +* Type: Number + +The number of lines of context to print in \`npm diff\`. + +#### \`dry-run\` + +* Default: false +* Type: Boolean + +Indicates that you don't want npm to make any changes and that it should +only report what it would have done. This can be passed into any of the +commands that modify your local installation, eg, \`install\`, \`update\`, +\`dedupe\`, \`uninstall\`, as well as \`pack\` and \`publish\`. + +Note: This is NOT honored by other network related commands, eg \`dist-tags\`, +\`owner\`, etc. + +#### \`editor\` + +* Default: The EDITOR or VISUAL environment variables, or 'notepad.exe' on + Windows, or 'vim' on Unix systems +* Type: String + +The command to run for \`npm edit\` and \`npm config edit\`. + +#### \`engine-strict\` + +* Default: false +* Type: Boolean + +If set to true, then npm will stubbornly refuse to install (or even consider +installing) any package that claims to not be compatible with the current +Node.js version. + +This can be overridden by setting the \`--force\` flag. + +#### \`fetch-retries\` + +* Default: 2 +* Type: Number + +The "retries" config for the \`retry\` module to use when fetching packages +from the registry. + +npm will retry idempotent read requests to the registry in the case of +network failures or 5xx HTTP errors. + +#### \`fetch-retry-factor\` + +* Default: 10 +* Type: Number + +The "factor" config for the \`retry\` module to use when fetching packages. + +#### \`fetch-retry-maxtimeout\` + +* Default: 60000 (1 minute) +* Type: Number + +The "maxTimeout" config for the \`retry\` module to use when fetching +packages. + +#### \`fetch-retry-mintimeout\` + +* Default: 10000 (10 seconds) +* Type: Number + +The "minTimeout" config for the \`retry\` module to use when fetching +packages. + +#### \`fetch-timeout\` + +* Default: 300000 (5 minutes) +* Type: Number + +The maximum amount of time to wait for HTTP requests to complete. + +#### \`force\` + +* Default: false +* Type: Boolean + +Removes various protections against unfortunate side effects, common +mistakes, unnecessary performance degradation, and malicious input. + +* Allow clobbering non-npm files in global installs. +* Allow the \`npm version\` command to work on an unclean git repository. +* Allow deleting the cache folder with \`npm cache clean\`. +* Allow installing packages that have an \`engines\` declaration requiring a + different version of npm. +* Allow installing packages that have an \`engines\` declaration requiring a + different version of \`node\`, even if \`--engine-strict\` is enabled. +* Allow \`npm audit fix\` to install modules outside your stated dependency + range (including SemVer-major changes). +* Allow unpublishing all versions of a published package. +* Allow conflicting peerDependencies to be installed in the root project. +* Implicitly set \`--yes\` during \`npm init\`. +* Allow clobbering existing values in \`npm pkg\` + +If you don't have a clear idea of what you want to do, it is strongly +recommended that you do not use this option! + +#### \`foreground-scripts\` + +* Default: false +* Type: Boolean + +Run all build scripts (ie, \`preinstall\`, \`install\`, and \`postinstall\`) +scripts for installed packages in the foreground process, sharing standard +input, output, and error with the main npm process. + +Note that this will generally make installs run slower, and be much noisier, +but can be useful for debugging. + +#### \`format-package-lock\` + +* Default: true +* Type: Boolean + +Format \`package-lock.json\` or \`npm-shrinkwrap.json\` as a human readable +file. + +#### \`fund\` + +* Default: true +* Type: Boolean + +When "true" displays the message at the end of each \`npm install\` +acknowledging the number of dependencies looking for funding. See [\`npm +fund\`](/commands/npm-fund) for details. + +#### \`git\` + +* Default: "git" +* Type: String + +The command to use for git commands. If git is installed on the computer, +but is not in the \`PATH\`, then set this to the full path to the git binary. + +#### \`git-tag-version\` + +* Default: true +* Type: Boolean + +Tag the commit when using the \`npm version\` command. + +#### \`global\` + +* Default: false +* Type: Boolean + +Operates in "global" mode, so that packages are installed into the \`prefix\` +folder instead of the current working directory. See +[folders](/configuring-npm/folders) for more on the differences in behavior. + +* packages are installed into the \`{prefix}/lib/node_modules\` folder, instead + of the current working directory. +* bin files are linked to \`{prefix}/bin\` +* man pages are linked to \`{prefix}/share/man\` + +#### \`global-style\` + +* Default: false +* Type: Boolean + +Causes npm to install the package into your local \`node_modules\` folder with +the same layout it uses with the global \`node_modules\` folder. Only your +direct dependencies will show in \`node_modules\` and everything they depend +on will be flattened in their \`node_modules\` folders. This obviously will +eliminate some deduping. If used with \`legacy-bundling\`, \`legacy-bundling\` +will be preferred. + +#### \`globalconfig\` + +* Default: The global --prefix setting plus 'etc/npmrc'. For example, + '/usr/local/etc/npmrc' +* Type: Path + +The config file to read for global config options. + +#### \`heading\` + +* Default: "npm" +* Type: String + +The string that starts all the debugging log output. + +#### \`https-proxy\` + +* Default: null +* Type: null or URL + +A proxy to use for outgoing https requests. If the \`HTTPS_PROXY\` or +\`https_proxy\` or \`HTTP_PROXY\` or \`http_proxy\` environment variables are set, +proxy settings will be honored by the underlying \`make-fetch-happen\` +library. + +#### \`if-present\` + +* Default: false +* Type: Boolean + +If true, npm will not exit with an error code when \`run-script\` is invoked +for a script that isn't defined in the \`scripts\` section of \`package.json\`. +This option can be used when it's desirable to optionally run a script when +it's present and fail if the script fails. This is useful, for example, when +running scripts that may only apply for some builds in an otherwise generic +CI setup. + +#### \`ignore-scripts\` + +* Default: false +* Type: Boolean + +If true, npm does not run scripts specified in package.json files. + +Note that commands explicitly intended to run a particular script, such as +\`npm start\`, \`npm stop\`, \`npm restart\`, \`npm test\`, and \`npm run-script\` +will still run their intended script if \`ignore-scripts\` is set, but they +will *not* run any pre- or post-scripts. + +#### \`include\` + +* Default: +* Type: "prod", "dev", "optional", or "peer" (can be set multiple times) + +Option that allows for defining which types of dependencies to install. + +This is the inverse of \`--omit=<type>\`. + +Dependency types specified in \`--include\` will not be omitted, regardless of +the order in which omit/include are specified on the command-line. + +#### \`include-staged\` + +* Default: false +* Type: Boolean + +Allow installing "staged" published packages, as defined by [npm RFC PR +#92](https://github.com/npm/rfcs/pull/92). + +This is experimental, and not implemented by the npm public registry. + +#### \`init-author-email\` + +* Default: "" +* Type: String + +The value \`npm init\` should use by default for the package author's email. + +#### \`init-author-name\` + +* Default: "" +* Type: String + +The value \`npm init\` should use by default for the package author's name. + +#### \`init-author-url\` + +* Default: "" +* Type: "" or URL + +The value \`npm init\` should use by default for the package author's +homepage. + +#### \`init-license\` + +* Default: "ISC" +* Type: String + +The value \`npm init\` should use by default for the package license. + +#### \`init-module\` + +* Default: "~/.npm-init.js" +* Type: Path + +A module that will be loaded by the \`npm init\` command. See the +documentation for the +[init-package-json](https://github.com/npm/init-package-json) module for +more information, or [npm init](/commands/npm-init). + +#### \`init-version\` + +* Default: "1.0.0" +* Type: SemVer string + +The value that \`npm init\` should use by default for the package version +number, if not already set in package.json. + +#### \`json\` + +* Default: false +* Type: Boolean + +Whether or not to output JSON data, rather than the normal output. + +* In \`npm pkg set\` it enables parsing set values with JSON.parse() before + saving them to your \`package.json\`. + +Not supported by all npm commands. + +#### \`key\` + +* Default: null +* Type: null or String + +A client key to pass when accessing the registry. Values should be in PEM +format with newlines replaced by the string "\\n". For example: + +\`\`\`ini +key="-----BEGIN PRIVATE KEY-----\\nXXXX\\nXXXX\\n-----END PRIVATE KEY-----" +\`\`\` + +It is _not_ the path to a key file (and there is no "keyfile" option). + +#### \`legacy-bundling\` + +* Default: false +* Type: Boolean + +Causes npm to install the package such that versions of npm prior to 1.4, +such as the one included with node 0.8, can install the package. This +eliminates all automatic deduping. If used with \`global-style\` this option +will be preferred. + +#### \`legacy-peer-deps\` + +* Default: false +* Type: Boolean + +Causes npm to completely ignore \`peerDependencies\` when building a package +tree, as in npm versions 3 through 6. + +If a package cannot be installed because of overly strict \`peerDependencies\` +that collide, it provides a way to move forward resolving the situation. + +This differs from \`--omit=peer\`, in that \`--omit=peer\` will avoid unpacking +\`peerDependencies\` on disk, but will still design a tree such that +\`peerDependencies\` _could_ be unpacked in a correct place. + +Use of \`legacy-peer-deps\` is not recommended, as it will not enforce the +\`peerDependencies\` contract that meta-dependencies may rely on. + +#### \`link\` + +* Default: false +* Type: Boolean + +Used with \`npm ls\`, limiting output to only those packages that are linked. + +#### \`local-address\` + +* Default: null +* Type: IP Address + +The IP address of the local interface to use when making connections to the +npm registry. Must be IPv4 in versions of Node prior to 0.12. + +#### \`location\` + +* Default: "user" unless \`--global\` is passed, which will also set this value + to "global" +* Type: "global", "user", or "project" + +When passed to \`npm config\` this refers to which config file to use. + +#### \`loglevel\` + +* Default: "notice" +* Type: "silent", "error", "warn", "notice", "http", "timing", "info", + "verbose", or "silly" + +What level of logs to report. On failure, *all* logs are written to +\`npm-debug.log\` in the current working directory. + +Any logs of a higher level than the setting are shown. The default is +"notice". + +See also the \`foreground-scripts\` config. + +#### \`logs-max\` + +* Default: 10 +* Type: Number + +The maximum number of log files to store. + +#### \`long\` + +* Default: false +* Type: Boolean + +Show extended information in \`ls\`, \`search\`, and \`help-search\`. + +#### \`maxsockets\` + +* Default: 15 +* Type: Number + +The maximum number of connections to use per origin (protocol/host/port +combination). + +#### \`message\` + +* Default: "%s" +* Type: String + +Commit message which is used by \`npm version\` when creating version commit. + +Any "%s" in the message will be replaced with the version number. + +#### \`node-options\` + +* Default: null +* Type: null or String + +Options to pass through to Node.js via the \`NODE_OPTIONS\` environment +variable. This does not impact how npm itself is executed but it does impact +how lifecycle scripts are called. + +#### \`node-version\` + +* Default: Node.js \`process.version\` value +* Type: SemVer string + +The node version to use when checking a package's \`engines\` setting. + +#### \`noproxy\` + +* Default: The value of the NO_PROXY environment variable +* Type: String (can be set multiple times) + +Domain extensions that should bypass any proxies. + +Also accepts a comma-delimited string. + +#### \`npm-version\` + +* Default: Output of \`npm --version\` +* Type: SemVer string + +The npm version to use when checking a package's \`engines\` setting. + +#### \`offline\` + +* Default: false +* Type: Boolean + +Force offline mode: no network requests will be done during install. To +allow the CLI to fill in missing cache data, see \`--prefer-offline\`. + +#### \`omit\` + +* Default: 'dev' if the \`NODE_ENV\` environment variable is set to + 'production', otherwise empty. +* Type: "dev", "optional", or "peer" (can be set multiple times) + +Dependency types to omit from the installation tree on disk. + +Note that these dependencies _are_ still resolved and added to the +\`package-lock.json\` or \`npm-shrinkwrap.json\` file. They are just not +physically installed on disk. + +If a package type appears in both the \`--include\` and \`--omit\` lists, then +it will be included. + +If the resulting omit list includes \`'dev'\`, then the \`NODE_ENV\` environment +variable will be set to \`'production'\` for all lifecycle scripts. + +#### \`otp\` + +* Default: null +* Type: null or String + +This is a one-time password from a two-factor authenticator. It's needed +when publishing or changing package permissions with \`npm access\`. + +If not set, and a registry response fails with a challenge for a one-time +password, npm will prompt on the command line for one. + +#### \`pack-destination\` + +* Default: "." +* Type: String + +Directory in which \`npm pack\` will save tarballs. + +#### \`package\` + +* Default: +* Type: String (can be set multiple times) + +The package to install for [\`npm exec\`](/commands/npm-exec) + +#### \`package-lock\` + +* Default: true +* Type: Boolean + +If set to false, then ignore \`package-lock.json\` files when installing. This +will also prevent _writing_ \`package-lock.json\` if \`save\` is true. + +When package package-locks are disabled, automatic pruning of extraneous +modules will also be disabled. To remove extraneous modules with +package-locks disabled use \`npm prune\`. + +#### \`package-lock-only\` + +* Default: false +* Type: Boolean + +If set to true, the current operation will only use the \`package-lock.json\`, +ignoring \`node_modules\`. + +For \`update\` this means only the \`package-lock.json\` will be updated, +instead of checking \`node_modules\` and downloading dependencies. + +For \`list\` this means the output will be based on the tree described by the +\`package-lock.json\`, rather than the contents of \`node_modules\`. + +#### \`parseable\` + +* Default: false +* Type: Boolean + +Output parseable results from commands that write to standard output. For +\`npm search\`, this will be tab-separated table format. + +#### \`prefer-offline\` + +* Default: false +* Type: Boolean + +If true, staleness checks for cached data will be bypassed, but missing data +will be requested from the server. To force full offline mode, use +\`--offline\`. + +#### \`prefer-online\` + +* Default: false +* Type: Boolean + +If true, staleness checks for cached data will be forced, making the CLI +look for updates immediately even for fresh package data. + +#### \`prefix\` + +* Default: In global mode, the folder where the node executable is installed. + In local mode, the nearest parent folder containing either a package.json + file or a node_modules folder. +* Type: Path + +The location to install global items. If set on the command line, then it +forces non-global commands to run in the specified folder. + +#### \`preid\` + +* Default: "" +* Type: String + +The "prerelease identifier" to use as a prefix for the "prerelease" part of +a semver. Like the \`rc\` in \`1.2.0-rc.8\`. + +#### \`progress\` + +* Default: \`true\` unless running in a known CI system +* Type: Boolean + +When set to \`true\`, npm will display a progress bar during time intensive +operations, if \`process.stderr\` is a TTY. + +Set to \`false\` to suppress the progress bar. + +#### \`proxy\` + +* Default: null +* Type: null, false, or URL + +A proxy to use for outgoing http requests. If the \`HTTP_PROXY\` or +\`http_proxy\` environment variables are set, proxy settings will be honored +by the underlying \`request\` library. + +#### \`read-only\` + +* Default: false +* Type: Boolean + +This is used to mark a token as unable to publish when configuring limited +access tokens with the \`npm token create\` command. + +#### \`rebuild-bundle\` + +* Default: true +* Type: Boolean + +Rebuild bundled dependencies after installation. + +#### \`registry\` + +* Default: "https://registry.npmjs.org/" +* Type: URL + +The base URL of the npm registry. + +#### \`save\` + +* Default: true +* Type: Boolean + +Save installed packages to a package.json file as dependencies. + +When used with the \`npm rm\` command, removes the dependency from +package.json. + +#### \`save-bundle\` + +* Default: false +* Type: Boolean + +If a package would be saved at install time by the use of \`--save\`, +\`--save-dev\`, or \`--save-optional\`, then also put it in the +\`bundleDependencies\` list. + +Ignore if \`--save-peer\` is set, since peerDependencies cannot be bundled. + +#### \`save-dev\` + +* Default: false +* Type: Boolean + +Save installed packages to a package.json file as \`devDependencies\`. + +#### \`save-exact\` + +* Default: false +* Type: Boolean + +Dependencies saved to package.json will be configured with an exact version +rather than using npm's default semver range operator. + +#### \`save-optional\` + +* Default: false +* Type: Boolean + +Save installed packages to a package.json file as \`optionalDependencies\`. + +#### \`save-peer\` + +* Default: false +* Type: Boolean + +Save installed packages. to a package.json file as \`peerDependencies\` + +#### \`save-prefix\` + +* Default: "^" +* Type: String + +Configure how versions of packages installed to a package.json file via +\`--save\` or \`--save-dev\` get prefixed. + +For example if a package has version \`1.2.3\`, by default its version is set +to \`^1.2.3\` which allows minor upgrades for that package, but after \`npm +config set save-prefix='~'\` it would be set to \`~1.2.3\` which only allows +patch upgrades. + +#### \`save-prod\` + +* Default: false +* Type: Boolean + +Save installed packages into \`dependencies\` specifically. This is useful if +a package already exists in \`devDependencies\` or \`optionalDependencies\`, but +you want to move it to be a non-optional production dependency. + +This is the default behavior if \`--save\` is true, and neither \`--save-dev\` +or \`--save-optional\` are true. + +#### \`scope\` + +* Default: the scope of the current project, if any, or "" +* Type: String + +Associate an operation with a scope for a scoped registry. + +Useful when logging in to or out of a private registry: + +\`\`\` +# log in, linking the scope to the custom registry +npm login --scope=@mycorp --registry=https://registry.mycorp.com + +# log out, removing the link and the auth token +npm logout --scope=@mycorp +\`\`\` + +This will cause \`@mycorp\` to be mapped to the registry for future +installation of packages specified according to the pattern +\`@mycorp/package\`. + +This will also cause \`npm init\` to create a scoped package. + +\`\`\` +# accept all defaults, and create a package named "@foo/whatever", +# instead of just named "whatever" +npm init --scope=@foo --yes +\`\`\` + + +#### \`script-shell\` + +* Default: '/bin/sh' on POSIX systems, 'cmd.exe' on Windows +* Type: null or String + +The shell to use for scripts run with the \`npm exec\`, \`npm run\` and \`npm +init <pkg>\` commands. + +#### \`searchexclude\` + +* Default: "" +* Type: String + +Space-separated options that limit the results from search. + +#### \`searchlimit\` + +* Default: 20 +* Type: Number + +Number of items to limit search results to. Will not apply at all to legacy +searches. + +#### \`searchopts\` + +* Default: "" +* Type: String + +Space-separated options that are always passed to search. + +#### \`searchstaleness\` + +* Default: 900 +* Type: Number + +The age of the cache, in seconds, before another registry request is made if +using legacy search endpoint. + +#### \`shell\` + +* Default: SHELL environment variable, or "bash" on Posix, or "cmd.exe" on + Windows +* Type: String + +The shell to run for the \`npm explore\` command. + +#### \`sign-git-commit\` + +* Default: false +* Type: Boolean + +If set to true, then the \`npm version\` command will commit the new package +version using \`-S\` to add a signature. + +Note that git requires you to have set up GPG keys in your git configs for +this to work properly. + +#### \`sign-git-tag\` + +* Default: false +* Type: Boolean + +If set to true, then the \`npm version\` command will tag the version using +\`-s\` to add a signature. + +Note that git requires you to have set up GPG keys in your git configs for +this to work properly. + +#### \`strict-peer-deps\` + +* Default: false +* Type: Boolean + +If set to \`true\`, and \`--legacy-peer-deps\` is not set, then _any_ +conflicting \`peerDependencies\` will be treated as an install failure, even +if npm could reasonably guess the appropriate resolution based on non-peer +dependency relationships. + +By default, conflicting \`peerDependencies\` deep in the dependency graph will +be resolved using the nearest non-peer dependency specification, even if +doing so will result in some packages receiving a peer dependency outside +the range set in their package's \`peerDependencies\` object. + +When such and override is performed, a warning is printed, explaining the +conflict and the packages involved. If \`--strict-peer-deps\` is set, then +this warning is treated as a failure. + +#### \`strict-ssl\` + +* Default: true +* Type: Boolean + +Whether or not to do SSL key validation when making requests to the registry +via https. + +See also the \`ca\` config. + +#### \`tag\` + +* Default: "latest" +* Type: String + +If you ask npm to install a package and don't tell it a specific version, +then it will install the specified tag. + +Also the tag that is added to the package@version specified by the \`npm tag\` +command, if no explicit tag is given. + +When used by the \`npm diff\` command, this is the tag used to fetch the +tarball that will be compared with the local files by default. + +#### \`tag-version-prefix\` + +* Default: "v" +* Type: String + +If set, alters the prefix used when tagging a new version when performing a +version increment using \`npm-version\`. To remove the prefix altogether, set +it to the empty string: \`""\`. + +Because other tools may rely on the convention that npm version tags look +like \`v1.0.0\`, _only use this property if it is absolutely necessary_. In +particular, use care when overriding this setting for public packages. + +#### \`timing\` + +* Default: false +* Type: Boolean + +If true, writes an \`npm-debug\` log to \`_logs\` and timing information to +\`_timing.json\`, both in your cache, even if the command completes +successfully. \`_timing.json\` is a newline delimited list of JSON objects. + +You can quickly view it with this [json](https://npm.im/json) command line: +\`npm exec -- json -g < ~/.npm/_timing.json\`. + +#### \`umask\` + +* Default: 0 +* Type: Octal numeric string in range 0000..0777 (0..511) + +The "umask" value to use when setting the file creation mode on files and +folders. + +Folders and executables are given a mode which is \`0o777\` masked against +this value. Other files are given a mode which is \`0o666\` masked against +this value. + +Note that the underlying system will _also_ apply its own umask value to +files and folders that are created, and npm does not circumvent this, but +rather adds the \`--umask\` config to it. + +Thus, the effective default umask value on most POSIX systems is 0o22, +meaning that folders and executables are created with a mode of 0o755 and +other files are created with a mode of 0o644. + +#### \`unicode\` + +* Default: false on windows, true on mac/unix systems with a unicode locale, + as defined by the \`LC_ALL\`, \`LC_CTYPE\`, or \`LANG\` environment variables. +* Type: Boolean + +When set to true, npm uses unicode characters in the tree output. When +false, it uses ascii characters instead of unicode glyphs. + +#### \`update-notifier\` + +* Default: true +* Type: Boolean + +Set to false to suppress the update notification when using an older version +of npm than the latest. + +#### \`usage\` + +* Default: false +* Type: Boolean + +Show short usage output about the command specified. + +#### \`user-agent\` + +* Default: "npm/{npm-version} node/{node-version} {platform} {arch} + workspaces/{workspaces} {ci}" +* Type: String + +Sets the User-Agent request header. The following fields are replaced with +their actual counterparts: + +* \`{npm-version}\` - The npm version in use +* \`{node-version}\` - The Node.js version in use +* \`{platform}\` - The value of \`process.platform\` +* \`{arch}\` - The value of \`process.arch\` +* \`{workspaces}\` - Set to \`true\` if the \`workspaces\` or \`workspace\` options + are set. +* \`{ci}\` - The value of the \`ci-name\` config, if set, prefixed with \`ci/\`, or + an empty string if \`ci-name\` is empty. + +#### \`userconfig\` + +* Default: "~/.npmrc" +* Type: Path + +The location of user-level configuration settings. + +This may be overridden by the \`npm_config_userconfig\` environment variable +or the \`--userconfig\` command line option, but may _not_ be overridden by +settings in the \`globalconfig\` file. + +#### \`version\` + +* Default: false +* Type: Boolean + +If true, output the npm version and exit successfully. + +Only relevant when specified explicitly on the command line. + +#### \`versions\` + +* Default: false +* Type: Boolean + +If true, output the npm version as well as node's \`process.versions\` map and +the version in the current working directory's \`package.json\` file if one +exists, and exit successfully. + +Only relevant when specified explicitly on the command line. + +#### \`viewer\` + +* Default: "man" on Posix, "browser" on Windows +* Type: String + +The program to use to view help content. + +Set to \`"browser"\` to view html help content in the default web browser. + +#### \`which\` + +* Default: null +* Type: null or Number + +If there are multiple funding sources, which 1-indexed source URL to open. + +#### \`workspace\` + +* Default: +* Type: String (can be set multiple times) + +Enable running a command in the context of the configured workspaces of the +current project while filtering by running only the workspaces defined by +this configuration option. + +Valid values for the \`workspace\` config are either: + +* Workspace names +* Path to a workspace directory +* Path to a parent workspace directory (will result to selecting all of the + nested workspaces) + +When set for the \`npm init\` command, this may be set to the folder of a +workspace which does not yet exist, to create the folder and set it up as a +brand new workspace within the project. + +This value is not exported to the environment for child processes. + +#### \`workspaces\` + +* Default: false +* Type: Boolean + +Enable running a command in the context of **all** the configured +workspaces. + +This value is not exported to the environment for child processes. + +#### \`yes\` + +* Default: null +* Type: null or Boolean + +Automatically answer "yes" to any prompts that npm might print on the +command line. + +#### \`also\` + +* Default: null +* Type: null, "dev", or "development" +* DEPRECATED: Please use --include=dev instead. + +When set to \`dev\` or \`development\`, this is an alias for \`--include=dev\`. + +#### \`auth-type\` + +* Default: "legacy" +* Type: "legacy", "sso", "saml", or "oauth" +* DEPRECATED: This method of SSO/SAML/OAuth is deprecated and will be removed + in a future version of npm in favor of web-based login. + +What authentication strategy to use with \`adduser\`/\`login\`. + +#### \`cache-max\` + +* Default: Infinity +* Type: Number +* DEPRECATED: This option has been deprecated in favor of \`--prefer-online\` + +\`--cache-max=0\` is an alias for \`--prefer-online\` + +#### \`cache-min\` + +* Default: 0 +* Type: Number +* DEPRECATED: This option has been deprecated in favor of \`--prefer-offline\`. + +\`--cache-min=9999 (or bigger)\` is an alias for \`--prefer-offline\`. + +#### \`dev\` + +* Default: false +* Type: Boolean +* DEPRECATED: Please use --include=dev instead. + +Alias for \`--include=dev\`. + +#### \`init.author.email\` + +* Default: "" +* Type: String +* DEPRECATED: Use \`--init-author-email\` instead. + +Alias for \`--init-author-email\` + +#### \`init.author.name\` + +* Default: "" +* Type: String +* DEPRECATED: Use \`--init-author-name\` instead. + +Alias for \`--init-author-name\` + +#### \`init.author.url\` + +* Default: "" +* Type: "" or URL +* DEPRECATED: Use \`--init-author-url\` instead. + +Alias for \`--init-author-url\` + +#### \`init.license\` + +* Default: "ISC" +* Type: String +* DEPRECATED: Use \`--init-license\` instead. + +Alias for \`--init-license\` + +#### \`init.module\` + +* Default: "~/.npm-init.js" +* Type: Path +* DEPRECATED: Use \`--init-module\` instead. + +Alias for \`--init-module\` + +#### \`init.version\` + +* Default: "1.0.0" +* Type: SemVer string +* DEPRECATED: Use \`--init-version\` instead. + +Alias for \`--init-version\` + +#### \`only\` + +* Default: null +* Type: null, "prod", or "production" +* DEPRECATED: Use \`--omit=dev\` to omit dev dependencies from the install. + +When set to \`prod\` or \`production\`, this is an alias for \`--omit=dev\`. + +#### \`optional\` + +* Default: null +* Type: null or Boolean +* DEPRECATED: Use \`--omit=optional\` to exclude optional dependencies, or + \`--include=optional\` to include them. + +Default value does install optional deps unless otherwise omitted. + +Alias for --include=optional or --omit=optional + +#### \`production\` + +* Default: null +* Type: null or Boolean +* DEPRECATED: Use \`--omit=dev\` instead. + +Alias for \`--omit=dev\` + +#### \`shrinkwrap\` + +* Default: true +* Type: Boolean +* DEPRECATED: Use the --package-lock setting instead. + +Alias for --package-lock + +#### \`sso-poll-frequency\` + +* Default: 500 +* Type: Number +* DEPRECATED: The --auth-type method of SSO/SAML/OAuth will be removed in a + future version of npm in favor of web-based login. + +When used with SSO-enabled \`auth-type\`s, configures how regularly the +registry should be polled while the user is completing authentication. + +#### \`sso-type\` + +* Default: "oauth" +* Type: null, "oauth", or "saml" +* DEPRECATED: The --auth-type method of SSO/SAML/OAuth will be removed in a + future version of npm in favor of web-based login. + +If \`--auth-type=sso\`, the type of SSO type to use. + +#### \`tmp\` + +* Default: The value returned by the Node.js \`os.tmpdir()\` method + <https://nodejs.org/api/os.html#os_os_tmpdir> +* Type: Path +* DEPRECATED: This setting is no longer used. npm stores temporary files in a + special location in the cache, and they are managed by + [\`cacache\`](http://npm.im/cacache). + +Historically, the location where temporary files were stored. No longer +relevant. +` diff --git a/tap-snapshots/test/lib/utils/config/index.js.test.cjs b/tap-snapshots/test/lib/utils/config/index.js.test.cjs new file mode 100644 index 0000000000000..f1cba9264ee2f --- /dev/null +++ b/tap-snapshots/test/lib/utils/config/index.js.test.cjs @@ -0,0 +1,136 @@ +/* IMPORTANT + * This snapshot file is auto-generated, but designed for humans. + * It should be checked into source control and tracked carefully. + * Re-generate by setting TAP_SNAPSHOT=1 and running tests. + * Make sure to inspect the output below. Do not ignore changes! + */ +'use strict' +exports[`test/lib/utils/config/index.js TAP > shorthands 1`] = ` +Object { + "?": Array [ + "--usage", + ], + "a": Array [ + "--all", + ], + "B": Array [ + "--save-bundle", + ], + "c": Array [ + "--call", + ], + "C": Array [ + "--prefix", + ], + "d": Array [ + "--loglevel", + "info", + ], + "D": Array [ + "--save-dev", + ], + "dd": Array [ + "--loglevel", + "verbose", + ], + "ddd": Array [ + "--loglevel", + "silly", + ], + "desc": Array [ + "--description", + ], + "E": Array [ + "--save-exact", + ], + "enjoy-by": Array [ + "--before", + ], + "f": Array [ + "--force", + ], + "g": Array [ + "--global", + ], + "h": Array [ + "--usage", + ], + "H": Array [ + "--usage", + ], + "help": Array [ + "--usage", + ], + "l": Array [ + "--long", + ], + "L": Array [ + "--location", + ], + "local": Array [ + "--no-global", + ], + "m": Array [ + "--message", + ], + "n": Array [ + "--no-yes", + ], + "no": Array [ + "--no-yes", + ], + "O": Array [ + "--save-optional", + ], + "p": Array [ + "--parseable", + ], + "P": Array [ + "--save-prod", + ], + "porcelain": Array [ + "--parseable", + ], + "q": Array [ + "--loglevel", + "warn", + ], + "quiet": Array [ + "--loglevel", + "warn", + ], + "readonly": Array [ + "--read-only", + ], + "reg": Array [ + "--registry", + ], + "s": Array [ + "--loglevel", + "silent", + ], + "S": Array [ + "--save", + ], + "silent": Array [ + "--loglevel", + "silent", + ], + "v": Array [ + "--version", + ], + "verbose": Array [ + "--loglevel", + "verbose", + ], + "w": Array [ + "--workspace", + ], + "ws": Array [ + "--workspaces", + ], + "y": Array [ + "--yes", + ], +} +` diff --git a/tap-snapshots/test-lib-utils-error-message.js-TAP.test.js b/tap-snapshots/test/lib/utils/error-message.js.test.cjs similarity index 66% rename from tap-snapshots/test-lib-utils-error-message.js-TAP.test.js rename to tap-snapshots/test/lib/utils/error-message.js.test.cjs index bcf5144499f24..5b6e3c85ab112 100644 --- a/tap-snapshots/test-lib-utils-error-message.js-TAP.test.js +++ b/tap-snapshots/test/lib/utils/error-message.js.test.cjs @@ -28,7 +28,10 @@ Object { ], Array [ "404", - "\\nNote that you can also install from a", + String( + + Note that you can also install from a + ), ], Array [ "404", @@ -67,7 +70,10 @@ Object { ], Array [ "404", - "\\nNote that you can also install from a", + String( + + Note that you can also install from a + ), ], Array [ "404", @@ -113,7 +119,10 @@ Object { ], Array [ "404", - "\\nNote that you can also install from a", + String( + + Note that you can also install from a + ), ], Array [ "404", @@ -134,7 +143,11 @@ Object { "detail": Array [ Array [ "notsup", - "Not compatible with your version of node/npm: some@package\\nRequired: undefined\\nActual: {\\"npm\\":\\"123.69.420-npm\\",\\"node\\":\\"99.99.99\\"}", + String( + Not compatible with your version of node/npm: some@package + Required: undefined + Actual: {"npm":"123.69.420-npm","node":"99.99.99"} + ), ], ], "summary": Array [ @@ -155,7 +168,12 @@ Object { "detail": Array [ Array [ "notsup", - "Valid OS: !yours,mine\\nValid Arch: x420,x69\\nActual OS: posix\\nActual Arch: x64", + String( + Valid OS: !yours,mine + Valid Arch: x420,x69 + Actual OS: posix + Actual Arch: x64 + ), ], ], "summary": Array [ @@ -172,7 +190,12 @@ Object { "detail": Array [ Array [ "notsup", - "Valid OS: !yours\\nValid Arch: x420\\nActual OS: posix\\nActual Arch: x64", + String( + Valid OS: !yours + Valid Arch: x420 + Actual OS: posix + Actual Arch: x64 + ), ], ], "summary": Array [ @@ -246,7 +269,15 @@ Object { "detail": Array [ Array [ "", - "\\nThe operation was rejected by your operating system.\\nIt is likely you do not have the permissions to access this file as the current user\\n\\nIf you believe this might be a permissions issue, please double-check the\\npermissions of the file and its containing directories, or try running\\nthe command again as root/Administrator.", + String( + + The operation was rejected by your operating system. + It is likely you do not have the permissions to access this file as the current user + + If you believe this might be a permissions issue, please double-check the + permissions of the file and its containing directories, or try running + the command again as root/Administrator. + ), ], ], "summary": Array [ @@ -271,7 +302,15 @@ Object { "detail": Array [ Array [ "", - "\\nThe operation was rejected by your operating system.\\nIt is likely you do not have the permissions to access this file as the current user\\n\\nIf you believe this might be a permissions issue, please double-check the\\npermissions of the file and its containing directories, or try running\\nthe command again as root/Administrator.", + String( + + The operation was rejected by your operating system. + It is likely you do not have the permissions to access this file as the current user + + If you believe this might be a permissions issue, please double-check the + permissions of the file and its containing directories, or try running + the command again as root/Administrator. + ), ], ], "summary": Array [ @@ -296,7 +335,15 @@ Object { "detail": Array [ Array [ "", - "\\nThe operation was rejected by your operating system.\\nIt is likely you do not have the permissions to access this file as the current user\\n\\nIf you believe this might be a permissions issue, please double-check the\\npermissions of the file and its containing directories, or try running\\nthe command again as root/Administrator.", + String( + + The operation was rejected by your operating system. + It is likely you do not have the permissions to access this file as the current user + + If you believe this might be a permissions issue, please double-check the + permissions of the file and its containing directories, or try running + the command again as root/Administrator. + ), ], ], "summary": Array [ @@ -321,7 +368,15 @@ Object { "detail": Array [ Array [ "", - "\\nThe operation was rejected by your operating system.\\nIt is likely you do not have the permissions to access this file as the current user\\n\\nIf you believe this might be a permissions issue, please double-check the\\npermissions of the file and its containing directories, or try running\\nthe command again as root/Administrator.", + String( + + The operation was rejected by your operating system. + It is likely you do not have the permissions to access this file as the current user + + If you believe this might be a permissions issue, please double-check the + permissions of the file and its containing directories, or try running + the command again as root/Administrator. + ), ], ], "summary": Array [ @@ -346,7 +401,15 @@ Object { "detail": Array [ Array [ "", - "\\nThe operation was rejected by your operating system.\\nIt is likely you do not have the permissions to access this file as the current user\\n\\nIf you believe this might be a permissions issue, please double-check the\\npermissions of the file and its containing directories, or try running\\nthe command again as root/Administrator.", + String( + + The operation was rejected by your operating system. + It is likely you do not have the permissions to access this file as the current user + + If you believe this might be a permissions issue, please double-check the + permissions of the file and its containing directories, or try running + the command again as root/Administrator. + ), ], ], "summary": Array [ @@ -372,7 +435,14 @@ Object { "summary": Array [ Array [ "", - "\\nYour cache folder contains root-owned files, due to a bug in\\nprevious versions of npm which has since been addressed.\\n\\nTo permanently fix this problem, please run:\\n sudo chown -R 69:420 \\"/some/cache/dir\\"", + String( + + Your cache folder contains root-owned files, due to a bug in + previous versions of npm which has since been addressed. + + To permanently fix this problem, please run: + sudo chown -R 69:420 "/some/cache/dir" + ), ], ], } @@ -392,7 +462,14 @@ Object { "summary": Array [ Array [ "", - "\\nYour cache folder contains root-owned files, due to a bug in\\nprevious versions of npm which has since been addressed.\\n\\nTo permanently fix this problem, please run:\\n sudo chown -R 69:420 \\"/some/cache/dir\\"", + String( + + Your cache folder contains root-owned files, due to a bug in + previous versions of npm which has since been addressed. + + To permanently fix this problem, please run: + sudo chown -R 69:420 "/some/cache/dir" + ), ], ], } @@ -412,7 +489,14 @@ Object { "summary": Array [ Array [ "", - "\\nYour cache folder contains root-owned files, due to a bug in\\nprevious versions of npm which has since been addressed.\\n\\nTo permanently fix this problem, please run:\\n sudo chown -R 69:420 \\"/some/cache/dir\\"", + String( + + Your cache folder contains root-owned files, due to a bug in + previous versions of npm which has since been addressed. + + To permanently fix this problem, please run: + sudo chown -R 69:420 "/some/cache/dir" + ), ], ], } @@ -431,7 +515,16 @@ Object { "detail": Array [ Array [ "", - "\\nThe operation was rejected by your operating system.\\nIt's possible that the file was already in use (by a text editor or antivirus),\\nor that you lack permissions to access it.\\n\\nIf you believe this might be a permissions issue, please double-check the\\npermissions of the file and its containing directories, or try running\\nthe command again as root/Administrator.", + String( + + The operation was rejected by your operating system. + It's possible that the file was already in use (by a text editor or antivirus), + or that you lack permissions to access it. + + If you believe this might be a permissions issue, please double-check the + permissions of the file and its containing directories, or try running + the command again as root/Administrator. + ), ], ], "summary": Array [ @@ -456,7 +549,16 @@ Object { "detail": Array [ Array [ "", - "\\nThe operation was rejected by your operating system.\\nIt's possible that the file was already in use (by a text editor or antivirus),\\nor that you lack permissions to access it.\\n\\nIf you believe this might be a permissions issue, please double-check the\\npermissions of the file and its containing directories, or try running\\nthe command again as root/Administrator.", + String( + + The operation was rejected by your operating system. + It's possible that the file was already in use (by a text editor or antivirus), + or that you lack permissions to access it. + + If you believe this might be a permissions issue, please double-check the + permissions of the file and its containing directories, or try running + the command again as root/Administrator. + ), ], ], "summary": Array [ @@ -481,7 +583,16 @@ Object { "detail": Array [ Array [ "", - "\\nThe operation was rejected by your operating system.\\nIt's possible that the file was already in use (by a text editor or antivirus),\\nor that you lack permissions to access it.\\n\\nIf you believe this might be a permissions issue, please double-check the\\npermissions of the file and its containing directories, or try running\\nthe command again as root/Administrator.", + String( + + The operation was rejected by your operating system. + It's possible that the file was already in use (by a text editor or antivirus), + or that you lack permissions to access it. + + If you believe this might be a permissions issue, please double-check the + permissions of the file and its containing directories, or try running + the command again as root/Administrator. + ), ], ], "summary": Array [ @@ -506,7 +617,16 @@ Object { "detail": Array [ Array [ "", - "\\nThe operation was rejected by your operating system.\\nIt's possible that the file was already in use (by a text editor or antivirus),\\nor that you lack permissions to access it.\\n\\nIf you believe this might be a permissions issue, please double-check the\\npermissions of the file and its containing directories, or try running\\nthe command again as root/Administrator.", + String( + + The operation was rejected by your operating system. + It's possible that the file was already in use (by a text editor or antivirus), + or that you lack permissions to access it. + + If you believe this might be a permissions issue, please double-check the + permissions of the file and its containing directories, or try running + the command again as root/Administrator. + ), ], ], "summary": Array [ @@ -531,7 +651,16 @@ Object { "detail": Array [ Array [ "", - "\\nThe operation was rejected by your operating system.\\nIt's possible that the file was already in use (by a text editor or antivirus),\\nor that you lack permissions to access it.\\n\\nIf you believe this might be a permissions issue, please double-check the\\npermissions of the file and its containing directories, or try running\\nthe command again as root/Administrator.", + String( + + The operation was rejected by your operating system. + It's possible that the file was already in use (by a text editor or antivirus), + or that you lack permissions to access it. + + If you believe this might be a permissions issue, please double-check the + permissions of the file and its containing directories, or try running + the command again as root/Administrator. + ), ], ], "summary": Array [ @@ -556,7 +685,16 @@ Object { "detail": Array [ Array [ "", - "\\nThe operation was rejected by your operating system.\\nIt's possible that the file was already in use (by a text editor or antivirus),\\nor that you lack permissions to access it.\\n\\nIf you believe this might be a permissions issue, please double-check the\\npermissions of the file and its containing directories, or try running\\nthe command again as root/Administrator.", + String( + + The operation was rejected by your operating system. + It's possible that the file was already in use (by a text editor or antivirus), + or that you lack permissions to access it. + + If you believe this might be a permissions issue, please double-check the + permissions of the file and its containing directories, or try running + the command again as root/Administrator. + ), ], ], "summary": Array [ @@ -581,7 +719,16 @@ Object { "detail": Array [ Array [ "", - "\\nThe operation was rejected by your operating system.\\nIt's possible that the file was already in use (by a text editor or antivirus),\\nor that you lack permissions to access it.\\n\\nIf you believe this might be a permissions issue, please double-check the\\npermissions of the file and its containing directories, or try running\\nthe command again as root/Administrator.", + String( + + The operation was rejected by your operating system. + It's possible that the file was already in use (by a text editor or antivirus), + or that you lack permissions to access it. + + If you believe this might be a permissions issue, please double-check the + permissions of the file and its containing directories, or try running + the command again as root/Administrator. + ), ], ], "summary": Array [ @@ -606,7 +753,16 @@ Object { "detail": Array [ Array [ "", - "\\nThe operation was rejected by your operating system.\\nIt's possible that the file was already in use (by a text editor or antivirus),\\nor that you lack permissions to access it.\\n\\nIf you believe this might be a permissions issue, please double-check the\\npermissions of the file and its containing directories, or try running\\nthe command again as root/Administrator.", + String( + + The operation was rejected by your operating system. + It's possible that the file was already in use (by a text editor or antivirus), + or that you lack permissions to access it. + + If you believe this might be a permissions issue, please double-check the + permissions of the file and its containing directories, or try running + the command again as root/Administrator. + ), ], ], "summary": Array [ @@ -695,7 +851,11 @@ Object { "detail": Array [ Array [ "", - "You can provide a one-time password by passing --otp=<code> to the command you ran.\\nIf you already provided a one-time password then it is likely that you either typoed\\nit, or it timed out. Please try again.", + String( + You can provide a one-time password by passing --otp=<code> to the command you ran. + If you already provided a one-time password then it is likely that you either typoed + it, or it timed out. Please try again. + ), ], ], "summary": Array [ @@ -712,7 +872,11 @@ Object { "detail": Array [ Array [ "", - "You can provide a one-time password by passing --otp=<code> to the command you ran.\\nIf you already provided a one-time password then it is likely that you either typoed\\nit, or it timed out. Please try again.", + String( + You can provide a one-time password by passing --otp=<code> to the command you ran. + If you already provided a one-time password then it is likely that you either typoed + it, or it timed out. Please try again. + ), ], ], "summary": Array [ @@ -729,7 +893,17 @@ Object { "detail": Array [ Array [ "", - "If you were trying to login, change your password, create an\\nauthentication token or enable two-factor authentication then\\nthat means you likely typed your password in incorrectly.\\nPlease try again, or recover your password at:\\n https://www.npmjs.com/forgot\\n\\nIf you were doing some other operation then your saved credentials are\\nprobably out of date. To correct this please try logging in again with:\\n npm login", + String( + If you were trying to login, change your password, create an + authentication token or enable two-factor authentication then + that means you likely typed your password in incorrectly. + Please try again, or recover your password at: + https://www.npmjs.com/forgot + + If you were doing some other operation then your saved credentials are + probably out of date. To correct this please try logging in again with: + npm login + ), ], ], "summary": Array [ @@ -746,7 +920,10 @@ Object { "detail": Array [ Array [ "", - "To correct this please trying logging in again with:\\n npm login", + String( + To correct this please trying logging in again with: + npm login + ), ], ], "summary": Array [ @@ -820,7 +997,13 @@ Object { "detail": Array [ Array [ "network", - "This is a problem related to network connectivity.\\nIn most cases you are behind a proxy or have bad network settings.\\n\\nIf you are behind a proxy, please make sure that the\\n'proxy' config is set properly. See: 'npm help config'", + String( + This is a problem related to network connectivity. + In most cases you are behind a proxy or have bad network settings. + + If you are behind a proxy, please make sure that the + 'proxy' config is set properly. See: 'npm help config' + ), ], ], "summary": Array [ @@ -837,7 +1020,13 @@ Object { "detail": Array [ Array [ "network", - "This is a problem related to network connectivity.\\nIn most cases you are behind a proxy or have bad network settings.\\n\\nIf you are behind a proxy, please make sure that the\\n'proxy' config is set properly. See: 'npm help config'", + String( + This is a problem related to network connectivity. + In most cases you are behind a proxy or have bad network settings. + + If you are behind a proxy, please make sure that the + 'proxy' config is set properly. See: 'npm help config' + ), ], ], "summary": Array [ @@ -854,7 +1043,13 @@ Object { "detail": Array [ Array [ "network", - "This is a problem related to network connectivity.\\nIn most cases you are behind a proxy or have bad network settings.\\n\\nIf you are behind a proxy, please make sure that the\\n'proxy' config is set properly. See: 'npm help config'", + String( + This is a problem related to network connectivity. + In most cases you are behind a proxy or have bad network settings. + + If you are behind a proxy, please make sure that the + 'proxy' config is set properly. See: 'npm help config' + ), ], ], "summary": Array [ @@ -871,7 +1066,11 @@ Object { "detail": Array [ Array [ "notsup", - "Not compatible with your version of node/npm: some@package\\nRequired: undefined\\nActual: {\\"npm\\":\\"123.69.420-npm\\",\\"node\\":\\"123.69.420-node\\"}", + String( + Not compatible with your version of node/npm: some@package + Required: undefined + Actual: {"npm":"123.69.420-npm","node":"123.69.420-node"} + ), ], ], "summary": Array [ @@ -892,7 +1091,10 @@ Object { "detail": Array [ Array [ "nospc", - "There appears to be insufficient space on your system to finish.\\nClear up some disk space and try again.", + String( + There appears to be insufficient space on your system to finish. + Clear up some disk space and try again. + ), ], ], "summary": Array [ @@ -909,7 +1111,10 @@ Object { "detail": Array [ Array [ "rofs", - "Often virtualized file systems, or other file systems\\nthat don't support symlinks, give this error.", + String( + Often virtualized file systems, or other file systems + that don't support symlinks, give this error. + ), ], ], "summary": Array [ @@ -926,7 +1131,11 @@ Object { "detail": Array [ Array [ "enoent", - "This is related to npm not being able to find a file.\\n\\nCheck if the file '/some/file' is present.", + String( + This is related to npm not being able to find a file. + + Check if the file '/some/file' is present. + ), ], ], "summary": Array [ @@ -943,7 +1152,10 @@ Object { "detail": Array [ Array [ "typeerror", - "This is an error with npm itself. Please report this error at:\\n https://github.com/npm/cli/issues", + String( + This is an error with npm itself. Please report this error at: + https://github.com/npm/cli/issues + ), ], ], "summary": Array [ @@ -960,7 +1172,10 @@ Object { "detail": Array [ Array [ "typeerror", - "This is an error with npm itself. Please report this error at:\\n https://github.com/npm/cli/issues", + String( + This is an error with npm itself. Please report this error at: + https://github.com/npm/cli/issues + ), ], ], "summary": Array [ @@ -977,7 +1192,10 @@ Object { "detail": Array [ Array [ "typeerror", - "This is an error with npm itself. Please report this error at:\\n https://github.com/npm/cli/issues", + String( + This is an error with npm itself. Please report this error at: + https://github.com/npm/cli/issues + ), ], ], "summary": Array [ @@ -1015,7 +1233,10 @@ Object { "detail": Array [ Array [ "typeerror", - "This is an error with npm itself. Please report this error at:\\n https://github.com/npm/cli/issues", + String( + This is an error with npm itself. Please report this error at: + https://github.com/npm/cli/issues + ), ], ], "summary": Array [ @@ -1032,7 +1253,10 @@ Object { "detail": Array [ Array [ "notarget", - "In most cases you or one of your dependencies are requesting\\na package version that doesn't exist.", + String( + In most cases you or one of your dependencies are requesting + a package version that doesn't exist. + ), ], ], "summary": Array [ @@ -1049,7 +1273,11 @@ Object { "detail": Array [ Array [ "403", - "In most cases, you or one of your dependencies are requesting\\na package version that is forbidden by your security policy, or\\non a server you do not have access to.", + String( + In most cases, you or one of your dependencies are requesting + a package version that is forbidden by your security policy, or + on a server you do not have access to. + ), ], ], "summary": Array [ @@ -1061,12 +1289,39 @@ Object { } ` +exports[`test/lib/utils/error-message.js TAP just simple messages > must match snapshot 23`] = ` +Object { + "detail": Array [ + Array [ + "network", + String( + This is a problem related to network connectivity. + In most cases you are behind a proxy or have bad network settings. + + If you are behind a proxy, please make sure that the + 'proxy' config is set properly. See: 'npm help config' + ), + ], + ], + "summary": Array [ + Array [ + "network", + "foo", + ], + ], +} +` + exports[`test/lib/utils/error-message.js TAP just simple messages > must match snapshot 3`] = ` Object { "detail": Array [ Array [ "", - "\\nIf you are behind a proxy, please make sure that the\\n'proxy' config is set properly. See: 'npm help config'", + String( + + If you are behind a proxy, please make sure that the + 'proxy' config is set properly. See: 'npm help config' + ), ], ], "summary": Array [ @@ -1088,7 +1343,11 @@ Object { "detail": Array [ Array [ "", - "\\nFailed using git.\\nPlease check if you have git installed and in your PATH.", + String( + + Failed using git. + Please check if you have git installed and in your PATH. + ), ], ], "summary": Array [ @@ -1134,7 +1393,10 @@ Object { "detail": Array [ Array [ "git", - "Refusing to remove it. Update manually,\\nor move it out of the way first.", + String( + Refusing to remove it. Update manually, + or move it out of the way first. + ), ], ], "summary": Array [ @@ -1197,7 +1459,13 @@ Object { "detail": Array [ Array [ "network", - "This is a problem related to network connectivity.\\nIn most cases you are behind a proxy or have bad network settings.\\n\\nIf you are behind a proxy, please make sure that the\\n'proxy' config is set properly. See: 'npm help config'", + String( + This is a problem related to network connectivity. + In most cases you are behind a proxy or have bad network settings. + + If you are behind a proxy, please make sure that the + 'proxy' config is set properly. See: 'npm help config' + ), ], ], "summary": Array [ @@ -1226,7 +1494,10 @@ Object { "detail": Array [ Array [ "JSON.parse", - "Failed to parse JSON data.\\nNote: package.json must be actual JSON, not just JavaScript.", + String( + Failed to parse JSON data. + Note: package.json must be actual JSON, not just JavaScript. + ), ], ], "summary": Array [ @@ -1243,7 +1514,10 @@ Object { "detail": Array [ Array [ "JSON.parse", - "Failed to parse JSON data.\\nNote: package.json must be actual JSON, not just JavaScript.", + String( + Failed to parse JSON data. + Note: package.json must be actual JSON, not just JavaScript. + ), ], ], "summary": Array [ @@ -1260,7 +1534,13 @@ Object { "detail": Array [ Array [ "", - "Merge conflict detected in your package.json.\\n\\nPlease resolve the package.json conflict and retry the command:\\n\\n$ arg v", + String( + Merge conflict detected in your package.json. + + Please resolve the package.json conflict and retry the command: + + $ arg v + ), ], ], "summary": Array [], diff --git a/tap-snapshots/test/lib/utils/exit-handler.js.test.cjs b/tap-snapshots/test/lib/utils/exit-handler.js.test.cjs new file mode 100644 index 0000000000000..eb383c104a674 --- /dev/null +++ b/tap-snapshots/test/lib/utils/exit-handler.js.test.cjs @@ -0,0 +1,20 @@ +/* IMPORTANT + * This snapshot file is auto-generated, but designed for humans. + * It should be checked into source control and tracked carefully. + * Re-generate by setting TAP_SNAPSHOT=1 and running tests. + * Make sure to inspect the output below. Do not ignore changes! + */ +'use strict' +exports[`test/lib/utils/exit-handler.js TAP handles unknown error > should have expected log contents for unknown error 1`] = ` +24 verbose stack Error: ERROR +25 verbose cwd {CWD} +26 verbose Foo 1.0.0 +27 verbose argv "/node" "{CWD}/test/lib/utils/exit-handler.js" +28 verbose node v1.0.0 +29 verbose npm v1.0.0 +30 error code ERROR +31 error ERR ERROR +32 error ERR ERROR +33 verbose exit 1 + +` diff --git a/tap-snapshots/test-lib-utils-explain-dep.js-TAP.test.js b/tap-snapshots/test/lib/utils/explain-dep.js.test.cjs similarity index 92% rename from tap-snapshots/test-lib-utils-explain-dep.js-TAP.test.js rename to tap-snapshots/test/lib/utils/explain-dep.js.test.cjs index 7e77081f9d636..4d6f4686df8c4 100644 --- a/tap-snapshots/test-lib-utils-explain-dep.js-TAP.test.js +++ b/tap-snapshots/test/lib/utils/explain-dep.js.test.cjs @@ -199,3 +199,29 @@ exports[`test/lib/utils/explain-dep.js TAP prodDep > print nocolor 1`] = ` prod-dep@1.2.3 node_modules/prod-dep ` + +exports[`test/lib/utils/explain-dep.js TAP workspaces > explain color deep 1`] = ` +a@1.0.0 +a + a@1.0.0 + node_modules/a + workspace a from the root project +` + +exports[`test/lib/utils/explain-dep.js TAP workspaces > explain nocolor shallow 1`] = ` +a@1.0.0 +a + a@1.0.0 + node_modules/a + workspace a from the root project +` + +exports[`test/lib/utils/explain-dep.js TAP workspaces > print color 1`] = ` +a@1.0.0 +a +` + +exports[`test/lib/utils/explain-dep.js TAP workspaces > print nocolor 1`] = ` +a@1.0.0 +a +` diff --git a/tap-snapshots/test-lib-utils-explain-eresolve.js-TAP.test.js b/tap-snapshots/test/lib/utils/explain-eresolve.js.test.cjs similarity index 62% rename from tap-snapshots/test-lib-utils-explain-eresolve.js-TAP.test.js rename to tap-snapshots/test/lib/utils/explain-eresolve.js.test.cjs index 87dcb861c633f..354081d110319 100644 --- a/tap-snapshots/test-lib-utils-explain-eresolve.js-TAP.test.js +++ b/tap-snapshots/test/lib/utils/explain-eresolve.js.test.cjs @@ -5,7 +5,7 @@ * Make sure to inspect the output below. Do not ignore changes! */ 'use strict' -exports[`test/lib/utils/explain-eresolve.js TAP chain-conflict > explain with color 1`] = ` +exports[`test/lib/utils/explain-eresolve.js TAP chain-conflict > explain with color, depth of 2 1`] = ` While resolving: project@1.2.3 Found: @isaacs/testing-peer-dep-conflict-chain-d@2.0.0 node_modules/@isaacs/testing-peer-dep-conflict-chain-d @@ -75,25 +75,7 @@ to accept an incorrect (and potentially broken) dependency resolution. See \${REPORT} for a full report. ` -exports[`test/lib/utils/explain-eresolve.js TAP chain-conflict > report with color, depth only 2 1`] = ` -While resolving: project@1.2.3 -Found: @isaacs/testing-peer-dep-conflict-chain-d@2.0.0 -node_modules/@isaacs/testing-peer-dep-conflict-chain-d - @isaacs/testing-peer-dep-conflict-chain-d@"2" from the root project - -Could not resolve dependency: -peer @isaacs/testing-peer-dep-conflict-chain-d@"1" from @isaacs/testing-peer-dep-conflict-chain-c@1.0.0 -node_modules/@isaacs/testing-peer-dep-conflict-chain-c - @isaacs/testing-peer-dep-conflict-chain-c@"1" from the root project - -Fix the upstream dependency conflict, or retry -this command with --force, or --legacy-peer-deps -to accept an incorrect (and potentially broken) dependency resolution. - -See \${REPORT} for a full report. -` - -exports[`test/lib/utils/explain-eresolve.js TAP chain-conflict > report with no color, depth of 6 1`] = ` +exports[`test/lib/utils/explain-eresolve.js TAP chain-conflict > report with no color 1`] = ` While resolving: project@1.2.3 Found: @isaacs/testing-peer-dep-conflict-chain-d@2.0.0 node_modules/@isaacs/testing-peer-dep-conflict-chain-d @@ -111,7 +93,7 @@ to accept an incorrect (and potentially broken) dependency resolution. See \${REPORT} for a full report. ` -exports[`test/lib/utils/explain-eresolve.js TAP cycleNested > explain with color 1`] = ` +exports[`test/lib/utils/explain-eresolve.js TAP cycleNested > explain with color, depth of 2 1`] = ` Found: @isaacs/peer-dep-cycle-c@2.0.0 node_modules/@isaacs/peer-dep-cycle-c @isaacs/peer-dep-cycle-c@"2.x" from the root project @@ -208,31 +190,7 @@ to accept an incorrect (and potentially broken) dependency resolution. See \${REPORT} for a full report. ` -exports[`test/lib/utils/explain-eresolve.js TAP cycleNested > report with color, depth only 2 1`] = ` -Found: @isaacs/peer-dep-cycle-c@2.0.0 -node_modules/@isaacs/peer-dep-cycle-c - @isaacs/peer-dep-cycle-c@"2.x" from the root project - -Could not resolve dependency: -peer @isaacs/peer-dep-cycle-b@"1" from @isaacs/peer-dep-cycle-a@1.0.0 -node_modules/@isaacs/peer-dep-cycle-a - @isaacs/peer-dep-cycle-a@"1.x" from the root project - -Conflicting peer dependency: @isaacs/peer-dep-cycle-c@1.0.0 -node_modules/@isaacs/peer-dep-cycle-c - peer @isaacs/peer-dep-cycle-c@"1" from @isaacs/peer-dep-cycle-b@1.0.0 - node_modules/@isaacs/peer-dep-cycle-b - peer @isaacs/peer-dep-cycle-b@"1" from @isaacs/peer-dep-cycle-a@1.0.0 - node_modules/@isaacs/peer-dep-cycle-a - -Fix the upstream dependency conflict, or retry -this command with --no-strict-peer-deps, --force, or --legacy-peer-deps -to accept an incorrect (and potentially broken) dependency resolution. - -See \${REPORT} for a full report. -` - -exports[`test/lib/utils/explain-eresolve.js TAP cycleNested > report with no color, depth of 6 1`] = ` +exports[`test/lib/utils/explain-eresolve.js TAP cycleNested > report with no color 1`] = ` Found: @isaacs/peer-dep-cycle-c@2.0.0 node_modules/@isaacs/peer-dep-cycle-c @isaacs/peer-dep-cycle-c@"2.x" from the root project @@ -257,7 +215,143 @@ to accept an incorrect (and potentially broken) dependency resolution. See \${REPORT} for a full report. ` -exports[`test/lib/utils/explain-eresolve.js TAP gatsby > explain with color 1`] = ` +exports[`test/lib/utils/explain-eresolve.js TAP eslint-plugin case > explain with color, depth of 2 1`] = ` +While resolving: eslint-plugin-react@7.24.0 +Found: eslint@6.8.0 +node_modules/eslint + dev eslint@"^3 || ^4 || ^5 || ^6 || ^7" from the root project + 3 more (@typescript-eslint/parser, ...) + +Could not resolve dependency: +dev eslint-plugin-eslint-plugin@"^3.1.0" from the root project + +Conflicting peer dependency: eslint@7.31.0 +node_modules/eslint + peer eslint@"^7.0.0" from eslint-plugin-eslint-plugin@3.5.1 + node_modules/eslint-plugin-eslint-plugin + dev eslint-plugin-eslint-plugin@"^3.1.0" from the root project +` + +exports[`test/lib/utils/explain-eresolve.js TAP eslint-plugin case > explain with no color, depth of 6 1`] = ` +While resolving: eslint-plugin-react@7.24.0 +Found: eslint@6.8.0 +node_modules/eslint + dev eslint@"^3 || ^4 || ^5 || ^6 || ^7" from the root project + peer eslint@"^5.0.0 || ^6.0.0" from @typescript-eslint/parser@2.34.0 + node_modules/@typescript-eslint/parser + dev @typescript-eslint/parser@"^2.34.0" from the root project + peer eslint@"^5.16.0 || ^6.8.0 || ^7.2.0" from eslint-config-airbnb-base@14.2.1 + node_modules/eslint-config-airbnb-base + dev eslint-config-airbnb-base@"^14.2.1" from the root project + 1 more (eslint-plugin-import) + +Could not resolve dependency: +dev eslint-plugin-eslint-plugin@"^3.1.0" from the root project + +Conflicting peer dependency: eslint@7.31.0 +node_modules/eslint + peer eslint@"^7.0.0" from eslint-plugin-eslint-plugin@3.5.1 + node_modules/eslint-plugin-eslint-plugin + dev eslint-plugin-eslint-plugin@"^3.1.0" from the root project +` + +exports[`test/lib/utils/explain-eresolve.js TAP eslint-plugin case > report 1`] = ` +# npm resolution error report + +\${TIME} + +While resolving: eslint-plugin-react@7.24.0 +Found: eslint@6.8.0 +node_modules/eslint + dev eslint@"^3 || ^4 || ^5 || ^6 || ^7" from the root project + peer eslint@"^5.0.0 || ^6.0.0" from @typescript-eslint/parser@2.34.0 + node_modules/@typescript-eslint/parser + dev @typescript-eslint/parser@"^2.34.0" from the root project + peer eslint@"^5.16.0 || ^6.8.0 || ^7.2.0" from eslint-config-airbnb-base@14.2.1 + node_modules/eslint-config-airbnb-base + dev eslint-config-airbnb-base@"^14.2.1" from the root project + peer eslint@"^2 || ^3 || ^4 || ^5 || ^6 || ^7.2.0" from eslint-plugin-import@2.23.4 + node_modules/eslint-plugin-import + dev eslint-plugin-import@"^2.23.4" from the root project + peer eslint-plugin-import@"^2.22.1" from eslint-config-airbnb-base@14.2.1 + node_modules/eslint-config-airbnb-base + dev eslint-config-airbnb-base@"^14.2.1" from the root project + +Could not resolve dependency: +dev eslint-plugin-eslint-plugin@"^3.1.0" from the root project + +Conflicting peer dependency: eslint@7.31.0 +node_modules/eslint + peer eslint@"^7.0.0" from eslint-plugin-eslint-plugin@3.5.1 + node_modules/eslint-plugin-eslint-plugin + dev eslint-plugin-eslint-plugin@"^3.1.0" from the root project + +Fix the upstream dependency conflict, or retry +this command with --force, or --legacy-peer-deps +to accept an incorrect (and potentially broken) dependency resolution. + +Raw JSON explanation object: + +{ + "name": "eslint-plugin case", + "json": true +} + +` + +exports[`test/lib/utils/explain-eresolve.js TAP eslint-plugin case > report with color 1`] = ` +While resolving: eslint-plugin-react@7.24.0 +Found: eslint@6.8.0 +node_modules/eslint + dev eslint@"^3 || ^4 || ^5 || ^6 || ^7" from the root project + peer eslint@"^5.0.0 || ^6.0.0" from @typescript-eslint/parser@2.34.0 + node_modules/@typescript-eslint/parser + dev @typescript-eslint/parser@"^2.34.0" from the root project + 2 more (eslint-config-airbnb-base, eslint-plugin-import) + +Could not resolve dependency: +dev eslint-plugin-eslint-plugin@"^3.1.0" from the root project + +Conflicting peer dependency: eslint@7.31.0 +node_modules/eslint + peer eslint@"^7.0.0" from eslint-plugin-eslint-plugin@3.5.1 + node_modules/eslint-plugin-eslint-plugin + dev eslint-plugin-eslint-plugin@"^3.1.0" from the root project + +Fix the upstream dependency conflict, or retry +this command with --force, or --legacy-peer-deps +to accept an incorrect (and potentially broken) dependency resolution. + +See \${REPORT} for a full report. +` + +exports[`test/lib/utils/explain-eresolve.js TAP eslint-plugin case > report with no color 1`] = ` +While resolving: eslint-plugin-react@7.24.0 +Found: eslint@6.8.0 +node_modules/eslint + dev eslint@"^3 || ^4 || ^5 || ^6 || ^7" from the root project + peer eslint@"^5.0.0 || ^6.0.0" from @typescript-eslint/parser@2.34.0 + node_modules/@typescript-eslint/parser + dev @typescript-eslint/parser@"^2.34.0" from the root project + 2 more (eslint-config-airbnb-base, eslint-plugin-import) + +Could not resolve dependency: +dev eslint-plugin-eslint-plugin@"^3.1.0" from the root project + +Conflicting peer dependency: eslint@7.31.0 +node_modules/eslint + peer eslint@"^7.0.0" from eslint-plugin-eslint-plugin@3.5.1 + node_modules/eslint-plugin-eslint-plugin + dev eslint-plugin-eslint-plugin@"^3.1.0" from the root project + +Fix the upstream dependency conflict, or retry +this command with --force, or --legacy-peer-deps +to accept an incorrect (and potentially broken) dependency resolution. + +See \${REPORT} for a full report. +` + +exports[`test/lib/utils/explain-eresolve.js TAP gatsby > explain with color, depth of 2 1`] = ` While resolving: gatsby-recipes@0.2.31 Found: ink@3.0.0-7 node_modules/ink @@ -366,29 +460,7 @@ to accept an incorrect (and potentially broken) dependency resolution. See \${REPORT} for a full report. ` -exports[`test/lib/utils/explain-eresolve.js TAP gatsby > report with color, depth only 2 1`] = ` -While resolving: gatsby-recipes@0.2.31 -Found: ink@3.0.0-7 -node_modules/ink - dev ink@"next" from gatsby-recipes@0.2.31 - node_modules/gatsby-recipes - gatsby-recipes@"^0.2.31" from gatsby-cli@2.12.107 - node_modules/gatsby-cli - -Could not resolve dependency: -peer ink@">=2.0.0" from ink-box@1.0.0 -node_modules/ink-box - ink-box@"^1.0.0" from gatsby-recipes@0.2.31 - node_modules/gatsby-recipes - -Fix the upstream dependency conflict, or retry -this command with --no-strict-peer-deps, --force, or --legacy-peer-deps -to accept an incorrect (and potentially broken) dependency resolution. - -See \${REPORT} for a full report. -` - -exports[`test/lib/utils/explain-eresolve.js TAP gatsby > report with no color, depth of 6 1`] = ` +exports[`test/lib/utils/explain-eresolve.js TAP gatsby > report with no color 1`] = ` While resolving: gatsby-recipes@0.2.31 Found: ink@3.0.0-7 node_modules/ink @@ -409,7 +481,6 @@ node_modules/ink-box node_modules/gatsby-cli gatsby-cli@"^2.12.107" from gatsby@2.24.74 node_modules/gatsby - gatsby@"" from the root project Fix the upstream dependency conflict, or retry this command with --no-strict-peer-deps, --force, or --legacy-peer-deps @@ -418,7 +489,173 @@ to accept an incorrect (and potentially broken) dependency resolution. See \${REPORT} for a full report. ` -exports[`test/lib/utils/explain-eresolve.js TAP withShrinkwrap > explain with color 1`] = ` +exports[`test/lib/utils/explain-eresolve.js TAP no current node, but has current edge > explain with color, depth of 2 1`] = ` +While resolving: eslint@7.22.0 +Found: dev eslint@"file:." from the root project + +Could not resolve dependency: +peer eslint@"^6.0.0" from eslint-plugin-jsdoc@22.2.0 +node_modules/eslint-plugin-jsdoc + dev eslint-plugin-jsdoc@"^22.1.0" from the root project +` + +exports[`test/lib/utils/explain-eresolve.js TAP no current node, but has current edge > explain with no color, depth of 6 1`] = ` +While resolving: eslint@7.22.0 +Found: dev eslint@"file:." from the root project + +Could not resolve dependency: +peer eslint@"^6.0.0" from eslint-plugin-jsdoc@22.2.0 +node_modules/eslint-plugin-jsdoc + dev eslint-plugin-jsdoc@"^22.1.0" from the root project +` + +exports[`test/lib/utils/explain-eresolve.js TAP no current node, but has current edge > report 1`] = ` +# npm resolution error report + +\${TIME} + +While resolving: eslint@7.22.0 +Found: dev eslint@"file:." from the root project + +Could not resolve dependency: +peer eslint@"^6.0.0" from eslint-plugin-jsdoc@22.2.0 +node_modules/eslint-plugin-jsdoc + dev eslint-plugin-jsdoc@"^22.1.0" from the root project + +Fix the upstream dependency conflict, or retry +this command with --force, or --legacy-peer-deps +to accept an incorrect (and potentially broken) dependency resolution. + +Raw JSON explanation object: + +{ + "name": "no current node, but has current edge", + "json": true +} + +` + +exports[`test/lib/utils/explain-eresolve.js TAP no current node, but has current edge > report with color 1`] = ` +While resolving: eslint@7.22.0 +Found: dev eslint@"file:." from the root project + +Could not resolve dependency: +peer eslint@"^6.0.0" from eslint-plugin-jsdoc@22.2.0 +node_modules/eslint-plugin-jsdoc + dev eslint-plugin-jsdoc@"^22.1.0" from the root project + +Fix the upstream dependency conflict, or retry +this command with --force, or --legacy-peer-deps +to accept an incorrect (and potentially broken) dependency resolution. + +See \${REPORT} for a full report. +` + +exports[`test/lib/utils/explain-eresolve.js TAP no current node, but has current edge > report with no color 1`] = ` +While resolving: eslint@7.22.0 +Found: dev eslint@"file:." from the root project + +Could not resolve dependency: +peer eslint@"^6.0.0" from eslint-plugin-jsdoc@22.2.0 +node_modules/eslint-plugin-jsdoc + dev eslint-plugin-jsdoc@"^22.1.0" from the root project + +Fix the upstream dependency conflict, or retry +this command with --force, or --legacy-peer-deps +to accept an incorrect (and potentially broken) dependency resolution. + +See \${REPORT} for a full report. +` + +exports[`test/lib/utils/explain-eresolve.js TAP no current node, no current edge, idk > explain with color, depth of 2 1`] = ` +While resolving: eslint@7.22.0 +Found: peer eslint@"^6.0.0" from eslint-plugin-jsdoc@22.2.0 +node_modules/eslint-plugin-jsdoc + dev eslint-plugin-jsdoc@"^22.1.0" from the root project + +Could not resolve dependency: +peer eslint@"^6.0.0" from eslint-plugin-jsdoc@22.2.0 +node_modules/eslint-plugin-jsdoc + dev eslint-plugin-jsdoc@"^22.1.0" from the root project +` + +exports[`test/lib/utils/explain-eresolve.js TAP no current node, no current edge, idk > explain with no color, depth of 6 1`] = ` +While resolving: eslint@7.22.0 +Found: peer eslint@"^6.0.0" from eslint-plugin-jsdoc@22.2.0 +node_modules/eslint-plugin-jsdoc + dev eslint-plugin-jsdoc@"^22.1.0" from the root project + +Could not resolve dependency: +peer eslint@"^6.0.0" from eslint-plugin-jsdoc@22.2.0 +node_modules/eslint-plugin-jsdoc + dev eslint-plugin-jsdoc@"^22.1.0" from the root project +` + +exports[`test/lib/utils/explain-eresolve.js TAP no current node, no current edge, idk > report 1`] = ` +# npm resolution error report + +\${TIME} + +While resolving: eslint@7.22.0 +Found: peer eslint@"^6.0.0" from eslint-plugin-jsdoc@22.2.0 +node_modules/eslint-plugin-jsdoc + dev eslint-plugin-jsdoc@"^22.1.0" from the root project + +Could not resolve dependency: +peer eslint@"^6.0.0" from eslint-plugin-jsdoc@22.2.0 +node_modules/eslint-plugin-jsdoc + dev eslint-plugin-jsdoc@"^22.1.0" from the root project + +Fix the upstream dependency conflict, or retry +this command with --force, or --legacy-peer-deps +to accept an incorrect (and potentially broken) dependency resolution. + +Raw JSON explanation object: + +{ + "name": "no current node, no current edge, idk", + "json": true +} + +` + +exports[`test/lib/utils/explain-eresolve.js TAP no current node, no current edge, idk > report with color 1`] = ` +While resolving: eslint@7.22.0 +Found: peer eslint@"^6.0.0" from eslint-plugin-jsdoc@22.2.0 +node_modules/eslint-plugin-jsdoc + dev eslint-plugin-jsdoc@"^22.1.0" from the root project + +Could not resolve dependency: +peer eslint@"^6.0.0" from eslint-plugin-jsdoc@22.2.0 +node_modules/eslint-plugin-jsdoc + dev eslint-plugin-jsdoc@"^22.1.0" from the root project + +Fix the upstream dependency conflict, or retry +this command with --force, or --legacy-peer-deps +to accept an incorrect (and potentially broken) dependency resolution. + +See \${REPORT} for a full report. +` + +exports[`test/lib/utils/explain-eresolve.js TAP no current node, no current edge, idk > report with no color 1`] = ` +While resolving: eslint@7.22.0 +Found: peer eslint@"^6.0.0" from eslint-plugin-jsdoc@22.2.0 +node_modules/eslint-plugin-jsdoc + dev eslint-plugin-jsdoc@"^22.1.0" from the root project + +Could not resolve dependency: +peer eslint@"^6.0.0" from eslint-plugin-jsdoc@22.2.0 +node_modules/eslint-plugin-jsdoc + dev eslint-plugin-jsdoc@"^22.1.0" from the root project + +Fix the upstream dependency conflict, or retry +this command with --force, or --legacy-peer-deps +to accept an incorrect (and potentially broken) dependency resolution. + +See \${REPORT} for a full report. +` + +exports[`test/lib/utils/explain-eresolve.js TAP withShrinkwrap > explain with color, depth of 2 1`] = ` While resolving: @isaacs/peer-dep-cycle-b@1.0.0 Found: @isaacs/peer-dep-cycle-c@2.0.0 node_modules/@isaacs/peer-dep-cycle-c @@ -495,26 +732,7 @@ to accept an incorrect (and potentially broken) dependency resolution. See \${REPORT} for a full report. ` -exports[`test/lib/utils/explain-eresolve.js TAP withShrinkwrap > report with color, depth only 2 1`] = ` -While resolving: @isaacs/peer-dep-cycle-b@1.0.0 -Found: @isaacs/peer-dep-cycle-c@2.0.0 -node_modules/@isaacs/peer-dep-cycle-c - @isaacs/peer-dep-cycle-c@"2.x" from the root project - -Could not resolve dependency: -peer @isaacs/peer-dep-cycle-c@"1" from @isaacs/peer-dep-cycle-b@1.0.0 -node_modules/@isaacs/peer-dep-cycle-b - peer @isaacs/peer-dep-cycle-b@"1" from @isaacs/peer-dep-cycle-a@1.0.0 - node_modules/@isaacs/peer-dep-cycle-a - -Fix the upstream dependency conflict, or retry -this command with --no-strict-peer-deps, --force, or --legacy-peer-deps -to accept an incorrect (and potentially broken) dependency resolution. - -See \${REPORT} for a full report. -` - -exports[`test/lib/utils/explain-eresolve.js TAP withShrinkwrap > report with no color, depth of 6 1`] = ` +exports[`test/lib/utils/explain-eresolve.js TAP withShrinkwrap > report with no color 1`] = ` While resolving: @isaacs/peer-dep-cycle-b@1.0.0 Found: @isaacs/peer-dep-cycle-c@2.0.0 node_modules/@isaacs/peer-dep-cycle-c diff --git a/tap-snapshots/test/lib/utils/npm-usage.js.test.cjs b/tap-snapshots/test/lib/utils/npm-usage.js.test.cjs new file mode 100644 index 0000000000000..50f6481f6e848 --- /dev/null +++ b/tap-snapshots/test/lib/utils/npm-usage.js.test.cjs @@ -0,0 +1,1159 @@ +/* IMPORTANT + * This snapshot file is auto-generated, but designed for humans. + * It should be checked into source control and tracked carefully. + * Re-generate by setting TAP_SNAPSHOT=1 and running tests. + * Make sure to inspect the output below. Do not ignore changes! + */ +'use strict' +exports[`test/lib/utils/npm-usage.js TAP usage basic usage > must match snapshot 1`] = ` +npm <command> + +Usage: + +npm install install all the dependencies in your project +npm install <foo> add the <foo> dependency to your project +npm test run this project's tests +npm run <foo> run the script named <foo> +npm <command> -h quick help on <command> +npm -l display usage info for all commands +npm help <term> search for help on <term> +npm help npm more involved overview + +All commands: + + access, adduser, audit, bin, bugs, cache, ci, completion, + config, dedupe, deprecate, diff, dist-tag, docs, doctor, + edit, exec, explain, explore, find-dupes, fund, get, help, + hook, init, install, install-ci-test, install-test, link, + ll, login, logout, ls, org, outdated, owner, pack, ping, + pkg, prefix, profile, prune, publish, rebuild, repo, + restart, root, run-script, search, set, set-script, + shrinkwrap, star, stars, start, stop, team, test, token, + uninstall, unpublish, unstar, update, version, view, whoami + +Specify configs in the ini-formatted file: + /some/config/file/.npmrc +or on the command line via: npm <command> --key=value + +More configuration info: npm help config +Configuration fields: npm help 7 config + +npm@{VERSION} {BASEDIR} +` + +exports[`test/lib/utils/npm-usage.js TAP usage set process.stdout.columns columns=0 > must match snapshot 1`] = ` +npm <command> + +Usage: + +npm install install all the dependencies in your project +npm install <foo> add the <foo> dependency to your project +npm test run this project's tests +npm run <foo> run the script named <foo> +npm <command> -h quick help on <command> +npm -l display usage info for all commands +npm help <term> search for help on <term> +npm help npm more involved overview + +All commands: + + access, adduser, audit, bin, bugs, cache, ci, completion, + config, dedupe, deprecate, diff, dist-tag, docs, doctor, + edit, exec, explain, explore, find-dupes, fund, get, help, + hook, init, install, install-ci-test, install-test, link, + ll, login, logout, ls, org, outdated, owner, pack, ping, + pkg, prefix, profile, prune, publish, rebuild, repo, + restart, root, run-script, search, set, set-script, + shrinkwrap, star, stars, start, stop, team, test, token, + uninstall, unpublish, unstar, update, version, view, whoami + +Specify configs in the ini-formatted file: + /some/config/file/.npmrc +or on the command line via: npm <command> --key=value + +More configuration info: npm help config +Configuration fields: npm help 7 config + +npm@{VERSION} {BASEDIR} +` + +exports[`test/lib/utils/npm-usage.js TAP usage set process.stdout.columns columns=90 > must match snapshot 1`] = ` +npm <command> + +Usage: + +npm install install all the dependencies in your project +npm install <foo> add the <foo> dependency to your project +npm test run this project's tests +npm run <foo> run the script named <foo> +npm <command> -h quick help on <command> +npm -l display usage info for all commands +npm help <term> search for help on <term> +npm help npm more involved overview + +All commands: + + access, adduser, audit, bin, bugs, cache, ci, completion, + config, dedupe, deprecate, diff, dist-tag, docs, doctor, + edit, exec, explain, explore, find-dupes, fund, get, help, + hook, init, install, install-ci-test, install-test, link, + ll, login, logout, ls, org, outdated, owner, pack, ping, + pkg, prefix, profile, prune, publish, rebuild, repo, + restart, root, run-script, search, set, set-script, + shrinkwrap, star, stars, start, stop, team, test, token, + uninstall, unpublish, unstar, update, version, view, whoami + +Specify configs in the ini-formatted file: + /some/config/file/.npmrc +or on the command line via: npm <command> --key=value + +More configuration info: npm help config +Configuration fields: npm help 7 config + +npm@{VERSION} {BASEDIR} +` + +exports[`test/lib/utils/npm-usage.js TAP usage with browser > must match snapshot 1`] = ` +npm <command> + +Usage: + +npm install install all the dependencies in your project +npm install <foo> add the <foo> dependency to your project +npm test run this project's tests +npm run <foo> run the script named <foo> +npm <command> -h quick help on <command> +npm -l display usage info for all commands +npm help <term> search for help on <term> (in a browser) +npm help npm more involved overview (in a browser) + +All commands: + + access, adduser, audit, bin, bugs, cache, ci, completion, + config, dedupe, deprecate, diff, dist-tag, docs, doctor, + edit, exec, explain, explore, find-dupes, fund, get, help, + hook, init, install, install-ci-test, install-test, link, + ll, login, logout, ls, org, outdated, owner, pack, ping, + pkg, prefix, profile, prune, publish, rebuild, repo, + restart, root, run-script, search, set, set-script, + shrinkwrap, star, stars, start, stop, team, test, token, + uninstall, unpublish, unstar, update, version, view, whoami + +Specify configs in the ini-formatted file: + /some/config/file/.npmrc +or on the command line via: npm <command> --key=value + +More configuration info: npm help config +Configuration fields: npm help 7 config + +npm@{VERSION} {BASEDIR} +` + +exports[`test/lib/utils/npm-usage.js TAP usage with long > must match snapshot 1`] = ` +npm <command> + +Usage: + +npm install install all the dependencies in your project +npm install <foo> add the <foo> dependency to your project +npm test run this project's tests +npm run <foo> run the script named <foo> +npm <command> -h quick help on <command> +npm -l display usage info for all commands +npm help <term> search for help on <term> +npm help npm more involved overview + +All commands: + + access npm access + + Set access level on published packages + + Usage: + npm access public [<package>] + npm access restricted [<package>] + npm access grant <read-only|read-write> <scope:team> [<package>] + npm access revoke <scope:team> [<package>] + npm access 2fa-required [<package>] + npm access 2fa-not-required [<package>] + npm access ls-packages [<user>|<scope>|<scope:team>] + npm access ls-collaborators [<package> [<user>]] + npm access edit [<package>] + + Options: + [--registry <registry>] [--otp <otp>] + + Run "npm help access" for more info + + adduser npm adduser + + Add a registry user account + + Usage: + npm adduser + + Options: + [--registry <registry>] [--scope <@scope>] + + aliases: login, add-user + + Run "npm help adduser" for more info + + audit npm audit + + Run a security audit + + Usage: + npm audit [fix] + + Options: + [--audit-level <info|low|moderate|high|critical|none>] [--dry-run] [-f|--force] + [--json] [--package-lock-only] + [--omit <dev|optional|peer> [--omit <dev|optional|peer> ...]] + [-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] + [-ws|--workspaces] + + Run "npm help audit" for more info + + bin npm bin + + Display npm bin folder + + Usage: + npm bin + + Options: + [-g|--global] + + Run "npm help bin" for more info + + bugs npm bugs + + Report bugs for a package in a web browser + + Usage: + npm bugs [<pkgname>] + + Options: + [--no-browser|--browser <browser>] [--registry <registry>] + + alias: issues + + Run "npm help bugs" for more info + + cache npm cache + + Manipulates packages cache + + Usage: + npm cache add <tarball file> + npm cache add <folder> + npm cache add <tarball url> + npm cache add <git url> + npm cache add <name>@<version> + npm cache clean + npm cache verify + + Options: + [--cache <cache>] + + Run "npm help cache" for more info + + ci npm ci + + Install a project with a clean slate + + Usage: + npm ci + + Options: + [--no-audit] [--ignore-scripts] [--script-shell <script-shell>] + + aliases: clean-install, ic, install-clean, isntall-clean + + Run "npm help ci" for more info + + completion npm completion + + Tab Completion for npm + + Usage: + npm completion + + Run "npm help completion" for more info + + config npm config + + Manage the npm configuration files + + Usage: + npm config set <key>=<value> [<key>=<value> ...] + npm config get [<key> [<key> ...]] + npm config delete <key> [<key> ...] + npm config list [--json] + npm config edit + + Options: + [--json] [-g|--global] [--editor <editor>] [-L|--location <global|user|project>] + [-l|--long] + + alias: c + + Run "npm help config" for more info + + dedupe npm dedupe + + Reduce duplication in the package tree + + Usage: + npm dedupe + + Options: + [--global-style] [--legacy-bundling] [--strict-peer-deps] [--no-package-lock] + [--omit <dev|optional|peer> [--omit <dev|optional|peer> ...]] [--ignore-scripts] + [--no-audit] [--no-bin-links] [--no-fund] [--dry-run] + [-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] + [-ws|--workspaces] + + alias: ddp + + Run "npm help dedupe" for more info + + deprecate npm deprecate + + Deprecate a version of a package + + Usage: + npm deprecate <pkg>[@<version>] <message> + + Options: + [--registry <registry>] [--otp <otp>] + + Run "npm help deprecate" for more info + + diff npm diff + + The registry diff command + + Usage: + npm diff [...<paths>] + + Options: + [--diff <pkg-name|spec|version> [--diff <pkg-name|spec|version> ...]] + [--diff-name-only] [--diff-unified <number>] [--diff-ignore-all-space] + [--diff-no-prefix] [--diff-src-prefix <path>] [--diff-dst-prefix <path>] + [--diff-text] [-g|--global] [--tag <tag>] + [-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] + [-ws|--workspaces] + + Run "npm help diff" for more info + + dist-tag npm dist-tag + + Modify package distribution tags + + Usage: + npm dist-tag add <pkg>@<version> [<tag>] + npm dist-tag rm <pkg> <tag> + npm dist-tag ls [<pkg>] + + Options: + [-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] + [-ws|--workspaces] + + alias: dist-tags + + Run "npm help dist-tag" for more info + + docs npm docs + + Open documentation for a package in a web browser + + Usage: + npm docs [<pkgname> [<pkgname> ...]] + + Options: + [--no-browser|--browser <browser>] [--registry <registry>] + [-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] + [-ws|--workspaces] + + alias: home + + Run "npm help docs" for more info + + doctor npm doctor + + Check your npm environment + + Usage: + npm doctor + + Options: + [--registry <registry>] + + Run "npm help doctor" for more info + + edit npm edit + + Edit an installed package + + Usage: + npm edit <pkg>[/<subpkg>...] + + Options: + [--editor <editor>] + + Run "npm help edit" for more info + + exec npm exec + + Run a command from a local or remote npm package + + Usage: + npm exec -- <pkg>[@<version>] [args...] + npm exec --package=<pkg>[@<version>] -- <cmd> [args...] + npm exec -c '<cmd> [args...]' + npm exec --package=foo -c '<cmd> [args...]' + + Options: + [--package <pkg>[@<version>] [--package <pkg>[@<version>] ...]] + [-c|--call <call>] + [-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] + [-ws|--workspaces] + + alias: x + + Run "npm help exec" for more info + + explain npm explain + + Explain installed packages + + Usage: + npm explain <folder | specifier> + + Options: + [--json] [-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] + + alias: why + + Run "npm help explain" for more info + + explore npm explore + + Browse an installed package + + Usage: + npm explore <pkg> [ -- <command>] + + Options: + [--shell <shell>] + + Run "npm help explore" for more info + + find-dupes npm find-dupes + + Find duplication in the package tree + + Usage: + npm find-dupes + + Options: + [--global-style] [--legacy-bundling] [--strict-peer-deps] [--no-package-lock] + [--omit <dev|optional|peer> [--omit <dev|optional|peer> ...]] [--ignore-scripts] + [--no-audit] [--no-bin-links] [--no-fund] + [-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] + [-ws|--workspaces] + + Run "npm help find-dupes" for more info + + fund npm fund + + Retrieve funding information + + Usage: + npm fund [[<@scope>/]<pkg>] + + Options: + [--json] [--no-browser|--browser <browser>] [--unicode] + [-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] + [--which <fundingSourceNumber>] + + Run "npm help fund" for more info + + get npm get + + Get a value from the npm configuration + + Usage: + npm get [<key> ...] (See \`npm config\`) + + Run "npm help get" for more info + + help npm help + + Get help on npm + + Usage: + npm help <term> [<terms..>] + + Options: + [--viewer <viewer>] + + alias: hlep + + Run "npm help help" for more info + + hook npm hook + + Manage registry hooks + + Usage: + npm hook add <pkg> <url> <secret> [--type=<type>] + npm hook ls [pkg] + npm hook rm <id> + npm hook update <id> <url> <secret> + + Options: + [--registry <registry>] [--otp <otp>] + + Run "npm help hook" for more info + + init npm init + + Create a package.json file + + Usage: + npm init [--force|-f|--yes|-y|--scope] + npm init <@scope> (same as \`npx <@scope>/create\`) + npm init [<@scope>/]<name> (same as \`npx [<@scope>/]create-<name>\`) + + Options: + [-y|--yes] [-f|--force] + [-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] + [-ws|--workspaces] + + aliases: create, innit + + Run "npm help init" for more info + + install npm install + + Install a package + + Usage: + npm install [<@scope>/]<pkg> + npm install [<@scope>/]<pkg>@<tag> + npm install [<@scope>/]<pkg>@<version> + npm install [<@scope>/]<pkg>@<version range> + npm install <alias>@npm:<name> + npm install <folder> + npm install <tarball file> + npm install <tarball url> + npm install <git:// url> + npm install <github username>/<github project> + + Options: + [-S|--save|--no-save|--save-prod|--save-dev|--save-optional|--save-peer] + [-E|--save-exact] [-g|--global] [--global-style] [--legacy-bundling] + [--strict-peer-deps] [--no-package-lock] + [--omit <dev|optional|peer> [--omit <dev|optional|peer> ...]] [--ignore-scripts] + [--no-audit] [--no-bin-links] [--no-fund] [--dry-run] + [-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] + [-ws|--workspaces] + + aliases: i, in, ins, inst, insta, instal, isnt, isnta, isntal, add + + Run "npm help install" for more info + + install-ci-test npm install-ci-test + + Install a project with a clean slate and run tests + + Usage: + npm install-ci-test + + Options: + [--no-audit] [--ignore-scripts] [--script-shell <script-shell>] + + alias: cit + + Run "npm help install-ci-test" for more info + + install-test npm install-test + + Install package(s) and run tests + + Usage: + npm install-test [<@scope>/]<pkg> + npm install-test [<@scope>/]<pkg>@<tag> + npm install-test [<@scope>/]<pkg>@<version> + npm install-test [<@scope>/]<pkg>@<version range> + npm install-test <alias>@npm:<name> + npm install-test <folder> + npm install-test <tarball file> + npm install-test <tarball url> + npm install-test <git:// url> + npm install-test <github username>/<github project> + + Options: + [-S|--save|--no-save|--save-prod|--save-dev|--save-optional|--save-peer] + [-E|--save-exact] [-g|--global] [--global-style] [--legacy-bundling] + [--strict-peer-deps] [--no-package-lock] + [--omit <dev|optional|peer> [--omit <dev|optional|peer> ...]] [--ignore-scripts] + [--no-audit] [--no-bin-links] [--no-fund] [--dry-run] + [-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] + [-ws|--workspaces] + + alias: it + + Run "npm help install-test" for more info + + link npm link + + Symlink a package folder + + Usage: + npm link (in package dir) + npm link [<@scope>/]<pkg>[@<version>] + + Options: + [-S|--save|--no-save|--save-prod|--save-dev|--save-optional|--save-peer] + [-E|--save-exact] [-g|--global] [--global-style] [--legacy-bundling] + [--strict-peer-deps] [--no-package-lock] + [--omit <dev|optional|peer> [--omit <dev|optional|peer> ...]] [--ignore-scripts] + [--no-audit] [--no-bin-links] [--no-fund] [--dry-run] + [-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] + [-ws|--workspaces] + + alias: ln + + Run "npm help link" for more info + + ll npm ll + + List installed packages + + Usage: + npm ll [[<@scope>/]<pkg> ...] + + Options: + [-a|--all] [--json] [-l|--long] [-p|--parseable] [-g|--global] [--depth <depth>] + [--omit <dev|optional|peer> [--omit <dev|optional|peer> ...]] [--link] + [--package-lock-only] [--unicode] + [-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] + [-ws|--workspaces] + + alias: la + + Run "npm help ll" for more info + + login npm adduser + + Add a registry user account + + Usage: + npm adduser + + Options: + [--registry <registry>] [--scope <@scope>] + + aliases: login, add-user + + Run "npm help adduser" for more info + + logout npm logout + + Log out of the registry + + Usage: + npm logout + + Options: + [--registry <registry>] [--scope <@scope>] + + Run "npm help logout" for more info + + ls npm ls + + List installed packages + + Usage: + npm ls [[<@scope>/]<pkg> ...] + + Options: + [-a|--all] [--json] [-l|--long] [-p|--parseable] [-g|--global] [--depth <depth>] + [--omit <dev|optional|peer> [--omit <dev|optional|peer> ...]] [--link] + [--package-lock-only] [--unicode] + [-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] + [-ws|--workspaces] + + alias: list + + Run "npm help ls" for more info + + org npm org + + Manage orgs + + Usage: + npm org set orgname username [developer | admin | owner] + npm org rm orgname username + npm org ls orgname [<username>] + + Options: + [--registry <registry>] [--otp <otp>] [--json] [-p|--parseable] + + alias: ogr + + Run "npm help org" for more info + + outdated npm outdated + + Check for outdated packages + + Usage: + npm outdated [[<@scope>/]<pkg> ...] + + Options: + [-a|--all] [--json] [-l|--long] [-p|--parseable] [-g|--global] + [-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] + + Run "npm help outdated" for more info + + owner npm owner + + Manage package owners + + Usage: + npm owner add <user> [<@scope>/]<pkg> + npm owner rm <user> [<@scope>/]<pkg> + npm owner ls [<@scope>/]<pkg> + + Options: + [--registry <registry>] [--otp <otp>] + + alias: author + + Run "npm help owner" for more info + + pack npm pack + + Create a tarball from a package + + Usage: + npm pack [[<@scope>/]<pkg>...] + + Options: + [--dry-run] [--json] [--pack-destination <pack-destination>] + [-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] + [-ws|--workspaces] + + Run "npm help pack" for more info + + ping npm ping + + Ping npm registry + + Usage: + npm ping + + Options: + [--registry <registry>] + + Run "npm help ping" for more info + + pkg npm pkg + + Manages your package.json + + Usage: + npm pkg set <key>=<value> [<key>=<value> ...] + npm pkg get [<key> [<key> ...]] + npm pkg delete <key> [<key> ...] + + Options: + [-f|--force] [--json] + [-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] + [-ws|--workspaces] + + Run "npm help pkg" for more info + + prefix npm prefix + + Display prefix + + Usage: + npm prefix [-g] + + Options: + [-g|--global] + + Run "npm help prefix" for more info + + profile npm profile + + Change settings on your registry profile + + Usage: + npm profile enable-2fa [auth-only|auth-and-writes] + npm profile disable-2fa + npm profile get [<key>] + npm profile set <key> <value> + + Options: + [--registry <registry>] [--json] [-p|--parseable] [--otp <otp>] + + Run "npm help profile" for more info + + prune npm prune + + Remove extraneous packages + + Usage: + npm prune [[<@scope>/]<pkg>...] + + Options: + [--omit <dev|optional|peer> [--omit <dev|optional|peer> ...]] [--dry-run] + [--json] [-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] + [-ws|--workspaces] + + Run "npm help prune" for more info + + publish npm publish + + Publish a package + + Usage: + npm publish [<folder>] + + Options: + [--tag <tag>] [--access <restricted|public>] [--dry-run] [--otp <otp>] + [-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] + [-ws|--workspaces] + + Run "npm help publish" for more info + + rebuild npm rebuild + + Rebuild a package + + Usage: + npm rebuild [[<@scope>/]<name>[@<version>] ...] + + Options: + [-g|--global] [--no-bin-links] [--ignore-scripts] + [-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] + [-ws|--workspaces] + + alias: rb + + Run "npm help rebuild" for more info + + repo npm repo + + Open package repository page in the browser + + Usage: + npm repo [<pkgname> [<pkgname> ...]] + + Options: + [--no-browser|--browser <browser>] + [-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] + [-ws|--workspaces] + + Run "npm help repo" for more info + + restart npm restart + + Restart a package + + Usage: + npm restart [-- <args>] + + Options: + [--ignore-scripts] [--script-shell <script-shell>] + + Run "npm help restart" for more info + + root npm root + + Display npm root + + Usage: + npm root + + Options: + [-g|--global] + + Run "npm help root" for more info + + run-script npm run-script + + Run arbitrary package scripts + + Usage: + npm run-script <command> [-- <args>] + + Options: + [-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] + [-ws|--workspaces] [--if-present] [--ignore-scripts] + [--script-shell <script-shell>] + + aliases: run, rum, urn + + Run "npm help run-script" for more info + + search npm search + + Search for packages + + Usage: + npm search [search terms ...] + + Options: + [-l|--long] [--json] [--color|--no-color|--color always] [-p|--parseable] + [--no-description] [--searchopts <searchopts>] [--searchexclude <searchexclude>] + [--registry <registry>] [--prefer-online] [--prefer-offline] [--offline] + + aliases: s, se, find + + Run "npm help search" for more info + + set npm set + + Set a value in the npm configuration + + Usage: + npm set <key>=<value> [<key>=<value> ...] (See \`npm config\`) + + Run "npm help set" for more info + + set-script npm set-script + + Set tasks in the scripts section of package.json + + Usage: + npm set-script [<script>] [<command>] + + Options: + [-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] + [-ws|--workspaces] + + Run "npm help set-script" for more info + + shrinkwrap npm shrinkwrap + + Lock down dependency versions for publication + + Usage: + npm shrinkwrap + + Run "npm help shrinkwrap" for more info + + star npm star + + Mark your favorite packages + + Usage: + npm star [<pkg>...] + + Options: + [--registry <registry>] [--unicode] + + Run "npm help star" for more info + + stars npm stars + + View packages marked as favorites + + Usage: + npm stars [<user>] + + Options: + [--registry <registry>] + + Run "npm help stars" for more info + + start npm start + + Start a package + + Usage: + npm start [-- <args>] + + Options: + [--ignore-scripts] [--script-shell <script-shell>] + + Run "npm help start" for more info + + stop npm stop + + Stop a package + + Usage: + npm stop [-- <args>] + + Options: + [--ignore-scripts] [--script-shell <script-shell>] + + Run "npm help stop" for more info + + team npm team + + Manage organization teams and team memberships + + Usage: + npm team create <scope:team> [--otp <otpcode>] + npm team destroy <scope:team> [--otp <otpcode>] + npm team add <scope:team> <user> [--otp <otpcode>] + npm team rm <scope:team> <user> [--otp <otpcode>] + npm team ls <scope>|<scope:team> + + Options: + [--registry <registry>] [--otp <otp>] [-p|--parseable] [--json] + + Run "npm help team" for more info + + test npm test + + Test a package + + Usage: + npm test [-- <args>] + + Options: + [--ignore-scripts] [--script-shell <script-shell>] + + aliases: tst, t + + Run "npm help test" for more info + + token npm token + + Manage your authentication tokens + + Usage: + npm token list + npm token revoke <id|token> + npm token create [--read-only] [--cidr=list] + + Options: + [--read-only] [--cidr <cidr> [--cidr <cidr> ...]] [--registry <registry>] + [--otp <otp>] + + Run "npm help token" for more info + + uninstall npm uninstall + + Remove a package + + Usage: + npm uninstall [<@scope>/]<pkg>... + + Options: + [-S|--save|--no-save|--save-prod|--save-dev|--save-optional|--save-peer] + [-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] + [-ws|--workspaces] + + aliases: un, unlink, remove, rm, r + + Run "npm help uninstall" for more info + + unpublish npm unpublish + + Remove a package from the registry + + Usage: + npm unpublish [<@scope>/]<pkg>[@<version>] + + Options: + [--dry-run] [-f|--force] + [-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] + [-ws|--workspaces] + + Run "npm help unpublish" for more info + + unstar npm unstar + + Remove an item from your favorite packages + + Usage: + npm unstar [<pkg>...] + + Options: + [--registry <registry>] [--unicode] [--otp <otp>] + + Run "npm help unstar" for more info + + update npm update + + Update packages + + Usage: + npm update [<pkg>...] + + Options: + [-g|--global] [--global-style] [--legacy-bundling] [--strict-peer-deps] + [--no-package-lock] [--omit <dev|optional|peer> [--omit <dev|optional|peer> ...]] + [--ignore-scripts] [--no-audit] [--no-bin-links] [--no-fund] [--dry-run] + [-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] + [-ws|--workspaces] + + aliases: up, upgrade, udpate + + Run "npm help update" for more info + + version npm version + + Bump a package version + + Usage: + npm version [<newversion> | major | minor | patch | premajor | preminor | prepatch | prerelease | from-git] + + Options: + [--allow-same-version] [--no-commit-hooks] [--no-git-tag-version] [--json] + [--preid prerelease-id] [--sign-git-tag] + [-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] + [-ws|--workspaces] + + alias: verison + + Run "npm help version" for more info + + view npm view + + View registry info + + Usage: + npm view [<@scope>/]<pkg>[@<version>] [<field>[.subfield]...] + + Options: + [--json] [-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] + [-ws|--workspaces] + + aliases: v, info, show + + Run "npm help view" for more info + + whoami npm whoami + + Display npm username + + Usage: + npm whoami + + Options: + [--registry <registry>] + + Run "npm help whoami" for more info + +Specify configs in the ini-formatted file: + /some/config/file/.npmrc +or on the command line via: npm <command> --key=value + +More configuration info: npm help config +Configuration fields: npm help 7 config + +npm@{VERSION} {BASEDIR} +` diff --git a/tap-snapshots/test-lib-utils-open-url.js-TAP.test.js b/tap-snapshots/test/lib/utils/open-url.js.test.cjs similarity index 100% rename from tap-snapshots/test-lib-utils-open-url.js-TAP.test.js rename to tap-snapshots/test/lib/utils/open-url.js.test.cjs diff --git a/tap-snapshots/test-lib-utils-reify-finish.js-TAP.test.js b/tap-snapshots/test/lib/utils/reify-finish.js.test.cjs similarity index 100% rename from tap-snapshots/test-lib-utils-reify-finish.js-TAP.test.js rename to tap-snapshots/test/lib/utils/reify-finish.js.test.cjs diff --git a/tap-snapshots/test-lib-utils-reify-output.js-TAP.test.js b/tap-snapshots/test/lib/utils/reify-output.js.test.cjs similarity index 100% rename from tap-snapshots/test-lib-utils-reify-output.js-TAP.test.js rename to tap-snapshots/test/lib/utils/reify-output.js.test.cjs diff --git a/tap-snapshots/test-lib-utils-tar.js-TAP.test.js b/tap-snapshots/test/lib/utils/tar.js.test.cjs similarity index 67% rename from tap-snapshots/test-lib-utils-tar.js-TAP.test.js rename to tap-snapshots/test/lib/utils/tar.js.test.cjs index 402a0e735afc4..d132d7af6e6f9 100644 --- a/tap-snapshots/test-lib-utils-tar.js-TAP.test.js +++ b/tap-snapshots/test/lib/utils/tar.js.test.cjs @@ -11,6 +11,9 @@ exports[`test/lib/utils/tar.js TAP should log tarball contents > must match snap package: my-cool-pkg@1.0.0 === Tarball Contents === +4B cat +4B chai +4B dog 97B package.json === Bundled Dependencies === @@ -20,14 +23,14 @@ bundle-dep name: my-cool-pkg version: 1.0.0 filename: my-cool-pkg-1.0.0.tgz -package size: 222 B -unpacked size: 101 B -shasum: fe3a2f6064ade3bc21640874530586343f2d832f -integrity: sha512-ehndP8xBQL4yo[...]kWinZ4k1SCqUA== +package size: 274 B +unpacked size: 113 B +shasum: cd0dfccff77dff944eb761854bc0b0497d974f67 +integrity: sha512-qeFip1jH05vkW[...]zHSdMdPpYogMA== bundled deps: 1 bundled files: 0 -own files: 2 -total files: 2 +own files: 5 +total files: 5 ` diff --git a/tap-snapshots/test-lib-utils-update-notifier.js-TAP.test.js b/tap-snapshots/test/lib/utils/update-notifier.js.test.cjs similarity index 56% rename from tap-snapshots/test-lib-utils-update-notifier.js-TAP.test.js rename to tap-snapshots/test/lib/utils/update-notifier.js.test.cjs index 91228650d47de..157390997d793 100644 --- a/tap-snapshots/test-lib-utils-update-notifier.js-TAP.test.js +++ b/tap-snapshots/test/lib/utils/update-notifier.js.test.cjs @@ -6,11 +6,11 @@ */ 'use strict' exports[`test/lib/utils/update-notifier.js TAP notification situations major to current > color 1`] = ` - -New major version of npm available! 122.420.69 -> 123.420.69 -Changelog: https://github.com/npm/cli/releases/tag/v123.420.69 -Run npm install -g npm@123.420.69 to update! - + +New major version of npm available! 122.420.69 -> 123.420.69 +Changelog: https://github.com/npm/cli/releases/tag/v123.420.69 +Run npm install -g npm@123.420.69 to update! + ` exports[`test/lib/utils/update-notifier.js TAP notification situations major to current > no color 1`] = ` @@ -22,11 +22,11 @@ Run \`npm install -g npm@123.420.69\` to update! ` exports[`test/lib/utils/update-notifier.js TAP notification situations minor to current > color 1`] = ` - -New minor version of npm available! 123.419.69 -> 123.420.69 -Changelog: https://github.com/npm/cli/releases/tag/v123.420.69 -Run npm install -g npm@123.420.69 to update! - + +New minor version of npm available! 123.419.69 -> 123.420.69 +Changelog: https://github.com/npm/cli/releases/tag/v123.420.69 +Run npm install -g npm@123.420.69 to update! + ` exports[`test/lib/utils/update-notifier.js TAP notification situations minor to current > no color 1`] = ` @@ -38,11 +38,11 @@ Run \`npm install -g npm@123.420.69\` to update! ` exports[`test/lib/utils/update-notifier.js TAP notification situations minor to next version > color 1`] = ` - -New minor version of npm available! 123.420.70 -> 123.421.70 -Changelog: https://github.com/npm/cli/releases/tag/v123.421.70 -Run npm install -g npm@123.421.70 to update! - + +New minor version of npm available! 123.420.70 -> 123.421.70 +Changelog: https://github.com/npm/cli/releases/tag/v123.421.70 +Run npm install -g npm@123.421.70 to update! + ` exports[`test/lib/utils/update-notifier.js TAP notification situations minor to next version > no color 1`] = ` @@ -54,11 +54,11 @@ Run \`npm install -g npm@123.421.70\` to update! ` exports[`test/lib/utils/update-notifier.js TAP notification situations new beta available > color 1`] = ` - -New prerelease version of npm available! 124.0.0-beta.0 -> 124.0.0-beta.99999 -Changelog: https://github.com/npm/cli/releases/tag/v124.0.0-beta.99999 -Run npm install -g npm@124.0.0-beta.99999 to update! - + +New prerelease version of npm available! 124.0.0-beta.0 -> 124.0.0-beta.99999 +Changelog: https://github.com/npm/cli/releases/tag/v124.0.0-beta.99999 +Run npm install -g npm@124.0.0-beta.99999 to update! + ` exports[`test/lib/utils/update-notifier.js TAP notification situations new beta available > no color 1`] = ` @@ -70,11 +70,11 @@ Run \`npm install -g npm@124.0.0-beta.99999\` to update! ` exports[`test/lib/utils/update-notifier.js TAP notification situations patch to current > color 1`] = ` - -New patch version of npm available! 123.420.68 -> 123.420.69 -Changelog: https://github.com/npm/cli/releases/tag/v123.420.69 -Run npm install -g npm@123.420.69 to update! - + +New patch version of npm available! 123.420.68 -> 123.420.69 +Changelog: https://github.com/npm/cli/releases/tag/v123.420.69 +Run npm install -g npm@123.420.69 to update! + ` exports[`test/lib/utils/update-notifier.js TAP notification situations patch to current > no color 1`] = ` @@ -86,11 +86,11 @@ Run \`npm install -g npm@123.420.69\` to update! ` exports[`test/lib/utils/update-notifier.js TAP notification situations patch to next version > color 1`] = ` - -New patch version of npm available! 123.421.69 -> 123.421.70 -Changelog: https://github.com/npm/cli/releases/tag/v123.421.70 -Run npm install -g npm@123.421.70 to update! - + +New patch version of npm available! 123.421.69 -> 123.421.70 +Changelog: https://github.com/npm/cli/releases/tag/v123.421.70 +Run npm install -g npm@123.421.70 to update! + ` exports[`test/lib/utils/update-notifier.js TAP notification situations patch to next version > no color 1`] = ` diff --git a/tap-snapshots/test-lib-view.js-TAP.test.js b/tap-snapshots/test/lib/view.js.test.cjs similarity index 50% rename from tap-snapshots/test-lib-view.js-TAP.test.js rename to tap-snapshots/test/lib/view.js.test.cjs index f8a9fe464df2a..41d7a80fe3b16 100644 --- a/tap-snapshots/test-lib-view.js-TAP.test.js +++ b/tap-snapshots/test/lib/view.js.test.cjs @@ -80,9 +80,9 @@ dist .unpackedSize:1 B dist-tags: +latest: 1.0.0 - -published a year ago +published {TIME} ago ` exports[`test/lib/view.js TAP should log info of package in current working dir specific version > must match snapshot 1`] = ` @@ -97,9 +97,39 @@ dist .unpackedSize:1 B dist-tags: +latest: 1.0.0 + +published {TIME} ago +` + +exports[`test/lib/view.js TAP should log package info package from git > must match snapshot 1`] = ` + + +green@1.0.0 | ACME | deps: 2 | versions: 2 +green is a very important color + +DEPRECATED!! - true + +keywords:colors, green, crayola + +bin:green + +dist +.tarball:http://hm.green.com/1.0.0.tgz +.shasum:123 +.integrity:--- +.unpackedSize:1 B + +dependencies: +red: 1.0.0 +yellow: 1.0.0 +maintainers: +-claudia <c@yellow.com> +-isaacs <i@yellow.com> -published a year ago +dist-tags: +latest: 1.0.0 ` exports[`test/lib/view.js TAP should log package info package with --json and semver range > must match snapshot 1`] = ` @@ -108,7 +138,9 @@ exports[`test/lib/view.js TAP should log package info package with --json and se { "_npmUser": "claudia <claudia@cyan.com>", "name": "cyan", - "dist-tags": {}, + "dist-tags": { + "latest": "1.0.0" + }, "versions": [ "1.0.0", "1.0.1" @@ -125,7 +157,9 @@ exports[`test/lib/view.js TAP should log package info package with --json and se { "_npmUser": "claudia <claudia@cyan.com>", "name": "cyan", - "dist-tags": {}, + "dist-tags": { + "latest": "1.0.0" + }, "versions": [ "1.0.0", "1.0.1" @@ -249,7 +283,7 @@ dist .unpackedSize:1 B dist-tags: - +latest: 1.0.0 published by claudia <claudia@cyan.com> ` @@ -266,7 +300,264 @@ dist .unpackedSize:1 B dist-tags: +latest: 1.0.0 + +published {TIME} ago +` + +exports[`test/lib/view.js TAP workspaces all workspaces --json > must match snapshot 1`] = ` + +{ + "green": { + "_id": "green", + "name": "green", + "dist-tags": { + "latest": "1.0.0" + }, + "maintainers": [ + { + "name": "claudia", + "email": "c@yellow.com", + "twitter": "cyellow" + }, + { + "name": "isaacs", + "email": "i@yellow.com", + "twitter": "iyellow" + } + ], + "keywords": [ + "colors", + "green", + "crayola" + ], + "versions": [ + "1.0.0", + "1.0.1" + ], + "version": "1.0.0", + "description": "green is a very important color", + "bugs": { + "url": "http://bugs.green.com" + }, + "deprecated": true, + "repository": { + "url": "http://repository.green.com" + }, + "license": { + "type": "ACME" + }, + "bin": { + "green": "bin/green.js" + }, + "dependencies": { + "red": "1.0.0", + "yellow": "1.0.0" + }, + "dist": { + "shasum": "123", + "tarball": "http://hm.green.com/1.0.0.tgz", + "integrity": "---", + "fileCount": 1, + "unpackedSize": 1 + } + }, + "orange": { + "name": "orange", + "dist-tags": { + "latest": "1.0.0" + }, + "versions": [ + "1.0.0", + "1.0.1" + ], + "version": "1.0.0", + "homepage": "http://hm.orange.com", + "license": {}, + "dist": { + "shasum": "123", + "tarball": "http://hm.orange.com/1.0.0.tgz", + "integrity": "---", + "fileCount": 1, + "unpackedSize": 1 + } + } +} +` + +exports[`test/lib/view.js TAP workspaces all workspaces > must match snapshot 1`] = ` + + +green@1.0.0 | ACME | deps: 2 | versions: 2 +green is a very important color + +DEPRECATED!! - true + +keywords:colors, green, crayola + +bin:green + +dist +.tarball:http://hm.green.com/1.0.0.tgz +.shasum:123 +.integrity:--- +.unpackedSize:1 B + +dependencies: +red: 1.0.0 +yellow: 1.0.0 + +maintainers: +-claudia <c@yellow.com> +-isaacs <i@yellow.com> + +dist-tags: +latest: 1.0.0 + +orange@1.0.0 | Proprietary | deps: none | versions: 2 +http://hm.orange.com + +dist +.tarball:http://hm.orange.com/1.0.0.tgz +.shasum:123 +.integrity:--- +.unpackedSize:1 B + +dist-tags: +latest: 1.0.0 +` + +exports[`test/lib/view.js TAP workspaces all workspaces nonexistent field --json > must match snapshot 1`] = ` + +` + +exports[`test/lib/view.js TAP workspaces all workspaces nonexistent field > must match snapshot 1`] = ` + +green: +orange: +` + +exports[`test/lib/view.js TAP workspaces all workspaces single field --json > must match snapshot 1`] = ` +{ + "green": "green", + "orange": "orange" +} +` + +exports[`test/lib/view.js TAP workspaces all workspaces single field > must match snapshot 1`] = ` + +green: +green +orange: +orange +` + +exports[`test/lib/view.js TAP workspaces one specific workspace > must match snapshot 1`] = ` + + +green@1.0.0 | ACME | deps: 2 | versions: 2 +green is a very important color + +DEPRECATED!! - true -published a year ago +keywords:colors, green, crayola + +bin:green + +dist +.tarball:http://hm.green.com/1.0.0.tgz +.shasum:123 +.integrity:--- +.unpackedSize:1 B + +dependencies: +red: 1.0.0 +yellow: 1.0.0 + +maintainers: +-claudia <c@yellow.com> +-isaacs <i@yellow.com> + +dist-tags: +latest: 1.0.0 +` + +exports[`test/lib/view.js TAP workspaces remote package name > must match snapshot 1`] = ` +Ignoring workspaces for specified package(s) +` + +exports[`test/lib/view.js TAP workspaces remote package name > must match snapshot 2`] = ` + + +pink@1.0.0 | Proprietary | deps: none | versions: 2 + +dist +.tarball:http://hm.pink.com/1.0.0.tgz +.shasum:123 +.integrity:--- +.unpackedSize:1 B + +dist-tags: +latest: 1.0.0 +` + +exports[`test/lib/view.js TAP workspaces single workspace --json > must match snapshot 1`] = ` + +{ + "green": { + "_id": "green", + "name": "green", + "dist-tags": { + "latest": "1.0.0" + }, + "maintainers": [ + { + "name": "claudia", + "email": "c@yellow.com", + "twitter": "cyellow" + }, + { + "name": "isaacs", + "email": "i@yellow.com", + "twitter": "iyellow" + } + ], + "keywords": [ + "colors", + "green", + "crayola" + ], + "versions": [ + "1.0.0", + "1.0.1" + ], + "version": "1.0.0", + "description": "green is a very important color", + "bugs": { + "url": "http://bugs.green.com" + }, + "deprecated": true, + "repository": { + "url": "http://repository.green.com" + }, + "license": { + "type": "ACME" + }, + "bin": { + "green": "bin/green.js" + }, + "dependencies": { + "red": "1.0.0", + "yellow": "1.0.0" + }, + "dist": { + "shasum": "123", + "tarball": "http://hm.green.com/1.0.0.tgz", + "integrity": "---", + "fileCount": 1, + "unpackedSize": 1 + } + } +} ` diff --git a/test/bin/npm-cli.js b/test/bin/npm-cli.js index bcca99c8c8fe1..7b4b619e2b771 100644 --- a/test/bin/npm-cli.js +++ b/test/bin/npm-cli.js @@ -1,7 +1,6 @@ const t = require('tap') -const requireInject = require('require-inject') t.test('loading the bin calls the implementation', t => { - requireInject('../../bin/npm-cli.js', { + t.mock('../../bin/npm-cli.js', { '../../lib/cli.js': proc => { t.equal(proc, process, 'called implementation with process object') t.end() diff --git a/test/bin/npx-cli.js b/test/bin/npx-cli.js index 2b7b488297cab..5eeee30184363 100644 --- a/test/bin/npx-cli.js +++ b/test/bin/npx-cli.js @@ -1,5 +1,4 @@ const t = require('tap') -const requireInject = require('require-inject') const npx = require.resolve('../../bin/npx-cli.js') const cli = require.resolve('../../lib/cli.js') const npm = require.resolve('../../bin/npm-cli.js') @@ -7,42 +6,39 @@ const npm = require.resolve('../../bin/npm-cli.js') const logs = [] console.error = (...msg) => logs.push(msg) -t.afterEach(cb => { - logs.length = 0 - cb() -}) +t.afterEach(() => logs.length = 0) t.test('npx foo -> npm exec -- foo', t => { process.argv = ['node', npx, 'foo'] - requireInject(npx, { [cli]: () => {} }) + t.mock(npx, { [cli]: () => {} }) t.strictSame(process.argv, ['node', npm, 'exec', '--', 'foo']) t.end() }) t.test('npx -- foo -> npm exec -- foo', t => { process.argv = ['node', npx, '--', 'foo'] - requireInject(npx, { [cli]: () => {} }) + t.mock(npx, { [cli]: () => {} }) t.strictSame(process.argv, ['node', npm, 'exec', '--', 'foo']) t.end() }) t.test('npx -x y foo -z -> npm exec -x y -- foo -z', t => { process.argv = ['node', npx, '-x', 'y', 'foo', '-z'] - requireInject(npx, { [cli]: () => {} }) + t.mock(npx, { [cli]: () => {} }) t.strictSame(process.argv, ['node', npm, 'exec', '-x', 'y', '--', 'foo', '-z']) t.end() }) t.test('npx --x=y --no-install foo -z -> npm exec --x=y -- foo -z', t => { process.argv = ['node', npx, '--x=y', '--no-install', 'foo', '-z'] - requireInject(npx, { [cli]: () => {} }) + t.mock(npx, { [cli]: () => {} }) t.strictSame(process.argv, ['node', npm, 'exec', '--x=y', '--yes=false', '--', 'foo', '-z']) t.end() }) t.test('transform renamed options into proper values', t => { process.argv = ['node', npx, '-y', '--shell=bash', '-p', 'foo', '-c', 'asdf'] - requireInject(npx, { [cli]: () => {} }) + t.mock(npx, { [cli]: () => {} }) t.strictSame(process.argv, ['node', npm, 'exec', '--yes', '--script-shell=bash', '--package', 'foo', '--call', 'asdf']) t.end() }) @@ -80,7 +76,7 @@ t.test('use a bunch of deprecated switches and options', t => { '--', 'foobar', ] - requireInject(npx, { [cli]: () => {} }) + t.mock(npx, { [cli]: () => {} }) t.strictSame(process.argv, expect) t.strictSame(logs, [ ['npx: the --npm argument has been removed.'], diff --git a/test/fixtures/eresolve-explanations.js b/test/fixtures/eresolve-explanations.js index c5c338e61c8c0..c34424c7f857a 100644 --- a/test/fixtures/eresolve-explanations.js +++ b/test/fixtures/eresolve-explanations.js @@ -16,10 +16,10 @@ module.exports = { type: 'prod', name: '@isaacs/peer-dep-cycle-a', spec: '1.x', - from: { location: '/some/project' } - } - ] - } + from: { location: '/some/project' }, + }, + ], + }, }, current: { name: '@isaacs/peer-dep-cycle-c', @@ -30,48 +30,50 @@ module.exports = { type: 'prod', name: '@isaacs/peer-dep-cycle-c', spec: '2.x', - from: { location: '/some/project' } - } - ] + from: { location: '/some/project' }, + }, + ], }, peerConflict: { - name: '@isaacs/peer-dep-cycle-c', - version: '1.0.0', - whileInstalling: { name: '@isaacs/peer-dep-cycle-a', version: '1.0.0' }, - location: 'node_modules/@isaacs/peer-dep-cycle-c', - dependents: [ - { - type: 'peer', - name: '@isaacs/peer-dep-cycle-c', - spec: '1', - from: { - name: '@isaacs/peer-dep-cycle-b', - version: '1.0.0', - whileInstalling: { name: '@isaacs/peer-dep-cycle-a', version: '1.0.0' }, - location: 'node_modules/@isaacs/peer-dep-cycle-b', - dependents: [ - { - type: 'peer', - name: '@isaacs/peer-dep-cycle-b', - spec: '1', - from: { - name: '@isaacs/peer-dep-cycle-a', - version: '1.0.0', - location: 'node_modules/@isaacs/peer-dep-cycle-a', - dependents: [ - { - type: 'prod', - name: '@isaacs/peer-dep-cycle-a', - spec: '1.x', - from: { location: '/some/project' } - } - ] - } - } - ] - } - } - ] + peer: { + name: '@isaacs/peer-dep-cycle-c', + version: '1.0.0', + whileInstalling: { name: '@isaacs/peer-dep-cycle-a', version: '1.0.0' }, + location: 'node_modules/@isaacs/peer-dep-cycle-c', + dependents: [ + { + type: 'peer', + name: '@isaacs/peer-dep-cycle-c', + spec: '1', + from: { + name: '@isaacs/peer-dep-cycle-b', + version: '1.0.0', + whileInstalling: { name: '@isaacs/peer-dep-cycle-a', version: '1.0.0' }, + location: 'node_modules/@isaacs/peer-dep-cycle-b', + dependents: [ + { + type: 'peer', + name: '@isaacs/peer-dep-cycle-b', + spec: '1', + from: { + name: '@isaacs/peer-dep-cycle-a', + version: '1.0.0', + location: 'node_modules/@isaacs/peer-dep-cycle-a', + dependents: [ + { + type: 'prod', + name: '@isaacs/peer-dep-cycle-a', + spec: '1.x', + from: { location: '/some/project' }, + }, + ], + }, + }, + ], + }, + }, + ], + }, }, strictPeerDeps: true, }, @@ -102,13 +104,13 @@ module.exports = { type: 'prod', name: '@isaacs/peer-dep-cycle-a', spec: '1.x', - from: { location: '/some/project' } - } - ] - } - } - ] - } + from: { location: '/some/project' }, + }, + ], + }, + }, + ], + }, }, current: { name: '@isaacs/peer-dep-cycle-c', @@ -119,9 +121,9 @@ module.exports = { type: 'prod', name: '@isaacs/peer-dep-cycle-c', spec: '2.x', - from: { location: '/some/project' } - } - ] + from: { location: '/some/project' }, + }, + ], }, strictPeerDeps: true, }, @@ -134,7 +136,7 @@ module.exports = { whileInstalling: { name: 'project', version: '1.2.3', - path: '/some/project' + path: '/some/project', }, location: 'node_modules/@isaacs/testing-peer-dep-conflict-chain-d', dependents: [ @@ -142,9 +144,9 @@ module.exports = { type: 'prod', name: '@isaacs/testing-peer-dep-conflict-chain-d', spec: '2', - from: { location: '/some/project' } - } - ] + from: { location: '/some/project' }, + }, + ], }, edge: { type: 'peer', @@ -157,7 +159,7 @@ module.exports = { whileInstalling: { name: 'project', version: '1.2.3', - path: '/some/project' + path: '/some/project', }, location: 'node_modules/@isaacs/testing-peer-dep-conflict-chain-c', dependents: [ @@ -165,13 +167,13 @@ module.exports = { type: 'prod', name: '@isaacs/testing-peer-dep-conflict-chain-c', spec: '1', - from: { location: '/some/project' } - } - ] - } + from: { location: '/some/project' }, + }, + ], + }, }, peerConflict: null, - strictPeerDeps: false + strictPeerDeps: false, }, gatsby: { @@ -182,7 +184,7 @@ module.exports = { whileInstalling: { name: 'gatsby-recipes', version: '0.2.31', - path: '/some/project/node_modules/gatsby-recipes' + path: '/some/project/node_modules/gatsby-recipes', }, location: 'node_modules/ink', dependents: [ @@ -218,19 +220,19 @@ module.exports = { name: 'gatsby', spec: '', from: { - location: '/some/project/gatsby-user' - } - } - ] - } - } - ] - } - } - ] - } - } - ] + location: '/some/project/gatsby-user', + }, + }, + ], + }, + }, + ], + }, + }, + ], + }, + }, + ], }, edge: { type: 'peer', @@ -243,7 +245,7 @@ module.exports = { whileInstalling: { name: 'gatsby-recipes', version: '0.2.31', - path: '/some/project/gatsby-user/node_modules/gatsby-recipes' + path: '/some/project/gatsby-user/node_modules/gatsby-recipes', }, location: 'node_modules/ink-box', dependents: [ @@ -279,23 +281,279 @@ module.exports = { name: 'gatsby', spec: '', from: { - location: '/some/project/gatsby-user' - } - } - ] - } - } - ] - } - } - ] - } - } - ] - } + location: '/some/project/gatsby-user', + }, + }, + ], + }, + }, + ], + }, + }, + ], + }, + }, + ], + }, }, peerConflict: null, - strictPeerDeps: true - } + strictPeerDeps: true, + }, + 'no current node, but has current edge': { + code: 'ERESOLVE', + current: null, + currentEdge: { + type: 'dev', + name: 'eslint', + spec: 'file:.', + error: 'MISSING', + from: { + location: '/some/projects/eslint', + }, + }, + edge: { + type: 'peer', + name: 'eslint', + spec: '^6.0.0', + error: 'MISSING', + from: { + name: 'eslint-plugin-jsdoc', + version: '22.2.0', + whileInstalling: { + name: 'eslint', + version: '7.22.0', + path: '/Users/isaacs/dev/npm/cli/eslint', + }, + location: 'node_modules/eslint-plugin-jsdoc', + dependents: [ + { + type: 'dev', + name: 'eslint-plugin-jsdoc', + spec: '^22.1.0', + from: { + location: '/some/projects/eslint', + }, + }, + ], + }, + }, + peerConflict: null, + strictPeerDeps: false, + force: false, + }, + 'no current node, no current edge, idk': { + code: 'ERESOLVE', + current: null, + edge: { + type: 'peer', + name: 'eslint', + spec: '^6.0.0', + error: 'MISSING', + from: { + name: 'eslint-plugin-jsdoc', + version: '22.2.0', + whileInstalling: { + name: 'eslint', + version: '7.22.0', + path: '/Users/isaacs/dev/npm/cli/eslint', + }, + location: 'node_modules/eslint-plugin-jsdoc', + dependents: [ + { + type: 'dev', + name: 'eslint-plugin-jsdoc', + spec: '^22.1.0', + from: { + location: '/some/projects/eslint', + }, + }, + ], + }, + }, + peerConflict: null, + strictPeerDeps: false, + force: false, + }, + + 'eslint-plugin case': { + code: 'ERESOLVE', + edge: { + type: 'dev', + name: 'eslint-plugin-eslint-plugin', + spec: '^3.1.0', + error: 'MISSING', + from: { + location: '/Users/isaacs/dev/npm/arborist/fixtures/eslint-plugin-react', + }, + }, + dep: { + name: 'eslint-plugin-eslint-plugin', + version: '3.5.1', + whileInstalling: { + name: 'eslint-plugin-react', + version: '7.24.0', + path: '/Users/isaacs/dev/npm/arborist/fixtures/eslint-plugin-react', + }, + location: 'node_modules/eslint-plugin-eslint-plugin', + isWorkspace: false, + dependents: [ + { + type: 'dev', + name: 'eslint-plugin-eslint-plugin', + spec: '^3.1.0', + error: 'MISSING', + from: { + location: '/Users/isaacs/dev/npm/arborist/fixtures/eslint-plugin-react', + }, + }, + ], + }, + current: null, + peerConflict: { + current: { + name: 'eslint', + version: '6.8.0', + location: 'node_modules/eslint', + isWorkspace: false, + dependents: [ + { + type: 'dev', + name: 'eslint', + spec: '^3 || ^4 || ^5 || ^6 || ^7', + from: { + location: '/Users/isaacs/dev/npm/arborist/fixtures/eslint-plugin-react', + }, + }, + { + type: 'peer', + name: 'eslint', + spec: '^5.0.0 || ^6.0.0', + from: { + name: '@typescript-eslint/parser', + version: '2.34.0', + location: 'node_modules/@typescript-eslint/parser', + isWorkspace: false, + dependents: [ + { + type: 'dev', + name: '@typescript-eslint/parser', + spec: '^2.34.0', + from: { + location: '/Users/isaacs/dev/npm/arborist/fixtures/eslint-plugin-react', + }, + }, + ], + }, + }, + { + type: 'peer', + name: 'eslint', + spec: '^5.16.0 || ^6.8.0 || ^7.2.0', + from: { + name: 'eslint-config-airbnb-base', + version: '14.2.1', + location: 'node_modules/eslint-config-airbnb-base', + isWorkspace: false, + dependents: [ + { + type: 'dev', + name: 'eslint-config-airbnb-base', + spec: '^14.2.1', + from: { + location: '/Users/isaacs/dev/npm/arborist/fixtures/eslint-plugin-react', + }, + }, + ], + }, + }, + { + type: 'peer', + name: 'eslint', + spec: '^2 || ^3 || ^4 || ^5 || ^6 || ^7.2.0', + from: { + name: 'eslint-plugin-import', + version: '2.23.4', + location: 'node_modules/eslint-plugin-import', + isWorkspace: false, + dependents: [ + { + type: 'dev', + name: 'eslint-plugin-import', + spec: '^2.23.4', + from: { + location: '/Users/isaacs/dev/npm/arborist/fixtures/eslint-plugin-react', + }, + }, + { + type: 'peer', + name: 'eslint-plugin-import', + spec: '^2.22.1', + from: { + name: 'eslint-config-airbnb-base', + version: '14.2.1', + location: 'node_modules/eslint-config-airbnb-base', + isWorkspace: false, + dependents: [ + { + type: 'dev', + name: 'eslint-config-airbnb-base', + spec: '^14.2.1', + from: { + location: '/Users/isaacs/dev/npm/arborist/fixtures/eslint-plugin-react', + }, + }, + ], + }, + }, + ], + }, + }, + ], + }, + peer: { + name: 'eslint', + version: '7.31.0', + whileInstalling: { + name: 'eslint-plugin-react', + version: '7.24.0', + path: '/Users/isaacs/dev/npm/arborist/fixtures/eslint-plugin-react', + }, + location: 'node_modules/eslint', + isWorkspace: false, + dependents: [ + { + type: 'peer', + name: 'eslint', + spec: '^7.0.0', + from: { + name: 'eslint-plugin-eslint-plugin', + version: '3.5.1', + whileInstalling: { + name: 'eslint-plugin-react', + version: '7.24.0', + path: '/Users/isaacs/dev/npm/arborist/fixtures/eslint-plugin-react', + }, + location: 'node_modules/eslint-plugin-eslint-plugin', + isWorkspace: false, + dependents: [ + { + type: 'dev', + name: 'eslint-plugin-eslint-plugin', + spec: '^3.1.0', + error: 'MISSING', + from: { + location: '/Users/isaacs/dev/npm/arborist/fixtures/eslint-plugin-react', + }, + }, + ], + }, + }, + ], + }, + }, + strictPeerDeps: false, + force: false, + isMine: true, + }, } diff --git a/test/fixtures/mock-npm.js b/test/fixtures/mock-npm.js new file mode 100644 index 0000000000000..e3be10b4b9aa3 --- /dev/null +++ b/test/fixtures/mock-npm.js @@ -0,0 +1,108 @@ +const npmlog = require('npmlog') +const procLog = require('../../lib/utils/proc-log-listener.js') +procLog.reset() + +const realLog = {} +for (const level in npmlog.levels) + realLog[level] = npmlog[level] + +const { title, execPath } = process + +const RealMockNpm = (t, otherMocks = {}) => { + t.teardown(() => { + npm.perfStop() + npmlog.record.length = 0 + for (const level in npmlog.levels) + npmlog[level] = realLog[level] + procLog.reset() + process.title = title + process.execPath = execPath + delete process.env.npm_command + delete process.env.COLOR + }) + const logs = [] + const outputs = [] + const npm = t.mock('../../lib/npm.js', otherMocks) + const command = async (command, args = []) => { + return new Promise((resolve, reject) => { + npm.commands[command](args, err => { + if (err) + return reject(err) + return resolve() + }) + }) + } + for (const level in npmlog.levels) { + npmlog[level] = (...msg) => { + logs.push([level, ...msg]) + + const l = npmlog.level + npmlog.level = 'silent' + realLog[level](...msg) + npmlog.level = l + } + } + npm.output = (...msg) => outputs.push(msg) + return { npm, logs, outputs, command } +} + +const realConfig = require('../../lib/utils/config') + +// Basic npm fixture that you can give a config object that acts like +// npm.config You still need a separate flatOptions. Tests should migrate to +// using the real npm mock above +class MockNpm { + constructor (base = {}) { + this._mockOutputs = [] + this.isMockNpm = true + this.base = base + + const config = base.config || {} + + for (const attr in base) { + if (attr !== 'config') { + this[attr] = base[attr] + } + } + + this.flatOptions = base.flatOptions || {} + this.config = { + // for now just set `find` to what config.find should return + // this works cause `find` is not an existing config entry + find: (k) => ({...realConfig.defaults, ...config})[k], + get: (k) => ({...realConfig.defaults, ...config})[k], + set: (k, v) => config[k] = v, + list: [{ ...realConfig.defaults, ...config}] + } + if (!this.log) { + this.log = { + clearProgress: () => {}, + disableProgress: () => {}, + enableProgress: () => {}, + http: () => {}, + info: () => {}, + levels: [], + notice: () => {}, + pause: () => {}, + silly: () => {}, + verbose: () => {}, + warn: () => {}, + } + } + } + + output(...msg) { + if (this.base.output) + return this.base.output(msg) + this._mockOutputs.push(msg) + } +} + +const FakeMockNpm = (base = {}) => { + return new MockNpm(base) +} + +module.exports = { + fake: FakeMockNpm, + real: RealMockNpm +} diff --git a/test/lib/access.js b/test/lib/access.js index 3a732ad0aac37..5fd170bab484a 100644 --- a/test/lib/access.js +++ b/test/lib/access.js @@ -1,9 +1,12 @@ -const { test } = require('tap') -const requireInject = require('require-inject') +const t = require('tap') const Access = require('../../lib/access.js') -test('completion', t => { +const npm = { + output: () => null, +} + +t.test('completion', t => { const access = new Access({ flatOptions: {} }) const testComp = (argv, expect) => { const res = access.completion({conf: {argv: {remain: argv}}}) @@ -34,15 +37,15 @@ test('completion', t => { t.end() }) -test('subcommand required', t => { +t.test('subcommand required', t => { const access = new Access({ flatOptions: {} }) access.exec([], (err) => { - t.equal(err, '\nUsage: Subcommand is required.\n\n' + access.usage) + t.match(err, access.usageError('Subcommand is required.')) t.end() }) }) -test('unrecognized subcommand', (t) => { +t.test('unrecognized subcommand', (t) => { const access = new Access({ flatOptions: {} }) access.exec(['blerg'], (err) => { t.match( @@ -54,7 +57,7 @@ test('unrecognized subcommand', (t) => { }) }) -test('edit', (t) => { +t.test('edit', (t) => { const access = new Access({ flatOptions: {} }) access.exec([ 'edit', @@ -69,7 +72,7 @@ test('edit', (t) => { }) }) -test('access public on unscoped package', (t) => { +t.test('access public on unscoped package', (t) => { const prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'npm-access-public-pkg', @@ -88,13 +91,13 @@ test('access public on unscoped package', (t) => { }) }) -test('access public on scoped package', (t) => { +t.test('access public on scoped package', (t) => { t.plan(4) const name = '@scoped/npm-access-public-pkg' const prefix = t.testdir({ 'package.json': JSON.stringify({ name }), }) - const Access = requireInject('../../lib/access.js', { + const Access = t.mock('../../lib/access.js', { libnpmaccess: { public: (pkg, { registry }) => { t.equal(pkg, name, 'should use pkg name ref') @@ -114,12 +117,12 @@ test('access public on scoped package', (t) => { access.exec([ 'public', ], (err) => { - t.ifError(err, 'npm access') + t.error(err, 'npm access') t.ok('should successfully access public on scoped package') }) }) -test('access public on missing package.json', (t) => { +t.test('access public on missing package.json', (t) => { const prefix = t.testdir({ node_modules: {}, }) @@ -136,7 +139,7 @@ test('access public on missing package.json', (t) => { }) }) -test('access public on invalid package.json', (t) => { +t.test('access public on invalid package.json', (t) => { const prefix = t.testdir({ 'package.json': '{\n', node_modules: {}, @@ -154,7 +157,7 @@ test('access public on invalid package.json', (t) => { }) }) -test('access restricted on unscoped package', (t) => { +t.test('access restricted on unscoped package', (t) => { const prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'npm-access-restricted-pkg', @@ -173,13 +176,13 @@ test('access restricted on unscoped package', (t) => { }) }) -test('access restricted on scoped package', (t) => { +t.test('access restricted on scoped package', (t) => { t.plan(4) const name = '@scoped/npm-access-restricted-pkg' const prefix = t.testdir({ 'package.json': JSON.stringify({ name }), }) - const Access = requireInject('../../lib/access.js', { + const Access = t.mock('../../lib/access.js', { libnpmaccess: { restricted: (pkg, { registry }) => { t.equal(pkg, name, 'should use pkg name ref') @@ -199,12 +202,12 @@ test('access restricted on scoped package', (t) => { access.exec([ 'restricted', ], (err) => { - t.ifError(err, 'npm access') + t.error(err, 'npm access') t.ok('should successfully access restricted on scoped package') }) }) -test('access restricted on missing package.json', (t) => { +t.test('access restricted on missing package.json', (t) => { const prefix = t.testdir({ node_modules: {}, }) @@ -221,7 +224,7 @@ test('access restricted on missing package.json', (t) => { }) }) -test('access restricted on invalid package.json', (t) => { +t.test('access restricted on invalid package.json', (t) => { const prefix = t.testdir({ 'package.json': '{\n', node_modules: {}, @@ -239,9 +242,9 @@ test('access restricted on invalid package.json', (t) => { }) }) -test('access grant read-only', (t) => { +t.test('access grant read-only', (t) => { t.plan(5) - const Access = requireInject('../../lib/access.js', { + const Access = t.mock('../../lib/access.js', { libnpmaccess: { grant: (spec, team, permissions) => { t.equal(spec, '@scoped/another', 'should use expected spec') @@ -258,14 +261,14 @@ test('access grant read-only', (t) => { 'myorg:myteam', '@scoped/another', ], (err) => { - t.ifError(err, 'npm access') + t.error(err, 'npm access') t.ok('should successfully access grant read-only') }) }) -test('access grant read-write', (t) => { +t.test('access grant read-write', (t) => { t.plan(5) - const Access = requireInject('../../lib/access.js', { + const Access = t.mock('../../lib/access.js', { libnpmaccess: { grant: (spec, team, permissions) => { t.equal(spec, '@scoped/another', 'should use expected spec') @@ -282,19 +285,19 @@ test('access grant read-write', (t) => { 'myorg:myteam', '@scoped/another', ], (err) => { - t.ifError(err, 'npm access') + t.error(err, 'npm access') t.ok('should successfully access grant read-write') }) }) -test('access grant current cwd', (t) => { +t.test('access grant current cwd', (t) => { t.plan(5) const prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'yargs', }), }) - const Access = requireInject('../../lib/access.js', { + const Access = t.mock('../../lib/access.js', { libnpmaccess: { grant: (spec, team, permissions) => { t.equal(spec, 'yargs', 'should use expected spec') @@ -310,12 +313,12 @@ test('access grant current cwd', (t) => { 'read-write', 'myorg:myteam', ], (err) => { - t.ifError(err, 'npm access') + t.error(err, 'npm access') t.ok('should successfully access grant current cwd') }) }) -test('access grant others', (t) => { +t.test('access grant others', (t) => { const access = new Access({ flatOptions: {} }) access.exec([ 'grant', @@ -332,7 +335,7 @@ test('access grant others', (t) => { }) }) -test('access grant missing team args', (t) => { +t.test('access grant missing team args', (t) => { const access = new Access({ flatOptions: {} }) access.exec([ 'grant', @@ -349,7 +352,7 @@ test('access grant missing team args', (t) => { }) }) -test('access grant malformed team arg', (t) => { +t.test('access grant malformed team arg', (t) => { const access = new Access({ flatOptions: {} }) access.exec([ 'grant', @@ -366,9 +369,9 @@ test('access grant malformed team arg', (t) => { }) }) -test('access 2fa-required/2fa-not-required', t => { +t.test('access 2fa-required/2fa-not-required', t => { t.plan(2) - const Access = requireInject('../../lib/access.js', { + const Access = t.mock('../../lib/access.js', { libnpmaccess: { tfaRequired: (spec) => { t.equal(spec, '@scope/pkg', 'should use expected spec') @@ -393,9 +396,9 @@ test('access 2fa-required/2fa-not-required', t => { }) }) -test('access revoke', (t) => { +t.test('access revoke', (t) => { t.plan(4) - const Access = requireInject('../../lib/access.js', { + const Access = t.mock('../../lib/access.js', { libnpmaccess: { revoke: (spec, team) => { t.equal(spec, '@scoped/another', 'should use expected spec') @@ -410,12 +413,12 @@ test('access revoke', (t) => { 'myorg:myteam', '@scoped/another', ], (err) => { - t.ifError(err, 'npm access') + t.error(err, 'npm access') t.ok('should successfully access revoke') }) }) -test('access revoke missing team args', (t) => { +t.test('access revoke missing team args', (t) => { const access = new Access({ flatOptions: {} }) access.exec([ 'revoke', @@ -431,7 +434,7 @@ test('access revoke missing team args', (t) => { }) }) -test('access revoke malformed team arg', (t) => { +t.test('access revoke malformed team arg', (t) => { const access = new Access({ flatOptions: {} }) access.exec([ 'revoke', @@ -447,9 +450,9 @@ test('access revoke malformed team arg', (t) => { }) }) -test('npm access ls-packages with no team', (t) => { +t.test('npm access ls-packages with no team', (t) => { t.plan(3) - const Access = requireInject('../../lib/access.js', { + const Access = t.mock('../../lib/access.js', { libnpmaccess: { lsPackages: (entity) => { t.equal(entity, 'foo', 'should use expected entity') @@ -457,80 +460,76 @@ test('npm access ls-packages with no team', (t) => { }, }, '../../lib/utils/get-identity.js': () => Promise.resolve('foo'), - '../../lib/utils/output.js': () => null, }) - const access = new Access({}) + const access = new Access(npm) access.exec([ 'ls-packages', ], (err) => { - t.ifError(err, 'npm access') + t.error(err, 'npm access') t.ok('should successfully access ls-packages with no team') }) }) -test('access ls-packages on team', (t) => { +t.test('access ls-packages on team', (t) => { t.plan(3) - const Access = requireInject('../../lib/access.js', { + const Access = t.mock('../../lib/access.js', { libnpmaccess: { lsPackages: (entity) => { t.equal(entity, 'myorg:myteam', 'should use expected entity') return {} }, }, - '../../lib/utils/output.js': () => null, }) - const access = new Access({}) + const access = new Access(npm) access.exec([ 'ls-packages', 'myorg:myteam', ], (err) => { - t.ifError(err, 'npm access') + t.error(err, 'npm access') t.ok('should successfully access ls-packages on team') }) }) -test('access ls-collaborators on current', (t) => { +t.test('access ls-collaborators on current', (t) => { t.plan(3) const prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'yargs', }), }) - const Access = requireInject('../../lib/access.js', { + const Access = t.mock('../../lib/access.js', { libnpmaccess: { lsCollaborators: (spec) => { t.equal(spec, 'yargs', 'should use expected spec') return {} }, }, - '../../lib/utils/output.js': () => null, }) - const access = new Access({ prefix }) + const access = new Access({ prefix, ...npm }) access.exec([ 'ls-collaborators', ], (err) => { - t.ifError(err, 'npm access') + t.error(err, 'npm access') t.ok('should successfully access ls-collaborators on current') }) }) -test('access ls-collaborators on spec', (t) => { +t.test('access ls-collaborators on spec', (t) => { t.plan(3) - const Access = requireInject('../../lib/access.js', { + const Access = t.mock('../../lib/access.js', { libnpmaccess: { lsCollaborators: (spec) => { t.equal(spec, 'yargs', 'should use expected spec') return {} }, }, - '../../lib/utils/output.js': () => null, }) - const access = new Access({}) + const access = new Access(npm) access.exec([ 'ls-collaborators', 'yargs', ], (err) => { - t.ifError(err, 'npm access') + t.error(err, 'npm access') t.ok('should successfully access ls-packages with no team') }) }) diff --git a/test/lib/adduser.js b/test/lib/adduser.js index 32fd97c1bd46d..a66623e668282 100644 --- a/test/lib/adduser.js +++ b/test/lib/adduser.js @@ -1,5 +1,4 @@ -const requireInject = require('require-inject') -const { test } = require('tap') +const t = require('tap') const { getCredentialsByURI, setCredentialsByURI } = require('@npmcli/config').prototype @@ -26,7 +25,6 @@ const authDummy = (npm, options) => { username: 'u', password: 'p', email: 'u@npmjs.org', - alwaysAuth: false, }, }) } @@ -61,26 +59,30 @@ const npm = { }, setCredentialsByURI, }, + output: msg => { + result = msg + }, } -const AddUser = requireInject('../../lib/adduser.js', { +const AddUser = t.mock('../../lib/adduser.js', { npmlog: { disableProgress: () => null, notice: (_, msg) => { registryOutput = msg }, }, - '../../lib/utils/output.js': msg => { - result = msg - }, '../../lib/auth/legacy.js': authDummy, }) const adduser = new AddUser(npm) -test('simple login', (t) => { +t.test('usage', (t) => { + t.match(adduser.usage, 'adduser', 'usage has command name in it') + t.end() +}) +t.test('simple login', (t) => { adduser.exec([], (err) => { - t.ifError(err, 'npm adduser') + t.error(err, 'npm adduser') t.equal( registryOutput, @@ -88,29 +90,31 @@ test('simple login', (t) => { 'should have correct message result' ) - t.deepEqual( + t.same( deletedConfig, { _token: 'user', _password: 'user', username: 'user', - email: 'user', _auth: 'user', _authtoken: 'user', + '-authtoken': 'user', _authToken: 'user', - '//registry.npmjs.org/:-authtoken': undefined, + '//registry.npmjs.org/:-authtoken': 'user', '//registry.npmjs.org/:_authToken': 'user', + '//registry.npmjs.org/:_authtoken': 'user', + '//registry.npmjs.org/:always-auth': 'user', + '//registry.npmjs.org/:email': 'user', }, 'should delete token in user config' ) - t.deepEqual( + t.same( setConfig, { '//registry.npmjs.org/:_password': { value: 'cA==', where: 'user' }, '//registry.npmjs.org/:username': { value: 'u', where: 'user' }, - '//registry.npmjs.org/:email': { value: 'u@npmjs.org', where: 'user' }, - '//registry.npmjs.org/:always-auth': { value: false, where: 'user' }, + email: { value: 'u@npmjs.org', where: 'user' }, }, 'should set expected user configs' ) @@ -129,7 +133,7 @@ test('simple login', (t) => { }) }) -test('bad auth type', (t) => { +t.test('bad auth type', (t) => { _flatOptions.authType = 'foo' adduser.exec([], (err) => { @@ -147,13 +151,13 @@ test('bad auth type', (t) => { }) }) -test('scoped login', (t) => { +t.test('scoped login', (t) => { _flatOptions.scope = '@myscope' adduser.exec([], (err) => { - t.ifError(err, 'npm adduser') + t.error(err, 'npm adduser') - t.deepEqual( + t.same( setConfig['@myscope:registry'], { value: 'https://registry.npmjs.org/', where: 'user' }, 'should set scoped registry config' @@ -167,14 +171,14 @@ test('scoped login', (t) => { }) }) -test('scoped login with valid scoped registry config', (t) => { +t.test('scoped login with valid scoped registry config', (t) => { _flatOptions['@myscope:registry'] = 'https://diff-registry.npmjs.com/' _flatOptions.scope = '@myscope' adduser.exec([], (err) => { - t.ifError(err, 'npm adduser') + t.error(err, 'npm adduser') - t.deepEqual( + t.same( setConfig['@myscope:registry'], { value: 'https://diff-registry.npmjs.com/', where: 'user' }, 'should keep scoped registry config' @@ -189,7 +193,7 @@ test('scoped login with valid scoped registry config', (t) => { }) }) -test('save config failure', (t) => { +t.test('save config failure', (t) => { failSave = true adduser.exec([], (err) => { diff --git a/test/lib/audit.js b/test/lib/audit.js index 6fd9c8a2c9b8f..561765a0270b5 100644 --- a/test/lib/audit.js +++ b/test/lib/audit.js @@ -1,5 +1,5 @@ const t = require('tap') -const requireInject = require('require-inject') +const { fake: mockNpm } = require('../fixtures/mock-npm') t.test('should audit using Arborist', t => { let ARB_ARGS = null @@ -9,13 +9,16 @@ t.test('should audit using Arborist', t => { let OUTPUT_CALLED = false let ARB_OBJ = null - const npm = { + const npm = mockNpm({ prefix: 'foo', - flatOptions: { + config: { json: false, }, - } - const Audit = requireInject('../../lib/audit.js', { + output: () => { + OUTPUT_CALLED = true + }, + }) + const Audit = t.mock('../../lib/audit.js', { 'npm-audit-report': () => { AUDIT_REPORT_CALLED = true return { @@ -37,9 +40,6 @@ t.test('should audit using Arborist', t => { REIFY_FINISH_CALLED = true }, - '../../lib/utils/output.js': () => { - OUTPUT_CALLED = true - }, }) const audit = new Audit(npm) @@ -65,14 +65,15 @@ t.test('should audit using Arborist', t => { }) t.test('should audit - json', t => { - const npm = { + const npm = mockNpm({ prefix: 'foo', - flatOptions: { + config: { json: true, }, - } + output: () => {}, + }) - const Audit = requireInject('../../lib/audit.js', { + const Audit = t.mock('../../lib/audit.js', { 'npm-audit-report': () => ({ report: 'there are vulnerabilities', exitCode: 0, @@ -83,7 +84,6 @@ t.test('should audit - json', t => { } }, '../../lib/utils/reify-output.js': () => {}, - '../../lib/utils/output.js': () => {}, }) const audit = new Audit(npm) @@ -98,17 +98,23 @@ t.test('report endpoint error', t => { t.test(`json=${json}`, t => { const OUTPUT = [] const LOGS = [] - const npm = { + const npm = mockNpm({ prefix: 'foo', command: 'audit', + config: { + json, + }, flatOptions: { json, }, log: { warn: (...warning) => LOGS.push(warning), }, - } - const Audit = requireInject('../../lib/audit.js', { + output: (...msg) => { + OUTPUT.push(msg) + }, + }) + const Audit = t.mock('../../lib/audit.js', { 'npm-audit-report': () => { throw new Error('should not call audit report when there are errors') }, @@ -130,9 +136,6 @@ t.test('report endpoint error', t => { } }, '../../lib/utils/reify-output.js': () => {}, - '../../lib/utils/output.js': (...msg) => { - OUTPUT.push(msg) - }, }) const audit = new Audit(npm) diff --git a/test/lib/auth/legacy.js b/test/lib/auth/legacy.js index f5297c58179c6..d06f9535742fb 100644 --- a/test/lib/auth/legacy.js +++ b/test/lib/auth/legacy.js @@ -1,12 +1,11 @@ -const requireInject = require('require-inject') -const { test } = require('tap') +const t = require('tap') let log = '' const token = '24528a24f240' const profile = {} const read = {} -const legacy = requireInject('../../../lib/auth/legacy.js', { +const legacy = t.mock('../../../lib/auth/legacy.js', { npmlog: { info: (...msgs) => { log += msgs.join(' ') @@ -26,7 +25,7 @@ const npm = { }, } -test('login using username/password with token result', async (t) => { +t.test('login using username/password with token result', async (t) => { profile.login = () => { return { token } } @@ -57,7 +56,7 @@ test('login using username/password with token result', async (t) => { 'should have correct message result' ) - t.deepEqual( + t.same( newCreds, { token }, 'should return expected obj from profile.login' @@ -67,7 +66,7 @@ test('login using username/password with token result', async (t) => { delete profile.login }) -test('login using username/password with user info result', async (t) => { +t.test('login using username/password with user info result', async (t) => { profile.login = () => { return null } @@ -92,7 +91,7 @@ test('login using username/password with user info result', async (t) => { 'should have correct message result' ) - t.deepEqual( + t.same( newCreds, { username: 'u', @@ -107,7 +106,7 @@ test('login using username/password with user info result', async (t) => { delete profile.login }) -test('login otp requested', async (t) => { +t.test('login otp requested', async (t) => { t.plan(5) profile.login = () => Promise.reject(Object.assign( @@ -143,7 +142,7 @@ test('login otp requested', async (t) => { 'should have correct message result' ) - t.deepEqual( + t.same( newCreds, { token }, 'should return token from loginCouch result' @@ -155,7 +154,7 @@ test('login otp requested', async (t) => { delete read.otp }) -test('login missing basic credential info', async (t) => { +t.test('login missing basic credential info', async (t) => { profile.login = () => Promise.reject(Object.assign( new Error('missing info'), { code: 'ERROR' } @@ -178,7 +177,7 @@ test('login missing basic credential info', async (t) => { delete profile.login }) -test('create new user when user not found', async (t) => { +t.test('create new user when user not found', async (t) => { t.plan(6) profile.login = () => Promise.reject(Object.assign( @@ -219,7 +218,7 @@ test('create new user when user not found', async (t) => { 'should have correct message result' ) - t.deepEqual( + t.same( newCreds, { token }, 'should return expected obj from profile.login' @@ -230,7 +229,7 @@ test('create new user when user not found', async (t) => { delete profile.login }) -test('prompts for user info if required', async (t) => { +t.test('prompts for user info if required', async (t) => { t.plan(4) profile.login = async (opener, prompt, opts) => { @@ -266,7 +265,7 @@ test('prompts for user info if required', async (t) => { 'should have correct message result' ) - t.deepEqual( + t.same( newCreds, { username: 'foo', @@ -284,7 +283,7 @@ test('prompts for user info if required', async (t) => { delete read.email }) -test('request otp when creating new user', async (t) => { +t.test('request otp when creating new user', async (t) => { t.plan(3) profile.login = () => Promise.reject(Object.assign( @@ -322,7 +321,7 @@ test('request otp when creating new user', async (t) => { delete read.otp }) -test('unknown error during user creation', async (t) => { +t.test('unknown error during user creation', async (t) => { profile.login = () => Promise.reject(Object.assign( new Error('missing info'), { code: 'ERROR' } @@ -352,7 +351,7 @@ test('unknown error during user creation', async (t) => { delete profile.login }) -test('open url error', async (t) => { +t.test('open url error', async (t) => { profile.login = async (opener, prompt, opts) => { await opener() } @@ -374,7 +373,7 @@ test('open url error', async (t) => { delete profile.login }) -test('login no credentials provided', async (t) => { +t.test('login no credentials provided', async (t) => { profile.login = () => ({ token }) await legacy(npm, { @@ -398,7 +397,7 @@ test('login no credentials provided', async (t) => { delete profile.login }) -test('scoped login', async (t) => { +t.test('scoped login', async (t) => { profile.login = () => ({ token }) const { message } = await legacy(npm, { diff --git a/test/lib/auth/oauth.js b/test/lib/auth/oauth.js index c2f4c3443a234..0c317fb9a130e 100644 --- a/test/lib/auth/oauth.js +++ b/test/lib/auth/oauth.js @@ -1,7 +1,6 @@ -const requireInject = require('require-inject') -const { test } = require('tap') +const t = require('tap') -test('oauth login', (t) => { +t.test('oauth login', (t) => { t.plan(3) const oauthOpts = { creds: {}, @@ -17,7 +16,7 @@ test('oauth login', (t) => { }, }, } - const oauth = requireInject('../../../lib/auth/oauth.js', { + const oauth = t.mock('../../../lib/auth/oauth.js', { '../../../lib/auth/sso.js': (npm, opts) => { t.equal(opts, oauthOpts, 'should forward opts') }, diff --git a/test/lib/auth/saml.js b/test/lib/auth/saml.js index b8c21f649edc0..1558e0db8eb29 100644 --- a/test/lib/auth/saml.js +++ b/test/lib/auth/saml.js @@ -1,7 +1,6 @@ -const requireInject = require('require-inject') -const { test } = require('tap') +const t = require('tap') -test('saml login', (t) => { +t.test('saml login', (t) => { t.plan(3) const samlOpts = { creds: {}, @@ -17,7 +16,7 @@ test('saml login', (t) => { }, }, } - const saml = requireInject('../../../lib/auth/saml.js', { + const saml = t.mock('../../../lib/auth/saml.js', { '../../../lib/auth/sso.js': (npm, opts) => { t.equal(opts, samlOpts, 'should forward opts') }, diff --git a/test/lib/auth/sso.js b/test/lib/auth/sso.js index 9d77e7c274025..11be002aee345 100644 --- a/test/lib/auth/sso.js +++ b/test/lib/auth/sso.js @@ -1,5 +1,4 @@ -const requireInject = require('require-inject') -const { test } = require('tap') +const t = require('tap') let log = '' let warn = '' @@ -11,7 +10,7 @@ const token = '24528a24f240' const SSO_URL = 'https://registry.npmjs.org/{SSO_URL}' const profile = {} const npmFetch = {} -const sso = requireInject('../../../lib/auth/sso.js', { +const sso = t.mock('../../../lib/auth/sso.js', { npmlog: { info: (...msgs) => { log += msgs.join(' ') + '\n' @@ -22,14 +21,12 @@ const sso = requireInject('../../../lib/auth/sso.js', { }, 'npm-profile': profile, 'npm-registry-fetch': npmFetch, - '../../../lib/utils/open-url.js': (npm, url, msg, cb) => { - if (url) - cb() - else { - cb(Object.assign( + '../../../lib/utils/open-url.js': async (npm, url, msg) => { + if (!url) { + throw Object.assign( new Error('failed open url'), { code: 'ERROR' } - )) + ) } }, '../../../lib/utils/otplease.js': (opts, fn) => { @@ -48,7 +45,7 @@ const npm = { flatOptions: _flatOptions, } -test('empty login', async (t) => { +t.test('empty login', async (t) => { _flatOptions.ssoType = false await t.rejects( @@ -68,13 +65,13 @@ test('empty login', async (t) => { warn = '' }) -test('simple login', async (t) => { +t.test('simple login', async (t) => { t.plan(6) profile.loginCouch = (username, password, opts) => { t.equal(username, 'npm_oauth_auth_dummy_user', 'should use dummy user') t.equal(password, 'placeholder', 'should use dummy password') - t.deepEqual( + t.same( opts, { creds: {}, @@ -111,7 +108,7 @@ test('simple login', async (t) => { 'should have correct logged info msg' ) - t.deepEqual( + t.same( newCreds, { token }, 'should return expected resulting credentials' @@ -123,7 +120,7 @@ test('simple login', async (t) => { delete npmFetch.json }) -test('polling retry', async (t) => { +t.test('polling retry', async (t) => { t.plan(3) profile.loginCouch = () => ({ token, sso: SSO_URL }) @@ -170,7 +167,7 @@ test('polling retry', async (t) => { delete npmFetch.json }) -test('polling error', async (t) => { +t.test('polling error', async (t) => { profile.loginCouch = () => ({ token, sso: SSO_URL }) npmFetch.json = () => Promise.reject(Object.assign( new Error('unknown error'), @@ -193,7 +190,7 @@ test('polling error', async (t) => { delete npmFetch.json }) -test('no token retrieved from loginCouch', async (t) => { +t.test('no token retrieved from loginCouch', async (t) => { profile.loginCouch = () => ({}) await t.rejects( @@ -211,7 +208,7 @@ test('no token retrieved from loginCouch', async (t) => { delete profile.loginCouch }) -test('no sso url retrieved from loginCouch', async (t) => { +t.test('no sso url retrieved from loginCouch', async (t) => { profile.loginCouch = () => Promise.resolve({ token }) await t.rejects( @@ -229,7 +226,7 @@ test('no sso url retrieved from loginCouch', async (t) => { delete profile.loginCouch }) -test('scoped login', async (t) => { +t.test('scoped login', async (t) => { profile.loginCouch = () => ({ token, sso: SSO_URL }) npmFetch.json = () => Promise.resolve({ username: 'foo' }) @@ -254,7 +251,7 @@ test('scoped login', async (t) => { 'should have correct logged info msg' ) - t.deepEqual( + t.same( newCreds, { token }, 'should return expected resulting credentials' diff --git a/test/lib/bin.js b/test/lib/bin.js index e96eb91af9708..8ceca8280f52d 100644 --- a/test/lib/bin.js +++ b/test/lib/bin.js @@ -1,28 +1,32 @@ -const { test } = require('tap') -const requireInject = require('require-inject') +const t = require('tap') +const { fake: mockNpm } = require('../fixtures/mock-npm') -test('bin', (t) => { - t.plan(3) +t.test('bin', (t) => { + t.plan(4) const dir = '/bin/dir' - const Bin = requireInject('../../lib/bin.js', { - '../../lib/utils/output.js': (output) => { + const Bin = require('../../lib/bin.js') + + const npm = mockNpm({ + bin: dir, + config: { global: false }, + output: (output) => { t.equal(output, dir, 'prints the correct directory') }, }) - - const bin = new Bin({ bin: dir, flatOptions: { global: false } }) + const bin = new Bin(npm) + t.match(bin.usage, 'bin', 'usage has command name in it') bin.exec([], (err) => { - t.ifError(err, 'npm bin') + t.error(err, 'npm bin') t.ok('should have printed directory') }) }) -test('bin -g', (t) => { +t.test('bin -g', (t) => { t.plan(3) const consoleError = console.error - t.tearDown(() => { + t.teardown(() => { console.error = consoleError }) @@ -31,25 +35,29 @@ test('bin -g', (t) => { } const dir = '/bin/dir' - const Bin = requireInject('../../lib/bin.js', { + const Bin = t.mock('../../lib/bin.js', { '../../lib/utils/path.js': [dir], - '../../lib/utils/output.js': (output) => { + }) + + const npm = mockNpm({ + bin: dir, + config: { global: true }, + output: (output) => { t.equal(output, dir, 'prints the correct directory') }, }) - - const bin = new Bin({ bin: dir, flatOptions: { global: true } }) + const bin = new Bin(npm) bin.exec([], (err) => { - t.ifError(err, 'npm bin') + t.error(err, 'npm bin') t.ok('should have printed directory') }) }) -test('bin -g (not in path)', (t) => { +t.test('bin -g (not in path)', (t) => { t.plan(4) const consoleError = console.error - t.tearDown(() => { + t.teardown(() => { console.error = consoleError }) @@ -58,16 +66,20 @@ test('bin -g (not in path)', (t) => { } const dir = '/bin/dir' - const Bin = requireInject('../../lib/bin.js', { + const Bin = t.mock('../../lib/bin.js', { '../../lib/utils/path.js': ['/not/my/dir'], - '../../lib/utils/output.js': (output) => { + }) + const npm = mockNpm({ + bin: dir, + config: { global: true }, + output: (output) => { t.equal(output, dir, 'prints the correct directory') }, }) - const bin = new Bin({ bin: dir, flatOptions: { global: true } }) + const bin = new Bin(npm) bin.exec([], (err) => { - t.ifError(err, 'npm bin') + t.error(err, 'npm bin') t.ok('should have printed directory') }) }) diff --git a/test/lib/birthday.js b/test/lib/birthday.js index c818223fb51e5..05660d6fa3f20 100644 --- a/test/lib/birthday.js +++ b/test/lib/birthday.js @@ -1,20 +1,23 @@ const t = require('tap') -const npm = { - flatOptions: { - yes: false, - package: [], - }, +const { fake: mockNpm } = require('../fixtures/mock-npm') + +const config = { + yes: false, + package: [], +} +const npm = mockNpm({ + config, commands: { exec: (args, cb) => { - t.equal(npm.flatOptions.yes, true, 'should say yes') - t.strictSame(npm.flatOptions.package, ['@npmcli/npm-birthday'], + t.equal(npm.config.get('yes'), true, 'should say yes') + t.strictSame(npm.config.get('package'), ['@npmcli/npm-birthday'], 'uses correct package') t.strictSame(args, ['npm-birthday'], 'called with correct args') t.match(cb, Function, 'callback is a function') cb() }, }, -} +}) const Birthday = require('../../lib/birthday.js') const birthday = new Birthday(npm) diff --git a/test/lib/bugs.js b/test/lib/bugs.js index e98131f1132e8..e5b238ffcea13 100644 --- a/test/lib/bugs.js +++ b/test/lib/bugs.js @@ -1,6 +1,5 @@ const t = require('tap') -const requireInject = require('require-inject') const pacote = { manifest: async (spec, options) => { return spec === 'nobugs' ? { @@ -32,6 +31,16 @@ const pacote = { version: '1.2.3', repository: { url: 'https://github.com/foo/repoobj' }, } + : spec === 'mailtest' ? { + name: 'mailtest', + version: '3.7.4', + bugs: { email: 'hello@example.com' }, + } + : spec === 'secondmailtest' ? { + name: 'secondmailtest', + version: '0.1.1', + bugs: { email: 'ABC432abc@a.b.example.net' }, + } : spec === '.' ? { name: 'thispkg', version: '1.2.3', @@ -48,14 +57,19 @@ const openUrl = async (npm, url, errMsg) => { opened[url]++ } -const Bugs = requireInject('../../lib/bugs.js', { +const Bugs = t.mock('../../lib/bugs.js', { pacote, '../../lib/utils/open-url.js': openUrl, }) const bugs = new Bugs({ flatOptions: {} }) -t.test('open bugs urls', t => { +t.test('usage', (t) => { + t.match(bugs.usage, 'bugs', 'usage has command name in it') + t.end() +}) + +t.test('open bugs urls & emails', t => { const expect = { nobugs: 'https://www.npmjs.com/package/nobugs', 'bugsobj-nourl': 'https://www.npmjs.com/package/bugsobj-nourl', @@ -63,6 +77,8 @@ t.test('open bugs urls', t => { bugsobj: 'https://bugzilla.localhost/bugsobj', repourl: 'https://github.com/foo/repourl/issues', repoobj: 'https://github.com/foo/repoobj/issues', + mailtest: 'mailto:hello@example.com', + secondmailtest: 'mailto:ABC432abc@a.b.example.net', '.': 'https://example.com', } const keys = Object.keys(expect) diff --git a/test/lib/cache.js b/test/lib/cache.js index 67499f37e9f30..d3d6f5b8845de 100644 --- a/test/lib/cache.js +++ b/test/lib/cache.js @@ -1,17 +1,10 @@ const t = require('tap') -const requireInject = require('require-inject') +const { fake: mockNpm } = require('../fixtures/mock-npm.js') const path = require('path') const usageUtil = () => 'usage instructions' -const flatOptions = { - force: false, -} - -const npm = { - flatOptions, - cache: '/fake/path', -} +let outputOutput = [] let rimrafPath = '' const rimraf = (path, cb) => { @@ -41,11 +34,6 @@ const pacote = { }, } -let outputOutput = [] -const output = (msg) => { - outputOutput.push(msg) -} - const cacacheVerifyStats = { keptSize: 100, verifiedContent: 1, @@ -58,20 +46,27 @@ const cacache = { }, } -const Cache = requireInject('../../lib/cache.js', { +const Cache = t.mock('../../lib/cache.js', { cacache, npmlog, pacote, rimraf, - '../../lib/utils/output.js': output, '../../lib/utils/usage.js': usageUtil, }) +const npm = mockNpm({ + cache: '/fake/path', + flatOptions: { force: false }, + config: { force: false }, + output: (msg) => { + outputOutput.push(msg) + }, +}) const cache = new Cache(npm) t.test('cache no args', t => { cache.exec([], err => { - t.equal(err.message, 'usage instructions', 'should throw usage instructions') + t.match(err.message, 'usage instructions', 'should throw usage instructions') t.end() }) }) @@ -84,14 +79,16 @@ t.test('cache clean', t => { }) t.test('cache clean (force)', t => { - flatOptions.force = true + npm.config.set('force', true) + npm.flatOptions.force = true t.teardown(() => { rimrafPath = '' - flatOptions.force = false + npm.config.force = false + npm.flatOptions.force = false }) cache.exec(['clear'], err => { - t.ifError(err) + t.error(err) t.equal(rimrafPath, path.join(npm.cache, '_cacache')) t.end() }) @@ -126,32 +123,33 @@ t.test('cache add pkg only', t => { }) cache.exec(['add', 'mypkg'], err => { - t.ifError(err) + t.error(err) t.strictSame(logOutput, [ ['silly', 'cache add', 'args', ['mypkg']], ['silly', 'cache add', 'spec', 'mypkg'], ], 'logs correctly') t.equal(tarballStreamSpec, 'mypkg', 'passes the correct spec to pacote') - t.same(tarballStreamOpts, flatOptions, 'passes the correct options to pacote') + t.same(tarballStreamOpts, npm.flatOptions, 'passes the correct options to pacote') t.end() }) }) -t.test('cache add pkg w/ spec modifier', t => { +t.test('cache add multiple pkgs', t => { t.teardown(() => { logOutput = [] tarballStreamSpec = '' tarballStreamOpts = {} }) - cache.exec(['add', 'mypkg', 'latest'], err => { - t.ifError(err) + cache.exec(['add', 'mypkg', 'anotherpkg'], err => { + t.error(err) t.strictSame(logOutput, [ - ['silly', 'cache add', 'args', ['mypkg', 'latest']], - ['silly', 'cache add', 'spec', 'mypkg@latest'], + ['silly', 'cache add', 'args', ['mypkg', 'anotherpkg']], + ['silly', 'cache add', 'spec', 'mypkg'], + ['silly', 'cache add', 'spec', 'anotherpkg'], ], 'logs correctly') - t.equal(tarballStreamSpec, 'mypkg@latest', 'passes the correct spec to pacote') - t.same(tarballStreamOpts, flatOptions, 'passes the correct options to pacote') + t.equal(tarballStreamSpec, 'anotherpkg', 'passes the correct spec to pacote') + t.same(tarballStreamOpts, npm.flatOptions, 'passes the correct options to pacote') t.end() }) }) @@ -162,7 +160,7 @@ t.test('cache verify', t => { }) cache.exec(['verify'], err => { - t.ifError(err) + t.error(err) t.match(outputOutput, [ `Cache verified and compressed (${path.join(npm.cache, '_cacache')})`, 'Content verified: 1 (100 bytes)', @@ -189,7 +187,7 @@ t.test('cache verify w/ extra output', t => { }) cache.exec(['check'], err => { - t.ifError(err) + t.error(err) t.match(outputOutput, [ `Cache verified and compressed (~${path.join('/fake/path', '_cacache')})`, 'Content verified: 1 (100 bytes)', diff --git a/test/lib/ci.js b/test/lib/ci.js index 3419218ef9d8b..b6b2af9c111db 100644 --- a/test/lib/ci.js +++ b/test/lib/ci.js @@ -2,14 +2,14 @@ const fs = require('fs') const util = require('util') const readdir = util.promisify(fs.readdir) -const { test } = require('tap') +const t = require('tap') -const requireInject = require('require-inject') +const { fake: mockNpm } = require('../fixtures/mock-npm') -test('should ignore scripts with --ignore-scripts', (t) => { +t.test('should ignore scripts with --ignore-scripts', (t) => { const SCRIPTS = [] let REIFY_CALLED = false - const CI = requireInject('../../lib/ci.js', { + const CI = t.mock('../../lib/ci.js', { '../../lib/utils/reify-finish.js': async () => {}, '@npmcli/run-script': ({ event }) => { SCRIPTS.push(event) @@ -22,17 +22,15 @@ test('should ignore scripts with --ignore-scripts', (t) => { }, }) - const ci = new CI({ + const npm = mockNpm({ globalDir: 'path/to/node_modules/', prefix: 'foo', - flatOptions: { - global: false, - ignoreScripts: true, - }, config: { - get: () => false, + global: false, + 'ignore-scripts': true, }, }) + const ci = new CI(npm) ci.exec([], er => { if (er) @@ -43,7 +41,7 @@ test('should ignore scripts with --ignore-scripts', (t) => { }) }) -test('should use Arborist and run-script', (t) => { +t.test('should use Arborist and run-script', (t) => { const scripts = [ 'preinstall', 'install', @@ -89,7 +87,7 @@ test('should use Arborist and run-script', (t) => { const expectRimrafs = 3 let actualRimrafs = 0 - const CI = requireInject('../../lib/ci.js', { + const CI = t.mock('../../lib/ci.js', { '../../lib/utils/reify-finish.js': async () => {}, '@npmcli/run-script': opts => { t.match(opts, { event: scripts.shift() }) @@ -115,12 +113,13 @@ test('should use Arborist and run-script', (t) => { }, }) - const ci = new CI({ + const npm = mockNpm({ prefix: path, - flatOptions: { + config: { global: false, }, }) + const ci = new CI(npm) ci.exec(null, er => { if (er) @@ -134,8 +133,8 @@ test('should use Arborist and run-script', (t) => { }) }) -test('should pass flatOptions to Arborist.reify', (t) => { - const CI = requireInject('../../lib/ci.js', { +t.test('should pass flatOptions to Arborist.reify', (t) => { + const CI = t.mock('../../lib/ci.js', { '../../lib/utils/reify-finish.js': async () => {}, '@npmcli/run-script': opts => {}, '@npmcli/arborist': function () { @@ -146,25 +145,26 @@ test('should pass flatOptions to Arborist.reify', (t) => { } }, }) - const ci = new CI({ + const npm = mockNpm({ prefix: 'foo', flatOptions: { production: true, }, }) + const ci = new CI(npm) ci.exec(null, er => { if (er) throw er }) }) -test('should throw if package-lock.json or npm-shrinkwrap missing', (t) => { +t.test('should throw if package-lock.json or npm-shrinkwrap missing', (t) => { const testDir = t.testdir({ 'index.js': 'some contents', 'package.json': 'some info', }) - const CI = requireInject('../../lib/ci.js', { + const CI = t.mock('../../lib/ci.js', { '@npmcli/run-script': opts => {}, '../../lib/utils/reify-finish.js': async () => {}, npmlog: { @@ -173,45 +173,47 @@ test('should throw if package-lock.json or npm-shrinkwrap missing', (t) => { }, }, }) - const ci = new CI({ + const npm = mockNpm({ prefix: testDir, - flatOptions: { + config: { global: false, }, }) + const ci = new CI(npm) ci.exec(null, (err, res) => { - t.ok(err, 'throws error when there is no package-lock') + t.match(err, /package-lock.json/, 'throws error when there is no package-lock') t.notOk(res) t.end() }) }) -test('should throw ECIGLOBAL', (t) => { - const CI = requireInject('../../lib/ci.js', { +t.test('should throw ECIGLOBAL', (t) => { + const CI = t.mock('../../lib/ci.js', { '@npmcli/run-script': opts => {}, '../../lib/utils/reify-finish.js': async () => {}, }) - const ci = new CI({ + const npm = mockNpm({ prefix: 'foo', - flatOptions: { + config: { global: true, }, }) + const ci = new CI(npm) ci.exec(null, (err, res) => { - t.equals(err.code, 'ECIGLOBAL', 'throws error with global packages') + t.equal(err.code, 'ECIGLOBAL', 'throws error with global packages') t.notOk(res) t.end() }) }) -test('should remove existing node_modules before installing', (t) => { +t.test('should remove existing node_modules before installing', (t) => { const testDir = t.testdir({ node_modules: { 'some-file': 'some contents', }, }) - const CI = requireInject('../../lib/ci.js', { + const CI = t.mock('../../lib/ci.js', { '@npmcli/run-script': opts => {}, '../../lib/utils/reify-finish.js': async () => {}, '@npmcli/arborist': function () { @@ -227,12 +229,13 @@ test('should remove existing node_modules before installing', (t) => { }, }) - const ci = new CI({ + const npm = mockNpm({ prefix: testDir, - flatOptions: { + config: { global: false, }, }) + const ci = new CI(npm) ci.exec(null, er => { if (er) diff --git a/test/lib/cli.js b/test/lib/cli.js index b5441be1e44d8..b85c981cd008b 100644 --- a/test/lib/cli.js +++ b/test/lib/cli.js @@ -1,32 +1,22 @@ const t = require('tap') -let LOAD_ERROR = null -const npmock = { - version: '99.99.99', - load: cb => cb(LOAD_ERROR), - argv: [], - config: { - settings: {}, - get: (k) => npmock.config.settings[k], - set: (k, v) => { - npmock.config.settings[k] = v - }, - }, - commands: {}, -} +const { real: mockNpm } = require('../fixtures/mock-npm.js') const unsupportedMock = { checkForBrokenNode: () => {}, checkForUnsupportedNode: () => {}, } -let errorHandlerCalled = null -const errorHandlerMock = (...args) => { - errorHandlerCalled = args +let exitHandlerCalled = null +let exitHandlerNpm = null +let exitHandlerCb +const exitHandlerMock = (...args) => { + exitHandlerCalled = args + if (exitHandlerCb) + exitHandlerCb() } -let errorHandlerExitCalled = null -errorHandlerMock.exit = code => { - errorHandlerExitCalled = code +exitHandlerMock.setNpm = npm => { + exitHandlerNpm = npm } const logs = [] @@ -36,146 +26,122 @@ const npmlogMock = { info: (...msg) => logs.push(['info', ...msg]), } -const requireInject = require('require-inject') -const cli = requireInject.installGlobally('../../lib/cli.js', { - '../../lib/npm.js': npmock, +const cliMock = (npm) => t.mock('../../lib/cli.js', { + '../../lib/npm.js': npm, + '../../lib/utils/update-notifier.js': async () => null, '../../lib/utils/unsupported.js': unsupportedMock, - '../../lib/utils/error-handler.js': errorHandlerMock, + '../../lib/utils/exit-handler.js': exitHandlerMock, npmlog: npmlogMock, }) -t.test('print the version, and treat npm_g to npm -g', t => { - const { log } = console - const consoleLogs = [] - console.log = (...msg) => consoleLogs.push(msg) - const { argv } = process - const proc = { - argv: ['node', 'npm_g', '-v'], - version: '420.69.lol', +const processMock = (proc) => { + const mocked = { + ...process, on: () => {}, + ...proc, } - process.argv = proc.argv - npmock.config.settings.version = true + // nopt looks at process directly + process.argv = mocked.argv + return mocked +} + +const { argv } = process + +t.afterEach(() => { + logs.length = 0 + process.argv = argv + exitHandlerCalled = null + exitHandlerNpm = null +}) - cli(proc) +t.test('print the version, and treat npm_g as npm -g', async t => { + const proc = processMock({ + argv: ['node', 'npm_g', '-v'], + version: process.version, + }) - t.strictSame(npmock.argv, []) - t.strictSame(proc.argv, ['node', 'npm', '-g', '-v']) + const { npm, outputs } = mockNpm(t) + const cli = cliMock(npm) + await cli(proc) + + t.strictSame(proc.argv, ['node', 'npm', '-g', '-v'], 'npm process.argv was rewritten') + t.strictSame(process.argv, ['node', 'npm', '-g', '-v'], 'system process.argv was rewritten') t.strictSame(logs, [ 'pause', - ['verbose', 'cli', ['node', 'npm', '-g', '-v']], - ['info', 'using', 'npm@%s', '99.99.99'], - ['info', 'using', 'node@%s', '420.69.lol'], + ['verbose', 'cli', proc.argv], + ['info', 'using', 'npm@%s', npm.version], + ['info', 'using', 'node@%s', process.version], ]) - t.strictSame(consoleLogs, [['99.99.99']]) - t.strictSame(errorHandlerExitCalled, 0) - - delete npmock.config.settings.version - process.argv = argv - console.log = log - npmock.argv.length = 0 - proc.argv.length = 0 - logs.length = 0 - consoleLogs.length = 0 - errorHandlerExitCalled = null - - t.end() + t.strictSame(outputs, [[npm.version]]) + t.strictSame(exitHandlerCalled, []) }) -t.test('calling with --versions calls npm version with no args', t => { - const { log } = console - const consoleLogs = [] - console.log = (...msg) => consoleLogs.push(msg) - const processArgv = process.argv - const proc = { +t.test('calling with --versions calls npm version with no args', async t => { + const proc = processMock({ argv: ['node', 'npm', 'install', 'or', 'whatever', '--versions'], - on: () => {}, - } - process.argv = proc.argv - npmock.config.set('versions', true) - - t.teardown(() => { - delete npmock.config.settings.versions - process.argv = processArgv - console.log = log - npmock.argv.length = 0 - proc.argv.length = 0 - logs.length = 0 - consoleLogs.length = 0 - errorHandlerExitCalled = null - delete npmock.commands.version }) + const { npm, outputs } = mockNpm(t) + const cli = cliMock(npm) - npmock.commands.version = (args, cb) => { - t.equal(proc.title, 'npm') - t.strictSame(npmock.argv, []) - t.strictSame(proc.argv, ['node', 'npm', 'install', 'or', 'whatever', '--versions']) - t.strictSame(logs, [ - 'pause', - ['verbose', 'cli', ['node', 'npm', 'install', 'or', 'whatever', '--versions']], - ['info', 'using', 'npm@%s', '99.99.99'], - ['info', 'using', 'node@%s', undefined], - ]) - - t.strictSame(consoleLogs, []) - t.strictSame(errorHandlerExitCalled, null) - - t.strictSame(args, []) - t.end() + let versionArgs + npm.commands.version = (args, cb) => { + versionArgs = args + cb() } - cli(proc) + await cli(proc) + t.strictSame(versionArgs, []) + t.equal(proc.title, 'npm') + t.strictSame(npm.argv, []) + t.strictSame(logs, [ + 'pause', + ['verbose', 'cli', proc.argv], + ['info', 'using', 'npm@%s', npm.version], + ['info', 'using', 'node@%s', process.version], + ]) + + t.strictSame(outputs, []) + t.strictSame(exitHandlerCalled, []) }) -t.test('print usage if -h provided', t => { - const { log } = console - const consoleLogs = [] - console.log = (...msg) => consoleLogs.push(msg) - const proc = { - argv: ['node', 'npm', 'asdf'], - on: () => {}, - } - npmock.argv = ['asdf'] - - t.teardown(() => { - console.log = log - npmock.argv.length = 0 - proc.argv.length = 0 - logs.length = 0 - consoleLogs.length = 0 - errorHandlerExitCalled = null - delete npmock.commands.help +t.test('print usage if no params provided', async t => { + const proc = processMock({ + argv: ['node', 'npm'], }) - npmock.commands.help = (args, cb) => { - delete npmock.commands.help - t.equal(proc.title, 'npm') - t.strictSame(args, ['asdf']) - t.strictSame(npmock.argv, ['asdf']) - t.strictSame(proc.argv, ['node', 'npm', 'asdf']) - t.strictSame(logs, [ - 'pause', - ['verbose', 'cli', ['node', 'npm', 'asdf']], - ['info', 'using', 'npm@%s', '99.99.99'], - ['info', 'using', 'node@%s', undefined], - ]) - t.strictSame(consoleLogs, []) - t.strictSame(errorHandlerExitCalled, null) - t.end() - } + const { npm, outputs } = mockNpm(t) + const cli = cliMock(npm) + await cli(proc) + t.match(outputs[0][0], 'Usage:', 'outputs npm usage') + t.match(exitHandlerCalled, [], 'should call exitHandler with no args') + t.ok(exitHandlerNpm, 'exitHandler npm is set') + t.match(proc.exitCode, 1) +}) + +t.test('print usage if non-command param provided', async t => { + const proc = processMock({ + argv: ['node', 'npm', 'tset'], + }) - cli(proc) + const { npm, outputs } = mockNpm(t) + const cli = cliMock(npm) + await cli(proc) + t.match(outputs[0][0], 'Unknown command: "tset"') + t.match(outputs[0][0], 'Did you mean this?') + t.match(exitHandlerCalled, [], 'should call exitHandler with no args') + t.ok(exitHandlerNpm, 'exitHandler npm is set') + t.match(proc.exitCode, 1) }) -t.test('load error calls error handler', t => { - const er = new Error('poop') - LOAD_ERROR = er - const proc = { +t.test('load error calls error handler', async t => { + const proc = processMock({ argv: ['node', 'npm', 'asdf'], - on: () => {}, - } - cli(proc) - t.strictSame(errorHandlerCalled, [er]) - LOAD_ERROR = null - t.end() + }) + + const { npm } = mockNpm(t) + const cli = cliMock(npm) + const er = new Error('test load error') + npm.load = () => Promise.reject(er) + await cli(proc) + t.strictSame(exitHandlerCalled, [er]) }) diff --git a/test/lib/completion.js b/test/lib/completion.js index 89e8134ebb303..4f7d4a5fd6e38 100644 --- a/test/lib/completion.js +++ b/test/lib/completion.js @@ -1,5 +1,4 @@ -const { test } = require('tap') -const requireInject = require('require-inject') +const t = require('tap') const fs = require('fs') const path = require('path') @@ -46,6 +45,9 @@ const npm = { }, }, }, + output: (line) => { + output.push(line) + }, } const cmdList = { @@ -60,11 +62,14 @@ const cmdList = { plumbing: [], } +// only include a subset so that the snapshots aren't huge and +// don't change when we add/remove config definitions. +const definitions = require('../../lib/utils/config/definitions.js') const config = { - types: { - global: Boolean, - browser: [null, Boolean, String], - registry: [null, String], + definitions: { + global: definitions.global, + browser: definitions.browser, + registry: definitions.registry, }, shorthands: { reg: ['--registry'], @@ -75,18 +80,15 @@ const deref = (cmd) => { return cmd } -const Completion = requireInject('../../lib/completion.js', { +const Completion = t.mock('../../lib/completion.js', { '../../lib/utils/cmd-list.js': cmdList, - '../../lib/utils/config.js': config, + '../../lib/utils/config/index.js': config, '../../lib/utils/deref-command.js': deref, '../../lib/utils/is-windows-shell.js': false, - '../../lib/utils/output.js': (line) => { - output.push(line) - }, }) const completion = new Completion(npm) -test('completion completion', async t => { +t.test('completion completion', async t => { const home = process.env.HOME t.teardown(() => { process.env.HOME = home @@ -105,7 +107,7 @@ test('completion completion', async t => { t.end() }) -test('completion completion no known shells', async t => { +t.test('completion completion no known shells', async t => { const home = process.env.HOME t.teardown(() => { process.env.HOME = home @@ -118,14 +120,14 @@ test('completion completion no known shells', async t => { t.end() }) -test('completion completion wrong word count', async t => { +t.test('completion completion wrong word count', async t => { const res = await completion.completion({ w: 3 }) t.strictSame(res, undefined, 'no responses') t.end() }) -test('completion errors in windows without bash', t => { - const Compl = requireInject('../../lib/completion.js', { +t.test('completion errors in windows without bash', t => { + const Compl = t.mock('../../lib/completion.js', { '../../lib/utils/is-windows-shell.js': true, }) @@ -140,7 +142,7 @@ test('completion errors in windows without bash', t => { }) }) -test('dump script when completion is not being attempted', t => { +t.test('dump script when completion is not being attempted', t => { const _write = process.stdout.write const _on = process.stdout.on t.teardown(() => { @@ -173,7 +175,7 @@ test('dump script when completion is not being attempted', t => { }) }) -test('dump script exits correctly when EPIPE is emitted on stdout', t => { +t.test('dump script exits correctly when EPIPE is emitted on stdout', t => { const _write = process.stdout.write const _on = process.stdout.on t.teardown(() => { @@ -206,7 +208,7 @@ test('dump script exits correctly when EPIPE is emitted on stdout', t => { }) }) -test('non EPIPE errors cause failures', t => { +t.test('non EPIPE errors cause failures', t => { const _write = process.stdout.write const _on = process.stdout.on t.teardown(() => { @@ -237,7 +239,7 @@ test('non EPIPE errors cause failures', t => { }) }) -test('completion completes single command name', t => { +t.test('completion completes single command name', t => { process.env.COMP_CWORD = 1 process.env.COMP_LINE = 'npm c' process.env.COMP_POINT = process.env.COMP_LINE.length @@ -259,7 +261,7 @@ test('completion completes single command name', t => { }) }) -test('completion completes command names', t => { +t.test('completion completes command names', t => { process.env.COMP_CWORD = 1 process.env.COMP_LINE = 'npm a' process.env.COMP_POINT = process.env.COMP_LINE.length @@ -281,7 +283,7 @@ test('completion completes command names', t => { }) }) -test('completion of invalid command name does nothing', t => { +t.test('completion of invalid command name does nothing', t => { process.env.COMP_CWORD = 1 process.env.COMP_LINE = 'npm compute' process.env.COMP_POINT = process.env.COMP_LINE.length @@ -303,7 +305,7 @@ test('completion of invalid command name does nothing', t => { }) }) -test('handles async completion function', t => { +t.test('handles async completion function', t => { process.env.COMP_CWORD = 2 process.env.COMP_LINE = 'npm promise' process.env.COMP_POINT = process.env.COMP_LINE.length @@ -332,7 +334,7 @@ test('handles async completion function', t => { }) }) -test('completion triggers command completions', t => { +t.test('completion triggers command completions', t => { process.env.COMP_CWORD = 2 process.env.COMP_LINE = 'npm access ' process.env.COMP_POINT = process.env.COMP_LINE.length @@ -361,7 +363,7 @@ test('completion triggers command completions', t => { }) }) -test('completion triggers filtered command completions', t => { +t.test('completion triggers filtered command completions', t => { process.env.COMP_CWORD = 2 process.env.COMP_LINE = 'npm access p' process.env.COMP_POINT = process.env.COMP_LINE.length @@ -390,7 +392,7 @@ test('completion triggers filtered command completions', t => { }) }) -test('completions for commands that return nested arrays are joined', t => { +t.test('completions for commands that return nested arrays are joined', t => { process.env.COMP_CWORD = 2 process.env.COMP_LINE = 'npm completion ' process.env.COMP_POINT = process.env.COMP_LINE.length @@ -419,7 +421,7 @@ test('completions for commands that return nested arrays are joined', t => { }) }) -test('completions for commands that return nothing work correctly', t => { +t.test('completions for commands that return nothing work correctly', t => { process.env.COMP_CWORD = 2 process.env.COMP_LINE = 'npm donothing ' process.env.COMP_POINT = process.env.COMP_LINE.length @@ -448,7 +450,7 @@ test('completions for commands that return nothing work correctly', t => { }) }) -test('completions for commands that return a single item work correctly', t => { +t.test('completions for commands that return a single item work correctly', t => { process.env.COMP_CWORD = 2 process.env.COMP_LINE = 'npm driveaboat ' process.env.COMP_POINT = process.env.COMP_LINE.length @@ -477,7 +479,7 @@ test('completions for commands that return a single item work correctly', t => { }) }) -test('command completion for commands with no completion return no results', t => { +t.test('command completion for commands with no completion return no results', t => { process.env.COMP_CWORD = 2 process.env.COMP_LINE = 'npm adduser ' process.env.COMP_POINT = process.env.COMP_LINE.length @@ -507,7 +509,7 @@ test('command completion for commands with no completion return no results', t = }) }) -test('command completion errors propagate', t => { +t.test('command completion errors propagate', t => { process.env.COMP_CWORD = 2 process.env.COMP_LINE = 'npm access ' process.env.COMP_POINT = process.env.COMP_LINE.length @@ -536,7 +538,7 @@ test('command completion errors propagate', t => { }) }) -test('completion can complete flags', t => { +t.test('completion can complete flags', t => { process.env.COMP_CWORD = 2 process.env.COMP_LINE = 'npm install --' process.env.COMP_POINT = process.env.COMP_LINE.length @@ -559,7 +561,7 @@ test('completion can complete flags', t => { }) }) -test('double dashes escape from flag completion', t => { +t.test('double dashes escape from flag completion', t => { process.env.COMP_CWORD = 2 process.env.COMP_LINE = 'npm -- install --' process.env.COMP_POINT = process.env.COMP_LINE.length @@ -582,7 +584,7 @@ test('double dashes escape from flag completion', t => { }) }) -test('completion cannot complete options that take a value in mid-command', t => { +t.test('completion cannot complete options that take a value in mid-command', t => { process.env.COMP_CWORD = 2 process.env.COMP_LINE = 'npm --registry install' process.env.COMP_POINT = process.env.COMP_LINE.length diff --git a/test/lib/config.js b/test/lib/config.js index c2420aefb4a00..8a1e7d85e09aa 100644 --- a/test/lib/config.js +++ b/test/lib/config.js @@ -1,5 +1,5 @@ const t = require('tap') -const requireInject = require('require-inject') + const { EventEmitter } = require('events') const redactCwd = (path) => { @@ -22,12 +22,21 @@ const redactCwd = (path) => { t.cleanSnapshot = (str) => redactCwd(str) let result = '' -const types = { - 'init-author-name': String, - 'init-version': String, - 'init.author.name': String, - 'init.version': String, -} + +const configDefs = require('../../lib/utils/config') +const definitions = Object.entries(configDefs.definitions) + .filter(([key, def]) => { + return [ + 'init-author-name', + 'init.author.name', + 'init-version', + 'init.version', + ].includes(key) + }).reduce((defs, [key, def]) => { + defs[key] = def + return defs + }, {}) + const defaults = { 'init-author-name': '', 'init-version': '1.0.0', @@ -35,16 +44,19 @@ const defaults = { 'init.version': '1.0.0', } -const flatOptions = { +const cliConfig = { editor: 'vi', json: false, + location: 'user', long: false, - global: false, + cat: true, + chai: true, + dog: true, } const npm = { - flatOptions, log: { + warn: () => null, info: () => null, enableProgress: () => null, disableProgress: () => null, @@ -53,28 +65,28 @@ const npm = { data: new Map(Object.entries({ default: { data: defaults, source: 'default values' }, global: { data: {}, source: '/etc/npmrc' }, - cli: { data: flatOptions, source: 'command line options' }, + cli: { data: cliConfig, source: 'command line options' }, })), get (key) { - return flatOptions[key] + return cliConfig[key] }, validate () { return true }, }, + output: msg => { + result = msg + }, } const usageUtil = () => 'usage instructions' const mocks = { - '../../lib/utils/config.js': { defaults, types }, - '../../lib/utils/output.js': msg => { - result = msg - }, + '../../lib/utils/config/index.js': { defaults, definitions }, '../../lib/utils/usage.js': usageUtil, } -const Config = requireInject('../../lib/config.js', mocks) +const Config = t.mock('../../lib/config.js', mocks) const config = new Config(npm) t.test('config no args', t => { @@ -84,6 +96,22 @@ t.test('config no args', t => { }) }) +t.test('config ignores workspaces', t => { + npm.log.warn = (title, msg) => { + t.equal(title, 'config', 'should warn with expected title') + t.equal( + msg, + 'This command does not support workspaces.', + 'should warn with unsupported option msg' + ) + } + config.execWorkspaces([], [], (err) => { + t.match(err, /usage instructions/, 'should not error out when workspaces are defined') + npm.log.warn = () => null + t.end() + }) +}) + t.test('config list', t => { t.plan(2) @@ -95,7 +123,7 @@ t.test('config list', t => { }) config.exec(['list'], (err) => { - t.ifError(err, 'npm config list') + t.error(err, 'npm config list') t.matchSnapshot(result, 'should list configs') }) }) @@ -110,18 +138,18 @@ t.test('config list overrides', t => { }, source: '~/.npmrc', }) - flatOptions['init.author.name'] = 'Bar' + cliConfig['init.author.name'] = 'Bar' npm.config.find = () => 'cli' result = '' t.teardown(() => { result = '' npm.config.data.delete('user') - delete flatOptions['init.author.name'] + delete cliConfig['init.author.name'] delete npm.config.find }) config.exec(['list'], (err) => { - t.ifError(err, 'npm config list') + t.error(err, 'npm config list') t.matchSnapshot(result, 'should list overridden configs') }) }) @@ -129,17 +157,17 @@ t.test('config list overrides', t => { t.test('config list --long', t => { t.plan(2) - npm.config.find = key => key in flatOptions ? 'cli' : 'default' - flatOptions.long = true + npm.config.find = key => key in cliConfig ? 'cli' : 'default' + cliConfig.long = true result = '' t.teardown(() => { delete npm.config.find - flatOptions.long = false + cliConfig.long = false result = '' }) config.exec(['list'], (err) => { - t.ifError(err, 'npm config list --long') + t.error(err, 'npm config list --long') t.matchSnapshot(result, 'should list all configs') }) }) @@ -147,7 +175,7 @@ t.test('config list --long', t => { t.test('config list --json', t => { t.plan(2) - flatOptions.json = true + cliConfig.json = true result = '' npm.config.list = [{ '//private-reg.npmjs.org/:_authThoken': 'f00ba1', @@ -158,20 +186,23 @@ t.test('config list --json', t => { t.teardown(() => { delete npm.config.list - flatOptions.json = false + cliConfig.json = false npm.config.get = npmConfigGet result = '' }) config.exec(['list'], (err) => { - t.ifError(err, 'npm config list --json') - t.deepEqual( + t.error(err, 'npm config list --json') + t.same( JSON.parse(result), { editor: 'vi', json: true, + location: 'user', long: false, - global: false, + cat: true, + chai: true, + dog: true, }, 'should list configs usin json' ) @@ -180,12 +211,7 @@ t.test('config list --json', t => { t.test('config delete no args', t => { config.exec(['delete'], (err) => { - t.equal( - err.message, - 'usage instructions', - 'should throw usage error' - ) - t.equal(err.code, 'EUSAGE', 'should throw expected error code') + t.match(err, { message: '\nUsage: usage instructions' }) t.end() }) }) @@ -203,7 +229,7 @@ t.test('config delete key', t => { } config.exec(['delete', 'foo'], (err) => { - t.ifError(err, 'npm config delete key') + t.error(err, 'npm config delete key') }) t.teardown(() => { @@ -230,7 +256,7 @@ t.test('config delete multiple key', t => { } config.exec(['delete', 'foo', 'bar'], (err) => { - t.ifError(err, 'npm config delete keys') + t.error(err, 'npm config delete keys') }) t.teardown(() => { @@ -239,7 +265,7 @@ t.test('config delete multiple key', t => { }) }) -t.test('config delete key --global', t => { +t.test('config delete key --location=global', t => { t.plan(4) npm.config.delete = (key, where) => { @@ -251,13 +277,13 @@ t.test('config delete key --global', t => { t.equal(where, 'global', 'should save global config post-delete') } - flatOptions.global = true + cliConfig.location = 'global' config.exec(['delete', 'foo'], (err) => { - t.ifError(err, 'npm config delete key --global') + t.error(err, 'npm config delete key --location=global') }) t.teardown(() => { - flatOptions.global = false + cliConfig.location = 'user' delete npm.config.delete delete npm.config.save }) @@ -265,11 +291,7 @@ t.test('config delete key --global', t => { t.test('config set no args', t => { config.exec(['set'], (err) => { - t.equal( - err.message, - 'usage instructions', - 'should throw usage error' - ) + t.match(err, { message: '\nUsage: usage instructions' }) t.end() }) }) @@ -288,7 +310,7 @@ t.test('config set key', t => { } config.exec(['set', 'foo', 'bar'], (err) => { - t.ifError(err, 'npm config set key') + t.error(err, 'npm config set key') }) t.teardown(() => { @@ -311,7 +333,7 @@ t.test('config set key=val', t => { } config.exec(['set', 'foo=bar'], (err) => { - t.ifError(err, 'npm config set key') + t.error(err, 'npm config set key') }) t.teardown(() => { @@ -342,7 +364,7 @@ t.test('config set multiple keys', t => { } config.exec(['set', ...args], (err) => { - t.ifError(err, 'npm config set key') + t.error(err, 'npm config set key') }) t.teardown(() => { @@ -365,7 +387,7 @@ t.test('config set key to empty value', t => { } config.exec(['set', 'foo'], (err) => { - t.ifError(err, 'npm config set key to empty value') + t.error(err, 'npm config set key to empty value') }) t.teardown(() => { @@ -389,15 +411,15 @@ t.test('config set invalid key', t => { npm.config.validate = npmConfigValidate delete npm.config.save delete npm.config.set - delete npm.log.warn + npm.log.warn = () => null }) config.exec(['set', 'foo', 'bar'], (err) => { - t.ifError(err, 'npm config set invalid key') + t.error(err, 'npm config set invalid key') }) }) -t.test('config set key --global', t => { +t.test('config set key --location=global', t => { t.plan(5) npm.config.set = (key, val, where) => { @@ -410,13 +432,13 @@ t.test('config set key --global', t => { t.equal(where, 'global', 'should save global config') } - flatOptions.global = true + cliConfig.location = 'global' config.exec(['set', 'foo', 'bar'], (err) => { - t.ifError(err, 'npm config set key --global') + t.error(err, 'npm config set key --location=global') }) t.teardown(() => { - flatOptions.global = false + cliConfig.location = 'user' delete npm.config.set delete npm.config.save }) @@ -433,7 +455,7 @@ t.test('config get no args', t => { }) config.exec(['get'], (err) => { - t.ifError(err, 'npm config get no args') + t.error(err, 'npm config get no args') t.matchSnapshot(result, 'should list configs on config get no args') }) }) @@ -452,7 +474,7 @@ t.test('config get key', t => { } config.exec(['get', 'foo'], (err) => { - t.ifError(err, 'npm config get key') + t.error(err, 'npm config get key') }) t.teardown(() => { @@ -480,7 +502,7 @@ t.test('config get multiple keys', t => { } config.exec(['get', 'foo', 'bar'], (err) => { - t.ifError(err, 'npm config get multiple keys') + t.error(err, 'npm config get multiple keys') t.equal(result, 'foo=asdf\nbar=asdf') }) @@ -538,20 +560,20 @@ sign-git-commit=true` }, }, } - const Config = requireInject('../../lib/config.js', editMocks) + const Config = t.mock('../../lib/config.js', editMocks) const config = new Config(npm) config.exec(['edit'], (err) => { - t.ifError(err, 'npm config edit') + t.error(err, 'npm config edit') // test no config file result editMocks.fs.readFile = (p, e, cb) => { cb(new Error('ERR')) } - const Config = requireInject('../../lib/config.js', editMocks) + const Config = t.mock('../../lib/config.js', editMocks) const config = new Config(npm) config.exec(['edit'], (err) => { - t.ifError(err, 'npm config edit') + t.error(err, 'npm config edit') }) }) @@ -561,10 +583,10 @@ sign-git-commit=true` }) }) -t.test('config edit --global', t => { +t.test('config edit --location=global', t => { t.plan(6) - flatOptions.global = true + cliConfig.location = 'global' const npmrc = 'init.author.name=Foo' npm.config.data.set('global', { source: '/etc/npmrc', @@ -597,14 +619,14 @@ t.test('config edit --global', t => { }, }, } - const Config = requireInject('../../lib/config.js', editMocks) + const Config = t.mock('../../lib/config.js', editMocks) const config = new Config(npm) config.exec(['edit'], (err) => { t.match(err, /exited with code: 137/, 'propagated exit code from editor') }) t.teardown(() => { - flatOptions.global = false + cliConfig.location = 'user' npm.config.data.delete('user') delete npm.config.save }) @@ -621,7 +643,7 @@ t.test('completion', t => { testComp(['npm', 'config'], ['get', 'set', 'delete', 'ls', 'rm', 'edit', 'list']) testComp(['npm', 'config', 'set', 'foo'], []) - const possibleConfigKeys = [...Object.keys(types)] + const possibleConfigKeys = [...Object.keys(definitions)] testComp(['npm', 'config', 'get'], possibleConfigKeys) testComp(['npm', 'config', 'set'], possibleConfigKeys) testComp(['npm', 'config', 'delete'], possibleConfigKeys) diff --git a/test/lib/dedupe.js b/test/lib/dedupe.js index 3e8b2f4c01347..30f8a380e8ea3 100644 --- a/test/lib/dedupe.js +++ b/test/lib/dedupe.js @@ -1,29 +1,20 @@ -const { test } = require('tap') -const requireInject = require('require-inject') +const t = require('tap') +const { real: mockNpm } = require('../fixtures/mock-npm') -const npm = (base) => { - const config = base.config - return { - ...base, - flatOptions: { dryRun: false }, - config: { - get: (k) => config[k], - }, - } -} - -test('should throw in global mode', (t) => { - const Dedupe = requireInject('../../lib/dedupe.js') - const dedupe = new Dedupe(npm({ config: { global: true }})) - - dedupe.exec([], er => { - t.match(er, { code: 'EDEDUPEGLOBAL' }, 'throws EDEDUPEGLOBAL') - t.end() - }) +t.test('should throw in global mode', async (t) => { + const { npm, command } = mockNpm(t) + await npm.load() + npm.config.set('global', true) + t.rejects( + command('dedupe'), + { code: 'EDEDUPEGLOBAL' }, + 'throws EDEDUPEGLOBALE' + ) }) -test('should remove dupes using Arborist', (t) => { - const Dedupe = requireInject('../../lib/dedupe.js', { +t.test('should remove dupes using Arborist', async (t) => { + t.plan(5) + const { npm, command } = mockNpm(t, { '@npmcli/arborist': function (args) { t.ok(args, 'gets options object') t.ok(args.path, 'gets path option') @@ -36,35 +27,24 @@ test('should remove dupes using Arborist', (t) => { t.ok(arb, 'gets arborist tree') }, }) - const dedupe = new Dedupe(npm({ - prefix: 'foo', - config: { - 'dry-run': 'true', - }, - })) - dedupe.exec([], er => { - if (er) - throw er - t.ok(true, 'callback is called') - t.end() - }) + await npm.load() + npm.config.set('prefix', 'foo') + npm.config.set('dry-run', 'true') + await command('dedupe') }) -test('should remove dupes using Arborist - no arguments', (t) => { - const Dedupe = requireInject('../../lib/dedupe.js', { +t.test('should remove dupes using Arborist - no arguments', async (t) => { + t.plan(1) + const { npm, command } = mockNpm(t, { '@npmcli/arborist': function (args) { - t.ok(args.dryRun, 'gets dryRun from flatOptions') + t.ok(args.dryRun, 'gets dryRun from config') this.dedupe = () => {} }, '../../lib/utils/reify-output.js': () => {}, + '../../lib/utils/reify-finish.js': () => {}, }) - const dedupe = new Dedupe(npm({ - prefix: 'foo', - config: { - 'dry-run': true, - }, - })) - dedupe.exec(null, () => { - t.end() - }) + await npm.load() + npm.config.set('prefix', 'foo') + npm.config.set('dry-run', true) + await command('dedupe') }) diff --git a/test/lib/deprecate.js b/test/lib/deprecate.js index 03100166a012c..a69ef6c7796fc 100644 --- a/test/lib/deprecate.js +++ b/test/lib/deprecate.js @@ -1,5 +1,4 @@ -const { test } = require('tap') -const requireInject = require('require-inject') +const t = require('tap') let getIdentityImpl = () => 'someperson' let npmFetchBody = null @@ -18,7 +17,7 @@ npmFetch.json = async (uri, opts) => { } } -const Deprecate = requireInject('../../lib/deprecate.js', { +const Deprecate = t.mock('../../lib/deprecate.js', { '../../lib/utils/get-identity.js': async () => getIdentityImpl(), '../../lib/utils/otplease.js': async (opts, fn) => fn(opts), libnpmaccess: { @@ -31,7 +30,7 @@ const deprecate = new Deprecate({ flatOptions: { registry: 'https://registry.npmjs.org' }, }) -test('completion', async t => { +t.test('completion', async t => { const defaultIdentityImpl = getIdentityImpl t.teardown(() => { getIdentityImpl = defaultIdentityImpl @@ -58,28 +57,43 @@ test('completion', async t => { t.rejects(testComp([], []), { message: 'deprecate test failure' }) }) -test('no args', t => { +t.test('no args', t => { deprecate.exec([], (err) => { - t.match(err, /Usage: npm deprecate/, 'logs usage') + t.match(err, 'Usage:', 'logs usage') t.end() }) }) -test('only one arg', t => { +t.test('only one arg', t => { deprecate.exec(['foo'], (err) => { - t.match(err, /Usage: npm deprecate/, 'logs usage') + t.match(err, 'Usage:', 'logs usage') t.end() }) }) -test('invalid semver range', t => { +t.test('invalid semver range', t => { deprecate.exec(['foo@notaversion', 'this will fail'], (err) => { t.match(err, /invalid version range/, 'logs semver error') t.end() }) }) -test('deprecates given range', t => { +t.test('undeprecate', t => { + deprecate.exec(['foo', ''], (err) => { + if (err) + throw err + t.match(npmFetchBody, { + versions: { + '1.0.0': { deprecated: '' }, + '1.0.1': { deprecated: '' }, + '1.0.1-pre': { deprecated: '' }, + }, + }, 'undeprecates everything') + t.end() + }) +}) + +t.test('deprecates given range', t => { t.teardown(() => { npmFetchBody = null }) @@ -105,7 +119,7 @@ test('deprecates given range', t => { }) }) -test('deprecates all versions when no range is specified', t => { +t.test('deprecates all versions when no range is specified', t => { t.teardown(() => { npmFetchBody = null }) diff --git a/test/lib/diff.js b/test/lib/diff.js index 5e60f125cec3d..fcba802d93b87 100644 --- a/test/lib/diff.js +++ b/test/lib/diff.js @@ -1,70 +1,85 @@ -const { resolve } = require('path') const t = require('tap') -const requireInject = require('require-inject') +const { resolve, join } = require('path') +const { fake: mockNpm } = require('../fixtures/mock-npm') const noop = () => null let libnpmdiff = noop -let rlp = () => 'foo' -const defaultFlatOptions = { - defaultTag: 'latest', + +const config = { + global: false, + tag: 'latest', diff: [], +} +const flatOptions = { + global: false, diffUnified: null, diffIgnoreAllSpace: false, diffNoPrefix: false, diffSrcPrefix: '', diffDstPrefix: '', diffText: false, - prefix: '.', savePrefix: '^', } -const npm = { - globalDir: __dirname, - flatOptions: { ...defaultFlatOptions }, - get prefix () { - return this.flatOptions.prefix - }, -} +const fooPath = t.testdir({ + 'package.json': JSON.stringify({ name: 'foo', version: '1.0.0' }), +}) +const npm = mockNpm({ + prefix: fooPath, + config, + flatOptions, + output: noop, +}) + const mocks = { npmlog: { info: noop, verbose: noop }, libnpmdiff: (...args) => libnpmdiff(...args), 'npm-registry-fetch': async () => ({}), - '../../lib/utils/output.js': noop, - '../../lib/utils/read-local-package.js': async () => rlp(), '../../lib/utils/usage.js': () => 'usage instructions', } -t.afterEach(cb => { - npm.flatOptions = { ...defaultFlatOptions } +t.afterEach(() => { + config.global = false + config.tag = 'latest' + config.diff = [] + flatOptions.global = false + flatOptions.diffUnified = null + flatOptions.diffIgnoreAllSpace = false + flatOptions.diffNoPrefix = false + flatOptions.diffSrcPrefix = '' + flatOptions.diffDstPrefix = '' + flatOptions.diffText = false + flatOptions.savePrefix = '^' + npm.globalDir = fooPath + npm.prefix = fooPath libnpmdiff = noop - rlp = () => 'foo' - npm.globalDir = __dirname - cb() + diff.prefix = undefined + diff.top = undefined }) -const Diff = requireInject('../../lib/diff.js', mocks) +const Diff = t.mock('../../lib/diff.js', mocks) const diff = new Diff(npm) t.test('no args', t => { t.test('in a project dir', t => { t.plan(3) - const path = t.testdir({}) libnpmdiff = async ([a, b], opts) => { t.equal(a, 'foo@latest', 'should have default spec comparison') - t.equal(b, `file:${path}`, 'should compare to cwd') + t.equal(b, `file:${fooPath}`, 'should compare to cwd') t.match(opts, npm.flatOptions, 'should forward flat options') } - npm.flatOptions.prefix = path + npm.prefix = fooPath diff.exec([], err => { if (err) throw err + t.end() }) }) t.test('no args, missing package.json name in cwd', t => { - rlp = () => undefined - + const path = t.testdir({}) + npm.prefix = path diff.exec([], err => { t.match( err, @@ -75,10 +90,11 @@ t.test('no args', t => { }) }) - t.test('no args, missing package.json in cwd', t => { - rlp = () => { - throw new Error('ERR') - } + t.test('no args, bad package.json in cwd', t => { + const path = t.testdir({ + 'package.json': '{invalid"json', + }) + npm.prefix = path diff.exec([], err => { t.match( @@ -97,29 +113,26 @@ t.test('single arg', t => { t.test('spec using cwd package name', t => { t.plan(3) - const path = t.testdir({}) libnpmdiff = async ([a, b], opts) => { t.equal(a, 'foo@1.0.0', 'should forward single spec') - t.equal(b, `file:${path}`, 'should compare to cwd') + t.equal(b, `file:${fooPath}`, 'should compare to cwd') t.match(opts, npm.flatOptions, 'should forward flat options') } - npm.flatOptions.diff = ['foo@1.0.0'] - npm.flatOptions.prefix = path + config.diff = ['foo@1.0.0'] + npm.prefix = fooPath diff.exec([], err => { if (err) throw err + t.end() }) }) t.test('unknown spec, no package.json', t => { const path = t.testdir({}) - rlp = () => { - throw new Error('ERR') - } - npm.flatOptions.diff = ['foo@1.0.0'] - npm.flatOptions.prefix = path + config.diff = ['foo@1.0.0'] + npm.prefix = path diff.exec([], err => { t.match( err, @@ -133,15 +146,13 @@ t.test('single arg', t => { t.test('spec using semver range', t => { t.plan(3) - const path = t.testdir({}) libnpmdiff = async ([a, b], opts) => { t.equal(a, 'foo@~1.0.0', 'should forward single spec') - t.equal(b, `file:${path}`, 'should compare to cwd') + t.equal(b, `file:${fooPath}`, 'should compare to cwd') t.match(opts, npm.flatOptions, 'should forward flat options') } - npm.flatOptions.diff = ['foo@~1.0.0'] - npm.flatOptions.prefix = path + config.diff = ['foo@~1.0.0'] diff.exec([], err => { if (err) throw err @@ -151,15 +162,13 @@ t.test('single arg', t => { t.test('version', t => { t.plan(3) - const path = t.testdir({}) libnpmdiff = async ([a, b], opts) => { t.equal(a, 'foo@2.1.4', 'should convert to expected first spec') - t.equal(b, `file:${path}`, 'should compare to cwd') + t.equal(b, `file:${fooPath}`, 'should compare to cwd') t.match(opts, npm.flatOptions, 'should forward flat options') } - npm.flatOptions.diff = ['2.1.4'] - npm.flatOptions.prefix = path + config.diff = ['2.1.4'] diff.exec([], err => { if (err) throw err @@ -167,11 +176,9 @@ t.test('single arg', t => { }) t.test('version, no package.json', t => { - rlp = () => { - throw new Error('ERR') - } - - npm.flatOptions.diff = ['2.1.4'] + const path = t.testdir({}) + npm.prefix = path + config.diff = ['2.1.4'] diff.exec([], err => { t.match( err, @@ -185,10 +192,9 @@ t.test('single arg', t => { t.test('version, filtering by files', t => { t.plan(3) - const path = t.testdir({}) libnpmdiff = async ([a, b], opts) => { t.equal(a, 'foo@2.1.4', 'should use expected spec') - t.equal(b, `file:${path}`, 'should compare to cwd') + t.equal(b, `file:${fooPath}`, 'should compare to cwd') t.match(opts, { ...npm.flatOptions, diffFiles: [ @@ -198,8 +204,7 @@ t.test('single arg', t => { }, 'should forward flatOptions and diffFiles') } - npm.flatOptions.diff = ['2.1.4'] - npm.flatOptions.prefix = path + config.diff = ['2.1.4'] diff.exec(['./foo.js', './bar.js'], err => { if (err) throw err @@ -221,8 +226,8 @@ t.test('single arg', t => { t.equal(b, `file:${path}`, 'should compare to cwd') } - npm.flatOptions.diff = ['bar@1.0.0'] - npm.flatOptions.prefix = path + config.diff = ['bar@1.0.0'] + npm.prefix = path diff.exec([], err => { if (err) @@ -248,8 +253,8 @@ t.test('single arg', t => { t.match(opts, npm.flatOptions, 'should forward flat options') } - npm.flatOptions.diff = ['simple-output'] - npm.flatOptions.prefix = path + config.diff = ['simple-output'] + npm.prefix = path diff.exec([], err => { if (err) throw err @@ -258,12 +263,9 @@ t.test('single arg', t => { t.test('unknown package name, no package.json', t => { const path = t.testdir({}) - rlp = () => { - throw new Error('ERR') - } - npm.flatOptions.diff = ['bar'] - npm.flatOptions.prefix = path + config.diff = ['bar'] + npm.prefix = path diff.exec([], err => { t.match( err, @@ -294,10 +296,10 @@ t.test('single arg', t => { }), }) - npm.flatOptions.diff = ['bar'] - npm.flatOptions.prefix = path + config.diff = ['bar'] + npm.prefix = path - const Diff = requireInject('../../lib/diff.js', { + const Diff = t.mock('../../lib/diff.js', { ...mocks, pacote: { packument: (spec) => { @@ -355,12 +357,13 @@ t.test('single arg', t => { }, }) - npm.flatOptions.global = true - npm.flatOptions.diff = ['lorem'] - npm.flatOptions.prefix = resolve(path, 'project') + config.global = true + flatOptions.global = true + config.diff = ['lorem'] + npm.prefix = resolve(path, 'project') npm.globalDir = resolve(path, 'globalDir/lib/node_modules') - const Diff = requireInject('../../lib/diff.js', { + const Diff = t.mock('../../lib/diff.js', { ...mocks, pacote: { packument: (spec) => { @@ -409,8 +412,8 @@ t.test('single arg', t => { t.equal(b, 'bar@2.0.0', 'should have expected comparison spec') } - npm.flatOptions.diff = ['bar@2.0.0'] - npm.flatOptions.prefix = path + config.diff = ['bar@2.0.0'] + npm.prefix = path diff.exec([], err => { if (err) @@ -447,9 +450,9 @@ t.test('single arg', t => { }), }) - const Diff = requireInject('../../lib/diff.js', { + const Diff = t.mock('../../lib/diff.js', { ...mocks, - '../../lib/utils/read-local-package.js': async () => 'my-project', + '../../lib/utils/read-package-name.js': async () => 'my-project', pacote: { packument: (spec) => { t.equal(spec.name, 'lorem', 'should have expected spec name') @@ -466,8 +469,8 @@ t.test('single arg', t => { }) const diff = new Diff(npm) - npm.flatOptions.diff = ['lorem'] - npm.flatOptions.prefix = path + config.diff = ['lorem'] + npm.prefix = path diff.exec([], err => { if (err) @@ -484,9 +487,9 @@ t.test('single arg', t => { }), }) - const Diff = requireInject('../../lib/diff.js', { + const Diff = t.mock('../../lib/diff.js', { ...mocks, - '../../lib/utils/read-local-package.js': async () => 'my-project', + '../../lib/utils/read-package-name.js': async () => 'my-project', '@npmcli/arborist': class { constructor () { throw new Error('ERR') @@ -499,8 +502,8 @@ t.test('single arg', t => { }) const diff = new Diff(npm) - npm.flatOptions.diff = ['lorem'] - npm.flatOptions.prefix = path + config.diff = ['lorem'] + npm.prefix = path diff.exec([], err => { if (err) @@ -511,15 +514,16 @@ t.test('single arg', t => { t.test('unknown package name', t => { t.plan(2) - const path = t.testdir({}) - rlp = async () => undefined + const path = t.testdir({ + 'package.json': JSON.stringify({ version: '1.0.0' }), + }) libnpmdiff = async ([a, b], opts) => { t.equal(a, 'bar@latest', 'should target latest tag of name') t.equal(b, `file:${path}`, 'should compare to cwd') } - npm.flatOptions.diff = ['bar'] - npm.flatOptions.prefix = path + config.diff = ['bar'] + npm.prefix = path diff.exec([], err => { if (err) @@ -530,15 +534,12 @@ t.test('single arg', t => { t.test('use project name in project dir', t => { t.plan(2) - const path = t.testdir({}) - rlp = async () => 'my-project' libnpmdiff = async ([a, b], opts) => { - t.equal(a, 'my-project@latest', 'should target latest tag of name') - t.equal(b, `file:${path}`, 'should compare to cwd') + t.equal(a, 'foo@latest', 'should target latest tag of name') + t.equal(b, `file:${fooPath}`, 'should compare to cwd') } - npm.flatOptions.diff = ['my-project'] - npm.flatOptions.prefix = path + config.diff = ['foo'] diff.exec([], err => { if (err) throw err @@ -548,15 +549,13 @@ t.test('single arg', t => { t.test('dir spec type', t => { t.plan(2) - const path = t.testdir({}) - rlp = async () => 'my-project' + const otherPath = resolve('/path/to/other-dir') libnpmdiff = async ([a, b], opts) => { - t.equal(a, 'file:/path/to/other-dir', 'should target dir') - t.equal(b, `file:${path}`, 'should compare to cwd') + t.equal(a, `file:${otherPath}`, 'should target dir') + t.equal(b, `file:${fooPath}`, 'should compare to cwd') } - npm.flatOptions.diff = ['/path/to/other-dir'] - npm.flatOptions.prefix = path + config.diff = [otherPath] diff.exec([], err => { if (err) throw err @@ -564,14 +563,11 @@ t.test('single arg', t => { }) t.test('unsupported spec type', t => { - rlp = async () => 'my-project' - - npm.flatOptions.diff = ['git+https://github.com/user/foo'] - + config.diff = ['git+https://github.com/user/foo'] diff.exec([], err => { t.match( err, - /Spec type not supported./, + /Spec type git not supported./, 'should throw spec type not supported error.' ) t.end() @@ -591,7 +587,7 @@ t.test('first arg is a qualified spec', t => { t.match(opts, npm.flatOptions, 'should forward flat options') } - npm.flatOptions.diff = ['bar@1.0.0', 'bar@^2.0.0'] + config.diff = ['bar@1.0.0', 'bar@^2.0.0'] diff.exec([], err => { if (err) throw err @@ -618,14 +614,13 @@ t.test('first arg is a qualified spec', t => { }), }) - rlp = async () => 'my-project' libnpmdiff = async ([a, b], opts) => { t.equal(a, 'bar@2.0.0', 'should set expected first spec') t.equal(b, `bar@file:${resolve(path, 'node_modules/bar')}`, 'should target local node_modules pkg') } - npm.flatOptions.prefix = path - npm.flatOptions.diff = ['bar@2.0.0', 'bar'] + npm.prefix = path + config.diff = ['bar@2.0.0', 'bar'] diff.exec([], err => { if (err) throw err @@ -635,7 +630,7 @@ t.test('first arg is a qualified spec', t => { t.test('second arg is a valid semver version', t => { t.plan(2) - npm.flatOptions.diff = ['bar@1.0.0', '2.0.0'] + config.diff = ['bar@1.0.0', '2.0.0'] libnpmdiff = async ([a, b], opts) => { t.equal(a, 'bar@1.0.0', 'should set expected first spec') @@ -656,7 +651,7 @@ t.test('first arg is a qualified spec', t => { t.equal(b, 'bar-fork@latest', 'should target latest tag if not a dep') } - npm.flatOptions.diff = ['bar@1.0.0', 'bar-fork'] + config.diff = ['bar@1.0.0', 'bar-fork'] diff.exec([], err => { if (err) throw err @@ -687,14 +682,13 @@ t.test('first arg is a known dependency name', t => { }), }) - rlp = async () => 'my-project' libnpmdiff = async ([a, b], opts) => { t.equal(a, `bar@file:${resolve(path, 'node_modules/bar')}`, 'should target local node_modules pkg') t.equal(b, 'bar@2.0.0', 'should set expected second spec') } - npm.flatOptions.prefix = path - npm.flatOptions.diff = ['bar', 'bar@2.0.0'] + npm.prefix = path + config.diff = ['bar', 'bar@2.0.0'] diff.exec([], err => { if (err) throw err @@ -727,14 +721,13 @@ t.test('first arg is a known dependency name', t => { }), }) - rlp = async () => 'my-project' libnpmdiff = async ([a, b], opts) => { t.equal(a, `bar@file:${resolve(path, 'node_modules/bar')}`, 'should target local node_modules pkg') t.equal(b, `bar-fork@file:${resolve(path, 'node_modules/bar-fork')}`, 'should target fork local node_modules pkg') } - npm.flatOptions.prefix = path - npm.flatOptions.diff = ['bar', 'bar-fork'] + npm.prefix = path + config.diff = ['bar', 'bar-fork'] diff.exec([], err => { if (err) throw err @@ -761,14 +754,13 @@ t.test('first arg is a known dependency name', t => { }), }) - rlp = async () => 'my-project' libnpmdiff = async ([a, b], opts) => { t.equal(a, `bar@file:${resolve(path, 'node_modules/bar')}`, 'should target local node_modules pkg') t.equal(b, 'bar@2.0.0', 'should use package name from first arg') } - npm.flatOptions.prefix = path - npm.flatOptions.diff = ['bar', '2.0.0'] + npm.prefix = path + config.diff = ['bar', '2.0.0'] diff.exec([], err => { if (err) throw err @@ -795,14 +787,13 @@ t.test('first arg is a known dependency name', t => { }), }) - rlp = async () => 'my-project' libnpmdiff = async ([a, b], opts) => { t.equal(a, `bar@file:${resolve(path, 'node_modules/bar')}`, 'should target local node_modules pkg') t.equal(b, 'bar-fork@latest', 'should set expected second spec') } - npm.flatOptions.prefix = path - npm.flatOptions.diff = ['bar', 'bar-fork'] + npm.prefix = path + config.diff = ['bar', 'bar-fork'] diff.exec([], err => { if (err) throw err @@ -816,7 +807,7 @@ t.test('first arg is a valid semver range', t => { t.test('second arg is a qualified spec', t => { t.plan(2) - npm.flatOptions.diff = ['1.0.0', 'bar@2.0.0'] + config.diff = ['1.0.0', 'bar@2.0.0'] libnpmdiff = async ([a, b], opts) => { t.equal(a, 'bar@1.0.0', 'should use name from second arg') @@ -849,14 +840,13 @@ t.test('first arg is a valid semver range', t => { }), }) - rlp = async () => 'my-project' libnpmdiff = async ([a, b], opts) => { t.equal(a, 'bar@1.0.0', 'should use name from second arg') t.equal(b, `bar@file:${resolve(path, 'node_modules/bar')}`, 'should set expected second spec from nm') } - npm.flatOptions.prefix = path - npm.flatOptions.diff = ['1.0.0', 'bar'] + npm.prefix = path + config.diff = ['1.0.0', 'bar'] diff.exec([], err => { if (err) throw err @@ -866,13 +856,12 @@ t.test('first arg is a valid semver range', t => { t.test('second arg is ALSO a semver version', t => { t.plan(2) - rlp = async () => 'my-project' libnpmdiff = async ([a, b], opts) => { - t.equal(a, 'my-project@1.0.0', 'should use name from project dir') - t.equal(b, 'my-project@2.0.0', 'should use name from project dir') + t.equal(a, 'foo@1.0.0', 'should use name from project dir') + t.equal(b, 'foo@2.0.0', 'should use name from project dir') } - npm.flatOptions.diff = ['1.0.0', '2.0.0'] + config.diff = ['1.0.0', '2.0.0'] diff.exec([], err => { if (err) throw err @@ -881,12 +870,8 @@ t.test('first arg is a valid semver range', t => { t.test('second arg is ALSO a semver version BUT cwd not a project dir', t => { const path = t.testdir({}) - rlp = () => { - throw new Error('ERR') - } - - npm.flatOptions.diff = ['1.0.0', '2.0.0'] - npm.flatOptions.prefix = path + config.diff = ['1.0.0', '2.0.0'] + npm.prefix = path diff.exec([], err => { t.match( err, @@ -900,13 +885,12 @@ t.test('first arg is a valid semver range', t => { t.test('second arg is an unknown dependency name', t => { t.plan(2) - rlp = async () => 'my-project' libnpmdiff = async ([a, b], opts) => { t.equal(a, 'bar@1.0.0', 'should use name from second arg') t.equal(b, 'bar@latest', 'should compare against latest tag') } - npm.flatOptions.diff = ['1.0.0', 'bar'] + config.diff = ['1.0.0', 'bar'] diff.exec([], err => { if (err) throw err @@ -922,9 +906,8 @@ t.test('first arg is a valid semver range', t => { }), }) - const Diff = requireInject('../../lib/diff.js', { + const Diff = t.mock('../../lib/diff.js', { ...mocks, - '../../lib/utils/read-local-package.js': async () => 'my-project', '@npmcli/arborist': class { constructor () { throw new Error('ERR') @@ -937,8 +920,8 @@ t.test('first arg is a valid semver range', t => { }) const diff = new Diff(npm) - npm.flatOptions.diff = ['1.0.0', 'lorem@2.0.0'] - npm.flatOptions.prefix = path + config.diff = ['1.0.0', 'lorem@2.0.0'] + npm.prefix = path diff.exec([], err => { if (err) @@ -951,15 +934,16 @@ t.test('first arg is a valid semver range', t => { t.test('first arg is an unknown dependency name', t => { t.test('second arg is a qualified spec', t => { - t.plan(3) + t.plan(4) libnpmdiff = async ([a, b], opts) => { t.equal(a, 'bar@latest', 'should set expected first spec') t.equal(b, 'bar@2.0.0', 'should set expected second spec') t.match(opts, npm.flatOptions, 'should forward flat options') + t.match(opts, { where: fooPath }, 'should forward pacote options') } - npm.flatOptions.diff = ['bar', 'bar@2.0.0'] + config.diff = ['bar', 'bar@2.0.0'] diff.exec([], err => { if (err) throw err @@ -986,14 +970,13 @@ t.test('first arg is an unknown dependency name', t => { }), }) - rlp = async () => 'my-project' libnpmdiff = async ([a, b], opts) => { t.equal(a, 'bar-fork@latest', 'should use latest tag') t.equal(b, `bar@file:${resolve(path, 'node_modules/bar')}`, 'should target local node_modules pkg') } - npm.flatOptions.prefix = path - npm.flatOptions.diff = ['bar-fork', 'bar'] + npm.prefix = path + config.diff = ['bar-fork', 'bar'] diff.exec([], err => { if (err) throw err @@ -1008,7 +991,7 @@ t.test('first arg is an unknown dependency name', t => { t.equal(b, 'bar@^1.0.0', 'should use name from first arg') } - npm.flatOptions.diff = ['bar', '^1.0.0'] + config.diff = ['bar', '^1.0.0'] diff.exec([], err => { if (err) throw err @@ -1023,7 +1006,7 @@ t.test('first arg is an unknown dependency name', t => { t.equal(b, 'bar-fork@latest', 'should use latest tag') } - npm.flatOptions.diff = ['bar', 'bar-fork'] + config.diff = ['bar', 'bar-fork'] diff.exec([], err => { if (err) throw err @@ -1034,16 +1017,13 @@ t.test('first arg is an unknown dependency name', t => { t.plan(2) const path = t.testdir({}) - rlp = () => { - throw new Error('ERR') - } libnpmdiff = async ([a, b], opts) => { t.equal(a, 'bar@latest', 'should use latest tag') t.equal(b, 'bar-fork@latest', 'should use latest tag') } - npm.flatOptions.diff = ['bar', 'bar-fork'] - npm.flatOptions.prefix = path + config.diff = ['bar', 'bar-fork'] + npm.prefix = path diff.exec([], err => { if (err) @@ -1058,7 +1038,7 @@ t.test('various options', t => { t.test('using --name-only option', t => { t.plan(1) - npm.flatOptions.diffNameOnly = true + flatOptions.diffNameOnly = true libnpmdiff = async ([a, b], opts) => { t.match(opts, { @@ -1076,7 +1056,7 @@ t.test('various options', t => { t.test('set files after both versions', t => { t.plan(3) - npm.flatOptions.diff = ['2.1.4', '3.0.0'] + config.diff = ['2.1.4', '3.0.0'] libnpmdiff = async ([a, b], opts) => { t.equal(a, 'foo@2.1.4', 'should use expected spec') @@ -1099,11 +1079,9 @@ t.test('various options', t => { t.test('set files no diff args', t => { t.plan(3) - const path = t.testdir({}) - rlp = async () => 'my-project' libnpmdiff = async ([a, b], opts) => { - t.equal(a, 'my-project@latest', 'should have default spec') - t.equal(b, `file:${path}`, 'should compare to cwd') + t.equal(a, 'foo@latest', 'should have default spec') + t.equal(b, `file:${fooPath}`, 'should compare to cwd') t.match(opts, { ...npm.flatOptions, diffFiles: [ @@ -1113,7 +1091,6 @@ t.test('various options', t => { }, 'should forward all remaining items as filenames') } - npm.flatOptions.prefix = path diff.exec(['./foo.js', './bar.js'], err => { if (err) throw err @@ -1123,12 +1100,12 @@ t.test('various options', t => { t.test('using diff option', t => { t.plan(1) - npm.flatOptions.diffContext = 5 - npm.flatOptions.diffIgnoreWhitespace = true - npm.flatOptions.diffNoPrefix = false - npm.flatOptions.diffSrcPrefix = 'foo/' - npm.flatOptions.diffDstPrefix = 'bar/' - npm.flatOptions.diffText = true + flatOptions.diffContext = 5 + flatOptions.diffIgnoreWhitespace = true + flatOptions.diffNoPrefix = false + flatOptions.diffSrcPrefix = 'foo/' + flatOptions.diffDstPrefix = 'bar/' + flatOptions.diffText = true libnpmdiff = async ([a, b], opts) => { t.match(opts, { @@ -1152,7 +1129,7 @@ t.test('various options', t => { }) t.test('too many args', t => { - npm.flatOptions.diff = ['a', 'b', 'c'] + config.diff = ['a', 'b', 'c'] diff.exec([], err => { t.match( err, @@ -1162,3 +1139,80 @@ t.test('too many args', t => { t.end() }) }) + +t.test('workspaces', t => { + const path = t.testdir({ + 'package.json': JSON.stringify({ + name: 'workspaces-test', + version: '1.2.3-test', + workspaces: ['workspace-a', 'workspace-b', 'workspace-c'], + }), + 'workspace-a': { + 'package.json': JSON.stringify({ + name: 'workspace-a', + version: '1.2.3-a', + }), + }, + 'workspace-b': { + 'package.json': JSON.stringify({ + name: 'workspace-b', + version: '1.2.3-b', + }), + }, + 'workspace-c': JSON.stringify({ + 'package.json': { + name: 'workspace-n', + version: '1.2.3-n', + }, + }), + }) + + t.test('all workspaces', t => { + const diffCalls = [] + libnpmdiff = async ([a, b]) => { + diffCalls.push([a, b]) + } + npm.prefix = path + npm.localPrefix = path + diff.execWorkspaces([], [], (err) => { + if (err) + throw err + t.same(diffCalls, [ + ['workspace-a@latest', join(`file:${path}`, 'workspace-a')], + ['workspace-b@latest', join(`file:${path}`, 'workspace-b')], + ], 'should call libnpmdiff with workspaces params') + t.end() + }) + }) + + t.test('one workspace', t => { + const diffCalls = [] + libnpmdiff = async ([a, b]) => { + diffCalls.push([a, b]) + } + npm.prefix = path + npm.localPrefix = path + diff.execWorkspaces([], ['workspace-a'], (err) => { + if (err) + throw err + t.same(diffCalls, [ + ['workspace-a@latest', join(`file:${path}`, 'workspace-a')], + ], 'should call libnpmdiff with workspaces params') + t.end() + }) + }) + + t.test('invalid workspace', t => { + libnpmdiff = () => { + t.fail('should not call libnpmdiff') + } + npm.prefix = path + npm.localPrefix = path + diff.execWorkspaces([], ['workspace-x'], (err) => { + t.match(err, /No workspaces found/) + t.match(err, /workspace-x/) + t.end() + }) + }) + t.end() +}) diff --git a/test/lib/dist-tag.js b/test/lib/dist-tag.js index b761fb103cda8..1fb5cb3b6ee62 100644 --- a/test/lib/dist-tag.js +++ b/test/lib/dist-tag.js @@ -1,17 +1,13 @@ -const requireInject = require('require-inject') -const { test } = require('tap') +const t = require('tap') +const { fake: mockNpm } = require('../fixtures/mock-npm') -let prefix let result = '' let log = '' -// these declared opts are used in ./utils/read-local-package.js -const _flatOptions = { - global: false, - get prefix () { - return prefix - }, -} +t.afterEach(() => { + result = '' + log = '' +}) const routeMap = { '/-/package/@scoped%2fpkg/dist-tags': { @@ -30,6 +26,18 @@ const routeMap = { b: '0.6.0', c: '7.7.7', }, + '/-/package/workspace-a/dist-tags': { + latest: '1.0.0', + 'latest-a': '1.0.0', + }, + '/-/package/workspace-b/dist-tags': { + latest: '2.0.0', + 'latest-b': '2.0.0', + }, + '/-/package/workspace-c/dist-tags': { + latest: '3.0.0', + 'latest-c': '3.0.0', + }, } let npmRegistryFetchMock = (url, opts) => { @@ -48,7 +56,7 @@ const logger = (...msgs) => { log += '\n' } -const DistTag = requireInject('../../lib/dist-tag.js', { +const DistTag = t.mock('../../lib/dist-tag.js', { npmlog: { error: logger, info: logger, @@ -58,82 +66,85 @@ const DistTag = requireInject('../../lib/dist-tag.js', { get 'npm-registry-fetch' () { return npmRegistryFetchMock }, - '../../lib/utils/output.js': msg => { - result = msg - }, }) -const distTag = new DistTag({ - flatOptions: _flatOptions, - config: { - get (key) { - return _flatOptions[key] - }, +const config = {} +const npm = mockNpm({ + config, + output: msg => { + result = result ? [result, msg].join('\n') : msg }, }) +const distTag = new DistTag(npm) -test('ls in current package', (t) => { - prefix = t.testdir({ +t.test('ls in current package', (t) => { + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: '@scoped/pkg', }), }) distTag.exec(['ls'], (err) => { - t.ifError(err, 'npm dist-tags ls') + t.error(err, 'npm dist-tags ls') t.matchSnapshot( result, 'should list available tags for current package' ) - result = '' - log = '' t.end() }) }) -test('no args in current package', (t) => { - prefix = t.testdir({ +t.test('ls global', (t) => { + t.teardown(() => { + config.global = false + }) + config.global = true + distTag.exec(['ls'], (err) => { + t.matchSnapshot( + err, + 'should throw basic usage' + ) + t.end() + }) +}) + +t.test('no args in current package', (t) => { + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: '@scoped/pkg', }), }) distTag.exec([], (err) => { - t.ifError(err, 'npm dist-tags ls') + t.error(err, 'npm dist-tags ls') t.matchSnapshot( result, 'should default to listing available tags for current package' ) - result = '' - log = '' t.end() }) }) -test('borked cmd usage', (t) => { - prefix = t.testdir({}) +t.test('borked cmd usage', (t) => { + npm.prefix = t.testdir({}) distTag.exec(['borked', '@scoped/pkg'], (err) => { t.matchSnapshot(err, 'should show usage error') - result = '' - log = '' t.end() }) }) -test('ls on named package', (t) => { - prefix = t.testdir({}) +t.test('ls on named package', (t) => { + npm.prefix = t.testdir({}) distTag.exec(['ls', '@scoped/another'], (err) => { - t.ifError(err, 'npm dist-tags ls') + t.error(err, 'npm dist-tags ls') t.matchSnapshot( result, 'should list tags for the specified package' ) - result = '' - log = '' t.end() }) }) -test('ls on missing package', (t) => { - prefix = t.testdir({}) +t.test('ls on missing package', (t) => { + npm.prefix = t.testdir({}) distTag.exec(['ls', 'foo'], (err) => { t.matchSnapshot( log, @@ -143,14 +154,12 @@ test('ls on missing package', (t) => { err, 'should throw error message' ) - result = '' - log = '' t.end() }) }) -test('ls on missing name in current package', (t) => { - prefix = t.testdir({ +t.test('ls on missing name in current package', (t) => { + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ version: '1.0.0', }), @@ -160,48 +169,183 @@ test('ls on missing name in current package', (t) => { err, 'should throw usage error message' ) - result = '' - log = '' t.end() }) }) -test('only named package arg', (t) => { - prefix = t.testdir({}) +t.test('only named package arg', (t) => { + npm.prefix = t.testdir({}) distTag.exec(['@scoped/another'], (err) => { - t.ifError(err, 'npm dist-tags ls') + t.error(err, 'npm dist-tags ls') t.matchSnapshot( result, 'should default to listing tags for the specified package' ) - result = '' - log = '' t.end() }) }) -test('add new tag', (t) => { +t.test('workspaces', (t) => { + npm.localPrefix = t.testdir({ + 'package.json': JSON.stringify({ + name: 'root', + version: '1.0.0', + workspaces: ['workspace-a', 'workspace-b', 'workspace-c'], + }), + 'workspace-a': { + 'package.json': JSON.stringify({ + name: 'workspace-a', + version: '1.0.0', + }), + }, + 'workspace-b': { + 'package.json': JSON.stringify({ + name: 'workspace-b', + version: '1.0.0', + }), + }, + 'workspace-c': { + 'package.json': JSON.stringify({ + name: 'workspace-c', + version: '1.0.0', + }), + }, + }) + + t.test('no args', t => { + distTag.execWorkspaces([], [], (err) => { + t.error(err) + t.matchSnapshot(result, 'printed the expected output') + t.end() + }) + }) + + t.test('no args, one workspace', t => { + distTag.execWorkspaces([], ['workspace-a'], (err) => { + t.error(err) + t.matchSnapshot(result, 'printed the expected output') + t.end() + }) + }) + + t.test('one arg -- .', t => { + distTag.execWorkspaces(['.'], [], (err) => { + t.error(err) + t.matchSnapshot(result, 'printed the expected output') + t.end() + }) + }) + + t.test('one arg -- .@1, ignores version spec', t => { + distTag.execWorkspaces(['.@'], [], (err) => { + t.error(err) + t.matchSnapshot(result, 'printed the expected output') + t.end() + }) + }) + + t.test('one arg -- list', t => { + distTag.execWorkspaces(['list'], [], (err) => { + t.error(err) + t.matchSnapshot(result, 'printed the expected output') + t.end() + }) + }) + + t.test('two args -- list, .', t => { + distTag.execWorkspaces(['list', '.'], [], (err) => { + t.error(err) + t.matchSnapshot(result, 'printed the expected output') + t.end() + }) + }) + + t.test('two args -- list, .@1, ignores version spec', t => { + distTag.execWorkspaces(['list', '.@'], [], (err) => { + t.error(err) + t.matchSnapshot(result, 'printed the expected output') + t.end() + }) + }) + + t.test('two args -- list, @scoped/pkg, logs a warning and ignores workspaces', t => { + distTag.execWorkspaces(['list', '@scoped/pkg'], [], (err) => { + t.error(err) + t.match(log, 'Ignoring workspaces for specified package', 'logs a warning') + t.matchSnapshot(result, 'printed the expected output') + t.end() + }) + }) + + t.test('no args, one failing workspace sets exitCode to 1', t => { + npm.localPrefix = t.testdir({ + 'package.json': JSON.stringify({ + name: 'root', + version: '1.0.0', + workspaces: ['workspace-a', 'workspace-b', 'workspace-c', 'workspace-d'], + }), + 'workspace-a': { + 'package.json': JSON.stringify({ + name: 'workspace-a', + version: '1.0.0', + }), + }, + 'workspace-b': { + 'package.json': JSON.stringify({ + name: 'workspace-b', + version: '1.0.0', + }), + }, + 'workspace-c': { + 'package.json': JSON.stringify({ + name: 'workspace-c', + version: '1.0.0', + }), + }, + 'workspace-d': { + 'package.json': JSON.stringify({ + name: 'workspace-d', + version: '1.0.0', + }), + }, + }) + + distTag.execWorkspaces([], [], (err) => { + t.error(err) + t.equal(process.exitCode, 1, 'set the error status') + process.exitCode = 0 + t.match(log, 'dist-tag ls Couldn\'t get dist-tag data for workspace-d@latest', 'logs the error') + t.matchSnapshot(result, 'printed the expected output') + t.end() + }) + }) + + t.end() +}) + +t.test('add new tag', (t) => { const _nrf = npmRegistryFetchMock + t.teardown(() => { + npmRegistryFetchMock = _nrf + }) + npmRegistryFetchMock = async (url, opts) => { t.equal(opts.method, 'PUT', 'should trigger request to add new tag') t.equal(opts.body, '7.7.7', 'should point to expected version') } - prefix = t.testdir({}) + npm.prefix = t.testdir({}) distTag.exec(['add', '@scoped/another@7.7.7', 'c'], (err) => { - t.ifError(err, 'npm dist-tags add') + t.error(err, 'npm dist-tags add') t.matchSnapshot( result, 'should return success msg' ) - result = '' - log = '' - npmRegistryFetchMock = _nrf t.end() }) }) -test('add using valid semver range as name', (t) => { - prefix = t.testdir({}) +t.test('add using valid semver range as name', (t) => { + npm.prefix = t.testdir({}) distTag.exec(['add', '@scoped/another@7.7.7', '1.0.0'], (err) => { t.match( err, @@ -212,64 +356,62 @@ test('add using valid semver range as name', (t) => { log, 'should return success msg' ) - result = '' - log = '' t.end() }) }) -test('add missing args', (t) => { - prefix = t.testdir({}) +t.test('add missing args', (t) => { + npm.prefix = t.testdir({}) + config.tag = '' + t.teardown(() => { + delete config.tag + }) distTag.exec(['add', '@scoped/another@7.7.7'], (err) => { t.matchSnapshot(err, 'should exit usage error message') - result = '' - log = '' t.end() }) }) -test('add missing pkg name', (t) => { - prefix = t.testdir({}) +t.test('add missing pkg name', (t) => { + npm.prefix = t.testdir({}) distTag.exec(['add', null], (err) => { t.matchSnapshot(err, 'should exit usage error message') - result = '' - log = '' t.end() }) }) -test('set existing version', (t) => { - prefix = t.testdir({}) +t.test('set existing version', (t) => { + npm.prefix = t.testdir({}) distTag.exec(['set', '@scoped/another@0.6.0', 'b'], (err) => { - t.ifError(err, 'npm dist-tags set') + t.error(err, 'npm dist-tags set') t.matchSnapshot( log, 'should log warn msg' ) - log = '' t.end() }) }) -test('remove existing tag', (t) => { +t.test('remove existing tag', (t) => { const _nrf = npmRegistryFetchMock + t.teardown(() => { + npmRegistryFetchMock = _nrf + }) + npmRegistryFetchMock = async (url, opts) => { t.equal(opts.method, 'DELETE', 'should trigger request to remove tag') } - prefix = t.testdir({}) + npm.prefix = t.testdir({}) distTag.exec(['rm', '@scoped/another', 'c'], (err) => { - t.ifError(err, 'npm dist-tags rm') + t.error(err, 'npm dist-tags rm') t.matchSnapshot(log, 'should log remove info') t.matchSnapshot(result, 'should return success msg') - result = '' - log = '' - npmRegistryFetchMock = _nrf t.end() }) }) -test('remove non-existing tag', (t) => { - prefix = t.testdir({}) +t.test('remove non-existing tag', (t) => { + npm.prefix = t.testdir({}) distTag.exec(['rm', '@scoped/another', 'nonexistent'], (err) => { t.match( err, @@ -277,23 +419,19 @@ test('remove non-existing tag', (t) => { 'should exit with error' ) t.matchSnapshot(log, 'should log error msg') - result = '' - log = '' t.end() }) }) -test('remove missing pkg name', (t) => { - prefix = t.testdir({}) +t.test('remove missing pkg name', (t) => { + npm.prefix = t.testdir({}) distTag.exec(['rm', null], (err) => { t.matchSnapshot(err, 'should exit usage error message') - result = '' - log = '' t.end() }) }) -test('completion', t => { +t.test('completion', t => { const { completion } = distTag t.plan(2) diff --git a/test/lib/docs.js b/test/lib/docs.js index a7325738ba43e..fbd7584201247 100644 --- a/test/lib/docs.js +++ b/test/lib/docs.js @@ -1,51 +1,89 @@ const t = require('tap') +const { fake: mockNpm } = require('../fixtures/mock-npm.js') +const { join, sep } = require('path') -const requireInject = require('require-inject') -const pacote = { - manifest: async (spec, options) => { - return spec === 'nodocs' ? { +const pkgDirs = t.testdir({ + 'package.json': JSON.stringify({ + name: 'thispkg', + version: '1.2.3', + homepage: 'https://example.com', + }), + nodocs: { + 'package.json': JSON.stringify({ name: 'nodocs', version: '1.2.3', - } - : spec === 'docsurl' ? { - name: 'docsurl', - version: '1.2.3', - homepage: 'https://bugzilla.localhost/docsurl', - } - : spec === 'repourl' ? { - name: 'repourl', - version: '1.2.3', - repository: 'https://github.com/foo/repourl', - } - : spec === 'repoobj' ? { - name: 'repoobj', - version: '1.2.3', - repository: { url: 'https://github.com/foo/repoobj' }, - } - : spec === '.' ? { - name: 'thispkg', - version: '1.2.3', - homepage: 'https://example.com', - } - : null + }), }, -} + docsurl: { + 'package.json': JSON.stringify({ + name: 'docsurl', + version: '1.2.3', + homepage: 'https://bugzilla.localhost/docsurl', + }), + }, + repourl: { + 'package.json': JSON.stringify({ + name: 'repourl', + version: '1.2.3', + repository: 'https://github.com/foo/repourl', + }), + }, + repoobj: { + 'package.json': JSON.stringify({ + name: 'repoobj', + version: '1.2.3', + repository: { url: 'https://github.com/foo/repoobj' }, + }), + }, + workspaces: { + 'package.json': JSON.stringify({ + name: 'workspaces-test', + version: '1.2.3-test', + workspaces: ['workspace-a', 'workspace-b', 'workspace-c'], + }), + 'workspace-a': { + 'package.json': JSON.stringify({ + name: 'workspace-a', + version: '1.2.3-a', + homepage: 'http://docs.workspace-a/', + }), + }, + 'workspace-b': { + 'package.json': JSON.stringify({ + name: 'workspace-b', + version: '1.2.3-n', + repository: 'https://github.com/npm/workspace-b', + }), + }, + 'workspace-c': JSON.stringify({ + 'package.json': { + name: 'workspace-n', + version: '1.2.3-n', + }, + }), + }, +}) // keep a tally of which urls got opened -const opened = {} +let opened = {} const openUrl = async (npm, url, errMsg) => { opened[url] = opened[url] || 0 opened[url]++ } -const Docs = requireInject('../../lib/docs.js', { - pacote, +const Docs = t.mock('../../lib/docs.js', { '../../lib/utils/open-url.js': openUrl, }) +const flatOptions = {} +const npm = mockNpm({ flatOptions }) +const docs = new Docs(npm) -const docs = new Docs({ flatOptions: {} }) +t.afterEach(() => opened = {}) t.test('open docs urls', t => { + // XXX It is very odd that `where` is how pacote knows to look anywhere other + // than the cwd. I would think npm.localPrefix would factor in somehow + flatOptions.where = pkgDirs const expect = { nodocs: 'https://www.npmjs.com/package/nodocs', docsurl: 'https://bugzilla.localhost/docsurl', @@ -57,11 +95,13 @@ t.test('open docs urls', t => { t.plan(keys.length) keys.forEach(pkg => { t.test(pkg, t => { - docs.exec([pkg], (er) => { - if (er) - throw er + docs.exec([['.', pkg].join(sep)], (err) => { + if (err) + throw err const url = expect[pkg] - t.equal(opened[url], 1, url, {opened}) + t.match({ + [url]: 1, + }, opened, `opened ${url}`, {opened}) t.end() }) }) @@ -72,7 +112,42 @@ t.test('open default package if none specified', t => { docs.exec([], (er) => { if (er) throw er - t.equal(opened['https://example.com'], 2, 'opened expected url', {opened}) + t.equal(opened['https://example.com'], 1, 'opened expected url', {opened}) t.end() }) }) + +t.test('workspaces', (t) => { + flatOptions.where = undefined + npm.localPrefix = join(pkgDirs, 'workspaces') + t.test('all workspaces', (t) => { + docs.execWorkspaces([], [], (err) => { + t.notOk(err) + t.match({ + 'http://docs.workspace-a/': 1, + 'https://github.com/npm/workspace-b#readme': 1, + }, opened, 'opened two valid docs urls') + t.end() + }) + }) + + t.test('one workspace', (t) => { + docs.execWorkspaces([], ['workspace-a'], (err) => { + t.notOk(err) + t.match({ + 'http://docs.workspace-a/': 1, + }, opened, 'opened one requested docs urls') + t.end() + }) + }) + + t.test('invalid workspace', (t) => { + docs.execWorkspaces([], ['workspace-x'], (err) => { + t.match(err, /No workspaces found/) + t.match(err, /workspace-x/) + t.match({}, opened, 'opened no docs urls') + t.end() + }) + }) + t.end() +}) diff --git a/test/lib/doctor.js b/test/lib/doctor.js index eaa7ad72df8a5..0ceb670c15054 100644 --- a/test/lib/doctor.js +++ b/test/lib/doctor.js @@ -1,5 +1,4 @@ -const { test } = require('tap') -const requireInject = require('require-inject') +const t = require('tap') const { join } = require('path') const fs = require('fs') @@ -104,6 +103,9 @@ const npm = { }, }, version: '7.1.0', + output: (data) => { + output.push(data) + }, } let latestNpm = npm.version @@ -120,12 +122,9 @@ const cacache = { }, } -const Doctor = requireInject('../../lib/doctor.js', { +const Doctor = t.mock('../../lib/doctor.js', { '../../lib/utils/is-windows.js': false, '../../lib/utils/ping.js': ping, - '../../lib/utils/output.js': (data) => { - output.push(data) - }, cacache, pacote, 'make-fetch-happen': fetch, @@ -134,7 +133,7 @@ const Doctor = requireInject('../../lib/doctor.js', { const doctor = new Doctor(npm) const origVersion = process.version -test('node versions', t => { +t.test('node versions', t => { t.plan(nodeVersions.length) nodeVersions.forEach(({ version }) => { @@ -276,18 +275,15 @@ test('node versions', t => { st.match(output, /local bin folder.*ok/, 'local bin is ok') st.match(output, /global bin folder.*ok/, 'global bin is ok') st.match(output, /cache contents.*ok/, 'cache contents is ok') - st.notEqual(output[0], ansiTrim(output[0]), 'output should contain color codes') + st.not(output[0], ansiTrim(output[0]), 'output should contain color codes') st.end() }) }) vt.test('npm doctor skips some tests in windows', st => { - const WinDoctor = requireInject('../../lib/doctor.js', { + const WinDoctor = t.mock('../../lib/doctor.js', { '../../lib/utils/is-windows.js': true, '../../lib/utils/ping.js': ping, - '../../lib/utils/output.js': (data) => { - output.push(data) - }, cacache, pacote, 'make-fetch-happen': fetch, @@ -563,12 +559,9 @@ test('node versions', t => { } } - const Doctor = requireInject('../../lib/doctor.js', { + const Doctor = t.mock('../../lib/doctor.js', { '../../lib/utils/is-windows.js': false, '../../lib/utils/ping.js': ping, - '../../lib/utils/output.js': (data) => { - output.push(data) - }, cacache, pacote, 'make-fetch-happen': fetch, @@ -909,7 +902,7 @@ test('node versions', t => { }) }) -test('outdated node version', vt => { +t.test('outdated node version', vt => { vt.plan(1) const version = 'v10.0.0' diff --git a/test/lib/edit.js b/test/lib/edit.js index acf03fa438c3c..09908165d7722 100644 --- a/test/lib/edit.js +++ b/test/lib/edit.js @@ -1,6 +1,5 @@ -const { test } = require('tap') +const t = require('tap') const { resolve } = require('path') -const requireInject = require('require-inject') const { EventEmitter } = require('events') let editorBin = null @@ -39,13 +38,13 @@ const npm = { } const gracefulFs = require('graceful-fs') -const Edit = requireInject('../../lib/edit.js', { +const Edit = t.mock('../../lib/edit.js', { child_process: childProcess, 'graceful-fs': gracefulFs, }) const edit = new Edit(npm) -test('npm edit', t => { +t.test('npm edit', t => { t.teardown(() => { rebuildArgs = null editorBin = null @@ -66,7 +65,7 @@ test('npm edit', t => { }) }) -test('rebuild fails', t => { +t.test('rebuild fails', t => { t.teardown(() => { rebuildFail = null rebuildArgs = null @@ -87,7 +86,7 @@ test('rebuild fails', t => { }) }) -test('npm edit editor has flags', t => { +t.test('npm edit editor has flags', t => { EDITOR = 'code -w' t.teardown(() => { rebuildArgs = null @@ -110,14 +109,14 @@ test('npm edit editor has flags', t => { }) }) -test('npm edit no args', t => { +t.test('npm edit no args', t => { return edit.exec([], (err) => { t.match(err, /npm edit/, 'throws usage error') t.end() }) }) -test('npm edit lstat error propagates', t => { +t.test('npm edit lstat error propagates', t => { const _lstat = gracefulFs.lstat gracefulFs.lstat = (dir, cb) => { return cb(new Error('lstat failed')) @@ -132,7 +131,7 @@ test('npm edit lstat error propagates', t => { }) }) -test('npm edit editor exit code error propagates', t => { +t.test('npm edit editor exit code error propagates', t => { EDITOR_CODE = 137 t.teardown(() => { EDITOR_CODE = 0 diff --git a/test/lib/exec.js b/test/lib/exec.js index 4dc7f31cc31f4..03a1bedf97e50 100644 --- a/test/lib/exec.js +++ b/test/lib/exec.js @@ -1,8 +1,6 @@ const t = require('tap') -const requireInject = require('require-inject') +const { fake: mockNpm } = require('../fixtures/mock-npm') const { resolve, delimiter } = require('path') -const OUTPUT = [] -const output = (...msg) => OUTPUT.push(msg) const ARB_CTOR = [] const ARB_ACTUAL_TREE = {} @@ -25,25 +23,27 @@ class Arborist { let PROGRESS_ENABLED = true const LOG_WARN = [] let PROGRESS_IGNORED = false -const npm = { - flatOptions: { - yes: true, - call: '', - package: [], - legacyPeerDeps: false, - shell: 'shell-cmd', - }, +const flatOptions = { + npxCache: 'npx-cache-dir', + color: false, + cache: 'cache-dir', + legacyPeerDeps: false, + package: [], +} +const config = { + cache: 'bad-cache-dir', // this should never show up passed into libnpmexec + yes: true, + call: '', + package: [], + 'script-shell': 'shell-cmd', +} + +const npm = mockNpm({ + flatOptions, + config, localPrefix: 'local-prefix', localBin: 'local-bin', globalBin: 'global-bin', - config: { - get: k => { - if (k !== 'cache') - throw new Error('unexpected config get') - - return 'cache-dir' - }, - }, log: { disableProgress: () => { PROGRESS_ENABLED = false @@ -55,7 +55,7 @@ const npm = { LOG_WARN.push(args) }, }, -} +}) const RUN_SCRIPTS = [] const runScript = async opt => { @@ -87,18 +87,19 @@ const PATH = require('../../lib/utils/path.js') let CI_NAME = 'travis-ci' const mocks = { - '@npmcli/arborist': Arborist, - '@npmcli/run-script': runScript, - '@npmcli/ci-detect': () => CI_NAME, - pacote, - read, - 'mkdirp-infer-owner': mkdirp, - '../../lib/utils/output.js': output, + libnpmexec: t.mock('libnpmexec', { + '@npmcli/arborist': Arborist, + '@npmcli/run-script': runScript, + '@npmcli/ci-detect': () => CI_NAME, + pacote, + read, + 'mkdirp-infer-owner': mkdirp, + }), } -const Exec = requireInject('../../lib/exec.js', mocks) +const Exec = t.mock('../../lib/exec.js', mocks) const exec = new Exec(npm) -t.afterEach(cb => { +t.afterEach(() => { MKDIRPS.length = 0 ARB_CTOR.length = 0 ARB_REIFY.length = 0 @@ -108,33 +109,43 @@ t.afterEach(cb => { READ_ERROR = null LOG_WARN.length = 0 PROGRESS_IGNORED = false - npm.flatOptions.legacyPeerDeps = false - npm.flatOptions.package = [] - npm.flatOptions.call = '' + flatOptions.legacyPeerDeps = false + flatOptions.color = false + config['script-shell'] = 'shell-cmd' + config.package = [] + flatOptions.package = [] + config.call = '' + config.yes = true + npm.color = false npm.localBin = 'local-bin' npm.globalBin = 'global-bin' - cb() }) t.test('npx foo, bin already exists locally', t => { const path = t.testdir({ - foo: 'just some file', + node_modules: { + '.bin': { + foo: 'just some file', + }, + }, }) PROGRESS_IGNORED = true - npm.localBin = path + npm.localBin = resolve(path, 'node_modules', '.bin') exec.exec(['foo', 'one arg', 'two arg'], er => { - t.ifError(er, 'npm exec') + t.error(er, 'npm exec') t.match(RUN_SCRIPTS, [{ pkg: { scripts: { npx: 'foo' }}, args: ['one arg', 'two arg'], + cache: flatOptions.cache, + npxCache: flatOptions.npxCache, banner: false, path: process.cwd(), stdioString: true, event: 'npx', env: { - PATH: [path, ...PATH].join(delimiter), + PATH: [npm.localBin, ...PATH].join(delimiter), }, stdio: 'inherit', }]) @@ -144,14 +155,18 @@ t.test('npx foo, bin already exists locally', t => { t.test('npx foo, bin already exists globally', t => { const path = t.testdir({ - foo: 'just some file', + node_modules: { + '.bin': { + foo: 'just some file', + }, + }, }) PROGRESS_IGNORED = true - npm.globalBin = path + npm.globalBin = resolve(path, 'node_modules', '.bin') exec.exec(['foo', 'one arg', 'two arg'], er => { - t.ifError(er, 'npm exec') + t.error(er, 'npm exec') t.match(RUN_SCRIPTS, [{ pkg: { scripts: { npx: 'foo' }}, args: ['one arg', 'two arg'], @@ -160,7 +175,7 @@ t.test('npx foo, bin already exists globally', t => { stdioString: true, event: 'npx', env: { - PATH: [path, ...PATH].join(delimiter), + PATH: [npm.globalBin, ...PATH].join(delimiter), }, stdio: 'inherit', }]) @@ -186,7 +201,7 @@ t.test('npm exec foo, already present locally', t => { if (er) throw er t.strictSame(MKDIRPS, [], 'no need to make any dirs') - t.match(ARB_CTOR, [{ package: ['foo'], path }]) + t.match(ARB_CTOR, [{ path }]) t.strictSame(ARB_REIFY, [], 'no need to reify anything') t.equal(PROGRESS_ENABLED, true, 'progress re-enabled') t.match(RUN_SCRIPTS, [{ @@ -214,7 +229,7 @@ t.test('npm exec <noargs>, run interactive shell', t => { ARB_CTOR.length = 0 MKDIRPS.length = 0 ARB_REIFY.length = 0 - OUTPUT.length = 0 + npm._mockOutputs.length = 0 exec.exec([], er => { if (er) throw er @@ -240,14 +255,28 @@ t.test('npm exec <noargs>, run interactive shell', t => { cb() }) } - t.test('print message when tty and not in CI', t => { CI_NAME = null process.stdin.isTTY = true run(t, true, () => { t.strictSame(LOG_WARN, []) - t.strictSame(OUTPUT, [ - ['\nEntering npm script environment\nType \'exit\' or ^D when finished\n'], + t.strictSame(npm._mockOutputs, [ + [`\nEntering npm script environment at location:\n${process.cwd()}\nType 'exit' or ^D when finished\n`], + ], 'printed message about interactive shell') + t.end() + }) + }) + + t.test('print message with color when tty and not in CI', t => { + CI_NAME = null + process.stdin.isTTY = true + npm.color = true + flatOptions.color = true + + run(t, true, () => { + t.strictSame(LOG_WARN, []) + t.strictSame(npm._mockOutputs, [ + [`\u001b[0m\u001b[0m\n\u001b[0mEntering npm script environment\u001b[0m\u001b[0m at location:\u001b[0m\n\u001b[0m\u001b[2m${process.cwd()}\u001b[22m\u001b[0m\u001b[1m\u001b[22m\n\u001b[1mType 'exit' or ^D when finished\u001b[22m\n\u001b[1m\u001b[22m`], ], 'printed message about interactive shell') t.end() }) @@ -258,7 +287,7 @@ t.test('npm exec <noargs>, run interactive shell', t => { process.stdin.isTTY = false run(t, true, () => { t.strictSame(LOG_WARN, []) - t.strictSame(OUTPUT, [], 'no message about interactive shell') + t.strictSame(npm._mockOutputs, [], 'no message about interactive shell') t.end() }) }) @@ -270,7 +299,30 @@ t.test('npm exec <noargs>, run interactive shell', t => { t.strictSame(LOG_WARN, [ ['exec', 'Interactive mode disabled in CI environment'], ]) - t.strictSame(OUTPUT, [], 'no message about interactive shell') + t.strictSame(npm._mockOutputs, [], 'no message about interactive shell') + t.end() + }) + }) + + t.test('not defined script-shell config value', t => { + CI_NAME = null + process.stdin.isTTY = true + config['script-shell'] = undefined + + exec.exec([], er => { + if (er) + throw er + + t.match(RUN_SCRIPTS, [{ + pkg: { scripts: { npx: /sh|cmd/ } }, + }]) + + LOG_WARN.length = 0 + ARB_CTOR.length = 0 + MKDIRPS.length = 0 + ARB_REIFY.length = 0 + npm._mockOutputs.length = 0 + RUN_SCRIPTS.length = 0 t.end() }) }) @@ -280,7 +332,7 @@ t.test('npm exec <noargs>, run interactive shell', t => { t.test('npm exec foo, not present locally or in central loc', t => { const path = t.testdir() - const installDir = resolve('cache-dir/_npx/f7fbba6e0636f890') + const installDir = resolve('npx-cache-dir/f7fbba6e0636f890') npm.localPrefix = path ARB_ACTUAL_TREE[path] = { children: new Map(), @@ -300,7 +352,7 @@ t.test('npm exec foo, not present locally or in central loc', t => { if (er) throw er t.strictSame(MKDIRPS, [installDir], 'need to make install dir') - t.match(ARB_CTOR, [{ package: ['foo'], path }]) + t.match(ARB_CTOR, [{ path }]) t.match(ARB_REIFY, [{add: ['foo@'], legacyPeerDeps: false}], 'need to install foo@') t.equal(PROGRESS_ENABLED, true, 'progress re-enabled') const PATH = `${resolve(installDir, 'node_modules', '.bin')}${delimiter}${process.env.PATH}` @@ -320,7 +372,7 @@ t.test('npm exec foo, not present locally or in central loc', t => { t.test('npm exec foo, not present locally but in central loc', t => { const path = t.testdir() - const installDir = resolve('cache-dir/_npx/f7fbba6e0636f890') + const installDir = resolve('npx-cache-dir/f7fbba6e0636f890') npm.localPrefix = path ARB_ACTUAL_TREE[path] = { children: new Map(), @@ -340,7 +392,7 @@ t.test('npm exec foo, not present locally but in central loc', t => { if (er) throw er t.strictSame(MKDIRPS, [installDir], 'need to make install dir') - t.match(ARB_CTOR, [{ package: ['foo'], path }]) + t.match(ARB_CTOR, [{ path }]) t.match(ARB_REIFY, [], 'no need to install again, already there') t.equal(PROGRESS_ENABLED, true, 'progress re-enabled') const PATH = `${resolve(installDir, 'node_modules', '.bin')}${delimiter}${process.env.PATH}` @@ -360,7 +412,7 @@ t.test('npm exec foo, not present locally but in central loc', t => { t.test('npm exec foo, present locally but wrong version', t => { const path = t.testdir() - const installDir = resolve('cache-dir/_npx/2badf4630f1cfaad') + const installDir = resolve('npx-cache-dir/2badf4630f1cfaad') npm.localPrefix = path ARB_ACTUAL_TREE[path] = { children: new Map(), @@ -380,7 +432,7 @@ t.test('npm exec foo, present locally but wrong version', t => { if (er) throw er t.strictSame(MKDIRPS, [installDir], 'need to make install dir') - t.match(ARB_CTOR, [{ package: ['foo'], path }]) + t.match(ARB_CTOR, [{ path }]) t.match(ARB_REIFY, [{ add: ['foo@2.x'], legacyPeerDeps: false }], 'need to add foo@2.x') t.equal(PROGRESS_ENABLED, true, 'progress re-enabled') const PATH = `${resolve(installDir, 'node_modules', '.bin')}${delimiter}${process.env.PATH}` @@ -412,12 +464,13 @@ t.test('npm exec --package=foo bar', t => { }, _from: 'foo@', } - npm.flatOptions.package = ['foo'] + config.package = ['foo'] + flatOptions.package = ['foo'] exec.exec(['bar', 'one arg', 'two arg'], er => { if (er) throw er t.strictSame(MKDIRPS, [], 'no need to make any dirs') - t.match(ARB_CTOR, [{ package: ['foo'], path }]) + t.match(ARB_CTOR, [{ path }]) t.strictSame(ARB_REIFY, [], 'no need to reify anything') t.equal(PROGRESS_ENABLED, true, 'progress re-enabled') t.match(RUN_SCRIPTS, [{ @@ -459,7 +512,7 @@ t.test('npm exec @foo/bar -- --some=arg, locally installed', t => { if (er) throw er t.strictSame(MKDIRPS, [], 'no need to make any dirs') - t.match(ARB_CTOR, [{ package: ['@foo/bar'], path }]) + t.match(ARB_CTOR, [{ path }]) t.strictSame(ARB_REIFY, [], 'no need to reify anything') t.equal(PROGRESS_ENABLED, true, 'progress re-enabled') t.match(RUN_SCRIPTS, [{ @@ -502,7 +555,7 @@ t.test('npm exec @foo/bar, with same bin alias and no unscoped named bin, locall if (er) throw er t.strictSame(MKDIRPS, [], 'no need to make any dirs') - t.match(ARB_CTOR, [{ package: ['@foo/bar'], path }]) + t.match(ARB_CTOR, [{ path }]) t.strictSame(ARB_REIFY, [], 'no need to reify anything') t.equal(PROGRESS_ENABLED, true, 'progress re-enabled') t.match(RUN_SCRIPTS, [{ @@ -552,10 +605,10 @@ t.test('run command with 2 packages, need install, verify sort', t => { t.plan(cases.length) for (const packages of cases) { t.test(packages.join(', '), t => { - npm.flatOptions.package = packages - const add = packages.map(p => `${p}@`).sort((a, b) => a.localeCompare(b)) + config.package = packages + const add = packages.map(p => `${p}@`).sort((a, b) => a.localeCompare(b, 'en')) const path = t.testdir() - const installDir = resolve('cache-dir/_npx/07de77790e5f40f2') + const installDir = resolve('npx-cache-dir/07de77790e5f40f2') npm.localPrefix = path ARB_ACTUAL_TREE[path] = { children: new Map(), @@ -583,7 +636,7 @@ t.test('run command with 2 packages, need install, verify sort', t => { if (er) throw er t.strictSame(MKDIRPS, [installDir], 'need to make install dir') - t.match(ARB_CTOR, [{ package: packages, path }]) + t.match(ARB_CTOR, [{ path }]) t.match(ARB_REIFY, [{add, legacyPeerDeps: false}], 'need to install both packages') t.equal(PROGRESS_ENABLED, true, 'progress re-enabled') const PATH = `${resolve(installDir, 'node_modules', '.bin')}${delimiter}${process.env.PATH}` @@ -652,8 +705,8 @@ t.test('npm exec foo, many bins in package, none named foo', t => { t.test('npm exec -p foo -c "ls -laF"', t => { const path = t.testdir() npm.localPrefix = path - npm.flatOptions.package = ['foo'] - npm.flatOptions.call = 'ls -laF' + config.package = ['foo'] + config.call = 'ls -laF' ARB_ACTUAL_TREE[path] = { children: new Map([['foo', { name: 'foo', version: '1.2.3' }]]), } @@ -666,7 +719,7 @@ t.test('npm exec -p foo -c "ls -laF"', t => { if (er) throw er t.strictSame(MKDIRPS, [], 'no need to make any dirs') - t.match(ARB_CTOR, [{ package: ['foo'], path }]) + t.match(ARB_CTOR, [{ path }]) t.strictSame(ARB_REIFY, [], 'no need to reify anything') t.equal(PROGRESS_ENABLED, true, 'progress re-enabled') t.match(RUN_SCRIPTS, [{ @@ -683,7 +736,7 @@ t.test('npm exec -p foo -c "ls -laF"', t => { }) t.test('positional args and --call together is an error', t => { - npm.flatOptions.call = 'true' + config.call = 'true' exec.exec(['foo'], er => { t.equal(er, exec.usage) t.end() @@ -705,12 +758,12 @@ t.test('prompt when installs are needed if not already present and shell is a TT const packages = ['foo', 'bar'] READ_RESULT = 'yolo' - npm.flatOptions.package = packages - npm.flatOptions.yes = undefined + config.package = packages + config.yes = undefined - const add = packages.map(p => `${p}@`).sort((a, b) => a.localeCompare(b)) + const add = packages.map(p => `${p}@`).sort((a, b) => a.localeCompare(b, 'en')) const path = t.testdir() - const installDir = resolve('cache-dir/_npx/07de77790e5f40f2') + const installDir = resolve('npx-cache-dir/07de77790e5f40f2') npm.localPrefix = path ARB_ACTUAL_TREE[path] = { children: new Map(), @@ -738,7 +791,7 @@ t.test('prompt when installs are needed if not already present and shell is a TT if (er) throw er t.strictSame(MKDIRPS, [installDir], 'need to make install dir') - t.match(ARB_CTOR, [{ package: packages, path }]) + t.match(ARB_CTOR, [{ path }]) t.match(ARB_REIFY, [{add, legacyPeerDeps: false}], 'need to install both packages') t.equal(PROGRESS_ENABLED, true, 'progress re-enabled') const PATH = `${resolve(installDir, 'node_modules', '.bin')}${delimiter}${process.env.PATH}` @@ -774,12 +827,12 @@ t.test('skip prompt when installs are needed if not already present and shell is const packages = ['foo', 'bar'] READ_RESULT = 'yolo' - npm.flatOptions.package = packages - npm.flatOptions.yes = undefined + config.package = packages + config.yes = undefined - const add = packages.map(p => `${p}@`).sort((a, b) => a.localeCompare(b)) + const add = packages.map(p => `${p}@`).sort((a, b) => a.localeCompare(b, 'en')) const path = t.testdir() - const installDir = resolve('cache-dir/_npx/07de77790e5f40f2') + const installDir = resolve('npx-cache-dir/07de77790e5f40f2') npm.localPrefix = path ARB_ACTUAL_TREE[path] = { children: new Map(), @@ -807,7 +860,7 @@ t.test('skip prompt when installs are needed if not already present and shell is if (er) throw er t.strictSame(MKDIRPS, [installDir], 'need to make install dir') - t.match(ARB_CTOR, [{ package: packages, path }]) + t.match(ARB_CTOR, [{ path }]) t.match(ARB_REIFY, [{add, legacyPeerDeps: false}], 'need to install both packages') t.equal(PROGRESS_ENABLED, true, 'progress re-enabled') const PATH = `${resolve(installDir, 'node_modules', '.bin')}${delimiter}${process.env.PATH}` @@ -841,12 +894,12 @@ t.test('skip prompt when installs are needed if not already present and shell is const packages = ['foo'] READ_RESULT = 'yolo' - npm.flatOptions.package = packages - npm.flatOptions.yes = undefined + config.package = packages + config.yes = undefined - const add = packages.map(p => `${p}@`).sort((a, b) => a.localeCompare(b)) + const add = packages.map(p => `${p}@`).sort((a, b) => a.localeCompare(b, 'en')) const path = t.testdir() - const installDir = resolve('cache-dir/_npx/f7fbba6e0636f890') + const installDir = resolve('npx-cache-dir/f7fbba6e0636f890') npm.localPrefix = path ARB_ACTUAL_TREE[path] = { children: new Map(), @@ -866,7 +919,7 @@ t.test('skip prompt when installs are needed if not already present and shell is if (er) throw er t.strictSame(MKDIRPS, [installDir], 'need to make install dir') - t.match(ARB_CTOR, [{ package: packages, path }]) + t.match(ARB_CTOR, [{ path }]) t.match(ARB_REIFY, [{add, legacyPeerDeps: false}], 'need to install the package') t.equal(PROGRESS_ENABLED, true, 'progress re-enabled') const PATH = `${resolve(installDir, 'node_modules', '.bin')}${delimiter}${process.env.PATH}` @@ -900,11 +953,11 @@ t.test('abort if prompt rejected', t => { const packages = ['foo', 'bar'] READ_RESULT = 'no, why would I want such a thing??' - npm.flatOptions.package = packages - npm.flatOptions.yes = undefined + config.package = packages + config.yes = undefined const path = t.testdir() - const installDir = resolve('cache-dir/_npx/07de77790e5f40f2') + const installDir = resolve('npx-cache-dir/07de77790e5f40f2') npm.localPrefix = path ARB_ACTUAL_TREE[path] = { children: new Map(), @@ -929,9 +982,9 @@ t.test('abort if prompt rejected', t => { _from: 'bar@', } exec.exec(['foobar'], er => { - t.equal(er, 'canceled', 'should be canceled') + t.match(er, /canceled/, 'should be canceled') t.strictSame(MKDIRPS, [installDir], 'need to make install dir') - t.match(ARB_CTOR, [{ package: packages, path }]) + t.match(ARB_CTOR, [{ path }]) t.strictSame(ARB_REIFY, [], 'no install performed') t.equal(PROGRESS_ENABLED, true, 'progress re-enabled') t.strictSame(RUN_SCRIPTS, []) @@ -958,11 +1011,11 @@ t.test('abort if prompt false', t => { const packages = ['foo', 'bar'] READ_ERROR = 'canceled' - npm.flatOptions.package = packages - npm.flatOptions.yes = undefined + config.package = packages + config.yes = undefined const path = t.testdir() - const installDir = resolve('cache-dir/_npx/07de77790e5f40f2') + const installDir = resolve('npx-cache-dir/07de77790e5f40f2') npm.localPrefix = path ARB_ACTUAL_TREE[path] = { children: new Map(), @@ -989,7 +1042,7 @@ t.test('abort if prompt false', t => { exec.exec(['foobar'], er => { t.equal(er, 'canceled', 'should be canceled') t.strictSame(MKDIRPS, [installDir], 'need to make install dir') - t.match(ARB_CTOR, [{ package: packages, path }]) + t.match(ARB_CTOR, [{ path }]) t.strictSame(ARB_REIFY, [], 'no install performed') t.equal(PROGRESS_ENABLED, true, 'progress re-enabled') t.strictSame(RUN_SCRIPTS, []) @@ -1015,11 +1068,11 @@ t.test('abort if -n provided', t => { const packages = ['foo', 'bar'] - npm.flatOptions.package = packages - npm.flatOptions.yes = false + config.package = packages + config.yes = false const path = t.testdir() - const installDir = resolve('cache-dir/_npx/07de77790e5f40f2') + const installDir = resolve('npx-cache-dir/07de77790e5f40f2') npm.localPrefix = path ARB_ACTUAL_TREE[path] = { children: new Map(), @@ -1044,20 +1097,20 @@ t.test('abort if -n provided', t => { _from: 'bar@', } exec.exec(['foobar'], er => { - t.equal(er, 'canceled', 'should be canceled') + t.match(er, /canceled/, 'should be canceled') t.strictSame(MKDIRPS, [installDir], 'need to make install dir') - t.match(ARB_CTOR, [{ package: packages, path }]) + t.match(ARB_CTOR, [{ path }]) t.strictSame(ARB_REIFY, [], 'no install performed') t.equal(PROGRESS_ENABLED, true, 'progress re-enabled') t.strictSame(RUN_SCRIPTS, []) t.strictSame(READ, []) - t.done() + t.end() }) }) t.test('forward legacyPeerDeps opt', t => { const path = t.testdir() - const installDir = resolve('cache-dir/_npx/f7fbba6e0636f890') + const installDir = resolve('npx-cache-dir/f7fbba6e0636f890') npm.localPrefix = path ARB_ACTUAL_TREE[path] = { children: new Map(), @@ -1073,12 +1126,103 @@ t.test('forward legacyPeerDeps opt', t => { }, _from: 'foo@', } - npm.flatOptions.yes = true - npm.flatOptions.legacyPeerDeps = true + config.yes = true + flatOptions.legacyPeerDeps = true exec.exec(['foo'], er => { if (er) throw er t.match(ARB_REIFY, [{add: ['foo@'], legacyPeerDeps: true}], 'need to install foo@ using legacyPeerDeps opt') - t.done() + t.end() + }) +}) + +t.test('workspaces', t => { + npm.localPrefix = t.testdir({ + node_modules: { + '.bin': { + foo: '', + }, + }, + packages: { + a: { + 'package.json': JSON.stringify({ + name: 'a', + version: '1.0.0', + bin: 'cli.js', + }), + 'cli.js': '', + }, + b: { + 'package.json': JSON.stringify({ + name: 'b', + version: '1.0.0', + }), + }, + }, + 'package.json': JSON.stringify({ + name: 'root', + version: '1.0.0', + workspaces: ['packages/*'], + }), }) + + PROGRESS_IGNORED = true + npm.localBin = resolve(npm.localPrefix, 'node_modules/.bin') + + t.test('with args, run scripts in the context of a workspace', t => { + exec.execWorkspaces(['foo', 'one arg', 'two arg'], ['a', 'b'], er => { + if (er) + throw er + + t.match(RUN_SCRIPTS, [{ + pkg: { scripts: { npx: 'foo' }}, + args: ['one arg', 'two arg'], + banner: false, + path: process.cwd(), + stdioString: true, + event: 'npx', + env: { + PATH: [npm.localBin, ...PATH].join(delimiter), + }, + stdio: 'inherit', + }]) + t.end() + }) + }) + + t.test('no args, spawn interactive shell', async t => { + CI_NAME = null + process.stdin.isTTY = true + + await new Promise((res, rej) => { + exec.execWorkspaces([], ['a'], er => { + if (er) + return rej(er) + + t.strictSame(LOG_WARN, []) + t.strictSame(npm._mockOutputs, [ + [`\nEntering npm script environment in workspace a@1.0.0 at location:\n${resolve(npm.localPrefix, 'packages/a')}\nType 'exit' or ^D when finished\n`], + ], 'printed message about interactive shell') + res() + }) + }) + + npm.color = true + flatOptions.color = true + npm._mockOutputs.length = 0 + await new Promise((res, rej) => { + exec.execWorkspaces([], ['a'], er => { + if (er) + return rej(er) + + t.strictSame(LOG_WARN, []) + t.strictSame(npm._mockOutputs, [ + [`\u001b[0m\u001b[0m\n\u001b[0mEntering npm script environment\u001b[0m\u001b[0m in workspace \u001b[32ma@1.0.0\u001b[39m at location:\u001b[0m\n\u001b[0m\u001b[2m${resolve(npm.localPrefix, 'packages/a')}\u001b[22m\u001b[0m\u001b[1m\u001b[22m\n\u001b[1mType 'exit' or ^D when finished\u001b[22m\n\u001b[1m\u001b[22m`], + ], 'printed message about interactive shell') + res() + }) + }) + }) + + t.end() }) diff --git a/test/lib/explain.js b/test/lib/explain.js index 22bfb8639ecff..f690aeb2c7b02 100644 --- a/test/lib/explain.js +++ b/test/lib/explain.js @@ -1,18 +1,17 @@ const t = require('tap') -const requireInject = require('require-inject') const npm = { prefix: null, color: true, flatOptions: {}, + output: (...args) => { + OUTPUT.push(args) + }, } const { resolve } = require('path') const OUTPUT = [] -const Explain = requireInject('../../lib/explain.js', { - '../../lib/utils/output.js': (...args) => { - OUTPUT.push(args) - }, +const Explain = t.mock('../../lib/explain.js', { // keep the snapshots pared down a bit, since this has its own tests. '../../lib/utils/explain-dep.js': { @@ -27,7 +26,7 @@ t.test('no args throws usage', t => { t.plan(1) explain.exec([], er => { t.equal(er, explain.usage) - t.done() + t.end() }) }) @@ -49,10 +48,9 @@ t.test('invalid package name throws not found', t => { }) t.test('explain some nodes', t => { - t.afterEach((cb) => { + t.afterEach(() => { OUTPUT.length = 0 npm.flatOptions.json = false - cb() }) npm.prefix = t.testdir({ @@ -172,8 +170,134 @@ t.test('explain some nodes', t => { t.plan(1) explain.exec(['asdf/foo/bar', 'quux@1.x'], er => { t.equal(er, 'No dependencies found matching asdf/foo/bar, quux@1.x') - t.done() + t.end() }) }) t.end() }) + +t.test('workspaces', async t => { + npm.localPrefix = npm.prefix = t.testdir({ + 'package.json': JSON.stringify({ + name: 'workspaces-project', + version: '1.0.0', + workspaces: ['packages/*'], + dependencies: { + abbrev: '^1.0.0', + }, + }), + node_modules: { + a: t.fixture('symlink', '../packages/a'), + b: t.fixture('symlink', '../packages/b'), + c: t.fixture('symlink', '../packages/c'), + once: { + 'package.json': JSON.stringify({ + name: 'once', + version: '1.0.0', + dependencies: { + wrappy: '2.0.0', + }, + }), + }, + abbrev: { + 'package.json': JSON.stringify({ + name: 'abbrev', + version: '1.0.0', + }), + }, + wrappy: { + 'package.json': JSON.stringify({ + name: 'wrappy', + version: '2.0.0', + }), + }, + }, + packages: { + a: { + 'package.json': JSON.stringify({ + name: 'a', + version: '1.0.0', + dependencies: { + once: '1.0.0', + }, + }), + }, + b: { + 'package.json': JSON.stringify({ + name: 'b', + version: '1.0.0', + dependencies: { + abbrev: '^1.0.0', + }, + }), + }, + c: { + 'package.json': JSON.stringify({ + name: 'c', + version: '1.0.0', + }), + }, + }, + }) + + await new Promise((res, rej) => { + explain.exec(['wrappy'], err => { + if (err) + rej(err) + + t.strictSame( + OUTPUT, + [['wrappy@2.0.0 depth=Infinity color=true']], + 'should explain workspaces deps' + ) + OUTPUT.length = 0 + res() + }) + }) + + await new Promise((res, rej) => { + explain.execWorkspaces(['wrappy'], ['a'], err => { + if (err) + rej(err) + + t.strictSame( + OUTPUT, + [ + ['wrappy@2.0.0 depth=Infinity color=true'], + ], + 'should explain deps when filtering to a single ws' + ) + OUTPUT.length = 0 + res() + }) + }) + + await new Promise((res, rej) => { + explain.execWorkspaces(['abbrev'], [], err => { + if (err) + rej(err) + + t.strictSame( + OUTPUT, + [ + ['abbrev@1.0.0 depth=Infinity color=true'], + ], + 'should explain deps of workspaces only' + ) + OUTPUT.length = 0 + res() + }) + }) + + await new Promise((res, rej) => { + explain.execWorkspaces(['abbrev'], ['a'], err => { + t.equal( + err, + 'No dependencies found matching abbrev', + 'should throw usage if dep not found within filtered ws' + ) + + res() + }) + }) +}) diff --git a/test/lib/explore.js b/test/lib/explore.js index 6f1f3bb47f240..fd9949e73fc4c 100644 --- a/test/lib/explore.js +++ b/test/lib/explore.js @@ -1,5 +1,4 @@ const t = require('tap') -const requireInject = require('require-inject') let RPJ_ERROR = null let RPJ_CALLED = '' @@ -44,20 +43,13 @@ const mockRunScript = ({ pkg, banner, path, event, stdio }) => { } const output = [] -let ERROR_HANDLER_CALLED = null const logs = [] const getExplore = (windows) => { - const Explore = requireInject('../../lib/explore.js', { + const Explore = t.mock('../../lib/explore.js', { '../../lib/utils/is-windows.js': windows, path: require('path')[windows ? 'win32' : 'posix'], - '../../lib/utils/error-handler.js': er => { - ERROR_HANDLER_CALLED = er - }, 'read-package-json-fast': mockRPJ, '@npmcli/run-script': mockRunScript, - '../../lib/utils/output.js': out => { - output.push(out) - }, }) const npm = { dir: windows ? 'c:\\npm\\dir' : '/npm/dir', @@ -69,6 +61,9 @@ const getExplore = (windows) => { flatOptions: { shell: 'shell-command', }, + output: out => { + output.push(out) + }, } return new Explore(npm) } @@ -77,21 +72,16 @@ const windowsExplore = getExplore(true) const posixExplore = getExplore(false) t.test('basic interactive', t => { - t.afterEach((cb) => { - output.length = 0 - cb() - }) + t.afterEach(() => output.length = 0) t.test('windows', t => windowsExplore.exec(['pkg'], er => { if (er) throw er t.strictSame({ - ERROR_HANDLER_CALLED, RPJ_CALLED, RUN_SCRIPT_EXEC, }, { - ERROR_HANDLER_CALLED: null, RPJ_CALLED: 'c:\\npm\\dir\\pkg\\package.json', RUN_SCRIPT_EXEC: 'shell-command', }) @@ -106,11 +96,9 @@ t.test('basic interactive', t => { throw er t.strictSame({ - ERROR_HANDLER_CALLED, RPJ_CALLED, RUN_SCRIPT_EXEC, }, { - ERROR_HANDLER_CALLED: null, RPJ_CALLED: '/npm/dir/pkg/package.json', RUN_SCRIPT_EXEC: 'shell-command', }) @@ -125,16 +113,14 @@ t.test('basic interactive', t => { t.test('interactive tracks exit code', t => { const { exitCode } = process - t.beforeEach((cb) => { + t.beforeEach(() => { process.exitCode = exitCode RUN_SCRIPT_EXIT_CODE = 99 - cb() }) - t.afterEach((cb) => { + t.afterEach(() => { RUN_SCRIPT_EXIT_CODE = 0 output.length = 0 process.exitCode = exitCode - cb() }) t.test('windows', t => windowsExplore.exec(['pkg'], er => { @@ -142,11 +128,9 @@ t.test('interactive tracks exit code', t => { throw er t.strictSame({ - ERROR_HANDLER_CALLED, RPJ_CALLED, RUN_SCRIPT_EXEC, }, { - ERROR_HANDLER_CALLED: null, RPJ_CALLED: 'c:\\npm\\dir\\pkg\\package.json', RUN_SCRIPT_EXEC: 'shell-command', }) @@ -162,11 +146,9 @@ t.test('interactive tracks exit code', t => { throw er t.strictSame({ - ERROR_HANDLER_CALLED, RPJ_CALLED, RUN_SCRIPT_EXEC, }, { - ERROR_HANDLER_CALLED: null, RPJ_CALLED: '/npm/dir/pkg/package.json', RUN_SCRIPT_EXEC: 'shell-command', }) @@ -223,21 +205,16 @@ t.test('interactive tracks exit code', t => { }) t.test('basic non-interactive', t => { - t.afterEach((cb) => { - output.length = 0 - cb() - }) + t.afterEach(() => output.length = 0) t.test('windows', t => windowsExplore.exec(['pkg', 'ls'], er => { if (er) throw er t.strictSame({ - ERROR_HANDLER_CALLED, RPJ_CALLED, RUN_SCRIPT_EXEC, }, { - ERROR_HANDLER_CALLED: null, RPJ_CALLED: 'c:\\npm\\dir\\pkg\\package.json', RUN_SCRIPT_EXEC: 'ls', }) @@ -250,11 +227,9 @@ t.test('basic non-interactive', t => { throw er t.strictSame({ - ERROR_HANDLER_CALLED, RPJ_CALLED, RUN_SCRIPT_EXEC, }, { - ERROR_HANDLER_CALLED: null, RPJ_CALLED: '/npm/dir/pkg/package.json', RUN_SCRIPT_EXEC: 'ls', }) @@ -267,22 +242,17 @@ t.test('basic non-interactive', t => { t.test('signal fails non-interactive', t => { const { exitCode } = process - t.afterEach((cb) => { + t.afterEach(() => { output.length = 0 logs.length = 0 - cb() }) - t.beforeEach(cb => { + t.beforeEach(() => { RUN_SCRIPT_SIGNAL = 'SIGPROBLEM' RUN_SCRIPT_EXIT_CODE = null process.exitCode = exitCode - cb() - }) - t.afterEach(cb => { - process.exitCode = exitCode - cb() }) + t.afterEach(() => process.exitCode = exitCode) t.test('windows', t => windowsExplore.exec(['pkg', 'ls'], er => { t.match(er, { @@ -324,7 +294,6 @@ t.test('signal fails non-interactive', t => { t.test('usage if no pkg provided', t => { t.teardown(() => { output.length = 0 - ERROR_HANDLER_CALLED = null }) const noPkg = [ [], @@ -338,13 +307,11 @@ t.test('usage if no pkg provided', t => { for (const args of noPkg) { t.test(JSON.stringify(args), t => { posixExplore.exec(args, er => { - t.equal(er, 'npm explore <pkg> [ -- <command>]') + t.match(er, 'Usage:') t.strictSame({ - ERROR_HANDLER_CALLED: null, RPJ_CALLED, RUN_SCRIPT_EXEC, }, { - ERROR_HANDLER_CALLED: null, RPJ_CALLED: '/npm/dir/pkg/package.json', RUN_SCRIPT_EXEC: 'ls', }) @@ -359,11 +326,9 @@ t.test('pkg not installed', t => { posixExplore.exec(['pkg', 'ls'], er => { t.strictSame({ - ERROR_HANDLER_CALLED, RPJ_CALLED, RUN_SCRIPT_EXEC, }, { - ERROR_HANDLER_CALLED: null, RPJ_CALLED: '/npm/dir/pkg/package.json', RUN_SCRIPT_EXEC: 'ls', }) diff --git a/test/lib/fund.js b/test/lib/fund.js index 831d76f151bb7..784989827edc1 100644 --- a/test/lib/fund.js +++ b/test/lib/fund.js @@ -1,5 +1,5 @@ -const { test } = require('tap') -const requireInject = require('require-inject') +const t = require('tap') +const { fake: mockNpm } = require('../fixtures/mock-npm') const version = '1.0.0' const funding = { @@ -180,19 +180,18 @@ const conflictingFundingPackages = { let result = '' let printUrl = '' -const _flatOptions = { +const config = { color: false, json: false, global: false, - prefix: undefined, unicode: false, - which: undefined, + which: null, } const openUrl = async (npm, url, msg) => { if (url === 'http://npmjs.org') throw new Error('ERROR') - if (_flatOptions.json) { + if (config.json) { printUrl = JSON.stringify({ title: msg, url: url, @@ -200,11 +199,8 @@ const openUrl = async (npm, url, msg) => { } else printUrl = `${msg}:\n ${url}` } -const Fund = requireInject('../../lib/fund.js', { +const Fund = t.mock('../../lib/fund.js', { '../../lib/utils/open-url.js': openUrl, - '../../lib/utils/output.js': msg => { - result += msg + '\n' - }, pacote: { manifest: (arg) => arg.name === 'ntl' ? Promise.resolve({ @@ -213,15 +209,16 @@ const Fund = requireInject('../../lib/fund.js', { : Promise.reject(new Error('ERROR')), }, }) -const fund = new Fund({ - flatOptions: _flatOptions, - get prefix () { - return _flatOptions.prefix +const npm = mockNpm({ + config, + output: msg => { + result += msg + '\n' }, }) +const fund = new Fund(npm) -test('fund with no package containing funding', t => { - _flatOptions.prefix = t.testdir({ +t.test('fund with no package containing funding', t => { + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'no-funding-package', version: '0.0.0', @@ -229,31 +226,31 @@ test('fund with no package containing funding', t => { }) fund.exec([], (err) => { - t.ifError(err, 'should not error out') + t.error(err, 'should not error out') t.matchSnapshot(result, 'should print empty funding info') result = '' t.end() }) }) -test('fund in which same maintainer owns all its deps', t => { - _flatOptions.prefix = t.testdir(maintainerOwnsAllDeps) +t.test('fund in which same maintainer owns all its deps', t => { + npm.prefix = t.testdir(maintainerOwnsAllDeps) fund.exec([], (err) => { - t.ifError(err, 'should not error out') + t.error(err, 'should not error out') t.matchSnapshot(result, 'should print stack packages together') result = '' t.end() }) }) -test('fund in which same maintainer owns all its deps, using --json option', t => { - _flatOptions.json = true - _flatOptions.prefix = t.testdir(maintainerOwnsAllDeps) +t.test('fund in which same maintainer owns all its deps, using --json option', t => { + config.json = true + npm.prefix = t.testdir(maintainerOwnsAllDeps) fund.exec([], (err) => { - t.ifError(err, 'should not error out') - t.deepEqual( + t.error(err, 'should not error out') + t.same( JSON.parse(result), { length: 3, @@ -281,16 +278,16 @@ test('fund in which same maintainer owns all its deps, using --json option', t = ) result = '' - _flatOptions.json = false + config.json = false t.end() }) }) -test('fund containing multi-level nested deps with no funding', t => { - _flatOptions.prefix = t.testdir(nestedNoFundingPackages) +t.test('fund containing multi-level nested deps with no funding', t => { + npm.prefix = t.testdir(nestedNoFundingPackages) fund.exec([], (err) => { - t.ifError(err, 'should not error out') + t.error(err, 'should not error out') t.matchSnapshot( result, 'should omit dependencies with no funding declared' @@ -301,13 +298,13 @@ test('fund containing multi-level nested deps with no funding', t => { }) }) -test('fund containing multi-level nested deps with no funding, using --json option', t => { - _flatOptions.prefix = t.testdir(nestedNoFundingPackages) - _flatOptions.json = true +t.test('fund containing multi-level nested deps with no funding, using --json option', t => { + npm.prefix = t.testdir(nestedNoFundingPackages) + config.json = true fund.exec([], (err) => { - t.ifError(err, 'should not error out') - t.deepEqual( + t.error(err, 'should not error out') + t.same( JSON.parse(result), { length: 2, @@ -328,18 +325,18 @@ test('fund containing multi-level nested deps with no funding, using --json opti ) result = '' - _flatOptions.json = false + config.json = false t.end() }) }) -test('fund containing multi-level nested deps with no funding, using --json option', t => { - _flatOptions.prefix = t.testdir(nestedMultipleFundingPackages) - _flatOptions.json = true +t.test('fund containing multi-level nested deps with no funding, using --json option', t => { + npm.prefix = t.testdir(nestedMultipleFundingPackages) + config.json = true fund.exec([], (err) => { - t.ifError(err, 'should not error out') - t.deepEqual( + t.error(err, 'should not error out') + t.same( JSON.parse(result), { length: 2, @@ -385,29 +382,29 @@ test('fund containing multi-level nested deps with no funding, using --json opti ) result = '' - _flatOptions.json = false + config.json = false t.end() }) }) -test('fund does not support global', t => { - _flatOptions.prefix = t.testdir({}) - _flatOptions.global = true +t.test('fund does not support global', t => { + npm.prefix = t.testdir({}) + config.global = true fund.exec([], (err) => { t.match(err.code, 'EFUNDGLOBAL', 'should throw EFUNDGLOBAL error') result = '' - _flatOptions.global = false + config.global = false t.end() }) }) -test('fund using package argument', t => { - _flatOptions.prefix = t.testdir(maintainerOwnsAllDeps) +t.test('fund using package argument', t => { + npm.prefix = t.testdir(maintainerOwnsAllDeps) fund.exec(['.'], (err) => { - t.ifError(err, 'should not error out') + t.error(err, 'should not error out') t.matchSnapshot(printUrl, 'should open funding url') printUrl = '' @@ -415,10 +412,10 @@ test('fund using package argument', t => { }) }) -test('fund does not support global, using --json option', t => { - _flatOptions.prefix = t.testdir({}) - _flatOptions.global = true - _flatOptions.json = true +t.test('fund does not support global, using --json option', t => { + npm.prefix = t.testdir({}) + config.global = true + config.json = true fund.exec([], (err) => { t.equal(err.code, 'EFUNDGLOBAL', 'should use EFUNDGLOBAL error code') @@ -428,14 +425,14 @@ test('fund does not support global, using --json option', t => { 'should use expected error msg' ) - _flatOptions.global = false - _flatOptions.json = false + config.global = false + config.json = false t.end() }) }) -test('fund using string shorthand', t => { - _flatOptions.prefix = t.testdir({ +t.test('fund using string shorthand', t => { + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'funding-string-shorthand', version: '0.0.0', @@ -444,7 +441,7 @@ test('fund using string shorthand', t => { }) fund.exec(['.'], (err) => { - t.ifError(err, 'should not error out') + t.error(err, 'should not error out') t.matchSnapshot(printUrl, 'should open string-only url') printUrl = '' @@ -452,11 +449,11 @@ test('fund using string shorthand', t => { }) }) -test('fund using nested packages with multiple sources', t => { - _flatOptions.prefix = t.testdir(nestedMultipleFundingPackages) +t.test('fund using nested packages with multiple sources', t => { + npm.prefix = t.testdir(nestedMultipleFundingPackages) fund.exec(['.'], (err) => { - t.ifError(err, 'should not error out') + t.error(err, 'should not error out') t.matchSnapshot(result, 'should prompt with all available URLs') result = '' @@ -464,8 +461,8 @@ test('fund using nested packages with multiple sources', t => { }) }) -test('fund using symlink ref', t => { - _flatOptions.prefix = t.testdir({ +t.test('fund using symlink ref', t => { + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'using-symlink-ref', version: '1.0.0', @@ -484,7 +481,7 @@ test('fund using symlink ref', t => { // using symlinked ref fund.exec(['./node_modules/a'], (err) => { - t.ifError(err, 'should not error out') + t.error(err, 'should not error out') t.match( printUrl, 'http://example.com/a', @@ -495,7 +492,7 @@ test('fund using symlink ref', t => { // using target ref fund.exec(['./a'], (err) => { - t.ifError(err, 'should not error out') + t.error(err, 'should not error out') t.match( printUrl, @@ -510,8 +507,8 @@ test('fund using symlink ref', t => { }) }) -test('fund using data from actual tree', t => { - _flatOptions.prefix = t.testdir({ +t.test('fund using data from actual tree', t => { + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'using-actual-tree', version: '1.0.0', @@ -545,7 +542,7 @@ test('fund using data from actual tree', t => { // using symlinked ref fund.exec(['a'], (err) => { - t.ifError(err, 'should not error out') + t.error(err, 'should not error out') t.match( printUrl, 'http://example.com/_AAA', @@ -557,26 +554,26 @@ test('fund using data from actual tree', t => { }) }) -test('fund using nested packages with multiple sources, with a source number', t => { - _flatOptions.prefix = t.testdir(nestedMultipleFundingPackages) - _flatOptions.which = '1' +t.test('fund using nested packages with multiple sources, with a source number', t => { + npm.prefix = t.testdir(nestedMultipleFundingPackages) + config.which = '1' fund.exec(['.'], (err) => { - t.ifError(err, 'should not error out') + t.error(err, 'should not error out') t.matchSnapshot(printUrl, 'should open the numbered URL') - _flatOptions.which = undefined + config.which = null printUrl = '' t.end() }) }) -test('fund using pkg name while having conflicting versions', t => { - _flatOptions.prefix = t.testdir(conflictingFundingPackages) - _flatOptions.which = '1' +t.test('fund using pkg name while having conflicting versions', t => { + npm.prefix = t.testdir(conflictingFundingPackages) + config.which = '1' fund.exec(['foo'], (err) => { - t.ifError(err, 'should not error out') + t.error(err, 'should not error out') t.matchSnapshot(printUrl, 'should open greatest version') printUrl = '' @@ -584,13 +581,13 @@ test('fund using pkg name while having conflicting versions', t => { }) }) -test('fund using package argument with no browser, using --json option', t => { - _flatOptions.prefix = t.testdir(maintainerOwnsAllDeps) - _flatOptions.json = true +t.test('fund using package argument with no browser, using --json option', t => { + npm.prefix = t.testdir(maintainerOwnsAllDeps) + config.json = true fund.exec(['.'], (err) => { - t.ifError(err, 'should not error out') - t.deepEqual( + t.error(err, 'should not error out') + t.same( JSON.parse(printUrl), { title: 'individual funding available at the following URL', @@ -599,17 +596,17 @@ test('fund using package argument with no browser, using --json option', t => { 'should open funding url using json output' ) - _flatOptions.json = false + config.json = false printUrl = '' t.end() }) }) -test('fund using package info fetch from registry', t => { - _flatOptions.prefix = t.testdir({}) +t.test('fund using package info fetch from registry', t => { + npm.prefix = t.testdir({}) fund.exec(['ntl'], (err) => { - t.ifError(err, 'should not error out') + t.error(err, 'should not error out') t.match( printUrl, /http:\/\/example.com\/pacote/, @@ -621,8 +618,8 @@ test('fund using package info fetch from registry', t => { }) }) -test('fund tries to use package info fetch from registry but registry has nothing', t => { - _flatOptions.prefix = t.testdir({}) +t.test('fund tries to use package info fetch from registry but registry has nothing', t => { + npm.prefix = t.testdir({}) fund.exec(['foo'], (err) => { t.equal(err.code, 'ENOFUND', 'should have ENOFUND error code') @@ -637,8 +634,8 @@ test('fund tries to use package info fetch from registry but registry has nothin }) }) -test('fund but target module has no funding info', t => { - _flatOptions.prefix = t.testdir(nestedNoFundingPackages) +t.test('fund but target module has no funding info', t => { + npm.prefix = t.testdir(nestedNoFundingPackages) fund.exec(['foo'], (err) => { t.equal(err.code, 'ENOFUND', 'should have ENOFUND error code') @@ -653,9 +650,9 @@ test('fund but target module has no funding info', t => { }) }) -test('fund using bad which value', t => { - _flatOptions.prefix = t.testdir(nestedMultipleFundingPackages) - _flatOptions.which = 3 +t.test('fund using bad which value', t => { + npm.prefix = t.testdir(nestedMultipleFundingPackages) + config.which = 3 fund.exec(['bar'], (err) => { t.equal(err.code, 'EFUNDNUMBER', 'should have EFUNDNUMBER error code') @@ -665,14 +662,14 @@ test('fund using bad which value', t => { 'should have bad which option error message' ) - _flatOptions.which = undefined + config.which = null result = '' t.end() }) }) -test('fund pkg missing version number', t => { - _flatOptions.prefix = t.testdir({ +t.test('fund pkg missing version number', t => { + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'foo', funding: 'http://example.com/foo', @@ -680,15 +677,15 @@ test('fund pkg missing version number', t => { }) fund.exec([], (err) => { - t.ifError(err, 'should not error out') + t.error(err, 'should not error out') t.matchSnapshot(result, 'should print name only') result = '' t.end() }) }) -test('fund a package throws on openUrl', t => { - _flatOptions.prefix = t.testdir({ +t.test('fund a package throws on openUrl', t => { + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'foo', version: '1.0.0', @@ -703,8 +700,8 @@ test('fund a package throws on openUrl', t => { }) }) -test('fund a package with type and multiple sources', t => { - _flatOptions.prefix = t.testdir({ +t.test('fund a package with type and multiple sources', t => { + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'foo', funding: [ @@ -721,7 +718,7 @@ test('fund a package with type and multiple sources', t => { }) fund.exec(['.'], (err) => { - t.ifError(err, 'should not error out') + t.error(err, 'should not error out') t.matchSnapshot(result, 'should print prompt select message') result = '' @@ -729,8 +726,8 @@ test('fund a package with type and multiple sources', t => { }) }) -test('fund colors', t => { - _flatOptions.prefix = t.testdir({ +t.test('fund colors', t => { + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-fund-colors', version: '1.0.0', @@ -782,20 +779,20 @@ test('fund colors', t => { }, }, }) - _flatOptions.color = true + npm.color = true fund.exec([], (err) => { - t.ifError(err, 'should not error out') + t.error(err, 'should not error out') t.matchSnapshot(result, 'should print output with color info') result = '' - _flatOptions.color = false + npm.color = false t.end() }) }) -test('sub dep with fund info and a parent with no funding info', t => { - _flatOptions.prefix = t.testdir({ +t.test('sub dep with fund info and a parent with no funding info', t => { + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-multiple-funding-sources', version: '1.0.0', @@ -835,10 +832,96 @@ test('sub dep with fund info and a parent with no funding info', t => { }) fund.exec([], (err) => { - t.ifError(err, 'should not error out') + t.error(err, 'should not error out') t.matchSnapshot(result, 'should nest sub dep as child of root') result = '' t.end() }) }) + +t.test('workspaces', t => { + t.test('filter funding info by a specific workspace', async t => { + npm.localPrefix = npm.prefix = t.testdir({ + 'package.json': JSON.stringify({ + name: 'workspaces-support', + version: '1.0.0', + workspaces: ['packages/*'], + dependencies: { + d: '^1.0.0', + }, + }), + node_modules: { + a: t.fixture('symlink', '../packages/a'), + b: t.fixture('symlink', '../packages/b'), + c: { + 'package.json': JSON.stringify({ + name: 'c', + version: '1.0.0', + funding: [ + 'http://example.com/c', + 'http://example.com/c-other', + ], + }), + }, + d: { + 'package.json': JSON.stringify({ + name: 'd', + version: '1.0.0', + funding: 'http://example.com/d', + }), + }, + }, + packages: { + a: { + 'package.json': JSON.stringify({ + name: 'a', + version: '1.0.0', + funding: 'https://example.com/a', + dependencies: { + c: '^1.0.0', + }, + }), + }, + b: { + 'package.json': JSON.stringify({ + name: 'b', + version: '1.0.0', + funding: 'http://example.com/b', + dependencies: { + d: '^1.0.0', + }, + }), + }, + }, + }) + + await new Promise((res, rej) => { + fund.execWorkspaces([], ['a'], (err) => { + if (err) + rej(err) + + t.matchSnapshot(result, + 'should display only filtered workspace name and its deps') + + result = '' + res() + }) + }) + + await new Promise((res, rej) => { + fund.execWorkspaces([], ['./packages/a'], (err) => { + if (err) + rej(err) + + t.matchSnapshot(result, + 'should display only filtered workspace path and its deps') + + result = '' + res() + }) + }) + }) + + t.end() +}) diff --git a/test/lib/get.js b/test/lib/get.js index a11597d2682db..9b77fbba3e6f4 100644 --- a/test/lib/get.js +++ b/test/lib/get.js @@ -1,8 +1,7 @@ -const { test } = require('tap') -const requireInject = require('require-inject') +const t = require('tap') -test('should retrieve values from npm.commands.config', (t) => { - const Get = requireInject('../../lib/get.js') +t.test('should retrieve values from npm.commands.config', (t) => { + const Get = t.mock('../../lib/get.js') const get = new Get({ commands: { config: ([action, arg]) => { diff --git a/test/lib/help-search.js b/test/lib/help-search.js index 8b1ecd46eb774..2df862d4fc570 100644 --- a/test/lib/help-search.js +++ b/test/lib/help-search.js @@ -1,6 +1,6 @@ -const { test } = require('tap') +const t = require('tap') const { join } = require('path') -const requireInject = require('require-inject') +const { fake: mockNpm } = require('../fixtures/mock-npm') const ansicolors = require('ansicolors') const OUTPUT = [] @@ -8,25 +8,24 @@ const output = (msg) => { OUTPUT.push(msg) } -let npmHelpArgs = null -let npmHelpErr = null -const npm = { +const config = { + long: false, +} +const npmHelpErr = null +const npm = mockNpm({ color: false, + config, flatOptions: { long: false, }, + usage: 'npm test usage', commands: { help: (args, cb) => { - npmHelpArgs = args return cb(npmHelpErr) }, }, -} - -let npmUsageArg = null -const npmUsage = (npm, arg) => { - npmUsageArg = arg -} + output, +}) let globRoot = null const globDir = { @@ -43,14 +42,12 @@ const globDir = { const glob = (p, cb) => cb(null, Object.keys(globDir).map((file) => join(globRoot, file))) -const HelpSearch = requireInject('../../lib/help-search.js', { - '../../lib/utils/npm-usage.js': npmUsage, - '../../lib/utils/output.js': output, +const HelpSearch = t.mock('../../lib/help-search.js', { glob, }) const helpSearch = new HelpSearch(npm) -test('npm help-search', t => { +t.test('npm help-search', t => { globRoot = t.testdir(globDir) t.teardown(() => { OUTPUT.length = 0 @@ -61,13 +58,12 @@ test('npm help-search', t => { if (err) throw err - t.match(OUTPUT, /Top hits for/, 'outputs results') - t.match(OUTPUT, /Did you mean this\?\n\s+exec/, 'matched command, so suggest it') + t.match(OUTPUT, /Top hits for "exec"/, 'outputs results') t.end() }) }) -test('npm help-search multiple terms', t => { +t.test('npm help-search multiple terms', t => { globRoot = t.testdir(globDir) t.teardown(() => { OUTPUT.length = 0 @@ -84,46 +80,12 @@ test('npm help-search multiple terms', t => { }) }) -test('npm help-search single result prints full section', t => { - globRoot = t.testdir(globDir) - t.teardown(() => { - OUTPUT.length = 0 - npmHelpArgs = null - globRoot = null - }) - - return helpSearch.exec(['does not exist in'], (err) => { - if (err) - throw err - - t.strictSame(npmHelpArgs, ['npm-install'], 'identified the correct man page and called help with it') - t.end() - }) -}) - -test('npm help-search single result propagates error', t => { - globRoot = t.testdir(globDir) - npmHelpErr = new Error('help broke') - t.teardown(() => { - OUTPUT.length = 0 - npmHelpArgs = null - npmHelpErr = null - globRoot = null - }) - - return helpSearch.exec(['does not exist in'], (err) => { - t.strictSame(npmHelpArgs, ['npm-install'], 'identified the correct man page and called help with it') - t.match(err, /help broke/, 'propagated the error from help') - t.end() - }) -}) - -test('npm help-search long output', t => { +t.test('npm help-search long output', t => { globRoot = t.testdir(globDir) - npm.flatOptions.long = true + config.long = true t.teardown(() => { OUTPUT.length = 0 - npm.flatOptions.long = false + config.long = false globRoot = null }) @@ -136,13 +98,13 @@ test('npm help-search long output', t => { }) }) -test('npm help-search long output with color', t => { +t.test('npm help-search long output with color', t => { globRoot = t.testdir(globDir) - npm.flatOptions.long = true + config.long = true npm.color = true t.teardown(() => { OUTPUT.length = 0 - npm.flatOptions.long = false + config.long = false npm.color = false globRoot = null }) @@ -157,18 +119,18 @@ test('npm help-search long output with color', t => { }) }) -test('npm help-search no args', t => { +t.test('npm help-search no args', t => { return helpSearch.exec([], (err) => { - t.match(err, /npm help-search/, 'throws usage') + t.notOk(err) + t.match(OUTPUT, /npm help-search/, 'outputs usage') t.end() }) }) -test('npm help-search no matches', t => { +t.test('npm help-search no matches', t => { globRoot = t.testdir(globDir) t.teardown(() => { OUTPUT.length = 0 - npmUsageArg = null globRoot = null }) @@ -176,7 +138,7 @@ test('npm help-search no matches', t => { if (err) throw err - t.equal(npmUsageArg, false, 'called npmUsage for no matches') + t.match(OUTPUT, /No matches/) t.end() }) }) diff --git a/test/lib/help.js b/test/lib/help.js index addbe4dcc1f9b..44ba5b1cabc3c 100644 --- a/test/lib/help.js +++ b/test/lib/help.js @@ -1,12 +1,6 @@ -const { test } = require('tap') -const requireInject = require('require-inject') +const t = require('tap') const { EventEmitter } = require('events') -let npmUsageArg = null -const npmUsage = (npm, arg) => { - npmUsageArg = arg -} - const npmConfig = { usage: false, viewer: undefined, @@ -14,7 +8,9 @@ const npmConfig = { } let helpSearchArgs = null +const OUTPUT = [] const npm = { + usage: 'test npm usage', config: { get: (key) => npmConfig[key], set: (key, value) => { @@ -34,11 +30,9 @@ const npm = { }, }, deref: (cmd) => {}, -} - -const OUTPUT = [] -const output = (msg) => { - OUTPUT.push(msg) + output: msg => { + OUTPUT.push(msg) + }, } const globDefaults = [ @@ -49,7 +43,9 @@ const globDefaults = [ let globErr = null let globResult = globDefaults +let globParam const glob = (p, cb) => { + globParam = p return cb(globErr, globResult) } @@ -71,10 +67,8 @@ const openUrl = async (npm, url, msg) => { openUrlArg = url } -const Help = requireInject('../../lib/help.js', { - '../../lib/utils/npm-usage.js': npmUsage, +const Help = t.mock('../../lib/help.js', { '../../lib/utils/open-url.js': openUrl, - '../../lib/utils/output.js': output, child_process: { spawn, }, @@ -82,21 +76,17 @@ const Help = requireInject('../../lib/help.js', { }) const help = new Help(npm) -test('npm help', t => { - t.teardown(() => { - npmUsageArg = null - }) - +t.test('npm help', t => { return help.exec([], (err) => { if (err) throw err - t.equal(npmUsageArg, false, 'called npmUsage') + t.match(OUTPUT, ['test npm usage'], 'showed npm usage') t.end() }) }) -test('npm help completion', async t => { +t.test('npm help completion', async t => { t.teardown(() => { globErr = null }) @@ -109,23 +99,7 @@ test('npm help completion', async t => { t.rejects(help.completion({ conf: { argv: { remain: [] } } }), /glob failed/, 'glob errors propagate') }) -test('npm help -h', t => { - npmConfig.usage = true - t.teardown(() => { - npmConfig.usage = false - OUTPUT.length = 0 - }) - - return help.exec(['help'], (err) => { - if (err) - throw err - - t.strictSame(OUTPUT, ['npm help <term>'], 'outputs usage information for command') - t.end() - }) -}) - -test('npm help multiple args calls search', t => { +t.test('npm help multiple args calls search', t => { t.teardown(() => { helpSearchArgs = null }) @@ -139,7 +113,7 @@ test('npm help multiple args calls search', t => { }) }) -test('npm help no matches calls search', t => { +t.test('npm help no matches calls search', t => { globResult = [] t.teardown(() => { helpSearchArgs = null @@ -155,7 +129,7 @@ test('npm help no matches calls search', t => { }) }) -test('npm help glob errors propagate', t => { +t.test('npm help glob errors propagate', t => { globErr = new Error('glob failed') t.teardown(() => { globErr = null @@ -169,7 +143,7 @@ test('npm help glob errors propagate', t => { }) }) -test('npm help whoami', t => { +t.test('npm help whoami', t => { globResult = ['/root/man/man1/npm-whoami.1.xz'] t.teardown(() => { globResult = globDefaults @@ -182,12 +156,12 @@ test('npm help whoami', t => { throw err t.equal(spawnBin, 'man', 'calls man by default') - t.strictSame(spawnArgs, ['1', 'npm-whoami'], 'passes the correct arguments') + t.strictSame(spawnArgs, [globResult[0]], 'passes the correct arguments') t.end() }) }) -test('npm help 1 install', t => { +t.test('npm help 1 install', t => { npmConfig.viewer = 'browser' globResult = [ '/root/man/man5/install.5', @@ -210,16 +184,16 @@ test('npm help 1 install', t => { }) }) -test('npm help 5 install', t => { +t.test('npm help 5 install', t => { npmConfig.viewer = 'browser' globResult = [ '/root/man/man5/install.5', - '/root/man/man1/npm-install.1', ] t.teardown(() => { npmConfig.viewer = undefined globResult = globDefaults + globParam = null spawnBin = null spawnArgs = null }) @@ -228,19 +202,20 @@ test('npm help 5 install', t => { if (err) throw err + t.match(globParam, /man5/, 'searches only in man5 folder') t.match(openUrlArg, /configuring-npm(\/|\\)install.html$/, 'attempts to open the correct url') t.end() }) }) -test('npm help 7 config', t => { +t.test('npm help 7 config', t => { npmConfig.viewer = 'browser' globResult = [ - '/root/man/man1/npm-config.1', '/root/man/man7/config.7', ] t.teardown(() => { npmConfig.viewer = undefined + globParam = null globResult = globDefaults spawnBin = null spawnArgs = null @@ -250,50 +225,13 @@ test('npm help 7 config', t => { if (err) throw err + t.match(globParam, /man7/, 'searches only in man5 folder') t.match(openUrlArg, /using-npm(\/|\\)config.html$/, 'attempts to open the correct url') t.end() }) }) -test('npm help with browser viewer and invalid section throws', t => { - npmConfig.viewer = 'browser' - globResult = [ - '/root/man/man1/npm-config.1', - '/root/man/man7/config.7', - '/root/man/man9/config.9', - ] - t.teardown(() => { - npmConfig.viewer = undefined - globResult = globDefaults - spawnBin = null - spawnArgs = null - }) - - return help.exec(['9', 'config'], (err) => { - t.match(err, /invalid man section: 9/, 'throws appropriate error') - t.end() - }) -}) - -test('npm help global redirects to folders', t => { - globResult = ['/root/man/man5/folders.5'] - t.teardown(() => { - globResult = globDefaults - spawnBin = null - spawnArgs = null - }) - - return help.exec(['global'], (err) => { - if (err) - throw err - - t.equal(spawnBin, 'man', 'calls man by default') - t.strictSame(spawnArgs, ['5', 'folders'], 'passes the correct arguments') - t.end() - }) -}) - -test('npm help package.json redirects to package-json', t => { +t.test('npm help package.json redirects to package-json', t => { globResult = ['/root/man/man5/package-json.5'] t.teardown(() => { globResult = globDefaults @@ -306,12 +244,13 @@ test('npm help package.json redirects to package-json', t => { throw err t.equal(spawnBin, 'man', 'calls man by default') - t.strictSame(spawnArgs, ['5', 'package-json'], 'passes the correct arguments') + t.match(globParam, /package-json/, 'glob was asked to find package-json') + t.strictSame(spawnArgs, [globResult[0]], 'passes the correct arguments') t.end() }) }) -test('npm help ?(un)star', t => { +t.test('npm help ?(un)star', t => { npmConfig.viewer = 'woman' globResult = [ '/root/man/man1/npm-star.1', @@ -329,12 +268,12 @@ test('npm help ?(un)star', t => { throw err t.equal(spawnBin, 'emacsclient', 'maps woman to emacs correctly') - t.strictSame(spawnArgs, ['-e', `(woman-find-file '/root/man/man1/npm-unstar.1')`], 'passes the correct arguments') + t.strictSame(spawnArgs, ['-e', `(woman-find-file '/root/man/man1/npm-star.1')`], 'passes the correct arguments') t.end() }) }) -test('npm help - woman viewer propagates errors', t => { +t.test('npm help - woman viewer propagates errors', t => { npmConfig.viewer = 'woman' spawnCode = 1 globResult = [ @@ -352,12 +291,12 @@ test('npm help - woman viewer propagates errors', t => { return help.exec(['?(un)star'], (err) => { t.match(err, /help process exited with code: 1/, 'received the correct error') t.equal(spawnBin, 'emacsclient', 'maps woman to emacs correctly') - t.strictSame(spawnArgs, ['-e', `(woman-find-file '/root/man/man1/npm-unstar.1')`], 'passes the correct arguments') + t.strictSame(spawnArgs, ['-e', `(woman-find-file '/root/man/man1/npm-star.1')`], 'passes the correct arguments') t.end() }) }) -test('npm help un*', t => { +t.test('npm help un*', t => { globResult = [ '/root/man/man1/npm-unstar.1', '/root/man/man1/npm-uninstall.1', @@ -374,12 +313,12 @@ test('npm help un*', t => { throw err t.equal(spawnBin, 'man', 'calls man by default') - t.strictSame(spawnArgs, ['1', 'npm-unstar'], 'passes the correct arguments') + t.strictSame(spawnArgs, ['/root/man/man1/npm-uninstall.1'], 'passes the correct arguments') t.end() }) }) -test('npm help - man viewer propagates errors', t => { +t.test('npm help - man viewer propagates errors', t => { spawnCode = 1 globResult = [ '/root/man/man1/npm-unstar.1', @@ -396,7 +335,30 @@ test('npm help - man viewer propagates errors', t => { return help.exec(['un*'], (err) => { t.match(err, /help process exited with code: 1/, 'received correct error') t.equal(spawnBin, 'man', 'calls man by default') - t.strictSame(spawnArgs, ['1', 'npm-unstar'], 'passes the correct arguments') + t.strictSame(spawnArgs, ['/root/man/man1/npm-uninstall.1'], 'passes the correct arguments') + t.end() + }) +}) + +t.test('npm help with complex installation path finds proper help file', t => { + npmConfig.viewer = 'browser' + globResult = [ + 'C:/Program Files/node-v14.15.5-win-x64/node_modules/npm/man/man1/npm-install.1', + // glob always returns forward slashes, even on Windows + ] + + t.teardown(() => { + npmConfig.viewer = undefined + globResult = globDefaults + spawnBin = null + spawnArgs = null + }) + + return help.exec(['1', 'install'], (err) => { + if (err) + throw err + + t.match(openUrlArg, /commands(\/|\\)npm-install.html$/, 'attempts to open the correct url') t.end() }) }) diff --git a/test/lib/hook.js b/test/lib/hook.js index 923f86e81ddf3..2419f16041748 100644 --- a/test/lib/hook.js +++ b/test/lib/hook.js @@ -1,6 +1,6 @@ -const { test } = require('tap') -const requireInject = require('require-inject') +const t = require('tap') +const output = [] const npm = { flatOptions: { json: false, @@ -9,6 +9,9 @@ const npm = { loglevel: 'info', unicode: false, }, + output: (msg) => { + output.push(msg) + }, } const pkgTypes = { @@ -51,24 +54,20 @@ const libnpmhook = { }, } -const output = [] -const Hook = requireInject('../../lib/hook.js', { +const Hook = t.mock('../../lib/hook.js', { '../../lib/utils/otplease.js': async (opts, fn) => fn(opts), - '../../lib/utils/output.js': (msg) => { - output.push(msg) - }, libnpmhook, }) const hook = new Hook(npm) -test('npm hook no args', t => { +t.test('npm hook no args', t => { return hook.exec([], (err) => { t.match(err, /npm hook add/, 'throws usage with no arguments') t.end() }) }) -test('npm hook add', t => { +t.test('npm hook add', t => { t.teardown(() => { hookArgs = null output.length = 0 @@ -89,7 +88,7 @@ test('npm hook add', t => { }) }) -test('npm hook add - unicode output', t => { +t.test('npm hook add - unicode output', t => { npm.flatOptions.unicode = true t.teardown(() => { npm.flatOptions.unicode = false @@ -112,7 +111,7 @@ test('npm hook add - unicode output', t => { }) }) -test('npm hook add - json output', t => { +t.test('npm hook add - json output', t => { npm.flatOptions.json = true t.teardown(() => { npm.flatOptions.json = false @@ -140,7 +139,7 @@ test('npm hook add - json output', t => { }) }) -test('npm hook add - parseable output', t => { +t.test('npm hook add - parseable output', t => { npm.flatOptions.parseable = true t.teardown(() => { npm.flatOptions.parseable = false @@ -168,7 +167,7 @@ test('npm hook add - parseable output', t => { }) }) -test('npm hook add - silent output', t => { +t.test('npm hook add - silent output', t => { npm.flatOptions.silent = true t.teardown(() => { npm.flatOptions.silent = false @@ -191,7 +190,7 @@ test('npm hook add - silent output', t => { }) }) -test('npm hook ls', t => { +t.test('npm hook ls', t => { t.teardown(() => { hookArgs = null output.length = 0 @@ -214,7 +213,7 @@ test('npm hook ls', t => { }) }) -test('npm hook ls, no results', t => { +t.test('npm hook ls, no results', t => { hookResponse = [] t.teardown(() => { hookResponse = null @@ -235,7 +234,7 @@ test('npm hook ls, no results', t => { }) }) -test('npm hook ls, single result', t => { +t.test('npm hook ls, single result', t => { hookResponse = [{ id: 1, name: 'semver', @@ -264,7 +263,7 @@ test('npm hook ls, single result', t => { }) }) -test('npm hook ls - json output', t => { +t.test('npm hook ls - json output', t => { npm.flatOptions.json = true t.teardown(() => { npm.flatOptions.json = false @@ -301,7 +300,7 @@ test('npm hook ls - json output', t => { }) }) -test('npm hook ls - parseable output', t => { +t.test('npm hook ls - parseable output', t => { npm.flatOptions.parseable = true t.teardown(() => { npm.flatOptions.parseable = false @@ -327,7 +326,7 @@ test('npm hook ls - parseable output', t => { }) }) -test('npm hook ls - silent output', t => { +t.test('npm hook ls - silent output', t => { npm.flatOptions.silent = true t.teardown(() => { npm.flatOptions.silent = false @@ -348,7 +347,7 @@ test('npm hook ls - silent output', t => { }) }) -test('npm hook rm', t => { +t.test('npm hook rm', t => { t.teardown(() => { hookArgs = null output.length = 0 @@ -369,7 +368,7 @@ test('npm hook rm', t => { }) }) -test('npm hook rm - unicode output', t => { +t.test('npm hook rm - unicode output', t => { npm.flatOptions.unicode = true t.teardown(() => { npm.flatOptions.unicode = false @@ -392,7 +391,7 @@ test('npm hook rm - unicode output', t => { }) }) -test('npm hook rm - silent output', t => { +t.test('npm hook rm - silent output', t => { npm.flatOptions.silent = true t.teardown(() => { npm.flatOptions.silent = false @@ -413,7 +412,7 @@ test('npm hook rm - silent output', t => { }) }) -test('npm hook rm - json output', t => { +t.test('npm hook rm - json output', t => { npm.flatOptions.json = true t.teardown(() => { npm.flatOptions.json = false @@ -439,7 +438,7 @@ test('npm hook rm - json output', t => { }) }) -test('npm hook rm - parseable output', t => { +t.test('npm hook rm - parseable output', t => { npm.flatOptions.parseable = true t.teardown(() => { npm.flatOptions.parseable = false @@ -463,7 +462,7 @@ test('npm hook rm - parseable output', t => { }) }) -test('npm hook update', t => { +t.test('npm hook update', t => { t.teardown(() => { hookArgs = null output.length = 0 @@ -486,7 +485,7 @@ test('npm hook update', t => { }) }) -test('npm hook update - unicode', t => { +t.test('npm hook update - unicode', t => { npm.flatOptions.unicode = true t.teardown(() => { npm.flatOptions.unicode = false @@ -511,7 +510,7 @@ test('npm hook update - unicode', t => { }) }) -test('npm hook update - json output', t => { +t.test('npm hook update - json output', t => { npm.flatOptions.json = true t.teardown(() => { npm.flatOptions.json = false @@ -539,7 +538,7 @@ test('npm hook update - json output', t => { }) }) -test('npm hook update - parseable output', t => { +t.test('npm hook update - parseable output', t => { npm.flatOptions.parseable = true t.teardown(() => { npm.flatOptions.parseable = false @@ -565,7 +564,7 @@ test('npm hook update - parseable output', t => { }) }) -test('npm hook update - silent output', t => { +t.test('npm hook update - silent output', t => { npm.flatOptions.silent = true t.teardown(() => { npm.flatOptions.silent = false diff --git a/test/lib/init.js b/test/lib/init.js index db5411ba76bf8..1cefb1fc9c8fd 100644 --- a/test/lib/init.js +++ b/test/lib/init.js @@ -1,7 +1,8 @@ const t = require('tap') -const requireInject = require('require-inject') +const fs = require('fs') +const { resolve } = require('path') +const { fake: mockNpm } = require('../fixtures/mock-npm') -let result = '' const npmLog = { disableProgress: () => null, enableProgress: () => null, @@ -10,134 +11,201 @@ const npmLog = { resume: () => null, silly: () => null, } -const npm = { - config: { set () {} }, - flatOptions: {}, - log: npmLog, +const config = { + cache: 'bad-cache-dir', + 'init-module': '~/.npm-init.js', + yes: true, +} +const flatOptions = { + cache: 'test-config-dir/_cacache', + npxCache: 'test-config-dir/_npx', } +const npm = mockNpm({ + flatOptions, + config, + log: npmLog, +}) const mocks = { - 'init-package-json': (dir, initFile, config, cb) => cb(null, 'data'), '../../lib/utils/usage.js': () => 'usage instructions', - '../../lib/utils/output.js': (...msg) => { - result += msg.join('\n') - }, } -const Init = requireInject('../../lib/init.js', mocks) +const Init = t.mock('../../lib/init.js', mocks) const init = new Init(npm) +const _cwd = process.cwd() +const _consolelog = console.log +const noop = () => {} -t.afterEach(cb => { - result = '' - npm.config = { get: () => '', set () {} } - npm.commands = {} - Object.defineProperty(npm, 'flatOptions', { value: {} }) +t.afterEach(() => { + config.yes = true + config.package = undefined npm.log = npmLog - cb() + process.chdir(_cwd) + console.log = _consolelog }) -t.test('classic npm init no args', t => { - npm.config = { - get () { - return '~/.npm-init.js' - }, - } +t.test('classic npm init -y', t => { + npm.localPrefix = t.testdir({}) + + // init-package-json prints directly to console.log + // this avoids poluting test output with those logs + console.log = noop + + process.chdir(npm.localPrefix) init.exec([], err => { - t.ifError(err, 'npm init no args') - t.matchSnapshot(result, 'should print helper info') + if (err) + throw err + + const pkg = require(resolve(npm.localPrefix, 'package.json')) + t.equal(pkg.version, '1.0.0') + t.equal(pkg.license, 'ISC') t.end() }) }) -t.test('classic npm init -y', t => { - t.plan(7) - npm.config = { - get: () => '~/.npm-init.js', - } - Object.defineProperty(npm, 'flatOptions', { value: { yes: true} }) - npm.log = { ...npm.log } - npm.log.silly = (title, msg) => { - t.equal(title, 'package data', 'should print title') - t.equal(msg, 'data', 'should print pkg data info') - } - npm.log.resume = () => { - t.ok('should resume logs') - } - npm.log.info = (title, msg) => { - t.equal(title, 'init', 'should print title') - t.equal(msg, 'written successfully', 'should print done info') - } +t.test('classic interactive npm init', t => { + npm.localPrefix = t.testdir({}) + config.yes = undefined + + const Init = t.mock('../../lib/init.js', { + ...mocks, + 'init-package-json': (path, initFile, config, cb) => { + t.equal( + path, + resolve(npm.localPrefix), + 'should start init package.json in expected path' + ) + cb() + }, + }) + const init = new Init(npm) + + process.chdir(npm.localPrefix) init.exec([], err => { - t.ifError(err, 'npm init -y') - t.equal(result, '') + if (err) + throw err + + t.end() }) }) t.test('npm init <arg>', t => { - t.plan(4) - npm.config = { - set (key, val) { - t.equal(key, 'package', 'should set package key') - t.deepEqual(val, [], 'should set empty array value') + t.plan(3) + npm.localPrefix = t.testdir({}) + + const Init = t.mock('../../lib/init.js', { + libnpmexec: ({ args, cache, npxCache }) => { + t.same( + args, + ['create-react-app'], + 'should npx with listed packages' + ) + t.same(cache, flatOptions.cache) + t.same(npxCache, flatOptions.npxCache) }, - } - npm.commands.exec = (arr, cb) => { - t.deepEqual( - arr, - ['create-react-app'], - 'should npx with listed packages' - ) - cb() - } + }) + const init = new Init(npm) + + process.chdir(npm.localPrefix) init.exec(['react-app'], err => { - t.ifError(err, 'npm init react-app') + if (err) + throw err + }) +}) + +t.test('npm init <arg> -- other-args', t => { + t.plan(1) + npm.localPrefix = t.testdir({}) + + const Init = t.mock('../../lib/init.js', { + libnpmexec: ({ args }) => { + t.same( + args, + ['create-react-app', 'my-path', '--some-option', 'some-value'], + 'should npm exec with expected args' + ) + }, }) + const init = new Init(npm) + + process.chdir(npm.localPrefix) + init.exec( + ['react-app', 'my-path', '--some-option', 'some-value'], + err => { + if (err) + throw err + } + ) }) t.test('npm init @scope/name', t => { - t.plan(2) - npm.commands.exec = (arr, cb) => { - t.deepEqual( - arr, - ['@npmcli/create-something'], - 'should npx with scoped packages' - ) - cb() - } + t.plan(1) + npm.localPrefix = t.testdir({}) + + const Init = t.mock('../../lib/init.js', { + libnpmexec: ({ args }) => { + t.same( + args, + ['@npmcli/create-something'], + 'should npx with scoped packages' + ) + }, + }) + const init = new Init(npm) + + process.chdir(npm.localPrefix) init.exec(['@npmcli/something'], err => { - t.ifError(err, 'npm init init @scope/name') + if (err) + throw err }) }) t.test('npm init git spec', t => { - t.plan(2) - npm.commands.exec = (arr, cb) => { - t.deepEqual( - arr, - ['npm/create-something'], - 'should npx with git-spec packages' - ) - cb() - } + t.plan(1) + npm.localPrefix = t.testdir({}) + + const Init = t.mock('../../lib/init.js', { + libnpmexec: ({ args }) => { + t.same( + args, + ['npm/create-something'], + 'should npx with git-spec packages' + ) + }, + }) + const init = new Init(npm) + + process.chdir(npm.localPrefix) init.exec(['npm/something'], err => { - t.ifError(err, 'npm init init @scope/name') + if (err) + throw err }) }) t.test('npm init @scope', t => { - t.plan(2) - npm.commands.exec = (arr, cb) => { - t.deepEqual( - arr, - ['@npmcli/create'], - 'should npx with @scope/create pkgs' - ) - cb() - } + t.plan(1) + npm.localPrefix = t.testdir({}) + + const Init = t.mock('../../lib/init.js', { + libnpmexec: ({ args }) => { + t.same( + args, + ['@npmcli/create'], + 'should npx with @scope/create pkgs' + ) + }, + }) + const init = new Init(npm) + + process.chdir(npm.localPrefix) init.exec(['@npmcli'], err => { - t.ifError(err, 'npm init init @scope/create') + if (err) + throw err }) }) t.test('npm init tgz', t => { + npm.localPrefix = t.testdir({}) + + process.chdir(npm.localPrefix) init.exec(['something.tgz'], err => { t.match( err, @@ -149,24 +217,38 @@ t.test('npm init tgz', t => { }) t.test('npm init <arg>@next', t => { - t.plan(2) - npm.commands.exec = (arr, cb) => { - t.deepEqual( - arr, - ['create-something@next'], - 'should npx with something@next' - ) - cb() - } + t.plan(1) + npm.localPrefix = t.testdir({}) + + const Init = t.mock('../../lib/init.js', { + libnpmexec: ({ args }) => { + t.same( + args, + ['create-something@next'], + 'should npx with something@next' + ) + }, + }) + const init = new Init(npm) + + process.chdir(npm.localPrefix) init.exec(['something@next'], err => { - t.ifError(err, 'npm init init something@next') + if (err) + throw err }) }) t.test('npm init exec error', t => { - npm.commands.exec = (arr, cb) => { - cb(new Error('ERROR')) - } + npm.localPrefix = t.testdir({}) + + const Init = t.mock('../../lib/init.js', { + libnpmexec: async ({ args }) => { + throw new Error('ERROR') + }, + }) + const init = new Init(npm) + + process.chdir(npm.localPrefix) init.exec(['something@next'], err => { t.match( err, @@ -178,35 +260,32 @@ t.test('npm init exec error', t => { }) t.test('should not rewrite flatOptions', t => { - t.plan(4) - Object.defineProperty(npm, 'flatOptions', { - get: () => ({}), - set () { - throw new Error('Should not set flatOptions') + t.plan(1) + npm.localPrefix = t.testdir({}) + + const Init = t.mock('../../lib/init.js', { + libnpmexec: async ({ args }) => { + t.same( + args, + ['create-react-app', 'my-app'], + 'should npx with extra args' + ) }, }) - npm.config = { - set (key, val) { - t.equal(key, 'package', 'should set package key') - t.deepEqual(val, [], 'should set empty array value') - }, - } - npm.commands.exec = (arr, cb) => { - t.deepEqual( - arr, - ['create-react-app', 'my-app'], - 'should npx with extra args' - ) - cb() - } + const init = new Init(npm) + + process.chdir(npm.localPrefix) init.exec(['react-app', 'my-app'], err => { - t.ifError(err, 'npm init react-app') + if (err) + throw err }) }) t.test('npm init cancel', t => { - t.plan(3) - const Init = requireInject('../../lib/init.js', { + t.plan(2) + npm.localPrefix = t.testdir({}) + + const Init = t.mock('../../lib/init.js', { ...mocks, 'init-package-json': (dir, initFile, config, cb) => cb( new Error('canceled') @@ -218,21 +297,219 @@ t.test('npm init cancel', t => { t.equal(title, 'init', 'should have init title') t.equal(msg, 'canceled', 'should log canceled') } + + process.chdir(npm.localPrefix) init.exec([], err => { - t.ifError(err, 'npm init cancel') + if (err) + throw err }) }) t.test('npm init error', t => { - const Init = requireInject('../../lib/init.js', { + npm.localPrefix = t.testdir({}) + + const Init = t.mock('../../lib/init.js', { ...mocks, 'init-package-json': (dir, initFile, config, cb) => cb( new Error('Unknown Error') ), }) const init = new Init(npm) + + process.chdir(npm.localPrefix) init.exec([], err => { t.match(err, /Unknown Error/, 'should throw error') t.end() }) }) + +t.test('workspaces', t => { + t.test('no args', t => { + t.teardown(() => { + npm._mockOutputs.length = 0 + }) + npm.localPrefix = t.testdir({ + 'package.json': JSON.stringify({ + name: 'top-level', + }), + }) + + const Init = t.mock('../../lib/init.js', { + ...mocks, + 'init-package-json': (dir, initFile, config, cb) => { + t.equal(dir, resolve(npm.localPrefix, 'a'), 'should use the ws path') + cb() + }, + }) + const init = new Init(npm) + init.execWorkspaces([], ['a'], err => { + if (err) + throw err + + t.matchSnapshot(npm._mockOutputs, 'should print helper info') + t.end() + }) + }) + + t.test('no args, existing folder', t => { + t.teardown(() => { + npm._mockOutputs.length = 0 + }) + // init-package-json prints directly to console.log + // this avoids poluting test output with those logs + console.log = noop + + npm.localPrefix = t.testdir({ + packages: { + a: { + 'package.json': JSON.stringify({ + name: 'a', + version: '1.0.0', + }), + }, + }, + 'package.json': JSON.stringify({ + name: 'top-level', + workspaces: ['packages/a'], + }), + }) + + init.execWorkspaces([], ['packages/a'], err => { + if (err) + throw err + + t.matchSnapshot(npm._mockOutputs, 'should print helper info') + t.end() + }) + }) + + t.test('with arg but missing workspace folder', t => { + t.teardown(() => { + npm._mockOutputs.length = 0 + }) + // init-package-json prints directly to console.log + // this avoids poluting test output with those logs + console.log = noop + + npm.localPrefix = t.testdir({ + node_modules: { + a: t.fixture('symlink', '../a'), + 'create-index': { + 'index.js': ``, + }, + }, + a: { + 'package.json': JSON.stringify({ + name: 'a', + version: '1.0.0', + }), + }, + 'package.json': JSON.stringify({ + name: 'top-level', + }), + }) + + init.execWorkspaces([], ['packages/a'], err => { + if (err) + throw err + + t.matchSnapshot(npm._mockOutputs, 'should print helper info') + t.end() + }) + }) + + t.test('fail parsing top-level package.json to set workspace', t => { + // init-package-json prints directly to console.log + // this avoids poluting test output with those logs + console.log = noop + + npm.localPrefix = t.testdir({ + 'package.json': JSON.stringify({ + name: 'top-level', + }), + }) + + const Init = t.mock('../../lib/init.js', { + ...mocks, + '@npmcli/package-json': { + async load () { + throw new Error('ERR') + }, + }, + }) + const init = new Init(npm) + + init.execWorkspaces([], ['a'], err => { + t.match( + err, + /ERR/, + 'should exit with error' + ) + t.end() + }) + }) + + t.test('missing top-level package.json when settting workspace', t => { + // init-package-json prints directly to console.log + // this avoids poluting test output with those logs + console.log = noop + + npm.localPrefix = t.testdir({}) + + const Init = require('../../lib/init.js') + const init = new Init(npm) + + init.execWorkspaces([], ['a'], err => { + t.match( + err, + { code: 'ENOENT' }, + 'should exit with missing package.json file error' + ) + t.end() + }) + }) + + t.test('using args', t => { + npm.localPrefix = t.testdir({ + b: { + 'package.json': JSON.stringify({ + name: 'b', + }), + }, + 'package.json': JSON.stringify({ + name: 'top-level', + workspaces: ['b'], + }), + }) + + const Init = t.mock('../../lib/init.js', { + ...mocks, + libnpmexec: ({ args, path }) => { + t.same( + args, + ['create-react-app'], + 'should npx with listed packages' + ) + t.same( + path, + resolve(npm.localPrefix, 'a'), + 'should use workspace path' + ) + fs.writeFileSync( + resolve(npm.localPrefix, 'a/package.json'), + JSON.stringify({ name: 'a' }) + ) + }, + }) + + const init = new Init(npm) + init.execWorkspaces(['react-app'], ['a'], err => { + if (err) + throw err + + t.end() + }) + }) + + t.end() +}) diff --git a/test/lib/install-ci-test.js b/test/lib/install-ci-test.js index 5f30efcabf259..2695e0f13decb 100644 --- a/test/lib/install-ci-test.js +++ b/test/lib/install-ci-test.js @@ -24,13 +24,12 @@ const installCITest = new InstallCITest({ }) t.test('the install-ci-test command', t => { - t.afterEach(cb => { + t.afterEach(() => { ciArgs = null ciCalled = false testArgs = null testCalled = false ciError = null - cb() }) t.test('ci and test', t => { diff --git a/test/lib/install-test.js b/test/lib/install-test.js index 0c52bd5e3c012..adec91b619923 100644 --- a/test/lib/install-test.js +++ b/test/lib/install-test.js @@ -24,13 +24,12 @@ const installTest = new InstallTest({ }) t.test('the install-test command', t => { - t.afterEach(cb => { + t.afterEach(() => { installArgs = null installCalled = false testArgs = null testCalled = false installError = null - cb() }) t.test('install and test', t => { diff --git a/test/lib/install.js b/test/lib/install.js index 8b7a968511136..6412b34c16f25 100644 --- a/test/lib/install.js +++ b/test/lib/install.js @@ -1,15 +1,15 @@ -const { test } = require('tap') +const t = require('tap') const Install = require('../../lib/install.js') -const requireInject = require('require-inject') +const { fake: mockNpm } = require('../fixtures/mock-npm') -test('should install using Arborist', (t) => { +t.test('should install using Arborist', (t) => { const SCRIPTS = [] let ARB_ARGS = null let REIFY_CALLED = false let ARB_OBJ = null - const Install = requireInject('../../lib/install.js', { + const Install = t.mock('../../lib/install.js', { '@npmcli/run-script': ({ event }) => { SCRIPTS.push(event) }, @@ -28,22 +28,22 @@ test('should install using Arborist', (t) => { throw new Error('got wrong object passed to reify-finish') }, }) - const install = new Install({ + + const npm = mockNpm({ + config: { dev: true }, + flatOptions: { global: false, auditLevel: 'low' }, globalDir: 'path/to/node_modules/', prefix: 'foo', - flatOptions: { - global: false, - }, - config: { - get: () => true, - }, }) + const install = new Install(npm) t.test('with args', t => { install.exec(['fizzbuzz'], er => { if (er) throw er - t.match(ARB_ARGS, { global: false, path: 'foo' }) + t.match(ARB_ARGS, + { global: false, path: 'foo', auditLevel: null }, + 'Arborist gets correct args and ignores auditLevel') t.equal(REIFY_CALLED, true, 'called reify') t.strictSame(SCRIPTS, [], 'no scripts when adding dep') t.end() @@ -72,10 +72,10 @@ test('should install using Arborist', (t) => { t.end() }) -test('should ignore scripts with --ignore-scripts', (t) => { +t.test('should ignore scripts with --ignore-scripts', (t) => { const SCRIPTS = [] let REIFY_CALLED = false - const Install = requireInject('../../lib/install.js', { + const Install = t.mock('../../lib/install.js', { '../../lib/utils/reify-finish.js': async () => {}, '@npmcli/run-script': ({ event }) => { SCRIPTS.push(event) @@ -86,17 +86,16 @@ test('should ignore scripts with --ignore-scripts', (t) => { } }, }) - const install = new Install({ + const npm = mockNpm({ globalDir: 'path/to/node_modules/', prefix: 'foo', - flatOptions: { - global: false, - ignoreScripts: true, - }, + flatOptions: { global: false }, config: { - get: () => false, + global: false, + 'ignore-scripts': true, }, }) + const install = new Install(npm) install.exec([], er => { if (er) throw er @@ -106,23 +105,20 @@ test('should ignore scripts with --ignore-scripts', (t) => { }) }) -test('should install globally using Arborist', (t) => { - const Install = requireInject('../../lib/install.js', { +t.test('should install globally using Arborist', (t) => { + const Install = t.mock('../../lib/install.js', { '../../lib/utils/reify-finish.js': async () => {}, '@npmcli/arborist': function () { this.reify = () => {} }, }) - const install = new Install({ + const npm = mockNpm({ globalDir: 'path/to/node_modules/', prefix: 'foo', - flatOptions: { - global: true, - }, - config: { - get: () => false, - }, + config: { global: true }, + flatOptions: { global: true }, }) + const install = new Install(npm) install.exec([], er => { if (er) throw er @@ -130,8 +126,8 @@ test('should install globally using Arborist', (t) => { }) }) -test('completion to folder', async t => { - const Install = requireInject('../../lib/install.js', { +t.test('completion to folder', async t => { + const Install = t.mock('../../lib/install.js', { '../../lib/utils/reify-finish.js': async () => {}, util: { promisify: (fn) => fn, @@ -152,8 +148,8 @@ test('completion to folder', async t => { t.end() }) -test('completion to folder - invalid dir', async t => { - const Install = requireInject('../../lib/install.js', { +t.test('completion to folder - invalid dir', async t => { + const Install = t.mock('../../lib/install.js', { '../../lib/utils/reify-finish.js': async () => {}, util: { promisify: (fn) => fn, @@ -170,8 +166,8 @@ test('completion to folder - invalid dir', async t => { t.end() }) -test('completion to folder - no matches', async t => { - const Install = requireInject('../../lib/install.js', { +t.test('completion to folder - no matches', async t => { + const Install = t.mock('../../lib/install.js', { '../../lib/utils/reify-finish.js': async () => {}, util: { promisify: (fn) => fn, @@ -188,8 +184,8 @@ test('completion to folder - no matches', async t => { t.end() }) -test('completion to folder - match is not a package', async t => { - const Install = requireInject('../../lib/install.js', { +t.test('completion to folder - match is not a package', async t => { + const Install = t.mock('../../lib/install.js', { '../../lib/utils/reify-finish.js': async () => {}, util: { promisify: (fn) => fn, @@ -209,14 +205,14 @@ test('completion to folder - match is not a package', async t => { t.end() }) -test('completion to url', async t => { +t.test('completion to url', async t => { const install = new Install({}) const res = await install.completion({ partialWord: 'http://path/to/url' }) t.strictSame(res, []) t.end() }) -test('completion', async t => { +t.test('completion', async t => { const install = new Install({}) const res = await install.completion({ partialWord: 'toto' }) t.notOk(res) diff --git a/test/lib/link.js b/test/lib/link.js index be7af3f524019..96f689892ff83 100644 --- a/test/lib/link.js +++ b/test/lib/link.js @@ -1,8 +1,9 @@ +const t = require('tap') const { resolve } = require('path') +const fs = require('fs') const Arborist = require('@npmcli/arborist') -const t = require('tap') -const requireInject = require('require-inject') +const { fake: mockNpm } = require('../fixtures/mock-npm') const redactCwd = (path) => { const normalizePath = p => p @@ -15,25 +16,21 @@ const redactCwd = (path) => { t.cleanSnapshot = (str) => redactCwd(str) let reifyOutput -const npm = { +const config = {} +const npm = mockNpm({ globalDir: null, prefix: null, - flatOptions: {}, - config: { - get () { - return false - }, - find () {}, - }, -} + config, +}) + const printLinks = async (opts) => { let res = '' const arb = new Arborist(opts) const tree = await arb.loadActual() const linkedItems = [...tree.inventory.values()] - .sort((a, b) => a.pkgid.localeCompare(b.pkgid)) + .sort((a, b) => a.pkgid.localeCompare(b.pkgid, 'en')) for (const item of linkedItems) { - if (item.target) + if (item.isLink) res += `${item.path} -> ${item.target.path}\n` } return res @@ -43,7 +40,7 @@ const mocks = { '../../lib/utils/reify-output.js': () => reifyOutput(), } -const Link = requireInject('../../lib/link.js', mocks) +const Link = t.mock('../../lib/link.js', mocks) const link = new Link(npm) t.test('link to globalDir when in current working dir of pkg and no args', (t) => { @@ -84,7 +81,61 @@ t.test('link to globalDir when in current working dir of pkg and no args', (t) = } link.exec([], (err) => { - t.ifError(err, 'should not error out') + t.error(err, 'should not error out') + }) +}) + +t.test('link ws to globalDir when workspace specified and no args', (t) => { + t.plan(2) + + const testdir = t.testdir({ + 'global-prefix': { + lib: { + node_modules: { + a: { + 'package.json': JSON.stringify({ + name: 'a', + version: '1.0.0', + }), + }, + }, + }, + }, + 'test-pkg-link': { + 'package.json': JSON.stringify({ + name: 'test-pkg-link', + version: '1.0.0', + workspaces: ['packages/*'], + }), + packages: { + a: { + 'package.json': JSON.stringify({ + name: 'a', + version: '1.0.0', + }), + }, + }, + }, + }) + npm.globalDir = resolve(testdir, 'global-prefix', 'lib', 'node_modules') + npm.prefix = resolve(testdir, 'test-pkg-link') + npm.localPrefix = resolve(testdir, 'test-pkg-link') + + reifyOutput = async () => { + reifyOutput = undefined + + const links = await printLinks({ + path: resolve(npm.globalDir, '..'), + global: true, + }) + + t.matchSnapshot(links, 'should create a global link to current pkg') + } + + // link.workspaces = ['a'] + // link.workspacePaths = [resolve(testdir, 'test-pkg-link/packages/a')] + link.execWorkspaces([], ['a'], (err) => { + t.error(err, 'should not error out') }) }) @@ -192,7 +243,125 @@ t.test('link global linked pkg to local nm when using args', (t) => { 'a', 'file:../link-me-too', ], (err) => { - t.ifError(err, 'should not error out') + t.error(err, 'should not error out') + }) +}) + +t.test('link global linked pkg to local workspace using args', (t) => { + t.plan(2) + + const testdir = t.testdir({ + 'global-prefix': { + lib: { + node_modules: { + '@myscope': { + foo: { + 'package.json': JSON.stringify({ + name: '@myscope/foo', + version: '1.0.0', + }), + }, + bar: { + 'package.json': JSON.stringify({ + name: '@myscope/bar', + version: '1.0.0', + }), + }, + linked: t.fixture('symlink', '../../../../scoped-linked'), + }, + a: { + 'package.json': JSON.stringify({ + name: 'a', + version: '1.0.0', + }), + }, + b: { + 'package.json': JSON.stringify({ + name: 'b', + version: '1.0.0', + }), + }, + 'test-pkg-link': t.fixture('symlink', '../../../test-pkg-link'), + }, + }, + }, + 'test-pkg-link': { + 'package.json': JSON.stringify({ + name: 'test-pkg-link', + version: '1.0.0', + }), + }, + 'link-me-too': { + 'package.json': JSON.stringify({ + name: 'link-me-too', + version: '1.0.0', + }), + }, + 'scoped-linked': { + 'package.json': JSON.stringify({ + name: '@myscope/linked', + version: '1.0.0', + }), + }, + 'my-project': { + 'package.json': JSON.stringify({ + name: 'my-project', + version: '1.0.0', + workspaces: ['packages/*'], + }), + packages: { + x: { + 'package.json': JSON.stringify({ + name: 'x', + version: '1.0.0', + dependencies: { + foo: '^1.0.0', + }, + }), + }, + }, + node_modules: { + foo: { + 'package.json': JSON.stringify({ + name: 'foo', + version: '1.0.0', + }), + }, + }, + }, + }) + npm.globalDir = resolve(testdir, 'global-prefix', 'lib', 'node_modules') + npm.prefix = resolve(testdir, 'my-project') + npm.localPrefix = resolve(testdir, 'my-project') + + const _cwd = process.cwd() + process.chdir(npm.prefix) + + reifyOutput = async () => { + reifyOutput = undefined + process.chdir(_cwd) + + const links = await printLinks({ + path: npm.prefix, + }) + + t.matchSnapshot(links, 'should create a local symlink to global pkg') + } + + // installs examples for: + // - test-pkg-link: pkg linked to globalDir from local fs + // - @myscope/linked: scoped pkg linked to globalDir from local fs + // - @myscope/bar: prev installed scoped package available in globalDir + // - a: prev installed package available in globalDir + // - file:./link-me-too: pkg that needs to be reified in globalDir first + link.execWorkspaces([ + 'test-pkg-link', + '@myscope/linked', + '@myscope/bar', + 'a', + 'file:../link-me-too', + ], ['x'], (err) => { + t.error(err, 'should not error out') }) }) @@ -255,7 +424,7 @@ t.test('link pkg already in global space', (t) => { // - a: prev installed package available in globalDir // - file:./link-me-too: pkg that needs to be reified in globalDir first link.exec(['@myscope/linked'], (err) => { - t.ifError(err, 'should not error out') + t.error(err, 'should not error out') }) }) @@ -313,10 +482,59 @@ t.test('link pkg already in global space when prefix is a symlink', (t) => { } link.exec(['@myscope/linked'], (err) => { - t.ifError(err, 'should not error out') + t.error(err, 'should not error out') }) }) +t.test('should not prune dependencies when linking packages', async t => { + const testdir = t.testdir({ + 'global-prefix': { + lib: { + node_modules: { + linked: t.fixture('symlink', '../../../linked'), + }, + }, + }, + linked: { + 'package.json': JSON.stringify({ + name: 'linked', + version: '1.0.0', + }), + }, + 'my-project': { + node_modules: { + foo: { + 'package.json': JSON.stringify({ name: 'foo', version: '1.0.0' }), + }, + }, + 'package.json': JSON.stringify({ + name: 'my-project', + version: '1.0.0', + }), + }, + }) + npm.globalDir = resolve(testdir, 'global-prefix', 'lib', 'node_modules') + npm.prefix = resolve(testdir, 'my-project') + reifyOutput = () => {} + + const _cwd = process.cwd() + process.chdir(npm.prefix) + + await new Promise((res, rej) => { + link.exec(['linked'], (err) => { + if (err) + rej(err) + res() + }) + }) + + t.ok( + fs.statSync(resolve(testdir, 'my-project/node_modules/foo')), + 'should not prune any extraneous dep when running npm link' + ) + process.chdir(_cwd) +}) + t.test('completion', async t => { const testdir = t.testdir({ 'global-prefix': { @@ -333,7 +551,7 @@ t.test('completion', async t => { npm.globalDir = resolve(testdir, 'global-prefix', 'lib', 'node_modules') const words = await link.completion({}) - t.deepEqual( + t.same( words, ['bar', 'foo', 'ipsum', 'lorem'], 'should list all package names available in globalDir' diff --git a/test/lib/ll.js b/test/lib/ll.js index 45eb4ec95b88f..28a3ab12c6a5f 100644 --- a/test/lib/ll.js +++ b/test/lib/ll.js @@ -1,4 +1,3 @@ -const requireInject = require('require-inject') const t = require('tap') t.test('ll', t => { @@ -10,12 +9,12 @@ t.test('ll', t => { } exec (args, cb) { - t.deepEqual(args, ['pkg'], 'should forward args') + t.same(args, ['pkg'], 'should forward args') cb() } } - const LL = requireInject('../../lib/ll.js', { + const LL = t.mock('../../lib/ll.js', { '../../lib/ls.js': LS, }) const ll = new LL({ diff --git a/test/lib/load-all-commands.js b/test/lib/load-all-commands.js index f6d1ae9e1817d..e5f10099cf365 100644 --- a/test/lib/load-all-commands.js +++ b/test/lib/load-all-commands.js @@ -1,27 +1,38 @@ -// Thanks to nyc not working properly with proxies this -// doesn't affect coverage. but it does ensure that every command -// has a usage, and if it has completion it is a function -const npm = require('../../lib/npm.js') +// Our coverage mapping means that stuff like this doen't count for coverage. +// It does ensure that every command has a usage that renders, contains its +// name, a description, and if it has completion it is a function. That it +// renders also ensures that any params we've defined in our commands work. const t = require('tap') +const { real: mockNpm } = require('../fixtures/mock-npm.js') const { cmdList } = require('../../lib/utils/cmd-list.js') -t.test('load npm', t => npm.load(er => { - if (er) - throw er -})) +const { npm, outputs } = mockNpm(t) t.test('load each command', t => { t.plan(cmdList.length) - for (const cmd of cmdList.sort((a, b) => a.localeCompare(b))) { - t.test(cmd, t => { - const impl = npm.commands[cmd] - if (impl.completion) { - t.plan(3) - t.isa(impl.completion, 'function', 'completion, if present, is a function') - } else - t.plan(2) - t.isa(impl, 'function', 'implementation is a function') - t.match(impl.usage, cmd, 'usage contains the command') - }) - } + npm.load((er) => { + if (er) + throw er + npm.config.set('usage', true) + for (const cmd of cmdList.sort((a, b) => a.localeCompare(b, 'en'))) { + t.test(cmd, t => { + const impl = npm.commands[cmd] + if (impl.completion) + t.type(impl.completion, 'function', 'completion, if present, is a function') + t.type(impl, 'function', 'implementation is a function') + t.ok(impl.description, 'implementation has a description') + t.ok(impl.name, 'implementation has a name') + t.match(impl.usage, cmd, 'usage contains the command') + impl([], (err) => { + t.notOk(err) + t.match(outputs[0][0], impl.usage, 'usage is what is output') + // This ties usage to a snapshot so we have to re-run snap if usage + // changes, which rebuilds the man pages + t.matchSnapshot(outputs[0][0]) + t.end() + }) + }) + outputs.length = 0 + } + }) }) diff --git a/test/lib/load-all.js b/test/lib/load-all.js index 02736c18ccc38..e6e407805346d 100644 --- a/test/lib/load-all.js +++ b/test/lib/load-all.js @@ -22,9 +22,10 @@ else { t.end() }) - t.test('call the error handle so we dont freak out', t => { - const errorHandler = require('../../lib/utils/error-handler.js') - errorHandler() + t.test('call the exit handler so we dont freak out', t => { + const exitHandler = require('../../lib/utils/exit-handler.js') + exitHandler.setNpm(npm) + exitHandler() t.end() }) } diff --git a/test/lib/logout.js b/test/lib/logout.js index b00fa641d8c16..7cb5c2790d621 100644 --- a/test/lib/logout.js +++ b/test/lib/logout.js @@ -1,12 +1,16 @@ -const requireInject = require('require-inject') -const { test } = require('tap') +const t = require('tap') +const { fake: mockNpm } = require('../fixtures/mock-npm') -const _flatOptions = { +const config = { registry: 'https://registry.npmjs.org/', scope: '', } +const flatOptions = { + registry: 'https://registry.npmjs.org/', + scope: '', +} +const npm = mockNpm({ config, flatOptions }) -const config = {} const npmlog = {} let result = null @@ -19,16 +23,13 @@ const mocks = { 'npm-registry-fetch': npmFetch, } -const Logout = requireInject('../../lib/logout.js', mocks) -const logout = new Logout({ - flatOptions: _flatOptions, - config, -}) +const Logout = t.mock('../../lib/logout.js', mocks) +const logout = new Logout(npm) -test('token logout', async (t) => { +t.test('token logout', async (t) => { t.plan(6) - _flatOptions.token = '@foo/' + flatOptions['//registry.npmjs.org/:_authToken'] = '@foo/' npmlog.verbose = (title, msg) => { t.equal(title, 'logout', 'should have correcct log prefix') @@ -39,7 +40,7 @@ test('token logout', async (t) => { ) } - config.clearCredentialsByURI = (registry) => { + npm.config.clearCredentialsByURI = (registry) => { t.equal( registry, 'https://registry.npmjs.org/', @@ -47,22 +48,22 @@ test('token logout', async (t) => { ) } - config.save = (type) => { + npm.config.save = (type) => { t.equal(type, 'user', 'should save to user config') } await new Promise((res, rej) => { logout.exec([], (err) => { - t.ifError(err, 'should not error out') + t.error(err, 'should not error out') - t.deepEqual( + t.same( result, { url: '/-/user/token/%40foo%2F', opts: { registry: 'https://registry.npmjs.org/', scope: '', - token: '@foo/', + '//registry.npmjs.org/:_authToken': '@foo/', method: 'DELETE', ignoreBody: true, }, @@ -70,7 +71,7 @@ test('token logout', async (t) => { 'should call npm-registry-fetch with expected values' ) - delete _flatOptions.token + delete flatOptions.token result = null mocks['npm-registry-fetch'] = null config.clearCredentialsByURI = null @@ -83,12 +84,15 @@ test('token logout', async (t) => { }) }) -test('token scoped logout', async (t) => { +t.test('token scoped logout', async (t) => { t.plan(8) - _flatOptions.token = '@foo/' - _flatOptions.scope = '@myscope' - _flatOptions['@myscope:registry'] = 'https://diff-registry.npmjs.com/' + flatOptions['//diff-registry.npmjs.com/:_authToken'] = '@bar/' + flatOptions['//registry.npmjs.org/:_authToken'] = '@foo/' + config.scope = '@myscope' + config['@myscope:registry'] = 'https://diff-registry.npmjs.com/' + flatOptions.scope = '@myscope' + flatOptions['@myscope:registry'] = 'https://diff-registry.npmjs.com/' npmlog.verbose = (title, msg) => { t.equal(title, 'logout', 'should have correcct log prefix') @@ -99,7 +103,7 @@ test('token scoped logout', async (t) => { ) } - config.clearCredentialsByURI = (registry) => { + npm.config.clearCredentialsByURI = (registry) => { t.equal( registry, 'https://diff-registry.npmjs.com/', @@ -107,7 +111,7 @@ test('token scoped logout', async (t) => { ) } - config.delete = (ref, type) => { + npm.config.delete = (ref, type) => { t.equal( ref, '@myscope:registry', @@ -116,23 +120,24 @@ test('token scoped logout', async (t) => { t.equal(type, 'user', 'should delete from user config') } - config.save = (type) => { + npm.config.save = (type) => { t.equal(type, 'user', 'should save to user config') } await new Promise((res, rej) => { logout.exec([], (err) => { - t.ifError(err, 'should not error out') + t.error(err, 'should not error out') - t.deepEqual( + t.same( result, { - url: '/-/user/token/%40foo%2F', + url: '/-/user/token/%40bar%2F', opts: { registry: 'https://registry.npmjs.org/', '@myscope:registry': 'https://diff-registry.npmjs.com/', scope: '@myscope', - token: '@foo/', + '//registry.npmjs.org/:_authToken': '@foo/', // <- removed by npm-registry-fetch + '//diff-registry.npmjs.com/:_authToken': '@bar/', method: 'DELETE', ignoreBody: true, }, @@ -140,9 +145,11 @@ test('token scoped logout', async (t) => { 'should call npm-registry-fetch with expected values' ) - _flatOptions.scope = '' - delete _flatOptions['@myscope:registry'] - delete _flatOptions.token + config.scope = '' + delete flatOptions['//diff-registry.npmjs.com/:_authToken'] + delete flatOptions['//registry.npmjs.org/:_authToken'] + delete config['@myscope:registry'] + delete flatOptions.scope result = null mocks['npm-registry-fetch'] = null config.clearCredentialsByURI = null @@ -155,14 +162,14 @@ test('token scoped logout', async (t) => { }) }) -test('user/pass logout', async (t) => { +t.test('user/pass logout', async (t) => { t.plan(3) - _flatOptions.username = 'foo' - _flatOptions.password = 'bar' + flatOptions['//registry.npmjs.org/:username'] = 'foo' + flatOptions['//registry.npmjs.org/:_password'] = 'bar' npmlog.verbose = (title, msg) => { - t.equal(title, 'logout', 'should have correcct log prefix') + t.equal(title, 'logout', 'should have correct log prefix') t.equal( msg, 'clearing user credentials for https://registry.npmjs.org/', @@ -170,17 +177,17 @@ test('user/pass logout', async (t) => { ) } - config.clearCredentialsByURI = () => null - config.save = () => null + npm.config.clearCredentialsByURI = () => null + npm.config.save = () => null await new Promise((res, rej) => { logout.exec([], (err) => { - t.ifError(err, 'should not error out') + t.error(err, 'should not error out') - delete _flatOptions.username - delete _flatOptions.password - config.clearCredentialsByURI = null - config.save = null + delete flatOptions['//registry.npmjs.org/:username'] + delete flatOptions['//registry.npmjs.org/:_password'] + npm.config.clearCredentialsByURI = null + npm.config.save = null npmlog.verbose = null res() @@ -188,7 +195,7 @@ test('user/pass logout', async (t) => { }) }) -test('missing credentials', (t) => { +t.test('missing credentials', (t) => { logout.exec([], (err) => { t.match( err.message, @@ -200,12 +207,12 @@ test('missing credentials', (t) => { }) }) -test('ignore invalid scoped registry config', async (t) => { +t.test('ignore invalid scoped registry config', async (t) => { t.plan(5) - _flatOptions.token = '@foo/' - _flatOptions.scope = '@myscope' - _flatOptions['@myscope:registry'] = '' + flatOptions['//registry.npmjs.org/:_authToken'] = '@foo/' + config.scope = '@myscope' + flatOptions['@myscope:registry'] = '' npmlog.verbose = (title, msg) => { t.equal(title, 'logout', 'should have correcct log prefix') @@ -216,7 +223,7 @@ test('ignore invalid scoped registry config', async (t) => { ) } - config.clearCredentialsByURI = (registry) => { + npm.config.clearCredentialsByURI = (registry) => { t.equal( registry, 'https://registry.npmjs.org/', @@ -224,22 +231,21 @@ test('ignore invalid scoped registry config', async (t) => { ) } - config.delete = () => null - config.save = () => null + npm.config.delete = () => null + npm.config.save = () => null await new Promise((res, rej) => { logout.exec([], (err) => { - t.ifError(err, 'should not error out') + t.error(err, 'should not error out') - t.deepEqual( + t.same( result, { url: '/-/user/token/%40foo%2F', opts: { + '//registry.npmjs.org/:_authToken': '@foo/', registry: 'https://registry.npmjs.org/', - scope: '@myscope', '@myscope:registry': '', - token: '@foo/', method: 'DELETE', ignoreBody: true, }, @@ -247,7 +253,7 @@ test('ignore invalid scoped registry config', async (t) => { 'should call npm-registry-fetch with expected values' ) - delete _flatOptions.token + delete flatOptions.token result = null mocks['npm-registry-fetch'] = null config.clearCredentialsByURI = null diff --git a/test/lib/ls.js b/test/lib/ls.js index bd81776d5f3df..5f196501e55d1 100644 --- a/test/lib/ls.js +++ b/test/lib/ls.js @@ -1,8 +1,11 @@ -const { resolve } = require('path') - +// TODO(isaacs): This test has a lot of very large objects pasted inline. +// Consider using t.matchSnapshot on these instead, especially since many +// of them contain the tap testdir folders, which are auto-generated and +// may change when node-tap is updated. const t = require('tap') -const requireInject = require('require-inject') +const { fake: mockNpm } = require('../fixtures/mock-npm.js') +const { resolve } = require('path') const { utimesSync } = require('fs') const touchHiddenPackageLock = prefix => { const later = new Date(Date.now() + 10000) @@ -18,19 +21,19 @@ const simpleNmFixture = { name: 'foo', version: '1.0.0', dependencies: { - bar: '^1.0.0', + dog: '^1.0.0', }, }), }, - bar: { + dog: { 'package.json': JSON.stringify({ - name: 'bar', + name: 'dog', version: '1.0.0', }), }, - lorem: { + chai: { 'package.json': JSON.stringify({ - name: 'lorem', + name: 'chai', version: '1.0.0', }), }, @@ -55,13 +58,13 @@ const diffDepTypesNmFixture = { description: 'A PROD dep kind of dep', version: '1.0.0', dependencies: { - bar: '^2.0.0', + dog: '^2.0.0', }, }), node_modules: { - bar: { + dog: { 'package.json': JSON.stringify({ - name: 'bar', + name: 'dog', description: 'A dep that bars', version: '2.0.0', }), @@ -86,12 +89,14 @@ const diffDepTypesNmFixture = { }, } -let prefix -let globalDir = 'MISSING_GLOBAL_DIR' let result = '' -// note this _flatOptions representations is for tests-only and does not -// represent exactly the properties found in the actual flatOptions obj -const _flatOptions = { +const LS = t.mock('../../lib/ls.js', { + path: { + ...require('path'), + sep: '/', + }, +}) +const config = { all: true, color: false, dev: false, @@ -101,84 +106,79 @@ const _flatOptions = { link: false, only: null, parseable: false, - get prefix () { - return prefix - }, production: false, + 'package-lock-only': false, +} +const flatOptions = { } -const LS = requireInject('../../lib/ls.js', { - '../../lib/utils/output.js': msg => { +const npm = mockNpm({ + config, + flatOptions, + output: msg => { result = msg }, }) -const ls = new LS({ - flatOptions: _flatOptions, - limit: { - fetch: 3, - }, - get prefix () { - return _flatOptions.prefix - }, - get globalDir () { - return globalDir - }, - config: { - get (key) { - return _flatOptions[key] - }, - }, -}) +const ls = new LS(npm) const redactCwd = res => res && res.replace(/\\+/g, '/').replace(new RegExp(__dirname.replace(/\\+/g, '/'), 'gi'), '{CWD}') -const jsonParse = res => JSON.parse(redactCwd(res)) - -const cleanUpResult = (done, t) => { - result = '' - done() +const redactCwdObj = obj => { + if (Array.isArray(obj)) + return obj.map(o => redactCwdObj(o)) + else if (typeof obj === 'string') + return redactCwd(obj) + else if (!obj) + return obj + else if (typeof obj === 'object') { + return Object.keys(obj).reduce((o, k) => { + o[k] = redactCwdObj(obj[k]) + return o + }, {}) + } else + return obj } +const jsonParse = res => redactCwdObj(JSON.parse(res)) + +const cleanUpResult = () => result = '' + t.test('ls', (t) => { t.beforeEach(cleanUpResult) - _flatOptions.json = false - _flatOptions.unicode = false + config.json = false + config.unicode = false t.test('no args', (t) => { - prefix = t.testdir({ + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', dependencies: { foo: '^1.0.0', - lorem: '^1.0.0', + chai: '^1.0.0', }, }), ...simpleNmFixture, }) ls.exec([], (err) => { - t.ifError(err, 'npm ls') + t.error(err, 'npm ls') t.matchSnapshot(redactCwd(result), 'should output tree representation of dependencies structure') t.end() }) }) t.test('missing package.json', (t) => { - prefix = t.testdir({ + npm.prefix = t.testdir({ ...simpleNmFixture, }) ls.exec([], (err) => { - t.match(err.code, 'ELSPROBLEMS', 'should have ELSPROBLEMS error code') - t.matchSnapshot( - redactCwd(err.message), - 'should log all extraneous deps on error msg' - ) + t.error(err) // should not error for extraneous t.matchSnapshot(redactCwd(result), 'should output tree missing name/version of top-level package') t.end() }) }) t.test('extraneous deps', (t) => { - prefix = t.testdir({ + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', @@ -189,42 +189,37 @@ t.test('ls', (t) => { ...simpleNmFixture, }) ls.exec([], (err) => { - t.equal(err.code, 'ELSPROBLEMS', 'should have error code') - t.equal( - redactCwd(err.message), - 'extraneous: lorem@1.0.0 {CWD}/ls-ls-extraneous-deps/node_modules/lorem', - 'should log extraneous dep as error' - ) + t.error(err) // should not error for extraneous t.matchSnapshot(redactCwd(result), 'should output containing problems info') t.end() }) }) t.test('with filter arg', (t) => { - _flatOptions.color = true - prefix = t.testdir({ + npm.color = true + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', dependencies: { foo: '^1.0.0', - lorem: '^1.0.0', + chai: '^1.0.0', }, }), ...simpleNmFixture, }) - ls.exec(['lorem'], (err) => { - t.ifError(err, 'npm ls') + ls.exec(['chai'], (err) => { + t.error(err, 'npm ls') t.matchSnapshot(redactCwd(result), 'should output tree contaning only occurrences of filtered by package and colored output') - _flatOptions.color = false + npm.color = false t.end() }) }) t.test('with dot filter arg', (t) => { - _flatOptions.all = false - _flatOptions.depth = 0 - prefix = t.testdir({ + config.all = false + config.depth = 0 + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', @@ -236,41 +231,41 @@ t.test('ls', (t) => { ...simpleNmFixture, }) ls.exec(['.'], (err) => { - t.ifError(err, 'should not throw on missing dep above current level') + t.error(err, 'should not throw on missing dep above current level') t.matchSnapshot(redactCwd(result), 'should output tree contaning only occurrences of filtered by package and colored output') - _flatOptions.all = true - _flatOptions.depth = Infinity + config.all = true + config.depth = Infinity t.end() }) }) t.test('with filter arg nested dep', (t) => { - prefix = t.testdir({ + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', dependencies: { foo: '^1.0.0', - lorem: '^1.0.0', + chai: '^1.0.0', }, }), ...simpleNmFixture, }) - ls.exec(['bar'], (err) => { - t.ifError(err, 'npm ls') + ls.exec(['dog'], (err) => { + t.error(err, 'npm ls') t.matchSnapshot(redactCwd(result), 'should output tree contaning only occurrences of filtered package and its ancestors') t.end() }) }) t.test('with multiple filter args', (t) => { - prefix = t.testdir({ + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', dependencies: { foo: '^1.0.0', - lorem: '^1.0.0', + chai: '^1.0.0', ipsum: '^1.0.0', }, }), @@ -284,27 +279,27 @@ t.test('ls', (t) => { }, }, }) - ls.exec(['bar@*', 'lorem@1.0.0'], (err) => { - t.ifError(err, 'npm ls') + ls.exec(['dog@*', 'chai@1.0.0'], (err) => { + t.error(err, 'npm ls') t.matchSnapshot(redactCwd(result), 'should output tree contaning only occurrences of multiple filtered packages and their ancestors') t.end() }) }) t.test('with missing filter arg', (t) => { - prefix = t.testdir({ + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', dependencies: { foo: '^1.0.0', - lorem: '^1.0.0', + chai: '^1.0.0', }, }), ...simpleNmFixture, }) ls.exec(['notadep'], (err) => { - t.ifError(err, 'npm ls') + t.error(err, 'npm ls') t.matchSnapshot(redactCwd(result), 'should output tree containing no dependencies info') t.equal( process.exitCode, @@ -317,55 +312,55 @@ t.test('ls', (t) => { }) t.test('default --depth value should be 0', (t) => { - _flatOptions.all = false - _flatOptions.depth = undefined - prefix = t.testdir({ + config.all = false + config.depth = undefined + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', dependencies: { foo: '^1.0.0', - lorem: '^1.0.0', + chai: '^1.0.0', }, }), ...simpleNmFixture, }) ls.exec([], (err) => { - t.ifError(err, 'npm ls') + t.error(err, 'npm ls') t.matchSnapshot(redactCwd(result), 'should output tree containing only top-level dependencies') - _flatOptions.all = true - _flatOptions.depth = Infinity + config.all = true + config.depth = Infinity t.end() }) }) t.test('--depth=0', (t) => { - _flatOptions.all = false - _flatOptions.depth = 0 - prefix = t.testdir({ + config.all = false + config.depth = 0 + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', dependencies: { foo: '^1.0.0', - lorem: '^1.0.0', + chai: '^1.0.0', }, }), ...simpleNmFixture, }) ls.exec([], (err) => { - t.ifError(err, 'npm ls') + t.error(err, 'npm ls') t.matchSnapshot(redactCwd(result), 'should output tree containing only top-level dependencies') - _flatOptions.all = true - _flatOptions.depth = Infinity + config.all = true + config.depth = Infinity t.end() }) }) t.test('--depth=1', (t) => { - _flatOptions.all = false - _flatOptions.depth = 1 - prefix = t.testdir({ + config.all = false + config.depth = 1 + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', @@ -415,16 +410,16 @@ t.test('ls', (t) => { }, }) ls.exec([], (err) => { - t.ifError(err, 'npm ls') + t.error(err, 'npm ls') t.matchSnapshot(redactCwd(result), 'should output tree containing top-level deps and their deps only') - _flatOptions.all = true - _flatOptions.depth = Infinity + config.all = true + config.depth = Infinity t.end() }) }) t.test('missing/invalid/extraneous', (t) => { - prefix = t.testdir({ + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', @@ -439,9 +434,9 @@ t.test('ls', (t) => { t.equal(err.code, 'ELSPROBLEMS', 'should have error code') t.equal( redactCwd(err.message).replace(/\r\n/g, '\n'), - 'invalid: foo@1.0.0 {CWD}/ls-ls-missing-invalid-extraneous/node_modules/foo\n' + - 'missing: ipsum@^1.0.0, required by test-npm-ls@1.0.0\n' + - 'extraneous: lorem@1.0.0 {CWD}/ls-ls-missing-invalid-extraneous/node_modules/lorem', + 'extraneous: chai@1.0.0 {CWD}/tap-testdir-ls-ls-missing-invalid-extraneous/node_modules/chai\n' + + 'invalid: foo@1.0.0 {CWD}/tap-testdir-ls-ls-missing-invalid-extraneous/node_modules/foo\n' + + 'missing: ipsum@^1.0.0, required by test-npm-ls@1.0.0', 'should log missing/invalid/extraneous errors' ) t.matchSnapshot(redactCwd(result), 'should output tree containing missing, invalid, extraneous labels') @@ -450,8 +445,8 @@ t.test('ls', (t) => { }) t.test('colored output', (t) => { - _flatOptions.color = true - prefix = t.testdir({ + npm.color = true + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', @@ -465,20 +460,20 @@ t.test('ls', (t) => { ls.exec([], (err) => { t.equal(err.code, 'ELSPROBLEMS', 'should have error code') t.matchSnapshot(redactCwd(result), 'should output tree containing color info') - _flatOptions.color = false + npm.color = false t.end() }) }) t.test('--dev', (t) => { - _flatOptions.dev = true - prefix = t.testdir({ + config.dev = true + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', dependencies: { 'prod-dep': '^1.0.0', - lorem: '^1.0.0', + chai: '^1.0.0', }, devDependencies: { 'dev-dep': '^1.0.0', @@ -494,20 +489,20 @@ t.test('ls', (t) => { }) ls.exec([], () => { t.matchSnapshot(redactCwd(result), 'should output tree containing dev deps') - _flatOptions.dev = false + config.dev = false t.end() }) }) t.test('--only=development', (t) => { - _flatOptions.only = 'development' - prefix = t.testdir({ + config.only = 'development' + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', dependencies: { 'prod-dep': '^1.0.0', - lorem: '^1.0.0', + chai: '^1.0.0', }, devDependencies: { 'dev-dep': '^1.0.0', @@ -523,20 +518,20 @@ t.test('ls', (t) => { }) ls.exec([], () => { t.matchSnapshot(redactCwd(result), 'should output tree containing only development deps') - _flatOptions.only = null + config.only = null t.end() }) }) t.test('--link', (t) => { - _flatOptions.link = true - prefix = t.testdir({ + config.link = true + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', dependencies: { 'prod-dep': '^1.0.0', - lorem: '^1.0.0', + chai: '^1.0.0', 'linked-dep': '^1.0.0', }, devDependencies: { @@ -562,13 +557,13 @@ t.test('ls', (t) => { }) ls.exec([], () => { t.matchSnapshot(redactCwd(result), 'should output tree containing linked deps') - _flatOptions.link = false + config.link = false t.end() }) }) t.test('print deduped symlinks', (t) => { - prefix = t.testdir({ + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'print-deduped-symlinks', version: '1.0.0', @@ -598,20 +593,20 @@ t.test('ls', (t) => { }) ls.exec([], () => { t.matchSnapshot(redactCwd(result), 'should output tree containing linked deps') - _flatOptions.link = false + config.link = false t.end() }) }) t.test('--production', (t) => { - _flatOptions.production = true - prefix = t.testdir({ + config.production = true + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', dependencies: { 'prod-dep': '^1.0.0', - lorem: '^1.0.0', + chai: '^1.0.0', }, devDependencies: { 'dev-dep': '^1.0.0', @@ -627,20 +622,20 @@ t.test('ls', (t) => { }) ls.exec([], () => { t.matchSnapshot(redactCwd(result), 'should output tree containing production deps') - _flatOptions.production = false + config.production = false t.end() }) }) t.test('--only=prod', (t) => { - _flatOptions.only = 'prod' - prefix = t.testdir({ + config.only = 'prod' + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', dependencies: { 'prod-dep': '^1.0.0', - lorem: '^1.0.0', + chai: '^1.0.0', }, devDependencies: { 'dev-dep': '^1.0.0', @@ -656,20 +651,20 @@ t.test('ls', (t) => { }) ls.exec([], () => { t.matchSnapshot(redactCwd(result), 'should output tree containing only prod deps') - _flatOptions.only = null + config.only = null t.end() }) }) t.test('--long', (t) => { - _flatOptions.long = true - prefix = t.testdir({ + config.long = true + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', dependencies: { 'prod-dep': '^1.0.0', - lorem: '^1.0.0', + chai: '^1.0.0', }, devDependencies: { 'dev-dep': '^1.0.0', @@ -685,22 +680,22 @@ t.test('ls', (t) => { }) ls.exec([], () => { t.matchSnapshot(redactCwd(result), 'should output tree info with descriptions') - _flatOptions.long = true + config.long = true t.end() }) }) t.test('--long --depth=0', (t) => { - _flatOptions.all = false - _flatOptions.depth = 0 - _flatOptions.long = true - prefix = t.testdir({ + config.all = false + config.depth = 0 + config.long = true + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', dependencies: { 'prod-dep': '^1.0.0', - lorem: '^1.0.0', + chai: '^1.0.0', }, devDependencies: { 'dev-dep': '^1.0.0', @@ -716,15 +711,15 @@ t.test('ls', (t) => { }) ls.exec([], () => { t.matchSnapshot(redactCwd(result), 'should output tree containing top-level deps with descriptions') - _flatOptions.all = true - _flatOptions.depth = Infinity - _flatOptions.long = false + config.all = true + config.depth = Infinity + config.long = false t.end() }) }) t.test('json read problems', (t) => { - prefix = t.testdir({ + npm.prefix = t.testdir({ 'package.json': '{broken json', }) ls.exec([], (err) => { @@ -735,22 +730,22 @@ t.test('ls', (t) => { }) t.test('empty location', (t) => { - prefix = t.testdir({}) + npm.prefix = t.testdir({}) ls.exec([], (err) => { - t.ifError(err, 'should not error out on empty locations') + t.error(err, 'should not error out on empty locations') t.matchSnapshot(redactCwd(result), 'should print empty result') t.end() }) }) t.test('invalid peer dep', (t) => { - prefix = t.testdir({ + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', dependencies: { 'prod-dep': '^1.0.0', - lorem: '^1.0.0', + chai: '^1.0.0', }, devDependencies: { 'dev-dep': '^1.0.0', @@ -771,8 +766,8 @@ t.test('ls', (t) => { }) t.test('invalid deduped dep', (t) => { - _flatOptions.color = true - prefix = t.testdir({ + npm.color = true + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'invalid-deduped-dep', version: '1.0.0', @@ -801,13 +796,13 @@ t.test('ls', (t) => { }) ls.exec([], () => { t.matchSnapshot(redactCwd(result), 'should output tree signaling mismatching peer dep in problems') - _flatOptions.color = false + npm.color = false t.end() }) }) t.test('deduped missing dep', (t) => { - prefix = t.testdir({ + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', @@ -837,7 +832,7 @@ t.test('ls', (t) => { }) t.test('unmet peer dep', (t) => { - prefix = t.testdir({ + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', @@ -855,14 +850,14 @@ t.test('ls', (t) => { }) t.test('unmet optional dep', (t) => { - _flatOptions.color = true - prefix = t.testdir({ + npm.color = true + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', dependencies: { 'prod-dep': '^1.0.0', - lorem: '^1.0.0', + chai: '^1.0.0', }, devDependencies: { 'dev-dep': '^1.0.0', @@ -881,13 +876,13 @@ t.test('ls', (t) => { t.match(err.code, 'ELSPROBLEMS', 'should have ELSPROBLEMS error code') t.match(err.message, /invalid: optional-dep@1.0.0/, 'should have invalid dep error msg') t.matchSnapshot(redactCwd(result), 'should output tree with empty entry for missing optional deps') - _flatOptions.color = false + npm.color = false t.end() }) }) t.test('cycle deps', (t) => { - prefix = t.testdir({ + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', @@ -917,15 +912,15 @@ t.test('ls', (t) => { }, }) ls.exec([], (err) => { - t.ifError(err, 'npm ls') + t.error(err, 'npm ls') t.matchSnapshot(redactCwd(result), 'should print tree output containing deduped ref') t.end() }) }) t.test('cycle deps with filter args', (t) => { - _flatOptions.color = true - prefix = t.testdir({ + npm.color = true + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', @@ -955,15 +950,15 @@ t.test('ls', (t) => { }, }) ls.exec(['a'], (err) => { - t.ifError(err, 'npm ls') + t.error(err, 'npm ls') t.matchSnapshot(redactCwd(result), 'should print tree output containing deduped ref') - _flatOptions.color = false + npm.color = false t.end() }) }) t.test('with no args dedupe entries', (t) => { - prefix = t.testdir({ + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'dedupe-entries', version: '1.0.0', @@ -1003,16 +998,16 @@ t.test('ls', (t) => { }, }) ls.exec([], (err) => { - t.ifError(err, 'npm ls') + t.error(err, 'npm ls') t.matchSnapshot(redactCwd(result), 'should print tree output containing deduped ref') t.end() }) }) t.test('with no args dedupe entries and not displaying all', (t) => { - _flatOptions.all = false - _flatOptions.depth = 0 - prefix = t.testdir({ + config.all = false + config.depth = 0 + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'dedupe-entries', version: '1.0.0', @@ -1052,17 +1047,17 @@ t.test('ls', (t) => { }, }) ls.exec([], (err) => { - t.ifError(err, 'npm ls') + t.error(err, 'npm ls') t.matchSnapshot(redactCwd(result), 'should print tree output containing deduped ref') - _flatOptions.all = true - _flatOptions.depth = Infinity + config.all = true + config.depth = Infinity t.end() }) }) t.test('with args and dedupe entries', (t) => { - _flatOptions.color = true - prefix = t.testdir({ + npm.color = true + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'dedupe-entries', version: '1.0.0', @@ -1102,15 +1097,15 @@ t.test('ls', (t) => { }, }) ls.exec(['@npmcli/b'], (err) => { - t.ifError(err, 'npm ls') + t.error(err, 'npm ls') t.matchSnapshot(redactCwd(result), 'should print tree output containing deduped ref') - _flatOptions.color = false + npm.color = false t.end() }) }) t.test('with args and different order of items', (t) => { - prefix = t.testdir({ + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'dedupe-entries', version: '1.0.0', @@ -1150,14 +1145,14 @@ t.test('ls', (t) => { }, }) ls.exec(['@npmcli/c'], (err) => { - t.ifError(err, 'npm ls') + t.error(err, 'npm ls') t.matchSnapshot(redactCwd(result), 'should print tree output containing deduped ref') t.end() }) }) t.test('using aliases', (t) => { - prefix = t.testdir({ + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', @@ -1192,7 +1187,7 @@ t.test('ls', (t) => { }, }, }) - touchHiddenPackageLock(prefix) + touchHiddenPackageLock(npm.prefix) ls.exec([], () => { t.matchSnapshot(redactCwd(result), 'should output tree containing aliases') t.end() @@ -1200,7 +1195,7 @@ t.test('ls', (t) => { }) t.test('resolved points to git ref', (t) => { - prefix = t.testdir({ + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', @@ -1238,16 +1233,16 @@ t.test('ls', (t) => { }, }, }) - touchHiddenPackageLock(prefix) + touchHiddenPackageLock(npm.prefix) ls.exec([], (err) => { - t.ifError(err, 'npm ls') + t.error(err, 'npm ls') t.matchSnapshot(redactCwd(result), 'should output tree containing git refs') t.end() }) }) t.test('broken resolved field', (t) => { - prefix = t.testdir({ + npm.prefix = t.testdir({ node_modules: { a: { 'package.json': JSON.stringify({ @@ -1270,7 +1265,7 @@ t.test('ls', (t) => { dependencies: { a: { version: '1.0.1', - resolved: 'foo@bar://b8f3a2fc0c3bb8ffd8b0d0072cc6b5a3667e963c', + resolved: 'foo@dog://b8f3a2fc0c3bb8ffd8b0d0072cc6b5a3667e963c', integrity: 'sha512-8AN9lNCcBt5Xeje7fMEEpp5K3rgcAzIpTtAjYb/YMUYu8SbIVF6wz0WqACDVKvpQOUcSfNHZQNLNmue0QSwXOQ==', }, }, @@ -1284,14 +1279,14 @@ t.test('ls', (t) => { }), }) ls.exec([], (err) => { - t.ifError(err, 'npm ls') + t.error(err, 'npm ls') t.matchSnapshot(redactCwd(result), 'should NOT print git refs in output tree') t.end() }) }) t.test('from and resolved properties', (t) => { - prefix = t.testdir({ + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', @@ -1337,7 +1332,7 @@ t.test('ls', (t) => { }, }, }) - touchHiddenPackageLock(prefix) + touchHiddenPackageLock(npm.prefix) ls.exec([], () => { t.matchSnapshot(redactCwd(result), 'should not be printed in tree output') t.end() @@ -1345,7 +1340,7 @@ t.test('ls', (t) => { }) t.test('global', (t) => { - _flatOptions.global = true + config.global = true const fixtures = t.testdir({ node_modules: { a: { @@ -1372,18 +1367,18 @@ t.test('ls', (t) => { }) // mimics lib/npm.js globalDir getter but pointing to fixtures - globalDir = resolve(fixtures, 'node_modules') + npm.globalDir = resolve(fixtures, 'node_modules') ls.exec([], () => { t.matchSnapshot(redactCwd(result), 'should print tree and not mark top-level items extraneous') - globalDir = 'MISSING_GLOBAL_DIR' - _flatOptions.global = false + npm.globalDir = 'MISSING_GLOBAL_DIR' + config.global = false t.end() }) }) t.test('filtering by child of missing dep', (t) => { - prefix = t.testdir({ + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'filter-by-child-of-missing-dep', version: '1.0.0', @@ -1428,20 +1423,22 @@ t.test('ls', (t) => { }) ls.exec(['c'], (err) => { - t.match(err.code, 'ELSPROBLEMS', 'should have ELSPROBLEMS error code') + t.error(err) // should not error for extraneous t.matchSnapshot(redactCwd(result), 'should print tree and not duplicate child of missing items') t.end() }) }) - t.test('loading a tree containing workspaces', (t) => { - prefix = t.testdir({ + t.test('loading a tree containing workspaces', async (t) => { + npm.localPrefix = npm.prefix = t.testdir({ 'package.json': JSON.stringify({ - name: 'filter-by-child-of-missing-dep', + name: 'workspaces-tree', version: '1.0.0', workspaces: [ './a', './b', + './d', + './group/*', ], }), node_modules: { @@ -1453,6 +1450,24 @@ t.test('ls', (t) => { version: '1.0.0', }), }, + d: t.fixture('symlink', '../d'), + e: t.fixture('symlink', '../group/e'), + f: t.fixture('symlink', '../group/f'), + foo: { + 'package.json': JSON.stringify({ + name: 'foo', + version: '1.1.1', + dependencies: { + bar: '^1.0.0', + }, + }), + }, + bar: { + 'package.json': JSON.stringify({ name: 'bar', version: '1.0.0' }), + }, + baz: { + 'package.json': JSON.stringify({ name: 'baz', version: '1.0.0' }), + }, }, a: { 'package.json': JSON.stringify({ @@ -1460,6 +1475,10 @@ t.test('ls', (t) => { version: '1.0.0', dependencies: { c: '^1.0.0', + d: '^1.0.0', + }, + devDependencies: { + baz: '^1.0.0', }, }), }, @@ -1469,25 +1488,126 @@ t.test('ls', (t) => { version: '1.0.0', }), }, + d: { + 'package.json': JSON.stringify({ + name: 'd', + version: '1.0.0', + dependencies: { + foo: '^1.1.1', + }, + }), + }, + group: { + e: { + 'package.json': JSON.stringify({ + name: 'e', + version: '1.0.0', + }), + }, + f: { + 'package.json': JSON.stringify({ + name: 'f', + version: '1.0.0', + }), + }, + }, }) - ls.exec([], (err) => { - t.ifError(err, 'should NOT have ELSPROBLEMS error code') - t.matchSnapshot(redactCwd(result), 'should list workspaces properly') + await new Promise((res, rej) => { + config.all = false + config.depth = 0 + npm.color = true + ls.exec([], (err) => { + if (err) + rej(err) + + t.matchSnapshot(redactCwd(result), + 'should list workspaces properly with default configs') + config.all = true + config.depth = Infinity + npm.color = false + res() + }) + }) + + // --all + await new Promise((res, rej) => { + ls.exec([], (err) => { + if (err) + rej(err) + + t.matchSnapshot(redactCwd(result), + 'should list --all workspaces properly') + res() + }) + }) + + // --production + await new Promise((res, rej) => { + config.production = true + ls.exec([], (err) => { + if (err) + rej(err) + + t.matchSnapshot(redactCwd(result), + 'should list only prod deps of workspaces') + + config.production = false + res() + }) + }) + + // filter out a single workspace using args + await new Promise((res, rej) => { + ls.exec(['d'], (err) => { + if (err) + rej(err) - // should also be able to filter out one of the workspaces - ls.exec(['a'], (err) => { - t.ifError(err, 'should NOT have ELSPROBLEMS error code when filter') t.matchSnapshot(redactCwd(result), 'should filter single workspace') + res() + }) + }) - t.end() + // filter out a single workspace and its deps using workspaces filters + await new Promise((res, rej) => { + ls.execWorkspaces([], ['a'], (err) => { + if (err) + rej(err) + + t.matchSnapshot(redactCwd(result), + 'should filter using workspace config') + res() + }) + }) + + // filter out a workspace by parent path + await new Promise((res, rej) => { + ls.execWorkspaces([], ['./group'], (err) => { + if (err) + rej(err) + + t.matchSnapshot(redactCwd(result), + 'should filter by parent folder workspace config') + res() + }) + }) + + // filter by a dep within a workspaces sub tree + await new Promise((res, rej) => { + ls.execWorkspaces(['bar'], ['d'], (err) => { + if (err) + rej(err) + + t.matchSnapshot(redactCwd(result), + 'should print all tree and filter by dep within only the ws subtree') + res() }) }) }) t.test('filter pkg arg using depth option', (t) => { - _flatOptions.depth = 0 - prefix = t.testdir({ + config.depth = 0 + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-pkg-arg-filter-with-depth-opt', version: '1.0.0', @@ -1535,17 +1655,17 @@ t.test('ls', (t) => { t.plan(6) ls.exec(['a'], (err) => { - t.ifError(err, 'should NOT have ELSPROBLEMS error code') + t.error(err, 'should NOT have ELSPROBLEMS error code') t.matchSnapshot(redactCwd(result), 'should list a in top-level only') ls.exec(['d'], (err) => { - t.ifError(err, 'should NOT have ELSPROBLEMS error code when filter') + t.error(err, 'should NOT have ELSPROBLEMS error code when filter') t.matchSnapshot(redactCwd(result), 'should print empty results msg') // if no --depth config is defined, should print path to dep - _flatOptions.depth = null // default config value + config.depth = null // default config value ls.exec(['d'], (err) => { - t.ifError(err, 'should NOT have ELSPROBLEMS error code when filter') + t.error(err, 'should NOT have ELSPROBLEMS error code when filter') t.matchSnapshot(redactCwd(result), 'should print expected result') }) }) @@ -1553,7 +1673,7 @@ t.test('ls', (t) => { }) t.teardown(() => { - _flatOptions.depth = Infinity + config.depth = Infinity }) t.end() @@ -1561,45 +1681,41 @@ t.test('ls', (t) => { t.test('ls --parseable', (t) => { t.beforeEach(cleanUpResult) - _flatOptions.json = false - _flatOptions.unicode = false - _flatOptions.parseable = true + config.json = false + config.unicode = false + config.parseable = true t.test('no args', (t) => { - prefix = t.testdir({ + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', dependencies: { foo: '^1.0.0', - lorem: '^1.0.0', + chai: '^1.0.0', }, }), ...simpleNmFixture, }) ls.exec([], (err) => { - t.ifError(err, 'npm ls') + t.error(err, 'npm ls') t.matchSnapshot(redactCwd(result), 'should output parseable representation of dependencies structure') t.end() }) }) t.test('missing package.json', (t) => { - prefix = t.testdir({ + npm.prefix = t.testdir({ ...simpleNmFixture, }) ls.exec([], (err) => { - t.match(err.code, 'ELSPROBLEMS', 'should have ELSPROBLEMS error code') - t.matchSnapshot( - redactCwd(err.message), - 'should log all extraneous deps on error msg' - ) + t.error(err) // should not error for extraneous t.matchSnapshot(redactCwd(result), 'should output parseable missing name/version of top-level package') t.end() }) }) t.test('extraneous deps', (t) => { - prefix = t.testdir({ + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', @@ -1610,58 +1726,58 @@ t.test('ls --parseable', (t) => { ...simpleNmFixture, }) ls.exec([], (err) => { - t.equal(err.code, 'ELSPROBLEMS', 'should have error code') + t.error(err) // should not error for extraneous t.matchSnapshot(redactCwd(result), 'should output containing problems info') t.end() }) }) t.test('with filter arg', (t) => { - prefix = t.testdir({ + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', dependencies: { foo: '^1.0.0', - lorem: '^1.0.0', + chai: '^1.0.0', }, }), ...simpleNmFixture, }) - ls.exec(['lorem'], (err) => { - t.ifError(err, 'npm ls') + ls.exec(['chai'], (err) => { + t.error(err, 'npm ls') t.matchSnapshot(redactCwd(result), 'should output parseable contaning only occurrences of filtered by package') t.end() }) }) t.test('with filter arg nested dep', (t) => { - prefix = t.testdir({ + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', dependencies: { foo: '^1.0.0', - lorem: '^1.0.0', + chai: '^1.0.0', }, }), ...simpleNmFixture, }) - ls.exec(['bar'], (err) => { - t.ifError(err, 'npm ls') + ls.exec(['dog'], (err) => { + t.error(err, 'npm ls') t.matchSnapshot(redactCwd(result), 'should output parseable contaning only occurrences of filtered package') t.end() }) }) t.test('with multiple filter args', (t) => { - prefix = t.testdir({ + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', dependencies: { foo: '^1.0.0', - lorem: '^1.0.0', + chai: '^1.0.0', ipsum: '^1.0.0', }, }), @@ -1675,27 +1791,27 @@ t.test('ls --parseable', (t) => { }, }, }) - ls.exec(['bar@*', 'lorem@1.0.0'], (err) => { - t.ifError(err, 'npm ls') + ls.exec(['dog@*', 'chai@1.0.0'], (err) => { + t.error(err, 'npm ls') t.matchSnapshot(redactCwd(result), 'should output parseable contaning only occurrences of multiple filtered packages and their ancestors') t.end() }) }) t.test('with missing filter arg', (t) => { - prefix = t.testdir({ + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', dependencies: { foo: '^1.0.0', - lorem: '^1.0.0', + chai: '^1.0.0', }, }), ...simpleNmFixture, }) ls.exec(['notadep'], (err) => { - t.ifError(err, 'npm ls') + t.error(err, 'npm ls') t.matchSnapshot(redactCwd(result), 'should output parseable output containing no dependencies info') t.equal( process.exitCode, @@ -1708,76 +1824,76 @@ t.test('ls --parseable', (t) => { }) t.test('default --depth value should be 0', (t) => { - _flatOptions.all = false - _flatOptions.depth = undefined - prefix = t.testdir({ + config.all = false + config.depth = undefined + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', dependencies: { foo: '^1.0.0', - lorem: '^1.0.0', + chai: '^1.0.0', }, }), ...simpleNmFixture, }) ls.exec([], (err) => { - t.ifError(err, 'npm ls') + t.error(err, 'npm ls') t.matchSnapshot(redactCwd(result), 'should output parseable output containing only top-level dependencies') - _flatOptions.all = true - _flatOptions.depth = Infinity + config.all = true + config.depth = Infinity t.end() }) }) t.test('--depth=0', (t) => { - _flatOptions.all = false - _flatOptions.depth = 0 - prefix = t.testdir({ + config.all = false + config.depth = 0 + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', dependencies: { foo: '^1.0.0', - lorem: '^1.0.0', + chai: '^1.0.0', }, }), ...simpleNmFixture, }) ls.exec([], (err) => { - t.ifError(err, 'npm ls') + t.error(err, 'npm ls') t.matchSnapshot(redactCwd(result), 'should output tree containing only top-level dependencies') - _flatOptions.all = true - _flatOptions.depth = Infinity + config.all = true + config.depth = Infinity t.end() }) }) t.test('--depth=1', (t) => { - _flatOptions.all = false - _flatOptions.depth = 1 - prefix = t.testdir({ + config.all = false + config.depth = 1 + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', dependencies: { foo: '^1.0.0', - lorem: '^1.0.0', + chai: '^1.0.0', }, }), ...simpleNmFixture, }) ls.exec([], (err) => { - t.ifError(err, 'npm ls') + t.error(err, 'npm ls') t.matchSnapshot(redactCwd(result), 'should output parseable containing top-level deps and their deps only') - _flatOptions.all = true - _flatOptions.depth = Infinity + config.all = true + config.depth = Infinity t.end() }) }) t.test('missing/invalid/extraneous', (t) => { - prefix = t.testdir({ + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', @@ -1796,14 +1912,14 @@ t.test('ls --parseable', (t) => { }) t.test('--dev', (t) => { - _flatOptions.dev = true - prefix = t.testdir({ + config.dev = true + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', dependencies: { 'prod-dep': '^1.0.0', - lorem: '^1.0.0', + chai: '^1.0.0', }, devDependencies: { 'dev-dep': '^1.0.0', @@ -1819,20 +1935,20 @@ t.test('ls --parseable', (t) => { }) ls.exec([], () => { t.matchSnapshot(redactCwd(result), 'should output tree containing dev deps') - _flatOptions.dev = false + config.dev = false t.end() }) }) t.test('--only=development', (t) => { - _flatOptions.only = 'development' - prefix = t.testdir({ + config.only = 'development' + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', dependencies: { 'prod-dep': '^1.0.0', - lorem: '^1.0.0', + chai: '^1.0.0', }, devDependencies: { 'dev-dep': '^1.0.0', @@ -1848,20 +1964,20 @@ t.test('ls --parseable', (t) => { }) ls.exec([], () => { t.matchSnapshot(redactCwd(result), 'should output tree containing only development deps') - _flatOptions.only = null + config.only = null t.end() }) }) t.test('--link', (t) => { - _flatOptions.link = true - prefix = t.testdir({ + config.link = true + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', dependencies: { 'prod-dep': '^1.0.0', - lorem: '^1.0.0', + chai: '^1.0.0', 'linked-dep': '^1.0.0', }, devDependencies: { @@ -1887,20 +2003,20 @@ t.test('ls --parseable', (t) => { }) ls.exec([], () => { t.matchSnapshot(redactCwd(result), 'should output tree containing linked deps') - _flatOptions.link = false + config.link = false t.end() }) }) t.test('--production', (t) => { - _flatOptions.production = true - prefix = t.testdir({ + config.production = true + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', dependencies: { 'prod-dep': '^1.0.0', - lorem: '^1.0.0', + chai: '^1.0.0', }, devDependencies: { 'dev-dep': '^1.0.0', @@ -1916,20 +2032,20 @@ t.test('ls --parseable', (t) => { }) ls.exec([], () => { t.matchSnapshot(redactCwd(result), 'should output tree containing production deps') - _flatOptions.production = false + config.production = false t.end() }) }) t.test('--only=prod', (t) => { - _flatOptions.only = 'prod' - prefix = t.testdir({ + config.only = 'prod' + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', dependencies: { 'prod-dep': '^1.0.0', - lorem: '^1.0.0', + chai: '^1.0.0', }, devDependencies: { 'dev-dep': '^1.0.0', @@ -1945,20 +2061,20 @@ t.test('ls --parseable', (t) => { }) ls.exec([], () => { t.matchSnapshot(redactCwd(result), 'should output tree containing only prod deps') - _flatOptions.only = null + config.only = null t.end() }) }) t.test('--long', (t) => { - _flatOptions.long = true - prefix = t.testdir({ + config.long = true + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', dependencies: { 'prod-dep': '^1.0.0', - lorem: '^1.0.0', + chai: '^1.0.0', }, devDependencies: { 'dev-dep': '^1.0.0', @@ -1974,13 +2090,13 @@ t.test('ls --parseable', (t) => { }) ls.exec([], () => { t.matchSnapshot(redactCwd(result), 'should output tree info with descriptions') - _flatOptions.long = true + config.long = true t.end() }) }) t.test('--long with extraneous deps', (t) => { - prefix = t.testdir({ + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', @@ -1991,16 +2107,15 @@ t.test('ls --parseable', (t) => { ...simpleNmFixture, }) ls.exec([], (err) => { - t.equal(err.code, 'ELSPROBLEMS', 'should have error code') - t.match(redactCwd(err.message), 'extraneous: lorem@1.0.0 {CWD}/ls-ls-parseable--long-with-extraneous-deps/node_modules/lorem', 'should have error code') + t.error(err) // should not error for extraneous t.matchSnapshot(redactCwd(result), 'should output long parseable output with extraneous info') t.end() }) }) t.test('--long missing/invalid/extraneous', (t) => { - _flatOptions.long = true - prefix = t.testdir({ + config.long = true + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', @@ -2014,20 +2129,20 @@ t.test('ls --parseable', (t) => { ls.exec([], (err) => { t.match(err, { code: 'ELSPROBLEMS' }, 'should list dep problems') t.matchSnapshot(redactCwd(result), 'should output parseable result containing EXTRANEOUS/INVALID labels') - _flatOptions.long = false + config.long = false t.end() }) }) t.test('--long print symlink target location', (t) => { - _flatOptions.long = true - prefix = t.testdir({ + config.long = true + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', dependencies: { 'prod-dep': '^1.0.0', - lorem: '^1.0.0', + chai: '^1.0.0', 'linked-dep': '^1.0.0', }, devDependencies: { @@ -2052,24 +2167,24 @@ t.test('ls --parseable', (t) => { }, }) ls.exec([], (err) => { - t.ifError(err, 'npm ls') + t.error(err, 'npm ls') t.matchSnapshot(redactCwd(result), 'should output parseable results with symlink targets') - _flatOptions.long = false + config.long = false t.end() }) }) t.test('--long --depth=0', (t) => { - _flatOptions.all = false - _flatOptions.depth = 0 - _flatOptions.long = true - prefix = t.testdir({ + config.all = false + config.depth = 0 + config.long = true + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', dependencies: { 'prod-dep': '^1.0.0', - lorem: '^1.0.0', + chai: '^1.0.0', }, devDependencies: { 'dev-dep': '^1.0.0', @@ -2085,15 +2200,15 @@ t.test('ls --parseable', (t) => { }) ls.exec([], () => { t.matchSnapshot(redactCwd(result), 'should output tree containing top-level deps with descriptions') - _flatOptions.all = true - _flatOptions.depth = Infinity - _flatOptions.long = false + config.all = true + config.depth = Infinity + config.long = false t.end() }) }) t.test('json read problems', (t) => { - prefix = t.testdir({ + npm.prefix = t.testdir({ 'package.json': '{broken json', }) ls.exec([], (err) => { @@ -2104,22 +2219,22 @@ t.test('ls --parseable', (t) => { }) t.test('empty location', (t) => { - prefix = t.testdir({}) + npm.prefix = t.testdir({}) ls.exec([], (err) => { - t.ifError(err, 'should not error out on empty locations') + t.error(err, 'should not error out on empty locations') t.matchSnapshot(redactCwd(result), 'should print empty result') t.end() }) }) t.test('unmet peer dep', (t) => { - prefix = t.testdir({ + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', dependencies: { 'prod-dep': '^1.0.0', - lorem: '^1.0.0', + chai: '^1.0.0', }, devDependencies: { 'dev-dep': '^1.0.0', @@ -2140,13 +2255,13 @@ t.test('ls --parseable', (t) => { }) t.test('unmet optional dep', (t) => { - prefix = t.testdir({ + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', dependencies: { 'prod-dep': '^1.0.0', - lorem: '^1.0.0', + chai: '^1.0.0', }, devDependencies: { 'dev-dep': '^1.0.0', @@ -2170,7 +2285,7 @@ t.test('ls --parseable', (t) => { }) t.test('cycle deps', (t) => { - prefix = t.testdir({ + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', @@ -2206,7 +2321,7 @@ t.test('ls --parseable', (t) => { }) t.test('using aliases', (t) => { - prefix = t.testdir({ + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', @@ -2237,7 +2352,7 @@ t.test('ls --parseable', (t) => { }, }, }) - touchHiddenPackageLock(prefix) + touchHiddenPackageLock(npm.prefix) ls.exec([], () => { t.matchSnapshot(redactCwd(result), 'should output tree containing aliases') t.end() @@ -2245,7 +2360,7 @@ t.test('ls --parseable', (t) => { }) t.test('resolved points to git ref', (t) => { - prefix = t.testdir({ + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', @@ -2282,7 +2397,7 @@ t.test('ls --parseable', (t) => { }, }, }) - touchHiddenPackageLock(prefix) + touchHiddenPackageLock(npm.prefix) ls.exec([], () => { t.matchSnapshot(redactCwd(result), 'should output tree containing git refs') t.end() @@ -2290,7 +2405,7 @@ t.test('ls --parseable', (t) => { }) t.test('from and resolved properties', (t) => { - prefix = t.testdir({ + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', @@ -2336,7 +2451,7 @@ t.test('ls --parseable', (t) => { }, }, }) - touchHiddenPackageLock(prefix) + touchHiddenPackageLock(npm.prefix) ls.exec([], () => { t.matchSnapshot(redactCwd(result), 'should not be printed in tree output') t.end() @@ -2344,7 +2459,7 @@ t.test('ls --parseable', (t) => { }) t.test('global', (t) => { - _flatOptions.global = true + config.global = true const fixtures = t.testdir({ node_modules: { a: { @@ -2371,12 +2486,12 @@ t.test('ls --parseable', (t) => { }) // mimics lib/npm.js globalDir getter but pointing to fixtures - globalDir = resolve(fixtures, 'node_modules') + npm.globalDir = resolve(fixtures, 'node_modules') ls.exec([], () => { t.matchSnapshot(redactCwd(result), 'should print parseable output for global deps') - globalDir = 'MISSING_GLOBAL_DIR' - _flatOptions.global = false + npm.globalDir = 'MISSING_GLOBAL_DIR' + config.global = false t.end() }) }) @@ -2384,25 +2499,125 @@ t.test('ls --parseable', (t) => { t.end() }) +t.test('ignore missing optional deps', async t => { + t.beforeEach(cleanUpResult) + npm.prefix = t.testdir({ + 'package.json': JSON.stringify({ + name: 'test-npm-ls-ignore-missing-optional', + version: '1.2.3', + peerDependencies: { + 'peer-ok': '1', + 'peer-missing': '1', + 'peer-wrong': '1', + 'peer-optional-ok': '1', + 'peer-optional-missing': '1', + 'peer-optional-wrong': '1', + }, + peerDependenciesMeta: { + 'peer-optional-ok': { + optional: true, + }, + 'peer-optional-missing': { + optional: true, + }, + 'peer-optional-wrong': { + optional: true, + }, + }, + optionalDependencies: { + 'optional-ok': '1', + 'optional-missing': '1', + 'optional-wrong': '1', + }, + dependencies: { + 'prod-ok': '1', + 'prod-missing': '1', + 'prod-wrong': '1', + }, + }), + node_modules: { + 'prod-ok': { + 'package.json': JSON.stringify({name: 'prod-ok', version: '1.2.3' }), + }, + 'prod-wrong': { + 'package.json': JSON.stringify({name: 'prod-wrong', version: '3.2.1' }), + }, + 'optional-ok': { + 'package.json': JSON.stringify({name: 'optional-ok', version: '1.2.3' }), + }, + 'optional-wrong': { + 'package.json': JSON.stringify({name: 'optional-wrong', version: '3.2.1' }), + }, + 'peer-optional-ok': { + 'package.json': JSON.stringify({name: 'peer-optional-ok', version: '1.2.3' }), + }, + 'peer-optional-wrong': { + 'package.json': JSON.stringify({name: 'peer-optional-wrong', version: '3.2.1' }), + }, + 'peer-ok': { + 'package.json': JSON.stringify({name: 'peer-ok', version: '1.2.3' }), + }, + 'peer-wrong': { + 'package.json': JSON.stringify({name: 'peer-wrong', version: '3.2.1' }), + }, + }, + }) + + config.all = true + const prefix = npm.prefix.toLowerCase().replace(/\\/g, '/') + const cleanupPaths = str => + str.toLowerCase().replace(/\\/g, '/').split(prefix).join('{project}') + + t.test('--json', t => { + config.json = true + config.parseable = false + ls.exec([], (err) => { + t.match(err, { code: 'ELSPROBLEMS' }) + result = JSON.parse(result) + const problems = result.problems.map(cleanupPaths) + t.matchSnapshot(problems, 'ls --json problems') + t.end() + }) + }) + t.test('--parseable', t => { + config.json = false + config.parseable = true + ls.exec([], (err) => { + t.match(err, { code: 'ELSPROBLEMS' }) + t.matchSnapshot(cleanupPaths(result), 'ls --parseable result') + t.end() + }) + }) + t.test('human output', t => { + config.json = false + config.parseable = false + ls.exec([], (err) => { + t.match(err, { code: 'ELSPROBLEMS' }) + t.matchSnapshot(cleanupPaths(result), 'ls result') + t.end() + }) + }) +}) + t.test('ls --json', (t) => { t.beforeEach(cleanUpResult) - _flatOptions.json = true - _flatOptions.parseable = false + config.json = true + config.parseable = false t.test('no args', (t) => { - prefix = t.testdir({ + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', dependencies: { foo: '^1.0.0', - lorem: '^1.0.0', + chai: '^1.0.0', }, }), ...simpleNmFixture, }) ls.exec([], (err) => { - t.ifError(err, 'npm ls') - t.deepEqual( + t.error(err, 'npm ls') + t.same( jsonParse(result), { name: 'test-npm-ls', @@ -2411,12 +2626,12 @@ t.test('ls --json', (t) => { foo: { version: '1.0.0', dependencies: { - bar: { + dog: { version: '1.0.0', }, }, }, - lorem: { + chai: { version: '1.0.0', }, }, @@ -2428,44 +2643,44 @@ t.test('ls --json', (t) => { }) t.test('missing package.json', (t) => { - prefix = t.testdir({ + npm.prefix = t.testdir({ ...simpleNmFixture, }) ls.exec([], (err) => { - t.match(err, { code: 'ELSPROBLEMS' }, 'should list dep problems') - t.deepEqual( + t.error(err) // should not error for extraneous + t.same( jsonParse(result), { problems: [ - 'extraneous: bar@1.0.0 {CWD}/ls-ls-json-missing-package-json/node_modules/bar', - 'extraneous: foo@1.0.0 {CWD}/ls-ls-json-missing-package-json/node_modules/foo', - 'extraneous: lorem@1.0.0 {CWD}/ls-ls-json-missing-package-json/node_modules/lorem', + 'extraneous: chai@1.0.0 {CWD}/tap-testdir-ls-ls---json-missing-package.json/node_modules/chai', + 'extraneous: dog@1.0.0 {CWD}/tap-testdir-ls-ls---json-missing-package.json/node_modules/dog', + 'extraneous: foo@1.0.0 {CWD}/tap-testdir-ls-ls---json-missing-package.json/node_modules/foo', ], dependencies: { - bar: { + dog: { version: '1.0.0', extraneous: true, problems: [ - 'extraneous: bar@1.0.0 {CWD}/ls-ls-json-missing-package-json/node_modules/bar', + 'extraneous: dog@1.0.0 {CWD}/tap-testdir-ls-ls---json-missing-package.json/node_modules/dog', ], }, foo: { version: '1.0.0', extraneous: true, problems: [ - 'extraneous: foo@1.0.0 {CWD}/ls-ls-json-missing-package-json/node_modules/foo', + 'extraneous: foo@1.0.0 {CWD}/tap-testdir-ls-ls---json-missing-package.json/node_modules/foo', ], dependencies: { - bar: { + dog: { version: '1.0.0', }, }, }, - lorem: { + chai: { version: '1.0.0', extraneous: true, problems: [ - 'extraneous: lorem@1.0.0 {CWD}/ls-ls-json-missing-package-json/node_modules/lorem', + 'extraneous: chai@1.0.0 {CWD}/tap-testdir-ls-ls---json-missing-package.json/node_modules/chai', ], }, }, @@ -2477,7 +2692,7 @@ t.test('ls --json', (t) => { }) t.test('extraneous deps', (t) => { - prefix = t.testdir({ + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', @@ -2488,38 +2703,29 @@ t.test('ls --json', (t) => { ...simpleNmFixture, }) ls.exec([], (err) => { - t.equal( - redactCwd(err.message), - 'extraneous: lorem@1.0.0 {CWD}/ls-ls-json-extraneous-deps/node_modules/lorem', - 'should log extraneous dep as error' - ) - t.equal( - err.code, - 'ELSPROBLEMS', - 'should have ELSPROBLEMS error code' - ) - t.deepEqual( + t.error(err) // should not error for extraneous + t.same( jsonParse(result), { name: 'test-npm-ls', version: '1.0.0', problems: [ - 'extraneous: lorem@1.0.0 {CWD}/ls-ls-json-extraneous-deps/node_modules/lorem', + 'extraneous: chai@1.0.0 {CWD}/tap-testdir-ls-ls---json-extraneous-deps/node_modules/chai', ], dependencies: { foo: { version: '1.0.0', dependencies: { - bar: { + dog: { version: '1.0.0', }, }, }, - lorem: { + chai: { version: '1.0.0', extraneous: true, problems: [ - 'extraneous: lorem@1.0.0 {CWD}/ls-ls-json-extraneous-deps/node_modules/lorem', + 'extraneous: chai@1.0.0 {CWD}/tap-testdir-ls-ls---json-extraneous-deps/node_modules/chai', ], }, }, @@ -2530,27 +2736,69 @@ t.test('ls --json', (t) => { }) }) + t.test('missing deps --long', (t) => { + config.long = true + npm.prefix = t.testdir({ + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: '^1.0.0', + dog: '^1.0.0', + chai: '^1.0.0', + ipsum: '^1.0.0', + }, + }), + ...simpleNmFixture, + }) + ls.exec([], (err) => { + t.equal( + redactCwd(err.message), + 'missing: ipsum@^1.0.0, required by test-npm-ls@1.0.0', + 'should log missing dep as error' + ) + t.equal( + err.code, + 'ELSPROBLEMS', + 'should have ELSPROBLEMS error code' + ) + t.match( + jsonParse(result), + { + name: 'test-npm-ls', + version: '1.0.0', + problems: [ + 'missing: ipsum@^1.0.0, required by test-npm-ls@1.0.0', + ], + }, + 'should output json containing problems info' + ) + config.long = false + t.end() + }) + }) + t.test('with filter arg', (t) => { - prefix = t.testdir({ + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', dependencies: { foo: '^1.0.0', - lorem: '^1.0.0', + chai: '^1.0.0', }, }), ...simpleNmFixture, }) - ls.exec(['lorem'], (err) => { - t.ifError(err, 'npm ls') - t.deepEqual( + ls.exec(['chai'], (err) => { + t.error(err, 'npm ls') + t.same( jsonParse(result), { name: 'test-npm-ls', version: '1.0.0', dependencies: { - lorem: { + chai: { version: '1.0.0', }, }, @@ -2567,20 +2815,20 @@ t.test('ls --json', (t) => { }) t.test('with filter arg nested dep', (t) => { - prefix = t.testdir({ + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', dependencies: { foo: '^1.0.0', - lorem: '^1.0.0', + chai: '^1.0.0', }, }), ...simpleNmFixture, }) - ls.exec(['bar'], (err) => { - t.ifError(err, 'npm ls') - t.deepEqual( + ls.exec(['dog'], (err) => { + t.error(err, 'npm ls') + t.same( jsonParse(result), { name: 'test-npm-ls', @@ -2589,7 +2837,7 @@ t.test('ls --json', (t) => { foo: { version: '1.0.0', dependencies: { - bar: { + dog: { version: '1.0.0', }, }, @@ -2603,13 +2851,13 @@ t.test('ls --json', (t) => { }) t.test('with multiple filter args', (t) => { - prefix = t.testdir({ + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', dependencies: { foo: '^1.0.0', - lorem: '^1.0.0', + chai: '^1.0.0', ipsum: '^1.0.0', }, }), @@ -2623,9 +2871,9 @@ t.test('ls --json', (t) => { }, }, }) - ls.exec(['bar@*', 'lorem@1.0.0'], (err) => { - t.ifError(err, 'npm ls') - t.deepEqual( + ls.exec(['dog@*', 'chai@1.0.0'], (err) => { + t.error(err, 'npm ls') + t.same( jsonParse(result), { version: '1.0.0', @@ -2634,12 +2882,12 @@ t.test('ls --json', (t) => { foo: { version: '1.0.0', dependencies: { - bar: { + dog: { version: '1.0.0', }, }, }, - lorem: { + chai: { version: '1.0.0', }, }, @@ -2651,20 +2899,20 @@ t.test('ls --json', (t) => { }) t.test('with missing filter arg', (t) => { - prefix = t.testdir({ + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', dependencies: { foo: '^1.0.0', - lorem: '^1.0.0', + chai: '^1.0.0', }, }), ...simpleNmFixture, }) ls.exec(['notadep'], (err) => { - t.ifError(err, 'npm ls') - t.deepEqual( + t.error(err, 'npm ls') + t.same( jsonParse(result), { name: 'test-npm-ls', @@ -2683,22 +2931,22 @@ t.test('ls --json', (t) => { }) t.test('default --depth value should now be 0', (t) => { - _flatOptions.all = false - _flatOptions.depth = undefined - prefix = t.testdir({ + config.all = false + config.depth = undefined + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', dependencies: { foo: '^1.0.0', - lorem: '^1.0.0', + chai: '^1.0.0', }, }), ...simpleNmFixture, }) ls.exec([], (err) => { - t.ifError(err, 'npm ls') - t.deepEqual( + t.error(err, 'npm ls') + t.same( jsonParse(result), { name: 'test-npm-ls', @@ -2707,36 +2955,36 @@ t.test('ls --json', (t) => { foo: { version: '1.0.0', }, - lorem: { + chai: { version: '1.0.0', }, }, }, 'should output json containing only top-level dependencies' ) - _flatOptions.all = true - _flatOptions.depth = Infinity + config.all = true + config.depth = Infinity t.end() }) }) t.test('--depth=0', (t) => { - _flatOptions.all = false - _flatOptions.depth = 0 - prefix = t.testdir({ + config.all = false + config.depth = 0 + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', dependencies: { foo: '^1.0.0', - lorem: '^1.0.0', + chai: '^1.0.0', }, }), ...simpleNmFixture, }) ls.exec([], (err) => { - t.ifError(err, 'npm ls') - t.deepEqual( + t.error(err, 'npm ls') + t.same( jsonParse(result), { name: 'test-npm-ls', @@ -2745,36 +2993,36 @@ t.test('ls --json', (t) => { foo: { version: '1.0.0', }, - lorem: { + chai: { version: '1.0.0', }, }, }, 'should output json containing only top-level dependencies' ) - _flatOptions.all = true - _flatOptions.depth = Infinity + config.all = true + config.depth = Infinity t.end() }) }) t.test('--depth=1', (t) => { - _flatOptions.all = false - _flatOptions.depth = 1 - prefix = t.testdir({ + config.all = false + config.depth = 1 + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', dependencies: { foo: '^1.0.0', - lorem: '^1.0.0', + chai: '^1.0.0', }, }), ...simpleNmFixture, }) ls.exec([], (err) => { - t.ifError(err, 'npm ls') - t.deepEqual( + t.error(err, 'npm ls') + t.same( jsonParse(result), { name: 'test-npm-ls', @@ -2783,26 +3031,26 @@ t.test('ls --json', (t) => { foo: { version: '1.0.0', dependencies: { - bar: { + dog: { version: '1.0.0', }, }, }, - lorem: { + chai: { version: '1.0.0', }, }, }, 'should output json containing top-level deps and their deps only' ) - _flatOptions.all = true - _flatOptions.depth = Infinity + config.all = true + config.depth = Infinity t.end() }) }) t.test('missing/invalid/extraneous', (t) => { - prefix = t.testdir({ + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', @@ -2815,34 +3063,34 @@ t.test('ls --json', (t) => { }) ls.exec([], (err) => { t.match(err, { code: 'ELSPROBLEMS' }, 'should list dep problems') - t.deepEqual( + t.same( jsonParse(result), { name: 'test-npm-ls', version: '1.0.0', problems: [ - 'invalid: foo@1.0.0 {CWD}/ls-ls-json-missing-invalid-extraneous/node_modules/foo', + 'extraneous: chai@1.0.0 {CWD}/tap-testdir-ls-ls---json-missing-invalid-extraneous/node_modules/chai', + 'invalid: foo@1.0.0 {CWD}/tap-testdir-ls-ls---json-missing-invalid-extraneous/node_modules/foo', 'missing: ipsum@^1.0.0, required by test-npm-ls@1.0.0', - 'extraneous: lorem@1.0.0 {CWD}/ls-ls-json-missing-invalid-extraneous/node_modules/lorem', ], dependencies: { foo: { version: '1.0.0', - invalid: true, + invalid: '"^2.0.0" from the root project', problems: [ - 'invalid: foo@1.0.0 {CWD}/ls-ls-json-missing-invalid-extraneous/node_modules/foo', + 'invalid: foo@1.0.0 {CWD}/tap-testdir-ls-ls---json-missing-invalid-extraneous/node_modules/foo', ], dependencies: { - bar: { + dog: { version: '1.0.0', }, }, }, - lorem: { + chai: { version: '1.0.0', extraneous: true, problems: [ - 'extraneous: lorem@1.0.0 {CWD}/ls-ls-json-missing-invalid-extraneous/node_modules/lorem', + 'extraneous: chai@1.0.0 {CWD}/tap-testdir-ls-ls---json-missing-invalid-extraneous/node_modules/chai', ], }, ipsum: { @@ -2861,14 +3109,14 @@ t.test('ls --json', (t) => { }) t.test('--dev', (t) => { - _flatOptions.dev = true - prefix = t.testdir({ + config.dev = true + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', dependencies: { 'prod-dep': '^1.0.0', - lorem: '^1.0.0', + chai: '^1.0.0', }, devDependencies: { 'dev-dep': '^1.0.0', @@ -2883,7 +3131,7 @@ t.test('ls --json', (t) => { ...diffDepTypesNmFixture, }) ls.exec([], () => { - t.deepEqual( + t.same( jsonParse(result), { name: 'test-npm-ls', @@ -2894,7 +3142,7 @@ t.test('ls --json', (t) => { dependencies: { foo: { version: '1.0.0', - dependencies: { bar: { version: '1.0.0' } }, + dependencies: { dog: { version: '1.0.0' } }, }, }, }, @@ -2902,20 +3150,20 @@ t.test('ls --json', (t) => { }, 'should output json containing dev deps' ) - _flatOptions.dev = false + config.dev = false t.end() }) }) t.test('--only=development', (t) => { - _flatOptions.only = 'development' - prefix = t.testdir({ + config.only = 'development' + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', dependencies: { 'prod-dep': '^1.0.0', - lorem: '^1.0.0', + chai: '^1.0.0', }, devDependencies: { 'dev-dep': '^1.0.0', @@ -2930,7 +3178,7 @@ t.test('ls --json', (t) => { ...diffDepTypesNmFixture, }) ls.exec([], () => { - t.deepEqual( + t.same( jsonParse(result), { name: 'test-npm-ls', @@ -2941,7 +3189,7 @@ t.test('ls --json', (t) => { dependencies: { foo: { version: '1.0.0', - dependencies: { bar: { version: '1.0.0' } }, + dependencies: { dog: { version: '1.0.0' } }, }, }, }, @@ -2949,20 +3197,20 @@ t.test('ls --json', (t) => { }, 'should output json containing only development deps' ) - _flatOptions.only = null + config.only = null t.end() }) }) t.test('--link', (t) => { - _flatOptions.link = true - prefix = t.testdir({ + config.link = true + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', dependencies: { 'prod-dep': '^1.0.0', - lorem: '^1.0.0', + chai: '^1.0.0', 'linked-dep': '^1.0.0', }, devDependencies: { @@ -2987,7 +3235,7 @@ t.test('ls --json', (t) => { }, }) ls.exec([], () => { - t.deepEqual( + t.same( jsonParse(result), { name: 'test-npm-ls', @@ -3001,20 +3249,20 @@ t.test('ls --json', (t) => { }, 'should output json containing linked deps' ) - _flatOptions.link = false + config.link = false t.end() }) }) t.test('--production', (t) => { - _flatOptions.production = true - prefix = t.testdir({ + config.production = true + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', dependencies: { 'prod-dep': '^1.0.0', - lorem: '^1.0.0', + chai: '^1.0.0', }, devDependencies: { 'dev-dep': '^1.0.0', @@ -3029,33 +3277,33 @@ t.test('ls --json', (t) => { ...diffDepTypesNmFixture, }) ls.exec([], () => { - t.deepEqual( + t.same( jsonParse(result), { name: 'test-npm-ls', version: '1.0.0', dependencies: { - lorem: { version: '1.0.0' }, + chai: { version: '1.0.0' }, 'optional-dep': { version: '1.0.0' }, - 'prod-dep': { version: '1.0.0', dependencies: { bar: { version: '2.0.0' } } }, + 'prod-dep': { version: '1.0.0', dependencies: { dog: { version: '2.0.0' } } }, }, }, 'should output json containing production deps' ) - _flatOptions.production = false + config.production = false t.end() }) }) t.test('--only=prod', (t) => { - _flatOptions.only = 'prod' - prefix = t.testdir({ + config.only = 'prod' + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', dependencies: { 'prod-dep': '^1.0.0', - lorem: '^1.0.0', + chai: '^1.0.0', }, devDependencies: { 'dev-dep': '^1.0.0', @@ -3070,26 +3318,26 @@ t.test('ls --json', (t) => { ...diffDepTypesNmFixture, }) ls.exec([], () => { - t.deepEqual( + t.same( jsonParse(result), { name: 'test-npm-ls', version: '1.0.0', dependencies: { - lorem: { version: '1.0.0' }, + chai: { version: '1.0.0' }, 'optional-dep': { version: '1.0.0' }, - 'prod-dep': { version: '1.0.0', dependencies: { bar: { version: '2.0.0' } } }, + 'prod-dep': { version: '1.0.0', dependencies: { dog: { version: '2.0.0' } } }, }, }, 'should output json containing only prod deps' ) - _flatOptions.only = null + config.only = null t.end() }) }) t.test('from lockfile', (t) => { - prefix = t.testdir({ + npm.prefix = t.testdir({ node_modules: { '@isaacs': { 'dedupe-tests-a': { @@ -3183,7 +3431,7 @@ t.test('ls --json', (t) => { }), }) ls.exec([], () => { - t.deepEqual( + t.same( jsonParse(result), { version: '1.0.0', @@ -3197,7 +3445,7 @@ t.test('ls --json', (t) => { resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-1.0.0.tgz', extraneous: true, problems: [ - 'extraneous: @isaacs/dedupe-tests-b@ {CWD}/ls-ls-json-from-lockfile/node_modules/@isaacs/dedupe-tests-a/node_modules/@isaacs/dedupe-tests-b', + 'extraneous: @isaacs/dedupe-tests-b@ {CWD}/tap-testdir-ls-ls---json-from-lockfile/node_modules/@isaacs/dedupe-tests-a/node_modules/@isaacs/dedupe-tests-b', ], }, }, @@ -3208,7 +3456,7 @@ t.test('ls --json', (t) => { }, }, problems: [ - 'extraneous: @isaacs/dedupe-tests-b@ {CWD}/ls-ls-json-from-lockfile/node_modules/@isaacs/dedupe-tests-a/node_modules/@isaacs/dedupe-tests-b', + 'extraneous: @isaacs/dedupe-tests-b@ {CWD}/tap-testdir-ls-ls---json-from-lockfile/node_modules/@isaacs/dedupe-tests-a/node_modules/@isaacs/dedupe-tests-b', ], }, 'should output json containing only prod deps' @@ -3218,14 +3466,14 @@ t.test('ls --json', (t) => { }) t.test('--long', (t) => { - _flatOptions.long = true - prefix = t.testdir({ + config.long = true + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', dependencies: { 'prod-dep': '^1.0.0', - lorem: '^1.0.0', + chai: '^1.0.0', }, devDependencies: { 'dev-dep': '^1.0.0', @@ -3240,7 +3488,7 @@ t.test('ls --json', (t) => { ...diffDepTypesNmFixture, }) ls.exec([], () => { - t.deepEqual( + t.same( jsonParse(result), { name: 'test-npm-ls', @@ -3254,7 +3502,7 @@ t.test('ls --json', (t) => { devDependencies: {}, peerDependencies: {}, _dependencies: {}, - path: '{CWD}/ls-ls-json--long/node_modules/peer-dep', + path: '{CWD}/tap-testdir-ls-ls---json---long/node_modules/peer-dep', extraneous: false, }, 'dev-dep': { @@ -3266,22 +3514,22 @@ t.test('ls --json', (t) => { name: 'foo', version: '1.0.0', dependencies: { - bar: { - name: 'bar', + dog: { + name: 'dog', version: '1.0.0', - _id: 'bar@1.0.0', + _id: 'dog@1.0.0', devDependencies: {}, peerDependencies: {}, _dependencies: {}, - path: '{CWD}/ls-ls-json--long/node_modules/bar', + path: '{CWD}/tap-testdir-ls-ls---json---long/node_modules/dog', extraneous: false, }, }, _id: 'foo@1.0.0', devDependencies: {}, peerDependencies: {}, - _dependencies: { bar: '^1.0.0' }, - path: '{CWD}/ls-ls-json--long/node_modules/foo', + _dependencies: { dog: '^1.0.0' }, + path: '{CWD}/tap-testdir-ls-ls---json---long/node_modules/foo', extraneous: false, }, }, @@ -3289,17 +3537,17 @@ t.test('ls --json', (t) => { devDependencies: {}, peerDependencies: {}, _dependencies: { foo: '^1.0.0' }, - path: '{CWD}/ls-ls-json--long/node_modules/dev-dep', + path: '{CWD}/tap-testdir-ls-ls---json---long/node_modules/dev-dep', extraneous: false, }, - lorem: { - name: 'lorem', + chai: { + name: 'chai', version: '1.0.0', - _id: 'lorem@1.0.0', + _id: 'chai@1.0.0', devDependencies: {}, peerDependencies: {}, _dependencies: {}, - path: '{CWD}/ls-ls-json--long/node_modules/lorem', + path: '{CWD}/tap-testdir-ls-ls---json---long/node_modules/chai', extraneous: false, }, 'optional-dep': { @@ -3310,7 +3558,7 @@ t.test('ls --json', (t) => { devDependencies: {}, peerDependencies: {}, _dependencies: {}, - path: '{CWD}/ls-ls-json--long/node_modules/optional-dep', + path: '{CWD}/tap-testdir-ls-ls---json---long/node_modules/optional-dep', extraneous: false, }, 'prod-dep': { @@ -3318,23 +3566,23 @@ t.test('ls --json', (t) => { description: 'A PROD dep kind of dep', version: '1.0.0', dependencies: { - bar: { - name: 'bar', + dog: { + name: 'dog', description: 'A dep that bars', version: '2.0.0', - _id: 'bar@2.0.0', + _id: 'dog@2.0.0', devDependencies: {}, peerDependencies: {}, _dependencies: {}, - path: '{CWD}/ls-ls-json--long/node_modules/prod-dep/node_modules/bar', + path: '{CWD}/tap-testdir-ls-ls---json---long/node_modules/prod-dep/node_modules/dog', extraneous: false, }, }, _id: 'prod-dep@1.0.0', devDependencies: {}, peerDependencies: {}, - _dependencies: { bar: '^2.0.0' }, - path: '{CWD}/ls-ls-json--long/node_modules/prod-dep', + _dependencies: { dog: '^2.0.0' }, + path: '{CWD}/tap-testdir-ls-ls---json---long/node_modules/prod-dep', extraneous: false, }, }, @@ -3342,28 +3590,28 @@ t.test('ls --json', (t) => { optionalDependencies: { 'optional-dep': '^1.0.0' }, peerDependencies: { 'peer-dep': '^1.0.0' }, _id: 'test-npm-ls@1.0.0', - _dependencies: { 'prod-dep': '^1.0.0', lorem: '^1.0.0', 'optional-dep': '^1.0.0' }, - path: '{CWD}/ls-ls-json--long', + _dependencies: { 'prod-dep': '^1.0.0', chai: '^1.0.0', 'optional-dep': '^1.0.0' }, + path: '{CWD}/tap-testdir-ls-ls---json---long', extraneous: false, }, 'should output long json info' ) - _flatOptions.long = true + config.long = true t.end() }) }) t.test('--long --depth=0', (t) => { - _flatOptions.all = false - _flatOptions.depth = 0 - _flatOptions.long = true - prefix = t.testdir({ + config.all = false + config.depth = 0 + config.long = true + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', dependencies: { 'prod-dep': '^1.0.0', - lorem: '^1.0.0', + chai: '^1.0.0', }, devDependencies: { 'dev-dep': '^1.0.0', @@ -3378,7 +3626,7 @@ t.test('ls --json', (t) => { ...diffDepTypesNmFixture, }) ls.exec([], () => { - t.deepEqual( + t.same( jsonParse(result), { name: 'test-npm-ls', @@ -3392,7 +3640,7 @@ t.test('ls --json', (t) => { devDependencies: {}, peerDependencies: {}, _dependencies: {}, - path: '{CWD}/ls-ls-json--long-depth-0/node_modules/peer-dep', + path: '{CWD}/tap-testdir-ls-ls---json---long---depth-0/node_modules/peer-dep', extraneous: false, }, 'dev-dep': { @@ -3403,17 +3651,17 @@ t.test('ls --json', (t) => { devDependencies: {}, peerDependencies: {}, _dependencies: { foo: '^1.0.0' }, - path: '{CWD}/ls-ls-json--long-depth-0/node_modules/dev-dep', + path: '{CWD}/tap-testdir-ls-ls---json---long---depth-0/node_modules/dev-dep', extraneous: false, }, - lorem: { - name: 'lorem', + chai: { + name: 'chai', version: '1.0.0', - _id: 'lorem@1.0.0', + _id: 'chai@1.0.0', devDependencies: {}, peerDependencies: {}, _dependencies: {}, - path: '{CWD}/ls-ls-json--long-depth-0/node_modules/lorem', + path: '{CWD}/tap-testdir-ls-ls---json---long---depth-0/node_modules/chai', extraneous: false, }, 'optional-dep': { @@ -3424,7 +3672,7 @@ t.test('ls --json', (t) => { devDependencies: {}, peerDependencies: {}, _dependencies: {}, - path: '{CWD}/ls-ls-json--long-depth-0/node_modules/optional-dep', + path: '{CWD}/tap-testdir-ls-ls---json---long---depth-0/node_modules/optional-dep', extraneous: false, }, 'prod-dep': { @@ -3434,8 +3682,8 @@ t.test('ls --json', (t) => { _id: 'prod-dep@1.0.0', devDependencies: {}, peerDependencies: {}, - _dependencies: { bar: '^2.0.0' }, - path: '{CWD}/ls-ls-json--long-depth-0/node_modules/prod-dep', + _dependencies: { dog: '^2.0.0' }, + path: '{CWD}/tap-testdir-ls-ls---json---long---depth-0/node_modules/prod-dep', extraneous: false, }, }, @@ -3443,32 +3691,32 @@ t.test('ls --json', (t) => { optionalDependencies: { 'optional-dep': '^1.0.0' }, peerDependencies: { 'peer-dep': '^1.0.0' }, _id: 'test-npm-ls@1.0.0', - _dependencies: { 'prod-dep': '^1.0.0', lorem: '^1.0.0', 'optional-dep': '^1.0.0' }, - path: '{CWD}/ls-ls-json--long-depth-0', + _dependencies: { 'prod-dep': '^1.0.0', chai: '^1.0.0', 'optional-dep': '^1.0.0' }, + path: '{CWD}/tap-testdir-ls-ls---json---long---depth-0', extraneous: false, }, 'should output json containing top-level deps in long format' ) - _flatOptions.all = true - _flatOptions.depth = Infinity - _flatOptions.long = false + config.all = true + config.depth = Infinity + config.long = false t.end() }) }) t.test('json read problems', (t) => { - prefix = t.testdir({ + npm.prefix = t.testdir({ 'package.json': '{broken json', }) ls.exec([], (err) => { t.match(err.message, 'Failed to parse root package.json', 'should have missin root package.json msg') t.match(err.code, 'EJSONPARSE', 'should have EJSONPARSE error code') - t.deepEqual( + t.same( jsonParse(result), { invalid: true, problems: [ - 'error in {CWD}/ls-ls-json-json-read-problems: Failed to parse root package.json', + 'error in {CWD}/tap-testdir-ls-ls---json-json-read-problems: Failed to parse root package.json', ], }, 'should print empty json result' @@ -3478,10 +3726,10 @@ t.test('ls --json', (t) => { }) t.test('empty location', (t) => { - prefix = t.testdir({}) + npm.prefix = t.testdir({}) ls.exec([], (err) => { - t.ifError(err, 'should not error out on empty locations') - t.deepEqual( + t.error(err, 'should not error out on empty locations') + t.same( jsonParse(result), {}, 'should print empty json result' @@ -3491,13 +3739,13 @@ t.test('ls --json', (t) => { }) t.test('unmet peer dep', (t) => { - prefix = t.testdir({ + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', dependencies: { 'prod-dep': '^1.0.0', - lorem: '^1.0.0', + chai: '^1.0.0', }, devDependencies: { 'dev-dep': '^1.0.0', @@ -3513,20 +3761,20 @@ t.test('ls --json', (t) => { }) ls.exec([], (err) => { t.match(err.code, 'ELSPROBLEMS', 'Should have ELSPROBLEMS error code') - t.deepEqual( + t.same( jsonParse(result), { name: 'test-npm-ls', version: '1.0.0', problems: [ - 'invalid: peer-dep@1.0.0 {CWD}/ls-ls-json-unmet-peer-dep/node_modules/peer-dep', + 'invalid: peer-dep@1.0.0 {CWD}/tap-testdir-ls-ls---json-unmet-peer-dep/node_modules/peer-dep', ], dependencies: { 'peer-dep': { version: '1.0.0', - invalid: true, + invalid: '"^2.0.0" from the root project', problems: [ - 'invalid: peer-dep@1.0.0 {CWD}/ls-ls-json-unmet-peer-dep/node_modules/peer-dep', + 'invalid: peer-dep@1.0.0 {CWD}/tap-testdir-ls-ls---json-unmet-peer-dep/node_modules/peer-dep', ], }, 'dev-dep': { @@ -3534,13 +3782,13 @@ t.test('ls --json', (t) => { dependencies: { foo: { version: '1.0.0', - dependencies: { bar: { version: '1.0.0' } }, + dependencies: { dog: { version: '1.0.0' } }, }, }, }, - lorem: { version: '1.0.0' }, + chai: { version: '1.0.0' }, 'optional-dep': { version: '1.0.0' }, - 'prod-dep': { version: '1.0.0', dependencies: { bar: { version: '2.0.0' } } }, + 'prod-dep': { version: '1.0.0', dependencies: { dog: { version: '2.0.0' } } }, }, }, 'should output json signaling missing peer dep in problems' @@ -3550,13 +3798,13 @@ t.test('ls --json', (t) => { }) t.test('unmet optional dep', (t) => { - prefix = t.testdir({ + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', dependencies: { 'prod-dep': '^1.0.0', - lorem: '^1.0.0', + chai: '^1.0.0', }, devDependencies: { 'dev-dep': '^1.0.0', @@ -3574,20 +3822,20 @@ t.test('ls --json', (t) => { ls.exec([], (err) => { t.match(err.code, 'ELSPROBLEMS', 'should have ELSPROBLEMS error code') t.match(err.message, /invalid: optional-dep@1.0.0/, 'should have invalid dep error msg') - t.deepEqual( + t.same( jsonParse(result), { name: 'test-npm-ls', version: '1.0.0', problems: [ - 'invalid: optional-dep@1.0.0 {CWD}/ls-ls-json-unmet-optional-dep/node_modules/optional-dep', // mismatching optional deps get flagged in problems + 'invalid: optional-dep@1.0.0 {CWD}/tap-testdir-ls-ls---json-unmet-optional-dep/node_modules/optional-dep', // mismatching optional deps get flagged in problems ], dependencies: { 'optional-dep': { version: '1.0.0', - invalid: true, + invalid: '"^2.0.0" from the root project', problems: [ - 'invalid: optional-dep@1.0.0 {CWD}/ls-ls-json-unmet-optional-dep/node_modules/optional-dep', + 'invalid: optional-dep@1.0.0 {CWD}/tap-testdir-ls-ls---json-unmet-optional-dep/node_modules/optional-dep', ], }, 'peer-dep': { @@ -3598,12 +3846,12 @@ t.test('ls --json', (t) => { dependencies: { foo: { version: '1.0.0', - dependencies: { bar: { version: '1.0.0' } }, + dependencies: { dog: { version: '1.0.0' } }, }, }, }, - lorem: { version: '1.0.0' }, - 'prod-dep': { version: '1.0.0', dependencies: { bar: { version: '2.0.0' } } }, + chai: { version: '1.0.0' }, + 'prod-dep': { version: '1.0.0', dependencies: { dog: { version: '2.0.0' } } }, 'missing-optional-dep': {}, // missing optional dep has an empty entry in json output }, }, @@ -3614,7 +3862,7 @@ t.test('ls --json', (t) => { }) t.test('cycle deps', (t) => { - prefix = t.testdir({ + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', @@ -3644,7 +3892,7 @@ t.test('ls --json', (t) => { }, }) ls.exec([], () => { - t.deepEqual( + t.same( jsonParse(result), { name: 'test-npm-ls', @@ -3670,7 +3918,7 @@ t.test('ls --json', (t) => { }) t.test('using aliases', (t) => { - prefix = t.testdir({ + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', @@ -3700,9 +3948,9 @@ t.test('ls --json', (t) => { }, }, }) - touchHiddenPackageLock(prefix) + touchHiddenPackageLock(npm.prefix) ls.exec([], () => { - t.deepEqual( + t.same( jsonParse(result), { name: 'test-npm-ls', @@ -3721,7 +3969,7 @@ t.test('ls --json', (t) => { }) t.test('resolved points to git ref', (t) => { - prefix = t.testdir({ + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', @@ -3760,9 +4008,9 @@ t.test('ls --json', (t) => { }, }, }) - touchHiddenPackageLock(prefix) + touchHiddenPackageLock(npm.prefix) ls.exec([], () => { - t.deepEqual( + t.same( jsonParse(result), { name: 'test-npm-ls', @@ -3781,7 +4029,7 @@ t.test('ls --json', (t) => { }) t.test('from and resolved properties', (t) => { - prefix = t.testdir({ + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'test-npm-ls', version: '1.0.0', @@ -3844,9 +4092,9 @@ t.test('ls --json', (t) => { }, }, }) - touchHiddenPackageLock(prefix) + touchHiddenPackageLock(npm.prefix) ls.exec([], () => { - t.deepEqual( + t.same( jsonParse(result), { name: 'test-npm-ls', @@ -3865,17 +4113,17 @@ t.test('ls --json', (t) => { }) t.test('node.name fallback if missing root package name', (t) => { - prefix = t.testdir({ + npm.prefix = t.testdir({ 'package.json': JSON.stringify({ version: '1.0.0', }), }) ls.exec([], () => { - t.deepEqual( + t.same( jsonParse(result), { version: '1.0.0', - name: 'ls-ls-json-node-name-fallback-if-missing-root-package-name', + name: 'tap-testdir-ls-ls---json-node.name-fallback-if-missing-root-package-name', }, 'should use node.name as key in json result obj' ) @@ -3884,7 +4132,7 @@ t.test('ls --json', (t) => { }) t.test('global', (t) => { - _flatOptions.global = true + config.global = true const fixtures = t.testdir({ node_modules: { a: { @@ -3911,13 +4159,13 @@ t.test('ls --json', (t) => { }) // mimics lib/npm.js globalDir getter but pointing to fixtures - globalDir = resolve(fixtures, 'node_modules') + npm.globalDir = resolve(fixtures, 'node_modules') ls.exec([], () => { - t.deepEqual( + t.same( jsonParse(result), { - name: 'ls-ls-json-global', + name: 'tap-testdir-ls-ls---json-global', dependencies: { a: { version: '1.0.0', @@ -3934,11 +4182,849 @@ t.test('ls --json', (t) => { }, 'should print json output for global deps' ) - globalDir = 'MISSING_GLOBAL_DIR' - _flatOptions.global = false + npm.globalDir = 'MISSING_GLOBAL_DIR' + config.global = false t.end() }) }) t.end() }) + +t.test('show multiple invalid reasons', (t) => { + config.json = false + config.all = true + config.depth = Infinity + npm.prefix = t.testdir({ + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + cat: '^2.0.0', + dog: '^1.2.3', + }, + }), + node_modules: { + cat: { + 'package.json': JSON.stringify({ + name: 'cat', + version: '1.0.0', + dependencies: { + dog: '^2.0.0', + }, + }), + }, + dog: { + 'package.json': JSON.stringify({ + name: 'dog', + version: '1.0.0', + dependencies: { + cat: '', + }, + }), + }, + chai: { + 'package.json': JSON.stringify({ + name: 'chai', + version: '1.0.0', + dependencies: { + dog: '2.x', + }, + }), + }, + }, + }) + + const cleanupPaths = str => + redactCwd(str).toLowerCase().replace(/\\/g, '/') + ls.exec([], (err) => { + t.match(err, { code: 'ELSPROBLEMS' }, 'should list dep problems') + t.matchSnapshot(cleanupPaths(result), 'ls result') + t.end() + }) +}) + +t.test('ls --package-lock-only', (t) => { + config['package-lock-only'] = true + t.test('ls --package-lock-only --json', (t) => { + t.beforeEach(cleanUpResult) + config.json = true + config.parseable = false + t.test('no args', (t) => { + npm.prefix = t.testdir({ + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: '^1.0.0', + chai: '^1.0.0', + }, + }), + 'package-lock.json': JSON.stringify({ + dependencies: { + foo: { + version: '1.0.0', + requires: { + dog: '^1.0.0', + }, + }, + dog: { + version: '1.0.0', + }, + chai: { + version: '1.0.0', + }, + }, + }), + }) + ls.exec([], (err) => { + t.error(err, 'npm ls') + t.same( + jsonParse(result), + { + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: { + version: '1.0.0', + dependencies: { + dog: { + version: '1.0.0', + }, + }, + }, + chai: { + version: '1.0.0', + }, + }, + }, + 'should output json representation of dependencies structure' + ) + t.end() + }) + }) + + t.test('extraneous deps', (t) => { + npm.prefix = t.testdir({ + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: '^1.0.0', + }, + }), + 'package-lock.json': JSON.stringify({ + dependencies: { + foo: { + version: '1.0.0', + requires: { + dog: '^1.0.0', + }, + }, + dog: { + version: '1.0.0', + }, + chai: { + version: '1.0.0', + }, + }, + }), + }) + ls.exec([], (err) => { + t.error(err) // should not error for extraneous + t.same( + jsonParse(result), + { + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: { + version: '1.0.0', + dependencies: { + dog: { + version: '1.0.0', + }, + }, + }, + }, + }, + 'should output json containing no problem info' + ) + t.end() + }) + }) + + t.test('missing deps --long', (t) => { + config.long = true + npm.prefix = t.testdir({ + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: '^1.0.0', + dog: '^1.0.0', + chai: '^1.0.0', + ipsum: '^1.0.0', + }, + }), + 'package-lock.json': JSON.stringify({ + dependencies: { + foo: { + version: '1.0.0', + requires: { + dog: '^1.0.0', + }, + }, + dog: { + version: '1.0.0', + }, + chai: { + version: '1.0.0', + }, + ipsum: { + version: '1.0.0', + }, + }, + }), + }) + ls.exec([], (err) => { + t.error(err, 'npm ls') + t.match( + jsonParse(result), + { + name: 'test-npm-ls', + version: '1.0.0', + }, + 'should output json containing no problems info' + ) + config.long = false + t.end() + }) + }) + + t.test('with filter arg', (t) => { + npm.prefix = t.testdir({ + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: '^1.0.0', + chai: '^1.0.0', + }, + }), + 'package-lock.json': JSON.stringify({ + dependencies: { + foo: { + version: '1.0.0', + requires: { + dog: '^1.0.0', + }, + }, + dog: { + version: '1.0.0', + }, + chai: { + version: '1.0.0', + }, + ipsum: { + version: '1.0.0', + }, + }, + }), + }) + ls.exec(['chai'], (err) => { + t.error(err, 'npm ls') + t.same( + jsonParse(result), + { + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + chai: { + version: '1.0.0', + }, + }, + }, + 'should output json contaning only occurrences of filtered by package' + ) + t.equal( + process.exitCode, + 0, + 'should exit with error code 0' + ) + t.end() + }) + }) + + t.test('with filter arg nested dep', (t) => { + npm.prefix = t.testdir({ + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: '^1.0.0', + chai: '^1.0.0', + }, + }), + 'package-lock.json': JSON.stringify({ + dependencies: { + foo: { + version: '1.0.0', + requires: { + dog: '^1.0.0', + }, + }, + dog: { + version: '1.0.0', + }, + chai: { + version: '1.0.0', + }, + ipsum: { + version: '1.0.0', + }, + }, + }), + }) + ls.exec(['dog'], (err) => { + t.error(err, 'npm ls') + t.same( + jsonParse(result), + { + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: { + version: '1.0.0', + dependencies: { + dog: { + version: '1.0.0', + }, + }, + }, + }, + }, + 'should output json contaning only occurrences of filtered by package' + ) + t.end() + }) + }) + + t.test('with multiple filter args', (t) => { + npm.prefix = t.testdir({ + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: '^1.0.0', + chai: '^1.0.0', + ipsum: '^1.0.0', + }, + }), + 'package-lock.json': JSON.stringify({ + dependencies: { + foo: { + version: '1.0.0', + requires: { + dog: '^1.0.0', + }, + }, + dog: { + version: '1.0.0', + }, + chai: { + version: '1.0.0', + }, + ipsum: { + version: '1.0.0', + }, + }, + }), + }) + ls.exec(['dog@*', 'chai@1.0.0'], (err) => { + t.error(err, 'npm ls') + t.same( + jsonParse(result), + { + version: '1.0.0', + name: 'test-npm-ls', + dependencies: { + foo: { + version: '1.0.0', + dependencies: { + dog: { + version: '1.0.0', + }, + }, + }, + chai: { + version: '1.0.0', + }, + }, + }, + 'should output json contaning only occurrences of multiple filtered packages and their ancestors' + ) + t.end() + }) + }) + + t.test('with missing filter arg', (t) => { + npm.prefix = t.testdir({ + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: '^1.0.0', + chai: '^1.0.0', + }, + }), + 'package-lock.json': JSON.stringify({ + dependencies: { + foo: { + version: '1.0.0', + requires: { + dog: '^1.0.0', + }, + }, + dog: { + version: '1.0.0', + }, + chai: { + version: '1.0.0', + }, + }, + }), + }) + ls.exec(['notadep'], (err) => { + t.error(err, 'npm ls') + t.same( + jsonParse(result), + { + name: 'test-npm-ls', + version: '1.0.0', + }, + 'should output json containing no dependencies info' + ) + t.equal( + process.exitCode, + 1, + 'should exit with error code 1' + ) + process.exitCode = 0 + t.end() + }) + }) + + t.test('default --depth value should now be 0', (t) => { + config.all = false + config.depth = undefined + npm.prefix = t.testdir({ + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: '^1.0.0', + chai: '^1.0.0', + }, + }), + 'package-lock.json': JSON.stringify({ + dependencies: { + foo: { + version: '1.0.0', + requires: { + dog: '^1.0.0', + }, + }, + dog: { + version: '1.0.0', + }, + chai: { + version: '1.0.0', + }, + }, + }), + }) + ls.exec([], (err) => { + t.error(err, 'npm ls') + t.same( + jsonParse(result), + { + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: { + version: '1.0.0', + }, + chai: { + version: '1.0.0', + }, + }, + }, + 'should output json containing only top-level dependencies' + ) + config.all = true + config.depth = Infinity + t.end() + }) + }) + + t.test('--depth=0', (t) => { + config.all = false + config.depth = 0 + npm.prefix = t.testdir({ + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: '^1.0.0', + chai: '^1.0.0', + }, + }), + 'package-lock.json': JSON.stringify({ + dependencies: { + foo: { + version: '1.0.0', + requires: { + dog: '^1.0.0', + }, + }, + dog: { + version: '1.0.0', + }, + chai: { + version: '1.0.0', + }, + }, + }), + }) + ls.exec([], (err) => { + t.error(err, 'npm ls') + t.same( + jsonParse(result), + { + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: { + version: '1.0.0', + }, + chai: { + version: '1.0.0', + }, + }, + }, + 'should output json containing only top-level dependencies' + ) + config.all = true + config.depth = Infinity + t.end() + }) + }) + + t.test('--depth=1', (t) => { + config.all = false + config.depth = 1 + npm.prefix = t.testdir({ + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: '^1.0.0', + chai: '^1.0.0', + }, + }), + 'package-lock.json': JSON.stringify({ + dependencies: { + foo: { + version: '1.0.0', + requires: { + dog: '^1.0.0', + }, + }, + dog: { + version: '1.0.0', + }, + chai: { + version: '1.0.0', + }, + }, + }), + }) + ls.exec([], (err) => { + t.error(err, 'npm ls') + t.same( + jsonParse(result), + { + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: { + version: '1.0.0', + dependencies: { + dog: { + version: '1.0.0', + }, + }, + }, + chai: { + version: '1.0.0', + }, + }, + }, + 'should output json containing top-level deps and their deps only' + ) + config.all = true + config.depth = Infinity + t.end() + }) + }) + + t.test('missing/invalid/extraneous', (t) => { + npm.prefix = t.testdir({ + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: '^2.0.0', + ipsum: '^1.0.0', + }, + }), + 'package-lock.json': JSON.stringify({ + dependencies: { + foo: { + version: '1.0.0', + requires: { + dog: '^1.0.0', + }, + }, + dog: { + version: '1.0.0', + }, + chai: { + version: '1.0.0', + }, + }, + }), + }) + ls.exec([], (err) => { + t.match(err, { code: 'ELSPROBLEMS' }, 'should list dep problems') + t.same( + jsonParse(result), + { + name: 'test-npm-ls', + version: '1.0.0', + problems: [ + 'invalid: foo@1.0.0 {CWD}/tap-testdir-ls-ls---package-lock-only-ls---package-lock-only---json-missing-invalid-extraneous/node_modules/foo', + 'missing: ipsum@^1.0.0, required by test-npm-ls@1.0.0', + ], + dependencies: { + foo: { + version: '1.0.0', + invalid: '"^2.0.0" from the root project', + problems: [ + 'invalid: foo@1.0.0 {CWD}/tap-testdir-ls-ls---package-lock-only-ls---package-lock-only---json-missing-invalid-extraneous/node_modules/foo', + ], + dependencies: { + dog: { + version: '1.0.0', + }, + }, + }, + ipsum: { + required: '^1.0.0', + missing: true, + problems: [ + 'missing: ipsum@^1.0.0, required by test-npm-ls@1.0.0', + ], + }, + }, + }, + 'should output json containing top-level deps and their deps only' + ) + t.end() + }) + }) + + t.test('from lockfile', (t) => { + npm.prefix = t.testdir({ + 'package-lock.json': JSON.stringify({ + name: 'dedupe-lockfile', + version: '1.0.0', + lockfileVersion: 2, + requires: true, + packages: { + '': { + name: 'dedupe-lockfile', + version: '1.0.0', + dependencies: { + '@isaacs/dedupe-tests-a': '1.0.1', + '@isaacs/dedupe-tests-b': '1||2', + }, + }, + 'node_modules/@isaacs/dedupe-tests-a': { + name: '@isaacs/dedupe-tests-a', + version: '1.0.1', + resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-a/-/dedupe-tests-a-1.0.1.tgz', + integrity: 'sha512-8AN9lNCcBt5Xeje7fMEEpp5K3rgcAzIpTtAjYb/YMUYu8SbIVF6wz0WqACDVKvpQOUcSfNHZQNLNmue0QSwXOQ==', + dependencies: { + '@isaacs/dedupe-tests-b': '1', + }, + }, + 'node_modules/@isaacs/dedupe-tests-a/node_modules/@isaacs/dedupe-tests-b': { + name: '@isaacs/dedupe-tests-b', + version: '1.0.0', + resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-1.0.0.tgz', + integrity: 'sha512-3nmvzIb8QL8OXODzipwoV3U8h9OQD9g9RwOPuSBQqjqSg9JZR1CCFOWNsDUtOfmwY8HFUJV9EAZ124uhqVxq+w==', + }, + 'node_modules/@isaacs/dedupe-tests-b': { + name: '@isaacs/dedupe-tests-b', + version: '2.0.0', + resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-2.0.0.tgz', + integrity: 'sha512-KTYkpRv9EzlmCg4Gsm/jpclWmRYFCXow8GZKJXjK08sIZBlElTZEa5Bw/UQxIvEfcKmWXczSqItD49Kr8Ax4UA==', + }, + }, + dependencies: { + '@isaacs/dedupe-tests-a': { + version: '1.0.1', + resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-a/-/dedupe-tests-a-1.0.1.tgz', + integrity: 'sha512-8AN9lNCcBt5Xeje7fMEEpp5K3rgcAzIpTtAjYb/YMUYu8SbIVF6wz0WqACDVKvpQOUcSfNHZQNLNmue0QSwXOQ==', + requires: { + '@isaacs/dedupe-tests-b': '1', + }, + dependencies: { + '@isaacs/dedupe-tests-b': { + version: '1.0.0', + resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-1.0.0.tgz', + integrity: 'sha512-3nmvzIb8QL8OXODzipwoV3U8h9OQD9g9RwOPuSBQqjqSg9JZR1CCFOWNsDUtOfmwY8HFUJV9EAZ124uhqVxq+w==', + }, + }, + }, + '@isaacs/dedupe-tests-b': { + version: '2.0.0', + resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-2.0.0.tgz', + integrity: 'sha512-KTYkpRv9EzlmCg4Gsm/jpclWmRYFCXow8GZKJXjK08sIZBlElTZEa5Bw/UQxIvEfcKmWXczSqItD49Kr8Ax4UA==', + }, + }, + }), + 'package.json': JSON.stringify({ + name: 'dedupe-lockfile', + version: '1.0.0', + dependencies: { + '@isaacs/dedupe-tests-a': '1.0.1', + '@isaacs/dedupe-tests-b': '1||2', + }, + }), + }) + ls.exec([], () => { + t.same( + jsonParse(result), + { + version: '1.0.0', + name: 'dedupe-lockfile', + dependencies: { + '@isaacs/dedupe-tests-a': { + version: '1.0.1', + resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-a/-/dedupe-tests-a-1.0.1.tgz', + dependencies: { + '@isaacs/dedupe-tests-b': { + version: '1.0.0', + resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-1.0.0.tgz', + }, + }, + }, + '@isaacs/dedupe-tests-b': { + version: '2.0.0', + resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-2.0.0.tgz', + }, + }, + }, + 'should output json containing only prod deps' + ) + t.end() + }) + }) + + t.test('using aliases', (t) => { + npm.prefix = t.testdir({ + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + a: 'npm:b@1.0.0', + }, + }), + 'package-lock.json': JSON.stringify({ + dependencies: { + a: { + version: 'npm:b@1.0.0', + resolved: 'https://localhost:8080/abbrev/-/abbrev-1.0.0.tgz', + }, + }, + }), + }) + ls.exec([], () => { + t.same( + jsonParse(result), + { + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + a: { + version: '1.0.0', + resolved: 'https://localhost:8080/abbrev/-/abbrev-1.0.0.tgz', + }, + }, + }, + 'should output json containing aliases' + ) + t.end() + }) + }) + + t.test('resolved points to git ref', (t) => { + config.long = false + npm.prefix = t.testdir({ + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + abbrev: 'git+https://github.com/isaacs/abbrev-js.git', + }, + }), + 'package-lock.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + lockfileVersion: 2, + requires: true, + dependencies: { + abbrev: { + version: 'git+ssh://git@github.com/isaacs/abbrev-js.git#b8f3a2fc0c3bb8ffd8b0d0072cc6b5a3667e963c', + from: 'abbrev@git+https://github.com/isaacs/abbrev-js.git', + }, + }, + } + ), + }) + ls.exec([], () => { + t.same( + jsonParse(result), + { + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + abbrev: { + resolved: 'git+ssh://git@github.com/isaacs/abbrev-js.git#b8f3a2fc0c3bb8ffd8b0d0072cc6b5a3667e963c', + }, + }, + }, + 'should output json containing git refs' + ) + t.end() + }) + }) + + t.end() + }) + + t.end() +}) diff --git a/test/lib/npm.js b/test/lib/npm.js index 1f7a54e228a0e..03bb46d8d8451 100644 --- a/test/lib/npm.js +++ b/test/lib/npm.js @@ -1,8 +1,12 @@ const t = require('tap') +const npmlog = require('npmlog') +const { real: mockNpm } = require('../fixtures/mock-npm.js') + // delete this so that we don't have configs from the fact that it // is being run by 'npm test' const event = process.env.npm_lifecycle_event + for (const env of Object.keys(process.env).filter(e => /^npm_/.test(e))) { if (env === 'npm_command') { // should only be running this in the 'test' or 'run-script' command! @@ -15,7 +19,7 @@ for (const env of Object.keys(process.env).filter(e => /^npm_/.test(e))) { 'should match "npm test" or "npm run test"' ) } else - t.match(process.env[env], /^(run)|(run-script)|(exec)$/) + t.match(process.env[env], /^(run-script|exec)$/) } delete process.env[env] } @@ -23,52 +27,36 @@ for (const env of Object.keys(process.env).filter(e => /^npm_/.test(e))) { const { resolve, dirname } = require('path') const actualPlatform = process.platform - const beWindows = () => { Object.defineProperty(process, 'platform', { value: 'win32', configurable: true, }) } - const bePosix = () => { Object.defineProperty(process, 'platform', { value: 'posix', configurable: true, }) } +const argv = [...process.argv] -const npmlog = require('npmlog') - -const npmPath = resolve(__dirname, '..', '..') -const Config = require('@npmcli/config') -const { types, defaults, shorthands } = require('../../lib/utils/config.js') -const freshConfig = (opts = {}) => { +t.afterEach(() => { for (const env of Object.keys(process.env).filter(e => /^npm_/.test(e))) delete process.env[env] - process.env.npm_config_cache = CACHE - - npm.config = new Config({ - types, - defaults, - shorthands, - npmPath, - log: npmlog, - ...opts, + process.argv = argv + Object.defineProperty(process, 'platform', { + value: actualPlatform, + configurable: true, }) -} - -const logs = [] -for (const level of ['silly', 'verbose', 'timing', 'notice', 'warn', 'error']) - npmlog[level] = (...msg) => logs.push([level, ...msg]) - -const npm = require('../../lib/npm.js') +}) const CACHE = t.testdir() process.env.npm_config_cache = CACHE t.test('not yet loaded', t => { + const { npm, logs } = mockNpm(t) t.match(npm, { started: Number, command: null, @@ -89,159 +77,125 @@ t.test('not yet loaded', t => { t.equal(npm.commands.asdfasdf, undefined) t.equal(npm.deref('list'), 'ls') t.same(logs, []) - logs.length = 0 t.end() }) t.test('npm.load', t => { - t.test('must be called with proper args', t => { - const er = new TypeError('must call as: npm.load(callback)') - t.throws(() => npm.load(), er) + t.test('callback must be a function', t => { + const { npm, logs } = mockNpm(t) + const er = new TypeError('callback must be a function if provided') t.throws(() => npm.load({}), er) t.same(logs, []) - logs.length = 0 t.end() }) - t.test('load error', t => { - const { load } = npm.config + t.test('callback style', t => { + const { npm } = mockNpm(t) + npm.load((err) => { + if (err) + throw err + t.ok(npm.loaded) + t.end() + }) + }) + + t.test('load error', async t => { + const { npm } = mockNpm(t) const loadError = new Error('load error') npm.config.load = async () => { throw loadError } - npm.load(er => { + await npm.load().catch(er => { t.equal(er, loadError) t.equal(npm.loadErr, loadError) - npm.config.load = load - // loading again just returns the same error - npm.load(er => { - t.equal(er, loadError) - t.equal(npm.loadErr, loadError) - npm.loadErr = null - t.end() - }) + }) + npm.config.load = async () => { + throw new Error('new load error') + } + await npm.load().catch(er => { + t.equal(er, loadError, 'loading again returns the original error') + t.equal(npm.loadErr, loadError) }) }) - t.test('basic loading', t => { + t.test('basic loading', async t => { + const { npm, logs } = mockNpm(t) const dir = t.testdir({ node_modules: {}, }) - let firstCalled = false - const first = (er) => { - if (er) - throw er - - firstCalled = true - t.equal(npm.loaded, true) - t.equal(npm.config.loaded, true) - t.equal(npm.config.get('force'), false) - } - - let secondCalled = false - const second = () => { - secondCalled = true - } - - t.equal(npm.loading, false, 'not loading yet') - const p = npm.load(first).then(() => { - npm.config.set('prefix', dir) - t.match(npm, { - loaded: true, - loading: false, - flatOptions: {}, - }) - t.equal(firstCalled, true, 'first callback got called') - t.equal(secondCalled, true, 'second callback got called') - let thirdCalled = false - const third = () => { - thirdCalled = true - } - npm.load(third) - t.equal(thirdCalled, true, 'third callbback got called') - t.match(logs, [ - ['timing', 'npm:load', /Completed in [0-9]+ms/], - ]) - logs.length = 0 - - bePosix() - t.equal(resolve(npm.cache), resolve(CACHE), 'cache is cache') - const newCache = t.testdir() - npm.cache = newCache - t.equal(npm.config.get('cache'), newCache, 'cache setter sets config') - t.equal(npm.cache, newCache, 'cache getter gets new config') - t.equal(npm.log, npmlog, 'npmlog getter') - t.equal(npm.lockfileVersion, 2, 'lockfileVersion getter') - t.equal(npm.prefix, npm.localPrefix, 'prefix is local prefix') - t.notEqual(npm.prefix, npm.globalPrefix, 'prefix is not global prefix') - npm.globalPrefix = npm.prefix - t.equal(npm.prefix, npm.globalPrefix, 'globalPrefix setter') - npm.localPrefix = dir + '/extra/prefix' - t.equal(npm.prefix, npm.localPrefix, 'prefix is local prefix after localPrefix setter') - t.notEqual(npm.prefix, npm.globalPrefix, 'prefix is not global prefix after localPrefix setter') - - npm.prefix = dir + '/some/prefix' - t.equal(npm.prefix, npm.localPrefix, 'prefix is local prefix after prefix setter') - t.notEqual(npm.prefix, npm.globalPrefix, 'prefix is not global prefix after prefix setter') - t.equal(npm.bin, npm.localBin, 'bin is local bin after prefix setter') - t.notEqual(npm.bin, npm.globalBin, 'bin is not global bin after prefix setter') - t.equal(npm.dir, npm.localDir, 'dir is local dir after prefix setter') - t.notEqual(npm.dir, npm.globalDir, 'dir is not global dir after prefix setter') - - npm.config.set('global', true) - t.equal(npm.prefix, npm.globalPrefix, 'prefix is global prefix after setting global') - t.notEqual(npm.prefix, npm.localPrefix, 'prefix is not local prefix after setting global') - t.equal(npm.bin, npm.globalBin, 'bin is global bin after setting global') - t.notEqual(npm.bin, npm.localBin, 'bin is not local bin after setting global') - t.equal(npm.dir, npm.globalDir, 'dir is global dir after setting global') - t.notEqual(npm.dir, npm.localDir, 'dir is not local dir after setting global') - - npm.prefix = dir + '/new/global/prefix' - t.equal(npm.prefix, npm.globalPrefix, 'prefix is global prefix after prefix setter') - t.notEqual(npm.prefix, npm.localPrefix, 'prefix is not local prefix after prefix setter') - t.equal(npm.bin, npm.globalBin, 'bin is global bin after prefix setter') - t.notEqual(npm.bin, npm.localBin, 'bin is not local bin after prefix setter') - - beWindows() - t.equal(npm.bin, npm.globalBin, 'bin is global bin in windows mode') - t.equal(npm.dir, npm.globalDir, 'dir is global dir in windows mode') - bePosix() - - const tmp = npm.tmp - t.match(tmp, String, 'npm.tmp is a string') - t.equal(tmp, npm.tmp, 'getter only generates it once') + await npm.load() + t.equal(npm.loaded, true) + t.equal(npm.config.loaded, true) + t.equal(npm.config.get('force'), false) + t.ok(npm.usage, 'has usage') + npm.config.set('prefix', dir) + + t.match(npm, { + flatOptions: {}, }) - - t.equal(npm.loaded, false, 'not loaded yet') - t.equal(npm.loading, true, 'working on it tho') - t.isa(p, Promise, 'npm.load() returned a Promise first time') - t.equal(npm.load(second), undefined, - 'npm.load() returns nothing second time') - - return p + t.match(logs, [ + ['timing', 'npm:load', /Completed in [0-9.]+ms/], + ]) + + bePosix() + t.equal(resolve(npm.cache), resolve(CACHE), 'cache is cache') + const newCache = t.testdir() + npm.cache = newCache + t.equal(npm.config.get('cache'), newCache, 'cache setter sets config') + t.equal(npm.cache, newCache, 'cache getter gets new config') + t.equal(npm.log, npmlog, 'npmlog getter') + t.equal(npm.lockfileVersion, 2, 'lockfileVersion getter') + t.equal(npm.prefix, npm.localPrefix, 'prefix is local prefix') + t.not(npm.prefix, npm.globalPrefix, 'prefix is not global prefix') + npm.globalPrefix = npm.prefix + t.equal(npm.prefix, npm.globalPrefix, 'globalPrefix setter') + npm.localPrefix = dir + '/extra/prefix' + t.equal(npm.prefix, npm.localPrefix, 'prefix is local prefix after localPrefix setter') + t.not(npm.prefix, npm.globalPrefix, 'prefix is not global prefix after localPrefix setter') + + npm.prefix = dir + '/some/prefix' + t.equal(npm.prefix, npm.localPrefix, 'prefix is local prefix after prefix setter') + t.not(npm.prefix, npm.globalPrefix, 'prefix is not global prefix after prefix setter') + t.equal(npm.bin, npm.localBin, 'bin is local bin after prefix setter') + t.not(npm.bin, npm.globalBin, 'bin is not global bin after prefix setter') + t.equal(npm.dir, npm.localDir, 'dir is local dir after prefix setter') + t.not(npm.dir, npm.globalDir, 'dir is not global dir after prefix setter') + + npm.config.set('global', true) + t.equal(npm.prefix, npm.globalPrefix, 'prefix is global prefix after setting global') + t.not(npm.prefix, npm.localPrefix, 'prefix is not local prefix after setting global') + t.equal(npm.bin, npm.globalBin, 'bin is global bin after setting global') + t.not(npm.bin, npm.localBin, 'bin is not local bin after setting global') + t.equal(npm.dir, npm.globalDir, 'dir is global dir after setting global') + t.not(npm.dir, npm.localDir, 'dir is not local dir after setting global') + + npm.prefix = dir + '/new/global/prefix' + t.equal(npm.prefix, npm.globalPrefix, 'prefix is global prefix after prefix setter') + t.not(npm.prefix, npm.localPrefix, 'prefix is not local prefix after prefix setter') + t.equal(npm.bin, npm.globalBin, 'bin is global bin after prefix setter') + t.not(npm.bin, npm.localBin, 'bin is not local bin after prefix setter') + + beWindows() + t.equal(npm.bin, npm.globalBin, 'bin is global bin in windows mode') + t.equal(npm.dir, npm.globalDir, 'dir is global dir in windows mode') + bePosix() + + const tmp = npm.tmp + t.match(tmp, String, 'npm.tmp is a string') + t.equal(tmp, npm.tmp, 'getter only generates it once') }) - t.test('forceful loading', t => { - // also, don't get thrown off if argv[0] isn't found for some reason - const [argv0] = process.argv - t.teardown(() => { - process.argv[0] = argv0 - }) - freshConfig({ argv: [...process.argv, '--force', '--color', 'always'] }) - process.argv[0] = 'this exe does not exist or else this test will fail' - return npm.load(er => { - if (er) - throw er - - t.match(logs.filter(l => l[0] !== 'timing'), [ - [ - 'warn', - 'using --force', - 'Recommended protections disabled.', - ], - ]) - logs.length = 0 - }) + t.test('forceful loading', async t => { + process.argv = [...process.argv, '--force', '--color', 'always'] + const { npm, logs } = mockNpm(t) + await npm.load() + t.match(logs.filter(l => l[0] !== 'timing'), [ + [ + 'warn', + 'using --force', + 'Recommended protections disabled.', + ], + ]) }) t.test('node is a symlink', async t => { @@ -253,7 +207,6 @@ t.test('npm.load', t => { const PATH = process.env.PATH || process.env.Path process.env.PATH = resolve(dir, 'bin') - const { execPath, argv: processArgv } = process process.argv = [ node, process.argv[1], @@ -266,58 +219,54 @@ t.test('npm.load', t => { 'blergggg', ] - freshConfig() - const { log } = console - const consoleLogs = [] - console.log = (...msg) => consoleLogs.push(msg) - t.teardown(() => { - console.log = log process.env.PATH = PATH - process.argv = processArgv - freshConfig() - logs.length = 0 - process.execPath = execPath }) - logs.length = 0 - - await npm.load(er => { - if (er) - throw er - - t.equal(npm.config.get('scope'), '@foo', 'added the @ sign to scope') - t.match(logs.filter(l => l[0] !== 'timing' || !/^config:/.test(l[1])), [ - [ - 'verbose', - 'node symlink', - resolve(dir, 'bin', node), - ], - [ - 'timing', - 'npm:load', - /Completed in [0-9]+ms/, - ], - ]) - logs.length = 0 - t.equal(process.execPath, resolve(dir, 'bin', node)) - }) + const { npm, logs, outputs } = mockNpm(t) + await npm.load() + t.equal(npm.config.get('scope'), '@foo', 'added the @ sign to scope') + t.match(logs.filter(l => l[0] !== 'timing' || !/^config:/.test(l[1])), [ + [ + 'timing', + 'npm:load:whichnode', + /Completed in [0-9.]+ms/, + ], + [ + 'verbose', + 'node symlink', + resolve(dir, 'bin', node), + ], + [ + 'timing', + 'npm:load', + /Completed in [0-9.]+ms/, + ], + ]) + t.equal(process.execPath, resolve(dir, 'bin', node)) + outputs.length = 0 await npm.commands.ll([], (er) => { if (er) throw er - t.same(consoleLogs, [[npm.commands.ll.usage]], 'print usage') - consoleLogs.length = 0 + t.equal(npm.command, 'll', 'command set to first npm command') + t.equal(npm.flatOptions.npmCommand, 'll', 'npmCommand flatOption set') + + t.same(outputs, [[npm.commands.ll.usage]], 'print usage') npm.config.set('usage', false) t.equal(npm.commands.ll, npm.commands.ll, 'same command, different name') - logs.length = 0 }) + outputs.length = 0 + logs.length = 0 await npm.commands.get(['scope', '\u2010not-a-dash'], (er) => { if (er) throw er + t.strictSame([npm.command, npm.flatOptions.npmCommand], ['ll', 'll'], + 'does not change npm.command when another command is called') + t.match(logs, [ [ 'error', @@ -328,21 +277,138 @@ t.test('npm.load', t => { [ 'timing', 'command:config', - /Completed in [0-9]+ms/, + /Completed in [0-9.]+ms/, ], [ 'timing', 'command:get', - /Completed in [0-9]+ms/, + /Completed in [0-9.]+ms/, ], ]) - t.same(consoleLogs, [['scope=@foo\n\u2010not-a-dash=undefined']]) + t.same(outputs, [['scope=@foo\n\u2010not-a-dash=undefined']]) }) // need this here or node 10 will improperly end the promise ahead of time await new Promise((res) => setTimeout(res)) }) + t.test('workspace-aware configs and commands', async t => { + const dir = t.testdir({ + packages: { + a: { + 'package.json': JSON.stringify({ + name: 'a', + version: '1.0.0', + scripts: { test: 'echo test a' }, + }), + }, + b: { + 'package.json': JSON.stringify({ + name: 'b', + version: '1.0.0', + scripts: { test: 'echo test b' }, + }), + }, + }, + 'package.json': JSON.stringify({ + name: 'root', + version: '1.0.0', + workspaces: ['./packages/*'], + }), + '.npmrc': '', + }) + + process.argv = [ + process.execPath, + process.argv[1], + '--userconfig', + resolve(dir, '.npmrc'), + '--color', + 'false', + '--workspaces', + 'true', + ] + + const { npm, outputs } = mockNpm(t) + await npm.load() + npm.localPrefix = dir + + await new Promise((res, rej) => { + // verify that calling the command with a short name still sets + // the npm.command property to the full canonical name of the cmd. + npm.command = null + npm.commands.run([], er => { + if (er) + rej(er) + + t.equal(npm.command, 'run-script', 'npm.command set to canonical name') + + t.match( + outputs, + [ + ['Lifecycle scripts included in a@1.0.0:'], + [' test\n echo test a'], + [''], + ['Lifecycle scripts included in b@1.0.0:'], + [' test\n echo test b'], + [''], + ], + 'should exec workspaces version of commands' + ) + + res() + }) + }) + }) + + t.test('workspaces in global mode', async t => { + const dir = t.testdir({ + packages: { + a: { + 'package.json': JSON.stringify({ + name: 'a', + version: '1.0.0', + scripts: { test: 'echo test a' }, + }), + }, + b: { + 'package.json': JSON.stringify({ + name: 'b', + version: '1.0.0', + scripts: { test: 'echo test b' }, + }), + }, + }, + 'package.json': JSON.stringify({ + name: 'root', + version: '1.0.0', + workspaces: ['./packages/*'], + }), + }) + process.argv = [ + process.execPath, + process.argv[1], + '--userconfig', + resolve(dir, '.npmrc'), + '--color', + 'false', + '--workspaces', + '--global', + 'true', + ] + const { npm } = mockNpm(t) + await npm.load() + npm.localPrefix = dir + await new Promise((res, rej) => { + // verify that calling the command with a short name still sets + // the npm.command property to the full canonical name of the cmd. + npm.command = null + npm.commands.run([], er => { + t.match(er, /Workspaces not supported for global packages/) + res() + }) + }) + }) t.end() }) @@ -357,92 +423,81 @@ t.test('loading as main will load the cli', t => { p.on('close', (code, signal) => { t.equal(code, 0) t.equal(signal, null) - t.equal(Buffer.concat(out).toString().trim(), ls.usage) + t.match(Buffer.concat(out).toString(), ls.usage) t.end() }) }) t.test('set process.title', t => { - const { argv: processArgv } = process - const { log } = console - const titleDesc = Object.getOwnPropertyDescriptor(process, 'title') - Object.defineProperty(process, 'title', { - value: '', - settable: true, - enumerable: true, - configurable: true, - }) - const consoleLogs = [] - console.log = (...msg) => consoleLogs.push(msg) - - t.teardown(() => { - console.log = log - process.argv = processArgv - Object.defineProperty(process, 'title', titleDesc) - freshConfig() - }) - - t.afterEach(cb => { - consoleLogs.length = 0 - cb() - }) - t.test('basic title setting', async t => { - freshConfig({ - argv: [ - process.execPath, - process.argv[1], - '--usage', - '--scope=foo', - 'ls', - ], - }) - await npm.load(er => { - if (er) - throw er - t.equal(npm.title, 'npm ls') - t.equal(process.title, 'npm ls') - }) + process.argv = [ + process.execPath, + process.argv[1], + '--usage', + '--scope=foo', + 'ls', + ] + const { npm } = mockNpm(t) + await npm.load() + t.equal(npm.title, 'npm ls') + t.equal(process.title, 'npm ls') }) t.test('do not expose token being revoked', async t => { - freshConfig({ - argv: [ - process.execPath, - process.argv[1], - '--usage', - '--scope=foo', - 'token', - 'revoke', - 'deadbeefcafebad', - ], - }) - await npm.load(er => { - if (er) - throw er - t.equal(npm.title, 'npm token revoke ***') - t.equal(process.title, 'npm token revoke ***') - }) + process.argv = [ + process.execPath, + process.argv[1], + '--usage', + '--scope=foo', + 'token', + 'revoke', + 'deadbeefcafebad', + ] + const { npm } = mockNpm(t) + await npm.load() + t.equal(npm.title, 'npm token revoke ***') + t.equal(process.title, 'npm token revoke ***') }) t.test('do show *** unless a token is actually being revoked', async t => { - freshConfig({ - argv: [ - process.execPath, - process.argv[1], - '--usage', - '--scope=foo', - 'token', - 'revoke', - ], - }) - await npm.load(er => { - if (er) - throw er - t.equal(npm.title, 'npm token revoke') - t.equal(process.title, 'npm token revoke') - }) + process.argv = [ + process.execPath, + process.argv[1], + '--usage', + '--scope=foo', + 'token', + 'revoke', + ] + const { npm } = mockNpm(t) + await npm.load() + t.equal(npm.title, 'npm token revoke') + t.equal(process.title, 'npm token revoke') }) t.end() }) + +t.test('timings', t => { + const { npm, logs } = mockNpm(t) + process.emit('time', 'foo') + process.emit('time', 'bar') + t.match(npm.timers.get('foo'), Number, 'foo timer is a number') + t.match(npm.timers.get('bar'), Number, 'foo timer is a number') + process.emit('timeEnd', 'foo') + process.emit('timeEnd', 'bar') + process.emit('timeEnd', 'baz') + t.match(logs, [ + ['timing', 'foo', /Completed in [0-9]+ms/], + ['timing', 'bar', /Completed in [0-9]+ms/], + [ + 'silly', + 'timing', + "Tried to end timer that doesn't exist:", + 'baz', + ], + ]) + t.notOk(npm.timers.has('foo'), 'foo timer is gone') + t.notOk(npm.timers.has('bar'), 'bar timer is gone') + t.match(npm.timings, { foo: Number, bar: Number }) + t.end() +}) diff --git a/test/lib/org.js b/test/lib/org.js index d21df85d64312..156232ac22a5d 100644 --- a/test/lib/org.js +++ b/test/lib/org.js @@ -1,7 +1,7 @@ -const { test } = require('tap') -const requireInject = require('require-inject') +const t = require('tap') const ansiTrim = require('../../lib/utils/ansi-trim.js') +const output = [] const npm = { flatOptions: { json: false, @@ -9,10 +9,11 @@ const npm = { silent: false, loglevel: 'info', }, + output: (msg) => { + output.push(msg) + }, } -const output = [] - let orgSize = 1 let orgSetArgs = null let orgRmArgs = null @@ -39,16 +40,13 @@ const libnpmorg = { }, } -const Org = requireInject('../../lib/org.js', { +const Org = t.mock('../../lib/org.js', { '../../lib/utils/otplease.js': async (opts, fn) => fn(opts), - '../../lib/utils/output.js': (msg) => { - output.push(msg) - }, libnpmorg, }) const org = new Org(npm) -test('completion', async t => { +t.test('completion', async t => { const completion = (argv) => org.completion({ conf: { argv: { remain: argv } } }) @@ -66,14 +64,14 @@ test('completion', async t => { t.rejects(completion(['npm', 'org', 'flurb']), /flurb not recognized/, 'errors for unknown subcommand') }) -test('npm org - invalid subcommand', t => { +t.test('npm org - invalid subcommand', t => { org.exec(['foo'], (err) => { t.match(err, /npm org set/, 'prints usage information') t.end() }) }) -test('npm org add', t => { +t.test('npm org add', t => { t.teardown(() => { orgSetArgs = null output.length = 0 @@ -94,7 +92,7 @@ test('npm org add', t => { }) }) -test('npm org add - no org', t => { +t.test('npm org add - no org', t => { t.teardown(() => { orgSetArgs = null output.length = 0 @@ -106,7 +104,7 @@ test('npm org add - no org', t => { }) }) -test('npm org add - no user', t => { +t.test('npm org add - no user', t => { t.teardown(() => { orgSetArgs = null output.length = 0 @@ -118,7 +116,7 @@ test('npm org add - no user', t => { }) }) -test('npm org add - invalid role', t => { +t.test('npm org add - invalid role', t => { t.teardown(() => { orgSetArgs = null output.length = 0 @@ -130,7 +128,7 @@ test('npm org add - invalid role', t => { }) }) -test('npm org add - more users', t => { +t.test('npm org add - more users', t => { orgSize = 5 t.teardown(() => { orgSize = 1 @@ -153,7 +151,7 @@ test('npm org add - more users', t => { }) }) -test('npm org add - json output', t => { +t.test('npm org add - json output', t => { npm.flatOptions.json = true t.teardown(() => { npm.flatOptions.json = false @@ -183,7 +181,7 @@ test('npm org add - json output', t => { }) }) -test('npm org add - parseable output', t => { +t.test('npm org add - parseable output', t => { npm.flatOptions.parseable = true t.teardown(() => { npm.flatOptions.parseable = false @@ -209,7 +207,7 @@ test('npm org add - parseable output', t => { }) }) -test('npm org add - silent output', t => { +t.test('npm org add - silent output', t => { npm.flatOptions.silent = true t.teardown(() => { npm.flatOptions.silent = false @@ -232,7 +230,7 @@ test('npm org add - silent output', t => { }) }) -test('npm org rm', t => { +t.test('npm org rm', t => { t.teardown(() => { orgRmArgs = null orgLsArgs = null @@ -257,7 +255,7 @@ test('npm org rm', t => { }) }) -test('npm org rm - no org', t => { +t.test('npm org rm - no org', t => { t.teardown(() => { orgRmArgs = null orgLsArgs = null @@ -270,7 +268,7 @@ test('npm org rm - no org', t => { }) }) -test('npm org rm - no user', t => { +t.test('npm org rm - no user', t => { t.teardown(() => { orgRmArgs = null orgLsArgs = null @@ -283,7 +281,7 @@ test('npm org rm - no user', t => { }) }) -test('npm org rm - one user left', t => { +t.test('npm org rm - one user left', t => { orgList = { one: 'developer', } @@ -313,7 +311,7 @@ test('npm org rm - one user left', t => { }) }) -test('npm org rm - json output', t => { +t.test('npm org rm - json output', t => { npm.flatOptions.json = true t.teardown(() => { npm.flatOptions.json = false @@ -345,7 +343,7 @@ test('npm org rm - json output', t => { }) }) -test('npm org rm - parseable output', t => { +t.test('npm org rm - parseable output', t => { npm.flatOptions.parseable = true t.teardown(() => { npm.flatOptions.parseable = false @@ -375,7 +373,7 @@ test('npm org rm - parseable output', t => { }) }) -test('npm org rm - silent output', t => { +t.test('npm org rm - silent output', t => { npm.flatOptions.silent = true t.teardown(() => { npm.flatOptions.silent = false @@ -402,7 +400,7 @@ test('npm org rm - silent output', t => { }) }) -test('npm org ls', t => { +t.test('npm org ls', t => { orgList = { one: 'developer', two: 'admin', @@ -430,7 +428,7 @@ test('npm org ls', t => { }) }) -test('npm org ls - user filter', t => { +t.test('npm org ls - user filter', t => { orgList = { username: 'admin', missing: 'admin', @@ -456,7 +454,7 @@ test('npm org ls - user filter', t => { }) }) -test('npm org ls - user filter, missing user', t => { +t.test('npm org ls - user filter, missing user', t => { orgList = { missing: 'admin', } @@ -481,7 +479,7 @@ test('npm org ls - user filter, missing user', t => { }) }) -test('npm org ls - no org', t => { +t.test('npm org ls - no org', t => { t.teardown(() => { orgLsArgs = null output.length = 0 @@ -493,7 +491,7 @@ test('npm org ls - no org', t => { }) }) -test('npm org ls - json output', t => { +t.test('npm org ls - json output', t => { npm.flatOptions.json = true orgList = { one: 'developer', @@ -520,7 +518,7 @@ test('npm org ls - json output', t => { }) }) -test('npm org ls - parseable output', t => { +t.test('npm org ls - parseable output', t => { npm.flatOptions.parseable = true orgList = { one: 'developer', @@ -552,7 +550,7 @@ test('npm org ls - parseable output', t => { }) }) -test('npm org ls - silent output', t => { +t.test('npm org ls - silent output', t => { npm.flatOptions.silent = true orgList = { one: 'developer', diff --git a/test/lib/outdated.js b/test/lib/outdated.js index aa8a1bcb6b3a5..34a0aa6c9e03e 100644 --- a/test/lib/outdated.js +++ b/test/lib/outdated.js @@ -1,10 +1,10 @@ const t = require('tap') -const requireInject = require('require-inject') +const { fake: mockNpm } = require('../fixtures/mock-npm') const packument = spec => { const mocks = { - alpha: { - name: 'alpha', + cat: { + name: 'cat', 'dist-tags': { latest: '1.0.1', }, @@ -12,13 +12,13 @@ const packument = spec => { '1.0.1': { version: '1.0.1', dependencies: { - gamma: '2.0.0', + dog: '2.0.0', }, }, }, }, - beta: { - name: 'beta', + chai: { + name: 'chai', 'dist-tags': { latest: '1.0.1', }, @@ -28,8 +28,8 @@ const packument = spec => { }, }, }, - gamma: { - name: 'gamma', + dog: { + name: 'dog', 'dist-tags': { latest: '2.0.0', }, @@ -68,24 +68,15 @@ const packument = spec => { } let logs -const cleanLogs = (done) => { - logs = '' - const fn = (...args) => { - logs += '\n' - args.map(el => { - logs += el - return logs - }) - } - console.log = fn - done() +const output = (msg) => { + logs = `${logs}\n${msg}` } const globalDir = t.testdir({ node_modules: { - alpha: { + cat: { 'package.json': JSON.stringify({ - name: 'alpha', + name: 'cat', version: '1.0.0', }, null, 2), }, @@ -93,19 +84,23 @@ const globalDir = t.testdir({ }) const outdated = (dir, opts) => { - const Outdated = requireInject('../../lib/outdated.js', { + logs = '' + const Outdated = t.mock('../../lib/outdated.js', { pacote: { packument, }, }) - return new Outdated({ + const npm = mockNpm({ + ...opts, + localPrefix: dir, prefix: dir, globalDir: `${globalDir}/node_modules`, - flatOptions: opts, + output, }) + return new Outdated(npm) } -t.beforeEach(cleanLogs) +t.beforeEach(() => logs = '') const redactCwd = (path) => { const normalizePath = p => p @@ -123,8 +118,8 @@ t.test('should display outdated deps', t => { name: 'delta', version: '1.0.0', dependencies: { - alpha: '^1.0.0', - gamma: '^1.0.0', + cat: '^1.0.0', + dog: '^1.0.0', theta: '^1.0.0', }, devDependencies: { @@ -134,36 +129,36 @@ t.test('should display outdated deps', t => { lorem: '^1.0.0', }, peerDependencies: { - beta: '^1.0.0', + chai: '^1.0.0', }, }, null, 2), node_modules: { - alpha: { + cat: { 'package.json': JSON.stringify({ - name: 'alpha', + name: 'cat', version: '1.0.0', dependencies: { - gamma: '2.0.0', + dog: '2.0.0', }, }, null, 2), node_modules: { - gamma: { + dog: { 'package.json': JSON.stringify({ - name: 'gamma', + name: 'dog', version: '2.0.0', }, null, 2), }, }, }, - beta: { + chai: { 'package.json': JSON.stringify({ - name: 'beta', + name: 'chai', version: '1.0.0', }, null, 2), }, - gamma: { + dog: { 'package.json': JSON.stringify({ - name: 'gamma', + name: 'dog', version: '1.0.1', }, null, 2), }, @@ -178,7 +173,7 @@ t.test('should display outdated deps', t => { t.test('outdated global', t => { outdated(null, { - global: true, + config: { global: true }, }).exec([], () => { t.matchSnapshot(logs) t.end() @@ -187,7 +182,9 @@ t.test('should display outdated deps', t => { t.test('outdated', t => { outdated(testDir, { - global: false, + config: { + global: false, + }, color: true, }).exec([], () => { t.matchSnapshot(logs) @@ -197,9 +194,11 @@ t.test('should display outdated deps', t => { t.test('outdated --omit=dev', t => { outdated(testDir, { - global: false, + config: { + global: false, + omit: ['dev'], + }, color: true, - omit: ['dev'], }).exec([], () => { t.matchSnapshot(logs) t.end() @@ -208,9 +207,11 @@ t.test('should display outdated deps', t => { t.test('outdated --omit=dev --omit=peer', t => { outdated(testDir, { - global: false, + config: { + global: false, + omit: ['dev', 'peer'], + }, color: true, - omit: ['dev', 'peer'], }).exec([], () => { t.matchSnapshot(logs) t.end() @@ -219,9 +220,11 @@ t.test('should display outdated deps', t => { t.test('outdated --omit=prod', t => { outdated(testDir, { - global: false, + config: { + global: false, + omit: ['prod'], + }, color: true, - omit: ['prod'], }).exec([], () => { t.matchSnapshot(logs) t.end() @@ -230,8 +233,10 @@ t.test('should display outdated deps', t => { t.test('outdated --long', t => { outdated(testDir, { - global: false, - long: true, + config: { + global: false, + long: true, + }, }).exec([], () => { t.matchSnapshot(logs) t.end() @@ -240,8 +245,10 @@ t.test('should display outdated deps', t => { t.test('outdated --json', t => { outdated(testDir, { - global: false, - json: true, + config: { + global: false, + json: true, + }, }).exec([], () => { t.matchSnapshot(logs) t.end() @@ -250,9 +257,11 @@ t.test('should display outdated deps', t => { t.test('outdated --json --long', t => { outdated(testDir, { - global: false, - json: true, - long: true, + config: { + global: false, + json: true, + long: true, + }, }).exec([], () => { t.matchSnapshot(logs) t.end() @@ -261,8 +270,10 @@ t.test('should display outdated deps', t => { t.test('outdated --parseable', t => { outdated(testDir, { - global: false, - parseable: true, + config: { + global: false, + parseable: true, + }, }).exec([], () => { t.matchSnapshot(logs) t.end() @@ -271,9 +282,11 @@ t.test('should display outdated deps', t => { t.test('outdated --parseable --long', t => { outdated(testDir, { - global: false, - parseable: true, - long: true, + config: { + global: false, + parseable: true, + long: true, + }, }).exec([], () => { t.matchSnapshot(logs) t.end() @@ -282,7 +295,9 @@ t.test('should display outdated deps', t => { t.test('outdated --all', t => { outdated(testDir, { - all: true, + config: { + all: true, + }, }).exec([], () => { t.matchSnapshot(logs) t.end() @@ -291,8 +306,10 @@ t.test('should display outdated deps', t => { t.test('outdated specific dep', t => { outdated(testDir, { - global: false, - }).exec(['alpha'], () => { + config: { + global: false, + }, + }).exec(['cat'], () => { t.matchSnapshot(logs) t.end() }) @@ -307,13 +324,13 @@ t.test('should return if no outdated deps', t => { name: 'delta', version: '1.0.0', dependencies: { - alpha: '^1.0.0', + cat: '^1.0.0', }, }, null, 2), node_modules: { - alpha: { + cat: { 'package.json': JSON.stringify({ - name: 'alpha', + name: 'cat', version: '1.0.1', }, null, 2), }, @@ -323,7 +340,7 @@ t.test('should return if no outdated deps', t => { outdated(testDir, { global: false, }).exec([], () => { - t.equals(logs.length, 0, 'no logs') + t.equal(logs.length, 0, 'no logs') t.end() }) }) @@ -350,7 +367,7 @@ t.test('throws if error with a dep', t => { outdated(testDir, { global: false, }).exec([], (err) => { - t.equals(err.message, 'There is an error with this package.') + t.equal(err.message, 'There is an error with this package.') t.end() }) }) @@ -361,7 +378,7 @@ t.test('should skip missing non-prod deps', t => { name: 'delta', version: '1.0.0', devDependencies: { - beta: '^1.0.0', + chai: '^1.0.0', }, }, null, 2), node_modules: {}, @@ -370,7 +387,7 @@ t.test('should skip missing non-prod deps', t => { outdated(testDir, { global: false, }).exec([], () => { - t.equals(logs.length, 0, 'no logs') + t.equal(logs.length, 0, 'no logs') t.end() }) }) @@ -381,13 +398,13 @@ t.test('should skip invalid pkg ranges', t => { name: 'delta', version: '1.0.0', dependencies: { - alpha: '>=^2', + cat: '>=^2', }, }, null, 2), node_modules: { - alpha: { + cat: { 'package.json': JSON.stringify({ - name: 'alpha', + name: 'cat', version: '1.0.0', }, null, 2), }, @@ -395,7 +412,7 @@ t.test('should skip invalid pkg ranges', t => { }) outdated(testDir, {}).exec([], () => { - t.equals(logs.length, 0, 'no logs') + t.equal(logs.length, 0, 'no logs') t.end() }) }) @@ -406,13 +423,13 @@ t.test('should skip git specs', t => { name: 'delta', version: '1.0.0', dependencies: { - alpha: 'github:username/foo', + cat: 'github:username/foo', }, }, null, 2), node_modules: { - alpha: { + cat: { 'package.json': JSON.stringify({ - name: 'alpha', + name: 'cat', version: '1.0.0', }, null, 2), }, @@ -420,7 +437,239 @@ t.test('should skip git specs', t => { }) outdated(testDir, {}).exec([], () => { - t.equals(logs.length, 0, 'no logs') + t.equal(logs.length, 0, 'no logs') t.end() }) }) + +t.test('workspaces', async t => { + const testDir = t.testdir({ + 'package.json': JSON.stringify({ + name: 'workspaces-project', + version: '1.0.0', + workspaces: ['packages/*'], + dependencies: { + dog: '^1.0.0', + }, + }), + node_modules: { + a: t.fixture('symlink', '../packages/a'), + b: t.fixture('symlink', '../packages/b'), + c: t.fixture('symlink', '../packages/c'), + cat: { + 'package.json': JSON.stringify({ + name: 'cat', + version: '1.0.0', + dependencies: { + dog: '2.0.0', + }, + }), + node_modules: { + dog: { + 'package.json': JSON.stringify({ + name: 'dog', + version: '2.0.0', + }), + }, + }, + }, + chai: { + 'package.json': JSON.stringify({ + name: 'chai', + version: '1.0.0', + }), + }, + dog: { + 'package.json': JSON.stringify({ + name: 'dog', + version: '1.0.1', + }), + }, + foo: { + 'package.json': JSON.stringify({ + name: 'foo', + version: '1.0.0', + dependencies: { + chai: '^1.0.0', + }, + }), + }, + zeta: { + 'package.json': JSON.stringify({ + name: 'zeta', + version: '1.0.0', + }), + }, + }, + packages: { + a: { + 'package.json': JSON.stringify({ + name: 'a', + version: '1.0.0', + dependencies: { + b: '^1.0.0', + cat: '^1.0.0', + foo: '^1.0.0', + }, + }), + }, + b: { + 'package.json': JSON.stringify({ + name: 'b', + version: '1.0.0', + dependencies: { + zeta: '^1.0.0', + }, + }), + }, + c: { + 'package.json': JSON.stringify({ + name: 'c', + version: '1.0.0', + dependencies: { + theta: '^1.0.0', + }, + }), + }, + }, + }) + + await new Promise((res, rej) => { + outdated(testDir, {}).exec([], err => { + if (err) + rej(err) + + t.matchSnapshot(logs, 'should display ws outdated deps human output') + res() + }) + }) + + await new Promise((res, rej) => { + outdated(testDir, { + config: { + json: true, + }, + }).exec([], err => { + if (err) + rej(err) + + t.matchSnapshot(logs, 'should display ws outdated deps json output') + res() + }) + }) + + await new Promise((res, rej) => { + outdated(testDir, { + config: { + parseable: true, + }, + }).exec([], err => { + if (err) + rej(err) + + t.matchSnapshot(logs, 'should display ws outdated deps parseable output') + res() + }) + }) + + await new Promise((res, rej) => { + outdated(testDir, { + config: { + all: true, + }, + }).exec([], err => { + if (err) + rej(err) + + t.matchSnapshot(logs, 'should display all dependencies') + res() + }) + }) + + await new Promise((res, rej) => { + outdated(testDir, { + color: true, + }).exec([], err => { + if (err) + rej(err) + + t.matchSnapshot(logs, 'should highlight ws in dependend by section') + res() + }) + }) + + await new Promise((res, rej) => { + outdated(testDir, {}).execWorkspaces([], ['a'], err => { + if (err) + rej(err) + + t.matchSnapshot(logs, 'should display results filtered by ws') + res() + }) + }) + + await new Promise((res, rej) => { + outdated(testDir, { + config: { + json: true, + }, + }).execWorkspaces([], ['a'], err => { + if (err) + rej(err) + + t.matchSnapshot(logs, 'should display json results filtered by ws') + res() + }) + }) + + await new Promise((res, rej) => { + outdated(testDir, { + config: { + parseable: true, + }, + }).execWorkspaces([], ['a'], err => { + if (err) + rej(err) + + t.matchSnapshot(logs, 'should display parseable results filtered by ws') + res() + }) + }) + + await new Promise((res, rej) => { + outdated(testDir, { + config: { + all: true, + }, + }).execWorkspaces([], ['a'], err => { + if (err) + rej(err) + + t.matchSnapshot(logs, + 'should display nested deps when filtering by ws and using --all') + res() + }) + }) + + await new Promise((res, rej) => { + outdated(testDir, {}).execWorkspaces([], ['b'], err => { + if (err) + rej(err) + + t.matchSnapshot(logs, + 'should display no results if ws has no deps to display') + res() + }) + }) + + await new Promise((res, rej) => { + outdated(testDir, {}).execWorkspaces([], ['c'], err => { + if (err) + rej(err) + + t.matchSnapshot(logs, + 'should display missing deps when filtering by ws') + res() + }) + }) +}) diff --git a/test/lib/owner.js b/test/lib/owner.js index 4f8f430886b7e..32944a84edbc4 100644 --- a/test/lib/owner.js +++ b/test/lib/owner.js @@ -1,12 +1,18 @@ -const requireInject = require('require-inject') const t = require('tap') +const { fake: mockNpm } = require('../fixtures/mock-npm.js') let result = '' -let readLocalPkgResponse = null +let readPackageNamePrefix = null +let readPackageNameResponse = null const noop = () => null -const npm = { flatOptions: {} } +const npm = mockNpm({ + output: (msg) => { + result = result ? `${result}\n${msg}` : msg + }, +}) + const npmFetch = { json: noop } const npmlog = { error: noop, info: noop, verbose: noop } const pacote = { packument: noop } @@ -15,11 +21,11 @@ const mocks = { npmlog, 'npm-registry-fetch': npmFetch, pacote, - '../../lib/utils/output.js': (...msg) => { - result += msg.join('\n') - }, '../../lib/utils/otplease.js': async (opts, fn) => fn({ otp: '123456', opts }), - '../../lib/utils/read-local-package.js': async () => readLocalPkgResponse, + '../../lib/utils/read-package-name.js': async (prefix) => { + readPackageNamePrefix = prefix + return readPackageNameResponse + }, '../../lib/utils/usage.js': () => 'usage instructions', } @@ -30,7 +36,7 @@ const npmcliMaintainers = [ { email: 'i@izs.me', name: 'isaacs' }, ] -const Owner = requireInject('../../lib/owner.js', mocks) +const Owner = t.mock('../../lib/owner.js', mocks) const owner = new Owner(npm) t.test('owner no args', t => { @@ -40,21 +46,17 @@ t.test('owner no args', t => { }) owner.exec([], err => { - t.equal( - err.message, - 'usage instructions', - 'should throw usage instructions' - ) + t.match(err, /usage instructions/, 'should not error out on empty locations') t.end() }) }) t.test('owner ls no args', t => { - t.plan(4) + t.plan(5) result = '' - readLocalPkgResponse = '@npmcli/map-workspaces' + readPackageNameResponse = '@npmcli/map-workspaces' pacote.packument = async (spec, opts) => { t.equal(spec.name, '@npmcli/map-workspaces', 'should use expect pkg name') t.match( @@ -68,14 +70,29 @@ t.test('owner ls no args', t => { return { maintainers: npmcliMaintainers } } t.teardown(() => { + npm.prefix = null result = '' pacote.packument = noop - readLocalPkgResponse = null + readPackageNameResponse = null }) + npm.prefix = 'test-npm-prefix' owner.exec(['ls'], err => { - t.ifError(err, 'npm owner ls no args') + t.error(err, 'npm owner ls no args') t.matchSnapshot(result, 'should output owners of cwd package') + t.equal(readPackageNamePrefix, 'test-npm-prefix', 'read-package-name gets npm.prefix') + }) +}) + +t.test('owner ls global', t => { + t.teardown(() => { + npm.config.set('global', false) + }) + npm.config.set('global', true) + + owner.exec(['ls'], err => { + t.match(err, /usage instructions/, 'should throw usage instructions if no cwd package available') + t.end() }) }) @@ -87,11 +104,7 @@ t.test('owner ls no args no cwd package', t => { }) owner.exec(['ls'], err => { - t.equal( - err.message, - 'usage instructions', - 'should throw usage instructions if no cwd package available' - ) + t.match(err, /usage instructions/, 'should throw usage instructions if no cwd package available') t.end() }) }) @@ -100,7 +113,7 @@ t.test('owner ls fails to retrieve packument', t => { t.plan(4) result = '' - readLocalPkgResponse = '@npmcli/map-workspaces' + readPackageNameResponse = '@npmcli/map-workspaces' pacote.packument = () => { throw new Error('ERR') } @@ -146,7 +159,7 @@ t.test('owner ls <pkg>', t => { }) owner.exec(['ls', '@npmcli/map-workspaces'], err => { - t.ifError(err, 'npm owner ls <pkg>') + t.error(err, 'npm owner ls <pkg>') t.matchSnapshot(result, 'should output owners of <pkg>') }) }) @@ -162,7 +175,7 @@ t.test('owner ls <pkg> no maintainers', t => { }) owner.exec(['ls', '@npmcli/map-workspaces'], err => { - t.ifError(err, 'npm owner ls <pkg> no maintainers') + t.error(err, 'npm owner ls <pkg> no maintainers') t.equal(result, 'no admin found', 'should output no admint found msg') t.end() }) @@ -196,7 +209,7 @@ t.test('owner add <user> <pkg>', t => { name: '@npmcli/map-workspaces', }, }, 'should use expected opts') - t.deepEqual( + t.same( opts.body.maintainers, [ ...npmcliMaintainers, @@ -233,14 +246,14 @@ t.test('owner add <user> <pkg>', t => { }) owner.exec(['add', 'foo', '@npmcli/map-workspaces'], err => { - t.ifError(err, 'npm owner add <user> <pkg>') + t.error(err, 'npm owner add <user> <pkg>') t.equal(result, '+ foo (@npmcli/map-workspaces)', 'should output add result') }) }) t.test('owner add <user> cwd package', t => { result = '' - readLocalPkgResponse = '@npmcli/map-workspaces' + readPackageNameResponse = '@npmcli/map-workspaces' npmFetch.json = async (uri, opts) => { // retrieve user info from couchdb request if (uri === '/-/user/org.couchdb.user:foo') { @@ -260,13 +273,13 @@ t.test('owner add <user> cwd package', t => { }) t.teardown(() => { result = '' - readLocalPkgResponse = null + readPackageNameResponse = null npmFetch.json = noop pacote.packument = noop }) owner.exec(['add', 'foo'], err => { - t.ifError(err, 'npm owner add <user> cwd package') + t.error(err, 'npm owner add <user> cwd package') t.equal(result, '+ foo (@npmcli/map-workspaces)', 'should output add result') t.end() }) @@ -309,13 +322,13 @@ t.test('owner add <user> <pkg> already an owner', t => { }) owner.exec(['add', 'ruyadorno', '@npmcli/map-workspaces'], err => { - t.ifError(err, 'npm owner add <user> <pkg> already an owner') + t.error(err, 'npm owner add <user> <pkg> already an owner') }) }) t.test('owner add <user> <pkg> fails to retrieve user', t => { result = '' - readLocalPkgResponse = + readPackageNameResponse = npmFetch.json = async (uri, opts) => { // retrieve borked user info from couchdb request if (uri === '/-/user/org.couchdb.user:foo') @@ -331,7 +344,7 @@ t.test('owner add <user> <pkg> fails to retrieve user', t => { }) t.teardown(() => { result = '' - readLocalPkgResponse = null + readPackageNameResponse = null npmFetch.json = noop pacote.packument = noop }) @@ -454,7 +467,7 @@ t.test('owner add <user> <pkg> no previous maintainers property from server', t }) owner.exec(['add', 'foo', '@npmcli/no-owners-pkg'], err => { - t.ifError(err, 'npm owner add <user> <pkg>') + t.error(err, 'npm owner add <user> <pkg>') t.equal(result, '+ foo (@npmcli/no-owners-pkg)', 'should output add result') t.end() }) @@ -467,11 +480,19 @@ t.test('owner add no user', t => { }) owner.exec(['add'], err => { - t.equal( - err.message, - 'usage instructions', - 'should throw usage instructions if no user provided' - ) + t.match(err, /usage instructions/, 'should throw usage instructions if user provided') + t.end() + }) +}) + +t.test('owner add no pkg global', t => { + t.teardown(() => { + npm.config.set('global', false) + }) + npm.config.set('global', true) + + owner.exec(['add', 'gar'], err => { + t.match(err, /usage instructions/, 'should throw usage instructions if user provided') t.end() }) }) @@ -483,11 +504,7 @@ t.test('owner add <user> no cwd package', t => { }) owner.exec(['add', 'foo'], err => { - t.equal( - err.message, - 'usage instructions', - 'should throw usage instructions if no user provided' - ) + t.match(err, /usage instructions/, 'should throw usage instructions if no user provided') t.end() }) }) @@ -519,7 +536,7 @@ t.test('owner rm <user> <pkg>', t => { name: '@npmcli/map-workspaces', }, }, 'should use expected opts') - t.deepEqual( + t.same( opts.body.maintainers, npmcliMaintainers.filter(m => m.name !== 'ruyadorno'), 'should contain expected new owners, removing requested user' @@ -550,7 +567,7 @@ t.test('owner rm <user> <pkg>', t => { }) owner.exec(['rm', 'ruyadorno', '@npmcli/map-workspaces'], err => { - t.ifError(err, 'npm owner rm <user> <pkg>') + t.error(err, 'npm owner rm <user> <pkg>') t.equal(result, '- ruyadorno (@npmcli/map-workspaces)', 'should output rm result') }) }) @@ -590,13 +607,13 @@ t.test('owner rm <user> <pkg> not a current owner', t => { }) owner.exec(['rm', 'foo', '@npmcli/map-workspaces'], err => { - t.ifError(err, 'npm owner rm <user> <pkg> not a current owner') + t.error(err, 'npm owner rm <user> <pkg> not a current owner') }) }) t.test('owner rm <user> cwd package', t => { result = '' - readLocalPkgResponse = '@npmcli/map-workspaces' + readPackageNameResponse = '@npmcli/map-workspaces' npmFetch.json = async (uri, opts) => { // retrieve user info from couchdb request if (uri === '/-/user/org.couchdb.user:ruyadorno') { @@ -616,13 +633,13 @@ t.test('owner rm <user> cwd package', t => { }) t.teardown(() => { result = '' - readLocalPkgResponse = null + readPackageNameResponse = null npmFetch.json = noop pacote.packument = noop }) owner.exec(['rm', 'ruyadorno'], err => { - t.ifError(err, 'npm owner rm <user> cwd package') + t.error(err, 'npm owner rm <user> cwd package') t.equal(result, '- ruyadorno (@npmcli/map-workspaces)', 'should output rm result') t.end() }) @@ -630,7 +647,7 @@ t.test('owner rm <user> cwd package', t => { t.test('owner rm <user> only user', t => { result = '' - readLocalPkgResponse = 'ipt' + readPackageNameResponse = 'ipt' npmFetch.json = async (uri, opts) => { // retrieve user info from couchdb request if (uri === '/-/user/org.couchdb.user:ruyadorno') { @@ -651,7 +668,7 @@ t.test('owner rm <user> only user', t => { }) t.teardown(() => { result = '' - readLocalPkgResponse = null + readPackageNameResponse = null npmFetch.json = noop pacote.packument = noop }) @@ -674,11 +691,19 @@ t.test('owner rm no user', t => { }) owner.exec(['rm'], err => { - t.equal( - err.message, - 'usage instructions', - 'should throw usage instructions if no user provided to rm' - ) + t.match(err, /usage instructions/, 'should throw usage instructions if no user provided to rm') + t.end() + }) +}) + +t.test('owner rm no pkg global', t => { + t.teardown(() => { + npm.config.set('global', false) + }) + npm.config.set('global', true) + + owner.exec(['rm', 'gar'], err => { + t.match(err, /usage instructions/, 'should throw usage instructions if user provided') t.end() }) }) @@ -690,11 +715,7 @@ t.test('owner rm <user> no cwd package', t => { }) owner.exec(['rm', 'foo'], err => { - t.equal( - err.message, - 'usage instructions', - 'should throw usage instructions if no user provided to rm' - ) + t.match(err, /usage instructions/, 'should throw usage instructions if no user provided to rm') t.end() }) }) @@ -716,15 +737,15 @@ t.test('completion', async t => { // npm owner rm completion is async t.test('completion npm owner rm', async t => { t.plan(2) - readLocalPkgResponse = '@npmcli/map-workspaces' + readPackageNameResponse = '@npmcli/map-workspaces' pacote.packument = async spec => { - t.equal(spec.name, readLocalPkgResponse, 'should use package spec') + t.equal(spec.name, readPackageNameResponse, 'should use package spec') return { maintainers: npmcliMaintainers, } } t.teardown(() => { - readLocalPkgResponse = null + readPackageNameResponse = null pacote.packument = noop }) @@ -741,17 +762,27 @@ t.test('completion', async t => { t.end() }) + t.test('completion npm owner rm global', async t => { + t.teardown(() => { + npm.config.set('global', false) + }) + npm.config.set('global', true) + const res = await owner.completion({ conf: { argv: { remain: ['npm', 'owner', 'rm'] } } }) + t.strictSame(res, [], 'should have no owners to autocomplete if global') + t.end() + }) + t.test('completion npm owner rm no owners found', async t => { t.plan(2) - readLocalPkgResponse = '@npmcli/map-workspaces' + readPackageNameResponse = '@npmcli/map-workspaces' pacote.packument = async spec => { - t.equal(spec.name, readLocalPkgResponse, 'should use package spec') + t.equal(spec.name, readPackageNameResponse, 'should use package spec') return { maintainers: [], } } t.teardown(() => { - readLocalPkgResponse = null + readPackageNameResponse = null pacote.packument = noop }) diff --git a/test/lib/pack.js b/test/lib/pack.js index 73a19baa3ef73..3d61abdaf74ca 100644 --- a/test/lib/pack.js +++ b/test/lib/pack.js @@ -1,5 +1,7 @@ const t = require('tap') -const requireInject = require('require-inject') +const { fake: mockNpm } = require('../fixtures/mock-npm') +const pacote = require('pacote') +const path = require('path') const OUTPUT = [] const output = (...msg) => OUTPUT.push(msg) @@ -10,40 +12,85 @@ const libnpmpack = async (spec, opts) => { return '' } +const mockPacote = { + manifest: (spec) => { + if (spec.type === 'directory') + return pacote.manifest(spec) + const m = { + name: spec.name || 'test-package', + version: spec.version || '1.0.0-test', + } + m._id = `${m.name}@${m.version}` + return m + }, +} -t.afterEach(cb => { - OUTPUT.length = 0 - cb() -}) +t.afterEach(() => OUTPUT.length = 0) t.test('should pack current directory with no arguments', (t) => { - const Pack = requireInject('../../lib/pack.js', { - '../../lib/utils/output.js': output, + let tarballFileName + const Pack = t.mock('../../lib/pack.js', { libnpmpack, npmlog: { notice: () => {}, showProgress: () => {}, clearProgress: () => {}, }, - }) - const pack = new Pack({ - flatOptions: { - unicode: false, - json: false, - dryRun: false, + fs: { + writeFile: (file, data, cb) => { + tarballFileName = file + cb() + }, }, }) + const npm = mockNpm({ + output, + }) + const pack = new Pack(npm) - pack.exec([], er => { - if (er) - throw er + pack.exec([], err => { + t.error(err, { bail: true }) const filename = `npm-${require('../../package.json').version}.tgz` t.strictSame(OUTPUT, [[filename]]) + t.strictSame(tarballFileName, path.resolve(filename)) t.end() }) }) +t.test('follows pack-destination config', (t) => { + let tarballFileName + const Pack = t.mock('../../lib/pack.js', { + libnpmpack, + npmlog: { + notice: () => {}, + showProgress: () => {}, + clearProgress: () => {}, + }, + fs: { + writeFile: (file, data, cb) => { + tarballFileName = file + cb() + }, + }, + }) + const npm = mockNpm({ + config: { + 'pack-destination': '/tmp/test', + }, + output, + }) + const pack = new Pack(npm) + + pack.exec([], err => { + t.error(err, { bail: true }) + + const filename = `npm-${require('../../package.json').version}.tgz` + t.strictSame(OUTPUT, [[filename]]) + t.strictSame(tarballFileName, path.resolve('/tmp/test', filename)) + t.end() + }) +}) t.test('should pack given directory', (t) => { const testDir = t.testdir({ 'package.json': JSON.stringify({ @@ -52,26 +99,29 @@ t.test('should pack given directory', (t) => { }, null, 2), }) - const Pack = requireInject('../../lib/pack.js', { - '../../lib/utils/output.js': output, + const Pack = t.mock('../../lib/pack.js', { libnpmpack, npmlog: { notice: () => {}, showProgress: () => {}, clearProgress: () => {}, }, + fs: { + writeFile: (file, data, cb) => cb(), + }, }) - const pack = new Pack({ - flatOptions: { + const npm = mockNpm({ + config: { unicode: true, - json: true, - dryRun: true, + json: false, + 'dry-run': true, }, + output, }) + const pack = new Pack(npm) - pack.exec([testDir], er => { - if (er) - throw er + pack.exec([testDir], err => { + t.error(err, { bail: true }) const filename = 'my-cool-pkg-1.0.0.tgz' t.strictSame(OUTPUT, [[filename]]) @@ -87,26 +137,29 @@ t.test('should pack given directory for scoped package', (t) => { }, null, 2), }) - const Pack = requireInject('../../lib/pack.js', { - '../../lib/utils/output.js': output, + const Pack = t.mock('../../lib/pack.js', { libnpmpack, npmlog: { notice: () => {}, showProgress: () => {}, clearProgress: () => {}, }, + fs: { + writeFile: (file, data, cb) => cb(), + }, }) - const pack = new Pack({ - flatOptions: { + const npm = mockNpm({ + config: { unicode: true, - json: true, - dryRun: true, + json: false, + 'dry-run': true, }, + output, }) + const pack = new Pack(npm) - return pack.exec([testDir], er => { - if (er) - throw er + return pack.exec([testDir], err => { + t.error(err, { bail: true }) const filename = 'cool-my-pkg-1.0.0.tgz' t.strictSame(OUTPUT, [[filename]]) @@ -115,8 +168,7 @@ t.test('should pack given directory for scoped package', (t) => { }) t.test('should log pack contents', (t) => { - const Pack = requireInject('../../lib/pack.js', { - '../../lib/utils/output.js': output, + const Pack = t.mock('../../lib/pack.js', { '../../lib/utils/tar.js': { ...require('../../lib/utils/tar.js'), logTar: () => { @@ -129,21 +181,241 @@ t.test('should log pack contents', (t) => { showProgress: () => {}, clearProgress: () => {}, }, + fs: { + writeFile: (file, data, cb) => cb(), + }, }) - const pack = new Pack({ - flatOptions: { + const npm = mockNpm({ + config: { unicode: false, json: false, - dryRun: false, + 'dry-run': false, }, + output, }) + const pack = new Pack(npm) - pack.exec([], er => { - if (er) - throw er + pack.exec([], err => { + t.error(err, { bail: true }) const filename = `npm-${require('../../package.json').version}.tgz` t.strictSame(OUTPUT, [[filename]]) t.end() }) }) + +t.test('should log output as valid json', (t) => { + const testDir = t.testdir({ + 'package.json': JSON.stringify({ + name: 'my-cool-pkg', + version: '1.0.0', + main: './index.js', + }, null, 2), + 'README.md': 'text', + 'index.js': 'void', + }) + + const Pack = t.mock('../../lib/pack.js', { + libnpmpack, + '../../lib/utils/tar.js': { + getContents: async () => ({ + id: '@ruyadorno/redact@1.0.0', + name: '@ruyadorno/redact', + version: '1.0.0', + size: 2450, + unpackedSize: 4911, + shasum: '044c7574639b923076069d6e801e2d1866430f17', + // mocks exactly how ssri Integrity works: + integrity: { + sha512: [ + { + source: 'sha512-JSdyskeR2qonBUaQ4vdlU/vQGSfgCxSq5O+vH+d2yVWRqzso4O3gUzd6QX/V7OWV//zU7kA5o63Zf433jUnOtQ==', + digest: 'JSdyskeR2qonBUaQ4vdlU/vQGSfgCxSq5O+vH+d2yVWRqzso4O3gUzd6QX/V7OWV//zU7kA5o63Zf433jUnOtQ==', + algorithm: 'sha512', + options: [], + }, + ], + toJSON () { + return 'sha512-JSdyskeR2qonBUaQ4vdlU/vQGSfgCxSq5O+vH+d2yVWRqzso4O3gUzd6QX/V7OWV//zU7kA5o63Zf433jUnOtQ==' + }, + }, + filename: '@ruyadorno/redact-1.0.0.tgz', + files: [ + { path: 'LICENSE', size: 1113, mode: 420 }, + { path: 'README.md', size: 2639, mode: 420 }, + { path: 'index.js', size: 719, mode: 493 }, + { path: 'package.json', size: 440, mode: 420 }, + ], + entryCount: 4, + bundled: [], + }), + }, + npmlog: { + notice: () => {}, + showProgress: () => {}, + clearProgress: () => {}, + }, + fs: { + writeFile: (file, data, cb) => cb(), + }, + }) + const npm = mockNpm({ + config: { + unicode: true, + json: true, + 'dry-run': true, + }, + output, + }) + const pack = new Pack(npm) + + pack.exec([testDir], err => { + t.error(err, { bail: true }) + + t.match(JSON.parse(OUTPUT), [{ + id: '@ruyadorno/redact@1.0.0', + name: '@ruyadorno/redact', + version: '1.0.0', + size: 2450, + unpackedSize: 4911, + shasum: '044c7574639b923076069d6e801e2d1866430f17', + integrity: 'sha512-JSdyskeR2qonBUaQ4vdlU/vQGSfgCxSq5O+vH+d2yVWRqzso4O3gUzd6QX/V7OWV//zU7kA5o63Zf433jUnOtQ==', + filename: '@ruyadorno/redact-1.0.0.tgz', + files: [ + { path: 'LICENSE' }, + { path: 'README.md' }, + { path: 'index.js' }, + { path: 'package.json' }, + ], + entryCount: 4, + }], 'pack details output as valid json') + + t.end() + }) +}) + +t.test('invalid packument', (t) => { + const mockPacote = { + manifest: () => { + return {} + }, + } + const Pack = t.mock('../../lib/pack.js', { + libnpmpack, + pacote: mockPacote, + npmlog: { + notice: () => {}, + showProgress: () => {}, + clearProgress: () => {}, + }, + fs: { + writeFile: (file, data, cb) => cb(), + }, + }) + const npm = mockNpm({ + config: { + unicode: true, + json: false, + 'dry-run': true, + }, + output, + }) + const pack = new Pack(npm) + pack.exec([], err => { + t.match(err, { message: 'Invalid package, must have name and version' }) + + t.strictSame(OUTPUT, []) + t.end() + }) +}) + +t.test('workspaces', (t) => { + const testDir = t.testdir({ + 'package.json': JSON.stringify({ + name: 'workspaces-test', + version: '1.0.0', + workspaces: ['workspace-a', 'workspace-b'], + }, null, 2), + 'workspace-a': { + 'package.json': JSON.stringify({ + name: 'workspace-a', + version: '1.0.0', + }), + }, + 'workspace-b': { + 'package.json': JSON.stringify({ + name: 'workspace-b', + version: '1.0.0', + }), + }, + }) + const Pack = t.mock('../../lib/pack.js', { + libnpmpack, + pacote: mockPacote, + npmlog: { + notice: () => {}, + showProgress: () => {}, + clearProgress: () => {}, + }, + fs: { + writeFile: (file, data, cb) => cb(), + }, + }) + const npm = mockNpm({ + localPrefix: testDir, + config: { + unicode: false, + json: false, + 'dry-run': false, + }, + output, + }) + const pack = new Pack(npm) + + t.test('all workspaces', (t) => { + pack.execWorkspaces([], [], err => { + t.error(err, { bail: true }) + + t.strictSame(OUTPUT, [ + ['workspace-a-1.0.0.tgz'], + ['workspace-b-1.0.0.tgz'], + ]) + t.end() + }) + }) + + t.test('all workspaces, `.` first arg', (t) => { + pack.execWorkspaces(['.'], [], err => { + t.error(err, { bail: true }) + + t.strictSame(OUTPUT, [ + ['workspace-a-1.0.0.tgz'], + ['workspace-b-1.0.0.tgz'], + ]) + t.end() + }) + }) + + t.test('one workspace', (t) => { + pack.execWorkspaces([], ['workspace-a'], err => { + t.error(err, { bail: true }) + + t.strictSame(OUTPUT, [ + ['workspace-a-1.0.0.tgz'], + ]) + t.end() + }) + }) + + t.test('specific package', (t) => { + pack.execWorkspaces(['abbrev'], [], err => { + t.error(err, { bail: true }) + + t.strictSame(OUTPUT, [ + ['abbrev-1.0.0-test.tgz'], + ]) + t.end() + }) + }) + t.end() +}) diff --git a/test/lib/ping.js b/test/lib/ping.js index cf47530749b33..f0a10718c46d0 100644 --- a/test/lib/ping.js +++ b/test/lib/ping.js @@ -1,14 +1,14 @@ -const { test } = require('tap') -const requireInject = require('require-inject') +const t = require('tap') +const { fake: mockNpm } = require('../fixtures/mock-npm') -test('pings', (t) => { +t.test('pings', (t) => { t.plan(8) - const flatOptions = { registry: 'https://registry.npmjs.org' } + const registry = 'https://registry.npmjs.org' let noticeCalls = 0 - const Ping = requireInject('../../lib/ping.js', { + const Ping = t.mock('../../lib/ping.js', { '../../lib/utils/ping.js': function (spec) { - t.equal(spec, flatOptions, 'passes flatOptions') + t.equal(spec.registry, registry, 'passes flatOptions') return {} }, npmlog: { @@ -16,7 +16,7 @@ test('pings', (t) => { ++noticeCalls if (noticeCalls === 1) { t.equal(type, 'PING', 'should log a PING') - t.equal(spec, flatOptions.registry, 'should log the registry url') + t.equal(spec, registry, 'should log the registry url') } else { t.equal(type, 'PONG', 'should log a PONG') t.match(spec, /\d+ms/, 'should log the elapsed milliseconds') @@ -24,24 +24,28 @@ test('pings', (t) => { }, }, }) - const ping = new Ping({ flatOptions }) + const npm = mockNpm({ + config: { registry }, + flatOptions: { registry }, + }) + const ping = new Ping(npm) ping.exec([], (err) => { t.equal(noticeCalls, 2, 'should have logged 2 lines') - t.ifError(err, 'npm ping') + t.error(err, 'npm ping') t.ok('should be able to ping') }) }) -test('pings and logs details', (t) => { +t.test('pings and logs details', (t) => { t.plan(10) - const flatOptions = { registry: 'https://registry.npmjs.org' } + const registry = 'https://registry.npmjs.org' const details = { extra: 'data' } let noticeCalls = 0 - const Ping = requireInject('../../lib/ping.js', { + const Ping = t.mock('../../lib/ping.js', { '../../lib/utils/ping.js': function (spec) { - t.equal(spec, flatOptions, 'passes flatOptions') + t.equal(spec.registry, registry, 'passes flatOptions') return details }, npmlog: { @@ -49,7 +53,7 @@ test('pings and logs details', (t) => { ++noticeCalls if (noticeCalls === 1) { t.equal(type, 'PING', 'should log a PING') - t.equal(spec, flatOptions.registry, 'should log the registry url') + t.equal(spec, registry, 'should log the registry url') } else if (noticeCalls === 2) { t.equal(type, 'PONG', 'should log a PONG') t.match(spec, /\d+ms/, 'should log the elapsed milliseconds') @@ -61,38 +65,36 @@ test('pings and logs details', (t) => { }, }, }) - const ping = new Ping({ flatOptions }) + const npm = mockNpm({ + config: { registry }, + flatOptions: { registry }, + }) + const ping = new Ping(npm) ping.exec([], (err) => { t.equal(noticeCalls, 3, 'should have logged 3 lines') - t.ifError(err, 'npm ping') + t.error(err, 'npm ping') t.ok('should be able to ping') }) }) -test('pings and returns json', (t) => { +t.test('pings and returns json', (t) => { t.plan(11) - const flatOptions = { registry: 'https://registry.npmjs.org', json: true } + const registry = 'https://registry.npmjs.org' const details = { extra: 'data' } let noticeCalls = 0 - const Ping = requireInject('../../lib/ping.js', { + const Ping = t.mock('../../lib/ping.js', { '../../lib/utils/ping.js': function (spec) { - t.equal(spec, flatOptions, 'passes flatOptions') + t.equal(spec.registry, registry, 'passes flatOptions') return details }, - '../../lib/utils/output.js': function (spec) { - const parsed = JSON.parse(spec) - t.equal(parsed.registry, flatOptions.registry, 'returns the correct registry url') - t.match(parsed.details, details, 'prints returned details') - t.type(parsed.time, 'number', 'returns time as a number') - }, npmlog: { notice: (type, spec) => { ++noticeCalls if (noticeCalls === 1) { t.equal(type, 'PING', 'should log a PING') - t.equal(spec, flatOptions.registry, 'should log the registry url') + t.equal(spec, registry, 'should log the registry url') } else { t.equal(type, 'PONG', 'should log a PONG') t.match(spec, /\d+ms/, 'should log the elapsed milliseconds') @@ -100,11 +102,21 @@ test('pings and returns json', (t) => { }, }, }) - const ping = new Ping({ flatOptions }) + const npm = mockNpm({ + config: { registry, json: true }, + flatOptions: { registry }, + output: function (spec) { + const parsed = JSON.parse(spec) + t.equal(parsed.registry, registry, 'returns the correct registry url') + t.match(parsed.details, details, 'prints returned details') + t.type(parsed.time, 'number', 'returns time as a number') + }, + }) + const ping = new Ping(npm) ping.exec([], (err) => { t.equal(noticeCalls, 2, 'should have logged 2 lines') - t.ifError(err, 'npm ping') + t.error(err, 'npm ping') t.ok('should be able to ping') }) }) diff --git a/test/lib/pkg.js b/test/lib/pkg.js new file mode 100644 index 0000000000000..688df6859054a --- /dev/null +++ b/test/lib/pkg.js @@ -0,0 +1,737 @@ +const { resolve } = require('path') +const { readFileSync } = require('fs') +const t = require('tap') +const { fake: mockNpm } = require('../fixtures/mock-npm') + +const redactCwd = (path) => { + const normalizePath = p => p + .replace(/\\+/g, '/') + .replace(/\r\n/g, '\n') + return normalizePath(path) + .replace(new RegExp(normalizePath(process.cwd()), 'g'), '{CWD}') +} + +t.cleanSnapshot = (str) => redactCwd(str) + +let OUTPUT = '' +const config = { + global: false, + force: false, + 'pkg-cast': 'string', +} +const npm = mockNpm({ + localPrefix: t.testdirName, + config, + output: (str) => { + OUTPUT += str + }, +}) + +const Pkg = require('../../lib/pkg.js') +const pkg = new Pkg(npm) + +const readPackageJson = (path) => { + path = path || npm.localPrefix + return JSON.parse(readFileSync(resolve(path, 'package.json'), 'utf8')) +} + +t.afterEach(() => { + config.global = false + config.json = false + npm.localPrefix = t.testdirName + OUTPUT = '' +}) + +t.test('no args', t => { + pkg.exec([], err => { + t.match( + err, + { code: 'EUSAGE' }, + 'should throw usage error' + ) + t.end() + }) +}) + +t.test('no global mode', t => { + config.global = true + pkg.exec(['get', 'foo'], err => { + t.match( + err, + { code: 'EPKGGLOBAL' }, + 'should throw no global mode error' + ) + t.end() + }) +}) + +t.test('get no args', t => { + npm.localPrefix = t.testdir({ + 'package.json': JSON.stringify({ + name: 'foo', + version: '1.1.1', + }), + }) + + pkg.exec(['get'], err => { + if (err) + throw err + + t.strictSame( + JSON.parse(OUTPUT), + { + name: 'foo', + version: '1.1.1', + }, + 'should print package.json content' + ) + t.end() + }) +}) + +t.test('get single arg', t => { + npm.localPrefix = t.testdir({ + 'package.json': JSON.stringify({ + name: 'foo', + version: '1.1.1', + }), + }) + + pkg.exec(['get', 'version'], err => { + if (err) + throw err + + t.strictSame( + JSON.parse(OUTPUT), + '1.1.1', + 'should print retrieved package.json field' + ) + t.end() + }) +}) + +t.test('get nested arg', t => { + npm.localPrefix = t.testdir({ + 'package.json': JSON.stringify({ + name: 'foo', + version: '1.1.1', + scripts: { + test: 'node test.js', + }, + }), + }) + + pkg.exec(['get', 'scripts.test'], err => { + if (err) + throw err + + t.strictSame( + JSON.parse(OUTPUT), + 'node test.js', + 'should print retrieved nested field' + ) + t.end() + }) +}) + +t.test('get array field', t => { + const files = [ + 'index.js', + 'cli.js', + ] + npm.localPrefix = t.testdir({ + 'package.json': JSON.stringify({ + name: 'foo', + version: '1.1.1', + files, + }), + }) + + pkg.exec(['get', 'files'], err => { + if (err) + throw err + + t.strictSame( + JSON.parse(OUTPUT), + files, + 'should print retrieved array field' + ) + t.end() + }) +}) + +t.test('get array item', t => { + const files = [ + 'index.js', + 'cli.js', + ] + npm.localPrefix = t.testdir({ + 'package.json': JSON.stringify({ + name: 'foo', + version: '1.1.1', + files, + }), + }) + + pkg.exec(['get', 'files[0]'], err => { + if (err) + throw err + + t.strictSame( + JSON.parse(OUTPUT), + 'index.js', + 'should print retrieved array field' + ) + t.end() + }) +}) + +t.test('get array nested items notation', t => { + const contributors = [ + { + name: 'Ruy', + url: 'http://example.com/ruy', + }, + { + name: 'Gar', + url: 'http://example.com/gar', + }, + ] + npm.localPrefix = t.testdir({ + 'package.json': JSON.stringify({ + name: 'foo', + version: '1.1.1', + contributors, + }), + }) + + pkg.exec(['get', 'contributors.name'], err => { + if (err) + throw err + + t.strictSame( + JSON.parse(OUTPUT), + { + 'contributors[0].name': 'Ruy', + 'contributors[1].name': 'Gar', + }, + 'should print json result containing matching results' + ) + t.end() + }) +}) + +t.test('set no args', t => { + npm.localPrefix = t.testdir({ + 'package.json': JSON.stringify({ name: 'foo' }), + }) + pkg.exec(['set'], err => { + t.match( + err, + { code: 'EPKGSET' }, + 'should throw an error if no args' + ) + + t.end() + }) +}) + +t.test('set missing value', t => { + npm.localPrefix = t.testdir({ + 'package.json': JSON.stringify({ name: 'foo' }), + }) + pkg.exec(['set', 'key='], err => { + t.match( + err, + { code: 'EPKGSET' }, + 'should throw an error if missing value' + ) + + t.end() + }) +}) + +t.test('set missing key', t => { + npm.localPrefix = t.testdir({ + 'package.json': JSON.stringify({ name: 'foo' }), + }) + pkg.exec(['set', '=value'], err => { + t.match( + err, + { code: 'EPKGSET' }, + 'should throw an error if missing key' + ) + + t.end() + }) +}) + +t.test('set single field', t => { + const json = { + name: 'foo', + version: '1.1.1', + } + npm.localPrefix = t.testdir({ + 'package.json': JSON.stringify(json), + }) + + pkg.exec(['set', 'description=Awesome stuff'], err => { + if (err) + throw err + + t.strictSame( + readPackageJson(), + { + ...json, + description: 'Awesome stuff', + }, + 'should add single field to package.json' + ) + t.end() + }) +}) + +t.test('push to array syntax', t => { + const json = { + name: 'foo', + version: '1.1.1', + keywords: [ + 'foo', + ], + } + npm.localPrefix = t.testdir({ + 'package.json': JSON.stringify(json), + }) + + pkg.exec(['set', 'keywords[]=bar', 'keywords[]=baz'], err => { + if (err) + throw err + + t.strictSame( + readPackageJson(), + { + ...json, + keywords: [ + 'foo', + 'bar', + 'baz', + ], + }, + 'should append to arrays using empty bracket syntax' + ) + t.end() + }) +}) + +t.test('set multiple fields', t => { + const json = { + name: 'foo', + version: '1.1.1', + } + npm.localPrefix = t.testdir({ + 'package.json': JSON.stringify(json), + }) + + pkg.exec(['set', 'bin.foo=foo.js', 'scripts.test=node test.js'], err => { + if (err) + throw err + + t.strictSame( + readPackageJson(), + { + ...json, + bin: { + foo: 'foo.js', + }, + scripts: { + test: 'node test.js', + }, + }, + 'should add single field to package.json' + ) + t.end() + }) +}) + +t.test('set = separate value', t => { + const json = { + name: 'foo', + version: '1.1.1', + } + npm.localPrefix = t.testdir({ + 'package.json': JSON.stringify(json), + }) + + pkg.exec(['set', 'tap[test-env][0]=LC_ALL=sk'], err => { + if (err) + throw err + + t.strictSame( + readPackageJson(), + { + ...json, + tap: { + 'test-env': [ + 'LC_ALL=sk', + ], + }, + }, + 'should add single field to package.json' + ) + t.end() + }) +}) + +t.test('set --json', async t => { + config.json = true + npm.localPrefix = t.testdir({ + 'package.json': JSON.stringify({ + name: 'foo', + version: '1.1.1', + }), + }) + + await new Promise((res, rej) => { + pkg.exec(['set', 'private=true'], err => { + if (err) + rej(err) + + t.strictSame( + readPackageJson(), + { + name: 'foo', + version: '1.1.1', + private: true, + }, + 'should add boolean field to package.json' + ) + res() + }) + }) + + await new Promise((res, rej) => { + pkg.exec(['set', 'tap.timeout=60'], err => { + if (err) + rej(err) + + t.strictSame( + readPackageJson(), + { + name: 'foo', + version: '1.1.1', + private: true, + tap: { + timeout: 60, + }, + }, + 'should add number field to package.json' + ) + res() + }) + }) + + await new Promise((res, rej) => { + pkg.exec(['set', 'foo={ "bar": { "baz": "BAZ" } }'], err => { + if (err) + rej(err) + + t.strictSame( + readPackageJson(), + { + name: 'foo', + version: '1.1.1', + private: true, + tap: { + timeout: 60, + }, + foo: { + bar: { + baz: 'BAZ', + }, + }, + }, + 'should add object field to package.json' + ) + res() + }) + }) + + await new Promise((res, rej) => { + pkg.exec(['set', 'workspaces=["packages/*"]'], err => { + if (err) + rej(err) + + t.strictSame( + readPackageJson(), + { + name: 'foo', + version: '1.1.1', + private: true, + workspaces: [ + 'packages/*', + ], + tap: { + timeout: 60, + }, + foo: { + bar: { + baz: 'BAZ', + }, + }, + }, + 'should add object field to package.json' + ) + res() + }) + }) + + await new Promise((res, rej) => { + pkg.exec(['set', 'description="awesome"'], err => { + if (err) + rej(err) + + t.strictSame( + readPackageJson(), + { + name: 'foo', + version: '1.1.1', + description: 'awesome', + private: true, + workspaces: [ + 'packages/*', + ], + tap: { + timeout: 60, + }, + foo: { + bar: { + baz: 'BAZ', + }, + }, + }, + 'should add object field to package.json' + ) + res() + }) + }) +}) + +t.test('delete no args', t => { + npm.localPrefix = t.testdir({ + 'package.json': JSON.stringify({ name: 'foo' }), + }) + pkg.exec(['delete'], err => { + t.match( + err, + { code: 'EPKGDELETE' }, + 'should throw an error if deleting no args' + ) + + t.end() + }) +}) + +t.test('delete invalid key', t => { + npm.localPrefix = t.testdir({ + 'package.json': JSON.stringify({ name: 'foo' }), + }) + pkg.exec(['delete', ''], err => { + t.match( + err, + { code: 'EPKGDELETE' }, + 'should throw an error if deleting invalid args' + ) + + t.end() + }) +}) + +t.test('delete single field', t => { + npm.localPrefix = t.testdir({ + 'package.json': JSON.stringify({ + name: 'foo', + version: '1.0.0', + }), + }) + pkg.exec(['delete', 'version'], err => { + if (err) + throw err + + t.strictSame( + readPackageJson(), + { + name: 'foo', + }, + 'should delete single field from package.json' + ) + + t.end() + }) +}) + +t.test('delete multiple field', t => { + npm.localPrefix = t.testdir({ + 'package.json': JSON.stringify({ + name: 'foo', + version: '1.0.0', + description: 'awesome', + }), + }) + pkg.exec(['delete', 'version', 'description'], err => { + if (err) + throw err + + t.strictSame( + readPackageJson(), + { + name: 'foo', + }, + 'should delete multiple fields from package.json' + ) + + t.end() + }) +}) + +t.test('delete nested field', t => { + npm.localPrefix = t.testdir({ + 'package.json': JSON.stringify({ + name: 'foo', + version: '1.0.0', + info: { + foo: { + bar: [ + { + baz: 'deleteme', + }, + ], + }, + }, + }), + }) + pkg.exec(['delete', 'info.foo.bar[0].baz'], err => { + if (err) + throw err + + t.strictSame( + readPackageJson(), + { + name: 'foo', + version: '1.0.0', + info: { + foo: { + bar: [ + {}, + ], + }, + }, + }, + 'should delete nested fields from package.json' + ) + + t.end() + }) +}) + +t.test('workspaces', async t => { + npm.localPrefix = t.testdir({ + 'package.json': JSON.stringify({ + name: 'root', + version: '1.0.0', + workspaces: [ + 'packages/*', + ], + }), + packages: { + a: { + 'package.json': JSON.stringify({ + name: 'a', + version: '1.0.0', + }), + }, + b: { + 'package.json': JSON.stringify({ + name: 'b', + version: '1.2.3', + }), + }, + }, + }) + + await new Promise((res, rej) => { + pkg.execWorkspaces(['get', 'name', 'version'], [], err => { + if (err) + rej(err) + + t.strictSame( + JSON.parse(OUTPUT), + { + a: { + name: 'a', + version: '1.0.0', + }, + b: { + name: 'b', + version: '1.2.3', + }, + }, + 'should return expected result for configured workspaces' + ) + res() + }) + }) + + await new Promise((res, rej) => { + pkg.execWorkspaces(['set', 'funding=http://example.com'], [], err => { + if (err) + rej(err) + + t.strictSame( + readPackageJson(resolve(npm.localPrefix, 'packages/a')), + { + name: 'a', + version: '1.0.0', + funding: 'http://example.com', + }, + 'should add field to workspace a' + ) + + t.strictSame( + readPackageJson(resolve(npm.localPrefix, 'packages/b')), + { + name: 'b', + version: '1.2.3', + funding: 'http://example.com', + }, + 'should add field to workspace b' + ) + res() + }) + }) + + await new Promise((res, rej) => { + pkg.execWorkspaces(['delete', 'version'], [], err => { + if (err) + rej(err) + + t.strictSame( + readPackageJson(resolve(npm.localPrefix, 'packages/a')), + { + name: 'a', + funding: 'http://example.com', + }, + 'should delete version field from workspace a' + ) + + t.strictSame( + readPackageJson(resolve(npm.localPrefix, 'packages/b')), + { + name: 'b', + funding: 'http://example.com', + }, + 'should delete version field from workspace b' + ) + res() + }) + }) +}) diff --git a/test/lib/prefix.js b/test/lib/prefix.js index dfb50f174f5db..526631388e74f 100644 --- a/test/lib/prefix.js +++ b/test/lib/prefix.js @@ -1,19 +1,19 @@ -const { test } = require('tap') -const requireInject = require('require-inject') +const t = require('tap') -test('prefix', (t) => { +t.test('prefix', (t) => { t.plan(3) const dir = '/prefix/dir' - const Prefix = requireInject('../../lib/prefix.js', { - '../../lib/utils/output.js': (output) => { + const Prefix = require('../../lib/prefix.js') + const prefix = new Prefix({ + prefix: dir, + output: (output) => { t.equal(output, dir, 'prints the correct directory') }, }) - const prefix = new Prefix({ prefix: dir }) prefix.exec([], (err) => { - t.ifError(err, 'npm prefix') + t.error(err, 'npm prefix') t.ok('should have printed directory') }) }) diff --git a/test/lib/profile.js b/test/lib/profile.js index 743ba2d6872e1..112aa5c3b75e1 100644 --- a/test/lib/profile.js +++ b/test/lib/profile.js @@ -1,14 +1,23 @@ const t = require('tap') -const requireInject = require('require-inject') +const { fake: mockNpm } = require('../fixtures/mock-npm') let result = '' -const flatOptions = { +const config = { otp: '', json: false, parseable: false, registry: 'https://registry.npmjs.org/', } -const npm = { config: {}, flatOptions: { ...flatOptions }} +const flatOptions = { + registry: 'https://registry.npmjs.org/', +} +const npm = mockNpm({ + config, + flatOptions, + output: (...msg) => { + result = result ? `${result}\n${msg.join('\n')}` : msg.join('\n') + }, +}) const mocks = { ansistyles: { bright: a => a }, npmlog: { @@ -32,9 +41,6 @@ const mocks = { .join('\n') } }, - '../../lib/utils/output.js': (...msg) => { - result += msg.join('\n') - }, '../../lib/utils/pulse-till-done.js': { withPromise: async a => a, }, @@ -60,14 +66,15 @@ const userProfile = { github: 'https://github.com/npm', } -t.afterEach(cb => { +t.afterEach(() => { result = '' - npm.config = {} - npm.flatOptions = { ...flatOptions } - cb() + flatOptions.otp = '' + config.json = false + config.parseable = false + config.registry = 'https://registry.npmjs.org/' }) -const Profile = requireInject('../../lib/profile.js', mocks) +const Profile = t.mock('../../lib/profile.js', mocks) const profile = new Profile(npm) t.test('no args', t => { @@ -88,7 +95,7 @@ t.test('profile get no args', t => { }, } - const Profile = requireInject('../../lib/profile.js', { + const Profile = t.mock('../../lib/profile.js', { ...mocks, 'npm-profile': npmProfile, }) @@ -108,13 +115,13 @@ t.test('profile get no args', t => { }) t.test('--json', t => { - npm.flatOptions.json = true + config.json = true profile.exec(['get'], err => { if (err) throw err - t.deepEqual( + t.same( JSON.parse(result), userProfile, 'should output json profile result' @@ -124,7 +131,7 @@ t.test('profile get no args', t => { }) t.test('--parseable', t => { - npm.flatOptions.parseable = true + config.parseable = true profile.exec(['get'], err => { if (err) @@ -148,7 +155,7 @@ t.test('profile get no args', t => { }, } - const Profile = requireInject('../../lib/profile.js', { + const Profile = t.mock('../../lib/profile.js', { ...mocks, 'npm-profile': npmProfile, }) @@ -176,7 +183,7 @@ t.test('profile get no args', t => { }, } - const Profile = requireInject('../../lib/profile.js', { + const Profile = t.mock('../../lib/profile.js', { ...mocks, 'npm-profile': npmProfile, }) @@ -204,7 +211,7 @@ t.test('profile get no args', t => { }, } - const Profile = requireInject('../../lib/profile.js', { + const Profile = t.mock('../../lib/profile.js', { ...mocks, 'npm-profile': npmProfile, }) @@ -232,7 +239,7 @@ t.test('profile get <key>', t => { }, } - const Profile = requireInject('../../lib/profile.js', { + const Profile = t.mock('../../lib/profile.js', { ...mocks, 'npm-profile': npmProfile, }) @@ -253,13 +260,13 @@ t.test('profile get <key>', t => { }) t.test('--json', t => { - npm.flatOptions.json = true + config.json = true profile.exec(['get', 'name'], err => { if (err) throw err - t.deepEqual( + t.same( JSON.parse(result), userProfile, 'should output json profile result ignoring args filter' @@ -269,7 +276,7 @@ t.test('profile get <key>', t => { }) t.test('--parseable', t => { - npm.flatOptions.parseable = true + config.parseable = true profile.exec(['get', 'name'], err => { if (err) @@ -293,7 +300,7 @@ t.test('profile get multiple args', t => { }, } - const Profile = requireInject('../../lib/profile.js', { + const Profile = t.mock('../../lib/profile.js', { ...mocks, 'npm-profile': npmProfile, }) @@ -313,13 +320,13 @@ t.test('profile get multiple args', t => { }) t.test('--json', t => { - npm.flatOptions.json = true + config.json = true profile.exec(['get', 'name', 'email', 'github'], err => { if (err) throw err - t.deepEqual( + t.same( JSON.parse(result), userProfile, 'should output json profile result and ignore args' @@ -329,7 +336,7 @@ t.test('profile get multiple args', t => { }) t.test('--parseable', t => { - npm.flatOptions.parseable = true + config.parseable = true profile.exec(['get', 'name', 'email', 'github'], err => { if (err) @@ -427,7 +434,7 @@ t.test('profile set <key> <value>', t => { t.test('default output', t => { t.plan(2) - const Profile = requireInject('../../lib/profile.js', { + const Profile = t.mock('../../lib/profile.js', { ...mocks, 'npm-profile': npmProfile(t), }) @@ -448,9 +455,9 @@ t.test('profile set <key> <value>', t => { t.test('--json', t => { t.plan(2) - npm.flatOptions.json = true + config.json = true - const Profile = requireInject('../../lib/profile.js', { + const Profile = t.mock('../../lib/profile.js', { ...mocks, 'npm-profile': npmProfile(t), }) @@ -460,7 +467,7 @@ t.test('profile set <key> <value>', t => { if (err) throw err - t.deepEqual( + t.same( JSON.parse(result), { fullname: 'Lorem Ipsum', @@ -473,9 +480,9 @@ t.test('profile set <key> <value>', t => { t.test('--parseable', t => { t.plan(2) - npm.flatOptions.parseable = true + config.parseable = true - const Profile = requireInject('../../lib/profile.js', { + const Profile = t.mock('../../lib/profile.js', { ...mocks, 'npm-profile': npmProfile(t), }) @@ -522,7 +529,7 @@ t.test('profile set <key> <value>', t => { }, } - const Profile = requireInject('../../lib/profile.js', { + const Profile = t.mock('../../lib/profile.js', { ...mocks, 'npm-profile': npmProfile, }) @@ -586,7 +593,7 @@ t.test('profile set <key> <value>', t => { }, } - const Profile = requireInject('../../lib/profile.js', { + const Profile = t.mock('../../lib/profile.js', { ...mocks, 'npm-profile': npmProfile, '../../lib/utils/read-user-info.js': readUserInfo, @@ -654,7 +661,7 @@ t.test('profile set <key> <value>', t => { }, } - const Profile = requireInject('../../lib/profile.js', { + const Profile = t.mock('../../lib/profile.js', { ...mocks, npmlog, 'npm-profile': npmProfile, @@ -702,7 +709,7 @@ t.test('enable-2fa', t => { }) t.test('no support for --json output', t => { - npm.flatOptions.json = true + config.json = true profile.exec(['enable-2fa', 'auth-only'], err => { t.match( @@ -716,7 +723,7 @@ t.test('enable-2fa', t => { }) t.test('no support for --parseable output', t => { - npm.flatOptions.parseable = true + config.parseable = true profile.exec(['enable-2fa', 'auth-only'], err => { t.match( @@ -745,7 +752,7 @@ t.test('enable-2fa', t => { }, } - const Profile = requireInject('../../lib/profile.js', { + const Profile = t.mock('../../lib/profile.js', { ...mocks, 'npm-profile': npmProfile, }) @@ -774,7 +781,7 @@ t.test('enable-2fa', t => { }, } - const Profile = requireInject('../../lib/profile.js', { + const Profile = t.mock('../../lib/profile.js', { ...mocks, 'npm-profile': npmProfile, }) @@ -795,7 +802,7 @@ t.test('enable-2fa', t => { t.test('no auth found', t => { npm.config.getCredentialsByURI = () => ({}) - const Profile = requireInject('../../lib/profile.js', { + const Profile = t.mock('../../lib/profile.js', { ...mocks, }) const profile = new Profile(npm) @@ -814,18 +821,16 @@ t.test('enable-2fa', t => { t.plan(10) // mock legacy basic auth style - npm.config = { - getCredentialsByURI (reg) { - t.equal(reg, flatOptions.registry, 'should use expected registry') - return { auth: Buffer.from('foo:bar').toString('base64') } - }, - setCredentialsByURI (registry, { token }) { - t.equal(registry, flatOptions.registry, 'should set expected registry') - t.equal(token, 'token', 'should set expected token') - }, - save (type) { - t.equal(type, 'user', 'should save to user config') - }, + npm.config.getCredentialsByURI = (reg) => { + t.equal(reg, flatOptions.registry, 'should use expected registry') + return { auth: Buffer.from('foo:bar').toString('base64') } + } + npm.config.setCredentialsByURI = (registry, { token }) => { + t.equal(registry, flatOptions.registry, 'should set expected registry') + t.equal(token, 'token', 'should set expected token') + } + npm.config.save = (type) => { + t.equal(type, 'user', 'should save to user config') } const npmProfile = { @@ -876,7 +881,7 @@ t.test('enable-2fa', t => { }, } - const Profile = requireInject('../../lib/profile.js', { + const Profile = t.mock('../../lib/profile.js', { ...mocks, 'npm-profile': npmProfile, '../../lib/utils/read-user-info.js': readUserInfo, @@ -898,12 +903,10 @@ t.test('enable-2fa', t => { t.test('from token and set otp, retries on pending and verifies with qrcode', t => { t.plan(4) - npm.flatOptions.otp = '1234' + flatOptions.otp = '1234' - npm.config = { - getCredentialsByURI () { - return { token: 'token' } - }, + npm.config.getCredentialsByURI = () => { + return { token: 'token' } } let setCount = 0 @@ -980,7 +983,7 @@ t.test('enable-2fa', t => { generate: (url, cb) => cb('qrcode'), } - const Profile = requireInject('../../lib/profile.js', { + const Profile = t.mock('../../lib/profile.js', { ...mocks, 'npm-profile': npmProfile, 'qrcode-terminal': qrcode, @@ -1000,12 +1003,10 @@ t.test('enable-2fa', t => { }) t.test('from token and set otp, retrieves invalid otp', t => { - npm.flatOptions.otp = '1234' + flatOptions.otp = '1234' - npm.config = { - getCredentialsByURI () { - return { token: 'token' } - }, + npm.config.getCredentialsByURI = () => { + return { token: 'token' } } const npmProfile = { @@ -1034,7 +1035,7 @@ t.test('enable-2fa', t => { }, } - const Profile = requireInject('../../lib/profile.js', { + const Profile = t.mock('../../lib/profile.js', { ...mocks, 'npm-profile': npmProfile, '../../lib/utils/read-user-info.js': readUserInfo, @@ -1052,12 +1053,11 @@ t.test('enable-2fa', t => { }) t.test('from token auth provides --otp config arg', t => { - npm.flatOptions.otp = '123456' + flatOptions.otp = '123456' + flatOptions.otp = '123456' - npm.config = { - getCredentialsByURI (reg) { - return { token: 'token' } - }, + npm.config.getCredentialsByURI = (reg) => { + return { token: 'token' } } const npmProfile = { @@ -1081,7 +1081,7 @@ t.test('enable-2fa', t => { }, } - const Profile = requireInject('../../lib/profile.js', { + const Profile = t.mock('../../lib/profile.js', { ...mocks, 'npm-profile': npmProfile, '../../lib/utils/read-user-info.js': readUserInfo, @@ -1102,10 +1102,8 @@ t.test('enable-2fa', t => { }) t.test('missing tfa from user profile', t => { - npm.config = { - getCredentialsByURI (reg) { - return { token: 'token' } - }, + npm.config.getCredentialsByURI = (reg) => { + return { token: 'token' } } const npmProfile = { @@ -1132,7 +1130,7 @@ t.test('enable-2fa', t => { }, } - const Profile = requireInject('../../lib/profile.js', { + const Profile = t.mock('../../lib/profile.js', { ...mocks, 'npm-profile': npmProfile, '../../lib/utils/read-user-info.js': readUserInfo, @@ -1153,10 +1151,8 @@ t.test('enable-2fa', t => { }) t.test('defaults to auth-and-writes permission if no mode specified', t => { - npm.config = { - getCredentialsByURI (reg) { - return { token: 'token' } - }, + npm.config.getCredentialsByURI = (reg) => { + return { token: 'token' } } const npmProfile = { @@ -1183,7 +1179,7 @@ t.test('enable-2fa', t => { }, } - const Profile = requireInject('../../lib/profile.js', { + const Profile = t.mock('../../lib/profile.js', { ...mocks, 'npm-profile': npmProfile, '../../lib/utils/read-user-info.js': readUserInfo, @@ -1217,7 +1213,7 @@ t.test('disable-2fa', t => { }, } - const Profile = requireInject('../../lib/profile.js', { + const Profile = t.mock('../../lib/profile.js', { ...mocks, 'npm-profile': npmProfile, }) @@ -1242,7 +1238,7 @@ t.test('disable-2fa', t => { return userProfile }, async set (newProfile, conf) { - t.deepEqual( + t.same( newProfile, { tfa: { @@ -1279,7 +1275,7 @@ t.test('disable-2fa', t => { }) t.test('default output', t => { - const Profile = requireInject('../../lib/profile.js', { + const Profile = t.mock('../../lib/profile.js', { ...mocks, 'npm-profile': npmProfile(t), '../../lib/utils/read-user-info.js': readUserInfo(t), @@ -1300,9 +1296,9 @@ t.test('disable-2fa', t => { }) t.test('--json', t => { - npm.flatOptions.json = true + config.json = true - const Profile = requireInject('../../lib/profile.js', { + const Profile = t.mock('../../lib/profile.js', { ...mocks, 'npm-profile': npmProfile(t), '../../lib/utils/read-user-info.js': readUserInfo(t), @@ -1313,7 +1309,7 @@ t.test('disable-2fa', t => { if (err) throw err - t.deepEqual( + t.same( JSON.parse(result), { tfa: false }, 'should output json already disabled msg' @@ -1323,9 +1319,9 @@ t.test('disable-2fa', t => { }) t.test('--parseable', t => { - npm.flatOptions.parseable = true + config.parseable = true - const Profile = requireInject('../../lib/profile.js', { + const Profile = t.mock('../../lib/profile.js', { ...mocks, 'npm-profile': npmProfile(t), '../../lib/utils/read-user-info.js': readUserInfo(t), @@ -1351,14 +1347,14 @@ t.test('disable-2fa', t => { t.test('--otp config already set', t => { t.plan(3) - npm.flatOptions.otp = '123456' + flatOptions.otp = '123456' const npmProfile = { async get () { return userProfile }, async set (newProfile, conf) { - t.deepEqual( + t.same( newProfile, { tfa: { @@ -1388,7 +1384,7 @@ t.test('disable-2fa', t => { }, } - const Profile = requireInject('../../lib/profile.js', { + const Profile = t.mock('../../lib/profile.js', { ...mocks, 'npm-profile': npmProfile, '../../lib/utils/read-user-info.js': readUserInfo, diff --git a/test/lib/prune.js b/test/lib/prune.js index 8cd148806e352..87bb1370f3a19 100644 --- a/test/lib/prune.js +++ b/test/lib/prune.js @@ -1,8 +1,7 @@ -const { test } = require('tap') -const requireInject = require('require-inject') +const t = require('tap') -test('should prune using Arborist', (t) => { - const Prune = requireInject('../../lib/prune.js', { +t.test('should prune using Arborist', (t) => { + const Prune = t.mock('../../lib/prune.js', { '@npmcli/arborist': function (args) { t.ok(args, 'gets options object') t.ok(args.path, 'gets path option') diff --git a/test/lib/publish.js b/test/lib/publish.js index 0e857fafddfe2..4aa3e5592751e 100644 --- a/test/lib/publish.js +++ b/test/lib/publish.js @@ -1,5 +1,5 @@ const t = require('tap') -const requireInject = require('require-inject') +const { fake: mockNpm } = require('../fixtures/mock-npm') const fs = require('fs') // The way we set loglevel is kind of convoluted, and there is no way to affect @@ -9,17 +9,13 @@ const fs = require('fs') const log = require('npmlog') log.level = 'silent' -// mock config -const {defaults} = require('../../lib/utils/config.js') +const {definitions} = require('../../lib/utils/config') +const defaults = Object.entries(definitions).reduce((defaults, [key, def]) => { + defaults[key] = def.default + return defaults +}, {}) -const config = { - list: [defaults], -} - -t.afterEach(cb => { - log.level = 'silent' - cb() -}) +t.afterEach(() => log.level = 'silent') t.test('should publish with libnpmpublish, passing through flatOptions and respecting publishConfig.registry', (t) => { t.plan(7) @@ -34,7 +30,7 @@ t.test('should publish with libnpmpublish, passing through flatOptions and respe }, null, 2), }) - const Publish = requireInject('../../lib/publish.js', { + const Publish = t.mock('../../lib/publish.js', { // verify that we do NOT remove publishConfig if it was there originally // and then removed during the script/pack process libnpmpack: async () => { @@ -47,25 +43,23 @@ t.test('should publish with libnpmpublish, passing through flatOptions and respe libnpmpublish: { publish: (manifest, tarData, opts) => { t.match(manifest, { name: 'my-cool-pkg', version: '1.0.0' }, 'gets manifest') - t.isa(tarData, Buffer, 'tarData is a buffer') + t.type(tarData, Buffer, 'tarData is a buffer') t.ok(opts, 'gets opts object') t.same(opts.customValue, true, 'flatOptions values are passed through') t.same(opts.registry, registry, 'publishConfig.registry is passed through') }, }, }) - const publish = new Publish({ + const npm = mockNpm({ flatOptions: { customValue: true, }, - config: { - ...config, - getCredentialsByURI: (uri) => { - t.same(uri, registry, 'gets credentials for expected registry') - return { token: 'some.registry.token' } - }, - }, }) + npm.config.getCredentialsByURI = (uri) => { + t.same(uri, registry, 'gets credentials for expected registry') + return { token: 'some.registry.token' } + } + const publish = new Publish(npm) publish.exec([testDir], (er) => { if (er) @@ -86,7 +80,7 @@ t.test('re-loads publishConfig.registry if added during script process', (t) => }, null, 2), }) - const Publish = requireInject('../../lib/publish.js', { + const Publish = t.mock('../../lib/publish.js', { libnpmpack: async () => { fs.writeFileSync(`${testDir}/package.json`, JSON.stringify({ name: 'my-cool-pkg', @@ -98,21 +92,18 @@ t.test('re-loads publishConfig.registry if added during script process', (t) => libnpmpublish: { publish: (manifest, tarData, opts) => { t.match(manifest, { name: 'my-cool-pkg', version: '1.0.0' }, 'gets manifest') - t.isa(tarData, Buffer, 'tarData is a buffer') + t.type(tarData, Buffer, 'tarData is a buffer') t.ok(opts, 'gets opts object') t.same(opts.registry, registry, 'publishConfig.registry is passed through') }, }, }) - const publish = new Publish({ - config: { - ...config, - getCredentialsByURI: (uri) => { - t.same(uri, registry, 'gets credentials for expected registry') - return { token: 'some.registry.token' } - }, - }, - }) + const npm = mockNpm() + npm.config.getCredentialsByURI = (uri) => { + t.same(uri, registry, 'gets credentials for expected registry') + return { token: 'some.registry.token' } + } + const publish = new Publish(npm) publish.exec([testDir], (er) => { if (er) @@ -133,10 +124,7 @@ t.test('if loglevel=info and json, should not output package contents', (t) => { }) log.level = 'info' - const Publish = requireInject('../../lib/publish.js', { - '../../lib/utils/output.js': () => { - t.pass('output is called') - }, + const Publish = t.mock('../../lib/publish.js', { '../../lib/utils/tar.js': { getContents: () => ({ id: 'someid', @@ -151,18 +139,17 @@ t.test('if loglevel=info and json, should not output package contents', (t) => { }, }, }) - const publish = new Publish({ - flatOptions: { - json: true, - }, - config: { - ...config, - getCredentialsByURI: (uri) => { - t.same(uri, defaults.registry, 'gets credentials for expected registry') - return { token: 'some.registry.token' } - }, + const npm = mockNpm({ + config: { json: true }, + output: () => { + t.pass('output is called') }, }) + npm.config.getCredentialsByURI = (uri) => { + t.same(uri, npm.config.get('registry'), 'gets credentials for expected registry') + return { token: 'some.registry.token' } + } + const publish = new Publish(npm) publish.exec([testDir], (er) => { if (er) @@ -183,10 +170,7 @@ t.test('if loglevel=silent and dry-run, should not output package contents or pu }) log.level = 'silent' - const Publish = requireInject('../../lib/publish.js', { - '../../lib/utils/output.js': () => { - throw new Error('should not output in dry run mode') - }, + const Publish = t.mock('../../lib/publish.js', { '../../lib/utils/tar.js': { getContents: () => ({ id: 'someid', @@ -201,17 +185,17 @@ t.test('if loglevel=silent and dry-run, should not output package contents or pu }, }, }) - const publish = new Publish({ - flatOptions: { - dryRun: true, - }, - config: { - ...config, - getCredentialsByURI: () => { - throw new Error('should not call getCredentialsByURI in dry run') - }, + const npm = mockNpm({ + config: { 'dry-run': true }, + output: () => { + throw new Error('should not output in dry run mode') }, }) + npm.config.getCredentialsByURI = () => { + throw new Error('should not call getCredentialsByURI in dry run') + } + + const publish = new Publish(npm) publish.exec([testDir], (er) => { if (er) @@ -232,7 +216,7 @@ t.test('if loglevel=info and dry-run, should not publish, should log package con }) log.level = 'info' - const Publish = requireInject('../../lib/publish.js', { + const Publish = t.mock('../../lib/publish.js', { '../../lib/utils/tar.js': { getContents: () => ({ id: 'someid', @@ -241,25 +225,22 @@ t.test('if loglevel=info and dry-run, should not publish, should log package con t.pass('logTar is called') }, }, - '../../lib/utils/output.js': () => { - t.pass('output fn is called') - }, libnpmpublish: { publish: () => { throw new Error('should not call libnpmpublish in dry run') }, }, }) - const publish = new Publish({ - flatOptions: { - dryRun: true, + const npm = mockNpm({ + config: { 'dry-run': true }, + output: () => { + t.pass('output fn is called') }, - config: { - ...config, - getCredentialsByURI: () => { - throw new Error('should not call getCredentialsByURI in dry run') - }}, }) + npm.config.getCredentialsByURI = () => { + throw new Error('should not call getCredentialsByURI in dry run') + } + const publish = new Publish(npm) publish.exec([testDir], (er) => { if (er) @@ -271,7 +252,7 @@ t.test('if loglevel=info and dry-run, should not publish, should log package con t.test('shows usage with wrong set of arguments', (t) => { t.plan(1) - const Publish = requireInject('../../lib/publish.js') + const Publish = t.mock('../../lib/publish.js') const publish = new Publish({}) publish.exec(['a', 'b', 'c'], (er) => { @@ -283,13 +264,11 @@ t.test('shows usage with wrong set of arguments', (t) => { t.test('throws when invalid tag', (t) => { t.plan(1) - const Publish = requireInject('../../lib/publish.js') - const publish = new Publish({ - flatOptions: { - defaultTag: '0.0.13', - }, - config, + const Publish = t.mock('../../lib/publish.js') + const npm = mockNpm({ + config: { tag: '0.0.13' }, }) + const publish = new Publish(npm) publish.exec([], (err) => { t.match(err, { @@ -319,7 +298,7 @@ t.test('can publish a tarball', t => { }, ['package']) const tarFile = fs.readFileSync(`${testDir}/tarball/package.tgz`) - const Publish = requireInject('../../lib/publish.js', { + const Publish = t.mock('../../lib/publish.js', { libnpmpublish: { publish: (manifest, tarData, opts) => { t.match(manifest, { @@ -330,15 +309,12 @@ t.test('can publish a tarball', t => { }, }, }) - const publish = new Publish({ - config: { - ...config, - getCredentialsByURI: (uri) => { - t.same(uri, defaults.registry, 'gets credentials for expected registry') - return { token: 'some.registry.token' } - }, - }, - }) + const npm = mockNpm() + npm.config.getCredentialsByURI = (uri) => { + t.same(uri, npm.config.get('registry'), 'gets credentials for expected registry') + return { token: 'some.registry.token' } + } + const publish = new Publish(npm) publish.exec([`${testDir}/tarball/package.tgz`], (er) => { if (er) @@ -350,16 +326,13 @@ t.test('can publish a tarball', t => { t.test('should check auth for default registry', t => { t.plan(2) - const Publish = requireInject('../../lib/publish.js') - const publish = new Publish({ - config: { - ...config, - getCredentialsByURI: (uri) => { - t.same(uri, defaults.registry, 'gets credentials for expected registry') - return {} - }, - }, - }) + const Publish = t.mock('../../lib/publish.js') + const npm = mockNpm() + npm.config.getCredentialsByURI = (uri) => { + t.same(uri, npm.config.get('registry'), 'gets credentials for expected registry') + return {} + } + const publish = new Publish(npm) publish.exec([], (err) => { t.match(err, { @@ -373,19 +346,15 @@ t.test('should check auth for default registry', t => { t.test('should check auth for configured registry', t => { t.plan(2) const registry = 'https://some.registry' - const Publish = requireInject('../../lib/publish.js') - const publish = new Publish({ - flatOptions: { - registry, - }, - config: { - ...config, - getCredentialsByURI: (uri) => { - t.same(uri, registry, 'gets credentials for expected registry') - return {} - }, - }, + const Publish = t.mock('../../lib/publish.js') + const npm = mockNpm({ + flatOptions: { registry }, }) + npm.config.getCredentialsByURI = (uri) => { + t.same(uri, registry, 'gets credentials for expected registry') + return {} + } + const publish = new Publish(npm) publish.exec([], (err) => { t.match(err, { @@ -406,19 +375,15 @@ t.test('should check auth for scope specific registry', t => { }, null, 2), }) - const Publish = requireInject('../../lib/publish.js') - const publish = new Publish({ - flatOptions: { - '@npm:registry': registry, - }, - config: { - ...config, - getCredentialsByURI: (uri) => { - t.same(uri, registry, 'gets credentials for expected registry') - return {} - }, - }, + const Publish = t.mock('../../lib/publish.js') + const npm = mockNpm({ + flatOptions: { '@npm:registry': registry }, }) + npm.config.getCredentialsByURI = (uri) => { + t.same(uri, registry, 'gets credentials for expected registry') + return {} + } + const publish = new Publish(npm) publish.exec([testDir], (err) => { t.match(err, { @@ -439,7 +404,7 @@ t.test('should use auth for scope specific registry', t => { }, null, 2), }) - const Publish = requireInject('../../lib/publish.js', { + const Publish = t.mock('../../lib/publish.js', { libnpmpublish: { publish: (manifest, tarData, opts) => { t.ok(opts, 'gets opts object') @@ -447,18 +412,15 @@ t.test('should use auth for scope specific registry', t => { }, }, }) - const publish = new Publish({ - flatOptions: { - '@npm:registry': registry, - }, - config: { - ...config, - getCredentialsByURI: (uri) => { - t.same(uri, registry, 'gets credentials for expected registry') - return { token: 'some.registry.token' } - }, - }, + const npm = mockNpm({ + flatOptions: { '@npm:registry': registry }, }) + npm.config.getCredentialsByURI = (uri) => { + t.same(uri, registry, 'gets credentials for expected registry') + return { token: 'some.registry.token' } + } + const publish = new Publish(npm) + publish.exec([testDir], (er) => { if (er) throw er @@ -480,7 +442,7 @@ t.test('read registry only from publishConfig', t => { }, null, 2), }) - const Publish = requireInject('../../lib/publish.js', { + const Publish = t.mock('../../lib/publish.js', { libnpmpublish: { publish: (manifest, tarData, opts) => { t.match(manifest, { name: 'my-cool-pkg', version: '1.0.0' }, 'gets manifest') @@ -488,9 +450,58 @@ t.test('read registry only from publishConfig', t => { }, }, }) + const npm = mockNpm() + npm.config.getCredentialsByURI = (uri) => { + t.same(uri, registry, 'gets credentials for expected registry') + return { token: 'some.registry.token' } + } + const publish = new Publish(npm) + + publish.exec([testDir], (er) => { + if (er) + throw er + t.pass('got to callback') + t.end() + }) +}) + +t.test('able to publish after if encountered multiple configs', t => { + t.plan(3) + + const registry = 'https://some.registry' + const tag = 'better-tag' + const publishConfig = { registry } + const testDir = t.testdir({ + 'package.json': JSON.stringify({ + name: 'my-cool-pkg', + version: '1.0.0', + publishConfig, + }, null, 2), + }) + + const configList = [defaults] + configList.unshift(Object.assign(Object.create(configList[0]), { + registry: `https://other.registry`, + tag: 'some-tag', + })) + configList.unshift(Object.assign(Object.create(configList[0]), { tag })) + + const Publish = t.mock('../../lib/publish.js', { + libnpmpublish: { + publish: (manifest, tarData, opts) => { + t.same(opts.defaultTag, tag, 'gets option for expected tag') + }, + }, + }) const publish = new Publish({ + // what would be flattened by the configList created above + flatOptions: { + defaultTag: 'better-tag', + registry: 'https://other.registry', + }, config: { - ...config, + get: key => configList[0][key], + list: configList, getCredentialsByURI: (uri) => { t.same(uri, registry, 'gets credentials for expected registry') return { token: 'some.registry.token' } @@ -505,3 +516,349 @@ t.test('read registry only from publishConfig', t => { t.end() }) }) + +t.test('workspaces', (t) => { + const testDir = t.testdir({ + 'package.json': JSON.stringify({ + name: 'my-cool-pkg', + version: '1.0.0', + workspaces: ['workspace-a', 'workspace-b', 'workspace-c'], + }, null, 2), + 'workspace-a': { + 'package.json': JSON.stringify({ + name: 'workspace-a', + version: '1.2.3-a', + repository: 'http://repo.workspace-a/', + }), + }, + 'workspace-b': { + 'package.json': JSON.stringify({ + name: 'workspace-b', + version: '1.2.3-n', + repository: 'https://github.com/npm/workspace-b', + }), + }, + 'workspace-c': { + 'package.json': JSON.stringify({ + name: 'workspace-n', + version: '1.2.3-n', + }), + }, + }) + + const publishes = [] + const outputs = [] + t.beforeEach(() => { + npm.config.set('json', false) + outputs.length = 0 + publishes.length = 0 + }) + const Publish = t.mock('../../lib/publish.js', { + '../../lib/utils/tar.js': { + getContents: (manifest) => ({ + id: manifest._id, + }), + logTar: () => {}, + }, + libnpmpublish: { + publish: (manifest, tarballData, opts) => { + publishes.push(manifest) + }, + }, + }) + const npm = mockNpm({ + output: (o) => { + outputs.push(o) + }, + }) + npm.localPrefix = testDir + npm.config.getCredentialsByURI = (uri) => { + return { token: 'some.registry.token' } + } + const publish = new Publish(npm) + + t.test('all workspaces', (t) => { + log.level = 'info' + publish.execWorkspaces([], [], (err) => { + t.notOk(err) + t.matchSnapshot(publishes, 'should publish all workspaces') + t.matchSnapshot(outputs, 'should output all publishes') + t.end() + }) + }) + + t.test('one workspace', t => { + log.level = 'info' + publish.execWorkspaces([], ['workspace-a'], (err) => { + t.notOk(err) + t.matchSnapshot(publishes, 'should publish given workspace') + t.matchSnapshot(outputs, 'should output one publish') + t.end() + }) + }) + + t.test('invalid workspace', t => { + publish.execWorkspaces([], ['workspace-x'], (err) => { + t.match(err, /No workspaces found/) + t.match(err, /workspace-x/) + t.end() + }) + }) + + t.test('json', t => { + log.level = 'info' + npm.config.set('json', true) + publish.execWorkspaces([], [], (err) => { + t.notOk(err) + t.matchSnapshot(publishes, 'should publish all workspaces') + t.matchSnapshot(outputs, 'should output all publishes as json') + t.end() + }) + }) + t.end() +}) + +t.test('private workspaces', (t) => { + const testDir = t.testdir({ + 'package.json': JSON.stringify({ + name: 'workspaces-project', + version: '1.0.0', + workspaces: ['packages/*'], + }), + packages: { + a: { + 'package.json': JSON.stringify({ + name: '@npmcli/a', + version: '1.0.0', + private: true, + }), + }, + b: { + 'package.json': JSON.stringify({ + name: '@npmcli/b', + version: '1.0.0', + }), + }, + }, + }) + + const publishes = [] + const outputs = [] + t.beforeEach(() => { + npm.config.set('json', false) + outputs.length = 0 + publishes.length = 0 + }) + const mocks = { + '../../lib/utils/tar.js': { + getContents: (manifest) => ({ + id: manifest._id, + }), + logTar: () => {}, + }, + libnpmpublish: { + publish: (manifest, tarballData, opts) => { + if (manifest.private) { + throw Object.assign( + new Error('private pkg'), + { code: 'EPRIVATE' } + ) + } + publishes.push(manifest) + }, + }, + } + const npm = mockNpm({ + output: (o) => { + outputs.push(o) + }, + }) + npm.localPrefix = testDir + npm.config.getCredentialsByURI = (uri) => { + return { token: 'some.registry.token' } + } + + t.test('with color', t => { + const Publish = t.mock('../../lib/publish.js', { + ...mocks, + npmlog: { + notice () {}, + verbose () {}, + warn (title, msg) { + t.equal(title, 'publish', 'should use publish warn title') + t.match( + msg, + 'Skipping workspace \u001b[32m@npmcli/a\u001b[39m, marked as \u001b[1mprivate\u001b[22m', + 'should display skip private workspace warn msg' + ) + }, + }, + }) + const publish = new Publish(npm) + + npm.color = true + publish.execWorkspaces([], [], (err) => { + t.notOk(err) + t.matchSnapshot(publishes, 'should publish all non-private workspaces') + t.matchSnapshot(outputs, 'should output all publishes') + npm.color = false + t.end() + }) + }) + + t.test('colorless', t => { + const Publish = t.mock('../../lib/publish.js', { + ...mocks, + npmlog: { + notice () {}, + verbose () {}, + warn (title, msg) { + t.equal(title, 'publish', 'should use publish warn title') + t.equal( + msg, + 'Skipping workspace @npmcli/a, marked as private', + 'should display skip private workspace warn msg' + ) + }, + }, + }) + const publish = new Publish(npm) + + publish.execWorkspaces([], [], (err) => { + t.notOk(err) + t.matchSnapshot(publishes, 'should publish all non-private workspaces') + t.matchSnapshot(outputs, 'should output all publishes') + t.end() + }) + }) + + t.test('unexpected error', t => { + const Publish = t.mock('../../lib/publish.js', { + ...mocks, + libnpmpublish: { + publish: (manifest, tarballData, opts) => { + if (manifest.private) + throw new Error('ERR') + + publishes.push(manifest) + }, + }, + npmlog: { + notice () {}, + verbose () {}, + }, + }) + const publish = new Publish(npm) + + publish.execWorkspaces([], [], (err) => { + t.match( + err, + /ERR/, + 'should throw unexpected error' + ) + t.end() + }) + }) + + t.end() +}) + +t.test('runs correct lifecycle scripts', t => { + const testDir = t.testdir({ + 'package.json': JSON.stringify({ + name: 'my-cool-pkg', + version: '1.0.0', + scripts: { + prepublishOnly: 'echo test prepublishOnly', + prepublish: 'echo test prepublish', // should NOT run this one + publish: 'echo test publish', + postpublish: 'echo test postpublish', + }, + }, null, 2), + }) + + const scripts = [] + const Publish = t.mock('../../lib/publish.js', { + '@npmcli/run-script': (args) => { + scripts.push(args) + }, + '../../lib/utils/tar.js': { + getContents: () => ({ + id: 'someid', + }), + logTar: () => { + t.pass('logTar is called') + }, + }, + libnpmpublish: { + publish: () => { + t.pass('publish called') + }, + }, + }) + const npm = mockNpm({ + output: () => { + t.pass('output is called') + }, + }) + npm.config.getCredentialsByURI = (uri) => { + t.same(uri, npm.config.get('registry'), 'gets credentials for expected registry') + return { token: 'some.registry.token' } + } + const publish = new Publish(npm) + publish.exec([testDir], (er) => { + if (er) + throw er + t.same( + scripts.map(s => s.event), + ['prepublishOnly', 'publish', 'postpublish'], + 'runs only expected scripts, in order' + ) + t.end() + }) +}) + +t.test('does not run scripts on --ignore-scripts', t => { + const testDir = t.testdir({ + 'package.json': JSON.stringify({ + name: 'my-cool-pkg', + version: '1.0.0', + }, null, 2), + }) + + const Publish = t.mock('../../lib/publish.js', { + '@npmcli/run-script': () => { + t.fail('should not call run-script') + }, + '../../lib/utils/tar.js': { + getContents: () => ({ + id: 'someid', + }), + logTar: () => { + t.pass('logTar is called') + }, + }, + libnpmpublish: { + publish: () => { + t.pass('publish called') + }, + }, + }) + const npm = mockNpm({ + config: { 'ignore-scripts': true }, + output: () => { + t.pass('output is called') + }, + }) + npm.config.getCredentialsByURI = (uri) => { + t.same(uri, npm.config.get('registry'), 'gets credentials for expected registry') + return { token: 'some.registry.token' } + } + const publish = new Publish(npm) + publish.exec([testDir], (er) => { + if (er) + throw er + t.pass('got to callback') + t.end() + }) +}) diff --git a/test/lib/rebuild.js b/test/lib/rebuild.js index ee081c087f07f..81768a21fb3b7 100644 --- a/test/lib/rebuild.js +++ b/test/lib/rebuild.js @@ -1,33 +1,29 @@ +const t = require('tap') const fs = require('fs') const { resolve } = require('path') -const t = require('tap') -const requireInject = require('require-inject') +const { fake: mockNpm } = require('../fixtures/mock-npm') let result = '' -const npm = { +const config = { + global: false, +} +const npm = mockNpm({ globalDir: '', - flatOptions: { - global: false, - }, + config, prefix: '', -} -const mocks = { - '../../lib/utils/output.js': (...msg) => { + output: (...msg) => { result += msg.join('\n') }, - '../../lib/utils/usage.js': () => 'usage instructions', -} - -const Rebuild = requireInject('../../lib/rebuild.js', mocks) +}) +const Rebuild = require('../../lib/rebuild.js') const rebuild = new Rebuild(npm) -t.afterEach(cb => { +t.afterEach(() => { npm.prefix = '' - npm.flatOptions.global = false + config.global = false npm.globalDir = '' result = '' - cb() }) t.test('no args', t => { @@ -39,7 +35,7 @@ t.test('no args', t => { version: '1.0.0', bin: 'cwd', scripts: { - preinstall: `node -e 'require("fs").writeFileSync("cwd", "")'`, + preinstall: "node -e \"require('fs').writeFileSync('cwd', '')\"", }, }), }, @@ -49,7 +45,7 @@ t.test('no args', t => { version: '1.0.0', bin: 'cwd', scripts: { - preinstall: `node -e 'require("fs").writeFileSync("cwd", "")'`, + preinstall: "node -e \"require('fs').writeFileSync('cwd', '')\"", }, }), }, @@ -242,7 +238,7 @@ t.test('global prefix', t => { }, }) - npm.flatOptions.global = true + config.global = true npm.globalDir = resolve(globalPath, 'lib', 'node_modules') rebuild.exec([], err => { diff --git a/test/lib/repo.js b/test/lib/repo.js index 7abda55ca28d0..e1ac90b1e7577 100644 --- a/test/lib/repo.js +++ b/test/lib/repo.js @@ -1,126 +1,203 @@ const t = require('tap') +const { fake: mockNpm } = require('../fixtures/mock-npm.js') +const { join, sep } = require('path') -const requireInject = require('require-inject') -const pacote = { - manifest: async (spec, options) => { - return spec === 'norepo' ? { +const pkgDirs = t.testdir({ + 'package.json': JSON.stringify({ + name: 'thispkg', + version: '1.2.3', + repository: 'https://example.com/thispkg.git', + }), + norepo: { + 'package.json': JSON.stringify({ name: 'norepo', version: '1.2.3', - } - - : spec === 'repoobbj-nourl' ? { - name: 'repoobj-nourl', - repository: { no: 'url' }, - } - - : spec === 'hostedgit' ? { - repository: 'git://github.com/foo/hostedgit', - } - : spec === 'hostedgitat' ? { - repository: 'git@github.com:foo/hostedgitat', - } - : spec === 'hostedssh' ? { - repository: 'ssh://git@github.com/foo/hostedssh', - } - : spec === 'hostedgitssh' ? { - repository: 'git+ssh://git@github.com/foo/hostedgitssh', - } - : spec === 'hostedgithttp' ? { - repository: 'git+http://github.com/foo/hostedgithttp', - } - : spec === 'hostedgithttps' ? { - repository: 'git+https://github.com/foo/hostedgithttps', - } - - : spec === 'hostedgitobj' ? { - repository: { url: 'git://github.com/foo/hostedgitobj' }, - } - : spec === 'hostedgitatobj' ? { - repository: { url: 'git@github.com:foo/hostedgitatobj' }, - } - : spec === 'hostedsshobj' ? { - repository: { url: 'ssh://git@github.com/foo/hostedsshobj' }, - } - : spec === 'hostedgitsshobj' ? { - repository: { url: 'git+ssh://git@github.com/foo/hostedgitsshobj' }, - } - : spec === 'hostedgithttpobj' ? { - repository: { url: 'git+http://github.com/foo/hostedgithttpobj' }, - } - : spec === 'hostedgithttpsobj' ? { - repository: { url: 'git+https://github.com/foo/hostedgithttpsobj' }, - } - - : spec === 'unhostedgit' ? { - repository: 'git://gothib.com/foo/unhostedgit', - } - : spec === 'unhostedgitat' ? { - repository: 'git@gothib.com:foo/unhostedgitat', - } - : spec === 'unhostedssh' ? { - repository: 'ssh://git@gothib.com/foo/unhostedssh', - } - : spec === 'unhostedgitssh' ? { - repository: 'git+ssh://git@gothib.com/foo/unhostedgitssh', - } - : spec === 'unhostedgithttp' ? { - repository: 'git+http://gothib.com/foo/unhostedgithttp', - } - : spec === 'unhostedgithttps' ? { - repository: 'git+https://gothib.com/foo/unhostedgithttps', - } - - : spec === 'unhostedgitobj' ? { - repository: { url: 'git://gothib.com/foo/unhostedgitobj' }, - } - : spec === 'unhostedgitatobj' ? { - repository: { url: 'git@gothib.com:foo/unhostedgitatobj' }, - } - : spec === 'unhostedsshobj' ? { - repository: { url: 'ssh://git@gothib.com/foo/unhostedsshobj' }, - } - : spec === 'unhostedgitsshobj' ? { - repository: { url: 'git+ssh://git@gothib.com/foo/unhostedgitsshobj' }, - } - : spec === 'unhostedgithttpobj' ? { - repository: { url: 'git+http://gothib.com/foo/unhostedgithttpobj' }, - } - : spec === 'unhostedgithttpsobj' ? { - repository: { url: 'git+https://gothib.com/foo/unhostedgithttpsobj' }, - } - - : spec === 'directory' ? { - repository: { - type: 'git', - url: 'git+https://github.com/foo/test-repo-with-directory.git', - directory: 'some/directory', - }, - } - - : spec === '.' ? { - name: 'thispkg', - version: '1.2.3', - repository: 'https://example.com/thispkg.git', - } - : null + }), }, -} + 'repoobbj-nourl': { + 'package.json': JSON.stringify({ + name: 'repoobj-nourl', + repository: { no: 'url' }, + }), + }, + hostedgit: { + 'package.json': JSON.stringify({ + repository: 'git://github.com/foo/hostedgit', + }), + }, + hostedgitat: { + 'package.json': JSON.stringify({ + repository: 'git@github.com:foo/hostedgitat', + }), + }, + hostedssh: { + 'package.json': JSON.stringify({ + repository: 'ssh://git@github.com/foo/hostedssh', + }), + }, + hostedgitssh: { + 'package.json': JSON.stringify({ + repository: 'git+ssh://git@github.com/foo/hostedgitssh', + }), + }, + hostedgithttp: { + 'package.json': JSON.stringify({ + repository: 'git+http://github.com/foo/hostedgithttp', + }), + }, + hostedgithttps: { + 'package.json': JSON.stringify({ + repository: 'git+https://github.com/foo/hostedgithttps', + }), + }, + hostedgitobj: { + 'package.json': JSON.stringify({ + repository: { url: 'git://github.com/foo/hostedgitobj' }, + }), + }, + hostedgitatobj: { + 'package.json': JSON.stringify({ + repository: { url: 'git@github.com:foo/hostedgitatobj' }, + }), + }, + hostedsshobj: { + 'package.json': JSON.stringify({ + repository: { url: 'ssh://git@github.com/foo/hostedsshobj' }, + }), + }, + hostedgitsshobj: { + 'package.json': JSON.stringify({ + repository: { url: 'git+ssh://git@github.com/foo/hostedgitsshobj' }, + }), + }, + hostedgithttpobj: { + 'package.json': JSON.stringify({ + repository: { url: 'git+http://github.com/foo/hostedgithttpobj' }, + }), + }, + hostedgithttpsobj: { + 'package.json': JSON.stringify({ + repository: { url: 'git+https://github.com/foo/hostedgithttpsobj' }, + }), + }, + unhostedgit: { + 'package.json': JSON.stringify({ + repository: 'git://gothib.com/foo/unhostedgit', + }), + }, + unhostedgitat: { + 'package.json': JSON.stringify({ + repository: 'git@gothib.com:foo/unhostedgitat', + }), + }, + unhostedssh: { + 'package.json': JSON.stringify({ + repository: 'ssh://git@gothib.com/foo/unhostedssh', + }), + }, + unhostedgitssh: { + 'package.json': JSON.stringify({ + repository: 'git+ssh://git@gothib.com/foo/unhostedgitssh', + }), + }, + unhostedgithttp: { + 'package.json': JSON.stringify({ + repository: 'git+http://gothib.com/foo/unhostedgithttp', + }), + }, + unhostedgithttps: { + 'package.json': JSON.stringify({ + repository: 'git+https://gothib.com/foo/unhostedgithttps', + }), + }, + unhostedgitobj: { + 'package.json': JSON.stringify({ + repository: { url: 'git://gothib.com/foo/unhostedgitobj' }, + }), + }, + unhostedgitatobj: { + 'package.json': JSON.stringify({ + repository: { url: 'git@gothib.com:foo/unhostedgitatobj' }, + }), + }, + unhostedsshobj: { + 'package.json': JSON.stringify({ + repository: { url: 'ssh://git@gothib.com/foo/unhostedsshobj' }, + }), + }, + unhostedgitsshobj: { + 'package.json': JSON.stringify({ + repository: { url: 'git+ssh://git@gothib.com/foo/unhostedgitsshobj' }, + }), + }, + unhostedgithttpobj: { + 'package.json': JSON.stringify({ + repository: { url: 'git+http://gothib.com/foo/unhostedgithttpobj' }, + }), + }, + unhostedgithttpsobj: { + 'package.json': JSON.stringify({ + repository: { url: 'git+https://gothib.com/foo/unhostedgithttpsobj' }, + }), + }, + directory: { + 'package.json': JSON.stringify({ + repository: { + type: 'git', + url: 'git+https://github.com/foo/test-repo-with-directory.git', + directory: 'some/directory', + }, + }), + }, + workspaces: { + 'package.json': JSON.stringify({ + name: 'workspaces-test', + version: '1.2.3-test', + workspaces: ['workspace-a', 'workspace-b', 'workspace-c'], + }), + 'workspace-a': { + 'package.json': JSON.stringify({ + name: 'workspace-a', + version: '1.2.3-a', + repository: 'http://repo.workspace-a/', + }), + }, + 'workspace-b': { + 'package.json': JSON.stringify({ + name: 'workspace-b', + version: '1.2.3-n', + repository: 'https://github.com/npm/workspace-b', + }), + }, + 'workspace-c': JSON.stringify({ + 'package.json': { + name: 'workspace-n', + version: '1.2.3-n', + }, + }), + }, +}) // keep a tally of which urls got opened -const opened = {} -const openUrl = async (npm, url, errMsg, cb) => { +let opened = {} +const openUrl = async (npm, url, errMsg) => { opened[url] = opened[url] || 0 opened[url]++ - process.nextTick(cb) } -const Repo = requireInject('../../lib/repo.js', { - pacote, +const Repo = t.mock('../../lib/repo.js', { '../../lib/utils/open-url.js': openUrl, }) -const repo = new Repo({ flatOptions: {} }) +const flatOptions = {} +const npm = mockNpm({ flatOptions }) +const repo = new Repo(npm) + +t.afterEach(() => opened = {}) t.test('open repo urls', t => { + // XXX It is very odd that `where` is how pacote knows to look anywhere other + // than the cwd. I would think npm.localPrefix would factor in somehow + flatOptions.where = pkgDirs const expect = { hostedgit: 'https://github.com/foo/hostedgit', hostedgitat: 'https://github.com/foo/hostedgitat', @@ -151,11 +228,13 @@ t.test('open repo urls', t => { t.plan(keys.length) keys.forEach(pkg => { t.test(pkg, t => { - repo.exec([pkg], (er) => { - if (er) - throw er + repo.exec([['.', pkg].join(sep)], (err) => { + if (err) + throw err const url = expect[pkg] - t.equal(opened[url], 1, url, {opened}) + t.match({ + [url]: 1, + }, opened, `opened ${url}`, {opened}) t.end() }) }) @@ -163,6 +242,7 @@ t.test('open repo urls', t => { }) t.test('fail if cannot figure out repo url', t => { + flatOptions.where = pkgDirs const cases = [ 'norepo', 'repoobbj-nourl', @@ -174,8 +254,8 @@ t.test('fail if cannot figure out repo url', t => { cases.forEach(pkg => { t.test(pkg, t => { - repo.exec([pkg], er => { - t.match(er, { pkgid: pkg }) + repo.exec([['.', pkg].join(sep)], (err) => { + t.match(err, { pkgid: pkg }) t.end() }) }) @@ -183,10 +263,47 @@ t.test('fail if cannot figure out repo url', t => { }) t.test('open default package if none specified', t => { + flatOptions.where = pkgDirs repo.exec([], (er) => { if (er) throw er - t.equal(opened['https://example.com/thispkg'], 2, 'opened expected url', {opened}) + t.equal(opened['https://example.com/thispkg'], 1, 'opened expected url', {opened}) t.end() }) }) + +t.test('workspaces', t => { + flatOptions.where = undefined + npm.localPrefix = join(pkgDirs, 'workspaces') + + t.test('all workspaces', (t) => { + repo.execWorkspaces([], [], (err) => { + t.notOk(err) + t.match({ + 'https://repo.workspace-a/': 1, // Gets translated to https! + 'https://github.com/npm/workspace-b': 1, + }, opened, 'opened two valid repo urls') + t.end() + }) + }) + + t.test('one workspace', (t) => { + repo.execWorkspaces([], ['workspace-a'], (err) => { + t.notOk(err) + t.match({ + 'https://repo.workspace-a/': 1, + }, opened, 'opened one requested repo urls') + t.end() + }) + }) + + t.test('invalid workspace', (t) => { + repo.execWorkspaces([], ['workspace-x'], (err) => { + t.match(err, /No workspaces found/) + t.match(err, /workspace-x/) + t.match({}, opened, 'opened no repo urls') + t.end() + }) + }) + t.end() +}) diff --git a/test/lib/restart.js b/test/lib/restart.js index f29592d9bfcec..9719476c41807 100644 --- a/test/lib/restart.js +++ b/test/lib/restart.js @@ -10,7 +10,6 @@ const npm = { } const Restart = require('../../lib/restart.js') const restart = new Restart(npm) -t.equal(restart.usage, 'npm restart [-- <args>]') restart.exec(['foo'], () => { t.match(runArgs, ['restart', 'foo']) t.end() diff --git a/test/lib/root.js b/test/lib/root.js index e8ccc1106d772..5460f3d4985c2 100644 --- a/test/lib/root.js +++ b/test/lib/root.js @@ -1,19 +1,19 @@ -const { test } = require('tap') -const requireInject = require('require-inject') +const t = require('tap') -test('root', (t) => { +t.test('root', (t) => { t.plan(3) const dir = '/root/dir' - const Root = requireInject('../../lib/root.js', { - '../../lib/utils/output.js': (output) => { + const Root = require('../../lib/root.js') + const root = new Root({ + dir, + output: (output) => { t.equal(output, dir, 'prints the correct directory') }, }) - const root = new Root({ dir }) root.exec([], (err) => { - t.ifError(err, 'npm root') + t.error(err, 'npm root') t.ok('should have printed directory') }) }) diff --git a/test/lib/run-script.js b/test/lib/run-script.js index 43592d3243466..a3f04ea6790fa 100644 --- a/test/lib/run-script.js +++ b/test/lib/run-script.js @@ -1,38 +1,60 @@ const t = require('tap') -const requireInject = require('require-inject') +const { resolve } = require('path') +const { fake: mockNpm } = require('../fixtures/mock-npm') + +const normalizePath = p => p + .replace(/\\+/g, '/') + .replace(/\r\n/g, '\n') + +const cleanOutput = (str) => normalizePath(str) + .replace(normalizePath(process.cwd()), '{CWD}') const RUN_SCRIPTS = [] -const npm = { +const flatOptions = { + scriptShell: undefined, +} +const config = { + json: false, + parseable: false, + 'if-present': false, +} + +const npm = mockNpm({ localPrefix: __dirname, - flatOptions: { - scriptShell: undefined, - json: false, - parseable: false, - }, - config: { - settings: { - 'if-present': false, + flatOptions, + config, + commands: { + help: { + description: 'test help description', }, - get: k => npm.config.settings[k], - set: (k, v) => { - npm.config.settings[k] = v + test: { + description: 'test test description', }, }, -} + output: (...msg) => output.push(msg), +}) const output = [] -t.afterEach(cb => { +const npmlog = { + disableProgress: () => null, + level: 'warn', + error: () => null, +} + +t.afterEach(() => { + npm.color = false + npmlog.level = 'warn' + npmlog.error = () => null output.length = 0 RUN_SCRIPTS.length = 0 - npm.flatOptions.json = false - npm.flatOptions.parseable = false - cb() + config['if-present'] = false + config.json = false + config.parseable = false }) -const npmlog = { level: 'warn' } const getRS = windows => { - const RunScript = requireInject('../../lib/run-script.js', { + const RunScript = t.mock('../../lib/run-script.js', { '@npmcli/run-script': Object.assign(async opts => { RUN_SCRIPTS.push(opts) }, { @@ -40,7 +62,6 @@ const getRS = windows => { }), npmlog, '../../lib/utils/is-windows-shell.js': windows, - '../../lib/utils/output.js': (...msg) => output.push(msg), }) return new RunScript(npm) } @@ -261,26 +282,41 @@ t.test('try to run missing script', t => { npm.localPrefix = t.testdir({ 'package.json': JSON.stringify({ scripts: { hello: 'world' }, + bin: { goodnight: 'moon' }, }), }) t.test('no suggestions', t => { runScript.exec(['notevenclose'], er => { t.match(er, { - message: 'missing script: notevenclose', + message: 'Missing script: "notevenclose"', }) t.end() }) }) - t.test('suggestions', t => { + t.test('script suggestions', t => { runScript.exec(['helo'], er => { t.match(er, { - message: 'missing script: helo\n\nDid you mean this?\n hello', + message: 'Missing script: "helo"', + }) + t.match(er, { + message: 'npm run hello', + }) + t.end() + }) + }) + t.test('bin suggestions', t => { + runScript.exec(['goodneght'], er => { + t.match(er, { + message: 'Missing script: "goodneght"', + }) + t.match(er, { + message: 'npm exec goodnight', }) t.end() }) }) t.test('with --if-present', t => { - npm.config.set('if-present', true) + config['if-present'] = true runScript.exec(['goodbye'], er => { if (er) throw er @@ -331,7 +367,7 @@ t.test('run pre/post hooks', t => { }) t.test('skip pre/post hooks when using ignoreScripts', t => { - npm.flatOptions.ignoreScripts = true + config['ignore-scripts'] = true npm.localPrefix = t.testdir({ 'package.json': JSON.stringify({ @@ -348,7 +384,7 @@ t.test('skip pre/post hooks when using ignoreScripts', t => { if (er) throw er - t.deepEqual(RUN_SCRIPTS, [ + t.same(RUN_SCRIPTS, [ { path: npm.localPrefix, args: [], @@ -368,7 +404,7 @@ t.test('skip pre/post hooks when using ignoreScripts', t => { }, ]) t.end() - delete npm.flatOptions.ignoreScripts + delete config['ignore-scripts'] }) }) @@ -443,13 +479,14 @@ t.test('list scripts', t => { if (er) throw er t.strictSame(output, [ - ['Lifecycle scripts included in x:'], + ['Lifecycle scripts included in x@1.2.3:'], [' test\n exit 2'], [' start\n node server.js'], [' stop\n node kill-server.js'], ['\navailable via `npm run-script`:'], [' preenv\n echo before the env'], [' postenv\n echo after the env'], + [''], ], 'basic report') t.end() }) @@ -466,7 +503,7 @@ t.test('list scripts', t => { }) t.test('warn json', t => { npmlog.level = 'warn' - npm.flatOptions.json = true + config.json = true runScript.exec([], er => { if (er) throw er @@ -476,7 +513,7 @@ t.test('list scripts', t => { }) t.test('parseable', t => { - npm.flatOptions.parseable = true + config.parseable = true runScript.exec([], er => { if (er) throw er @@ -522,8 +559,9 @@ t.test('list scripts, only commands', t => { if (er) throw er t.strictSame(output, [ - ['Lifecycle scripts included in x:'], + ['Lifecycle scripts included in x@1.2.3:'], [' preversion\n echo doing the version dance'], + [''], ]) t.end() }) @@ -542,9 +580,443 @@ t.test('list scripts, only non-commands', t => { if (er) throw er t.strictSame(output, [ - ['Scripts available in x via `npm run-script`:'], + ['Scripts available in x@1.2.3 via `npm run-script`:'], [' glorp\n echo doing the glerp glop'], + [''], ]) t.end() }) }) + +t.test('workspaces', t => { + npm.localPrefix = t.testdir({ + packages: { + a: { + 'package.json': JSON.stringify({ + name: 'a', + version: '1.0.0', + scripts: { glorp: 'echo a doing the glerp glop' }, + }), + }, + b: { + 'package.json': JSON.stringify({ + name: 'b', + version: '2.0.0', + scripts: { glorp: 'echo b doing the glerp glop' }, + }), + }, + c: { + 'package.json': JSON.stringify({ + name: 'c', + version: '1.0.0', + scripts: { + test: 'exit 0', + posttest: 'echo posttest', + lorem: 'echo c lorem', + }, + }), + }, + d: { + 'package.json': JSON.stringify({ + name: 'd', + version: '1.0.0', + scripts: { + test: 'exit 0', + posttest: 'echo posttest', + }, + }), + }, + e: { + 'package.json': JSON.stringify({ + name: 'e', + scripts: { test: 'exit 0', start: 'echo start something' }, + }), + }, + noscripts: { + 'package.json': JSON.stringify({ + name: 'noscripts', + version: '1.0.0', + }), + }, + }, + 'package.json': JSON.stringify({ + name: 'x', + version: '1.2.3', + workspaces: ['packages/*'], + }), + }) + + t.test('list all scripts', t => { + runScript.execWorkspaces([], [], er => { + if (er) + throw er + t.strictSame(output, [ + ['Scripts available in a@1.0.0 via `npm run-script`:'], + [' glorp\n echo a doing the glerp glop'], + [''], + ['Scripts available in b@2.0.0 via `npm run-script`:'], + [' glorp\n echo b doing the glerp glop'], + [''], + ['Lifecycle scripts included in c@1.0.0:'], + [' test\n exit 0'], + [' posttest\n echo posttest'], + ['\navailable via `npm run-script`:'], + [' lorem\n echo c lorem'], + [''], + ['Lifecycle scripts included in d@1.0.0:'], + [' test\n exit 0'], + [' posttest\n echo posttest'], + [''], + ['Lifecycle scripts included in e:'], + [' test\n exit 0'], + [' start\n echo start something'], + [''], + ]) + t.end() + }) + }) + + t.test('list regular scripts, filtered by name', t => { + runScript.execWorkspaces([], ['a', 'b'], er => { + if (er) + throw er + t.strictSame(output, [ + ['Scripts available in a@1.0.0 via `npm run-script`:'], + [' glorp\n echo a doing the glerp glop'], + [''], + ['Scripts available in b@2.0.0 via `npm run-script`:'], + [' glorp\n echo b doing the glerp glop'], + [''], + ]) + t.end() + }) + }) + + t.test('list regular scripts, filtered by path', t => { + runScript.execWorkspaces([], ['./packages/a'], er => { + if (er) + throw er + t.strictSame(output, [ + ['Scripts available in a@1.0.0 via `npm run-script`:'], + [' glorp\n echo a doing the glerp glop'], + [''], + ]) + t.end() + }) + }) + + t.test('list regular scripts, filtered by parent folder', t => { + runScript.execWorkspaces([], ['./packages'], er => { + if (er) + throw er + t.strictSame(output, [ + ['Scripts available in a@1.0.0 via `npm run-script`:'], + [' glorp\n echo a doing the glerp glop'], + [''], + ['Scripts available in b@2.0.0 via `npm run-script`:'], + [' glorp\n echo b doing the glerp glop'], + [''], + ['Lifecycle scripts included in c@1.0.0:'], + [' test\n exit 0'], + [' posttest\n echo posttest'], + ['\navailable via `npm run-script`:'], + [' lorem\n echo c lorem'], + [''], + ['Lifecycle scripts included in d@1.0.0:'], + [' test\n exit 0'], + [' posttest\n echo posttest'], + [''], + ['Lifecycle scripts included in e:'], + [' test\n exit 0'], + [' start\n echo start something'], + [''], + ]) + t.end() + }) + }) + + t.test('list all scripts with colors', t => { + npm.color = true + runScript.execWorkspaces([], [], er => { + if (er) + throw er + t.strictSame(output, [ + [ + '\u001b[1mScripts\u001b[22m available in \x1B[32ma@1.0.0\x1B[39m via `\x1B[34mnpm run-script\x1B[39m`:', + ], + [' glorp\n \x1B[2mecho a doing the glerp glop\x1B[22m'], + [''], + [ + '\u001b[1mScripts\u001b[22m available in \x1B[32mb@2.0.0\x1B[39m via `\x1B[34mnpm run-script\x1B[39m`:', + ], + [' glorp\n \x1B[2mecho b doing the glerp glop\x1B[22m'], + [''], + [ + '\x1B[0m\x1B[1mLifecycle scripts\x1B[22m\x1B[0m included in \x1B[32mc@1.0.0\x1B[39m:', + ], + [' test\n \x1B[2mexit 0\x1B[22m'], + [' posttest\n \x1B[2mecho posttest\x1B[22m'], + ['\navailable via `\x1B[34mnpm run-script\x1B[39m`:'], + [' lorem\n \x1B[2mecho c lorem\x1B[22m'], + [''], + [ + '\x1B[0m\x1B[1mLifecycle scripts\x1B[22m\x1B[0m included in \x1B[32md@1.0.0\x1B[39m:', + ], + [' test\n \x1B[2mexit 0\x1B[22m'], + [' posttest\n \x1B[2mecho posttest\x1B[22m'], + [''], + [ + '\x1B[0m\x1B[1mLifecycle scripts\x1B[22m\x1B[0m included in \x1B[32me\x1B[39m:', + ], + [' test\n \x1B[2mexit 0\x1B[22m'], + [' start\n \x1B[2mecho start something\x1B[22m'], + [''], + ]) + t.end() + }) + }) + + t.test('list all scripts --json', t => { + config.json = true + runScript.execWorkspaces([], [], er => { + if (er) + throw er + t.strictSame(output, [ + [ + '{\n' + + ' "a": {\n' + + ' "glorp": "echo a doing the glerp glop"\n' + + ' },\n' + + ' "b": {\n' + + ' "glorp": "echo b doing the glerp glop"\n' + + ' },\n' + + ' "c": {\n' + + ' "test": "exit 0",\n' + + ' "posttest": "echo posttest",\n' + + ' "lorem": "echo c lorem"\n' + + ' },\n' + + ' "d": {\n' + + ' "test": "exit 0",\n' + + ' "posttest": "echo posttest"\n' + + ' },\n' + + ' "e": {\n' + + ' "test": "exit 0",\n' + + ' "start": "echo start something"\n' + + ' },\n' + + ' "noscripts": {}\n' + + '}', + ], + ]) + t.end() + }) + }) + + t.test('list all scripts --parseable', t => { + config.parseable = true + runScript.execWorkspaces([], [], er => { + if (er) + throw er + t.strictSame(output, [ + ['a:glorp:echo a doing the glerp glop'], + ['b:glorp:echo b doing the glerp glop'], + ['c:test:exit 0'], + ['c:posttest:echo posttest'], + ['c:lorem:echo c lorem'], + ['d:test:exit 0'], + ['d:posttest:echo posttest'], + ['e:test:exit 0'], + ['e:start:echo start something'], + ]) + t.end() + }) + }) + + t.test('list no scripts --loglevel=silent', t => { + npmlog.level = 'silent' + runScript.execWorkspaces([], [], er => { + if (er) + throw er + t.strictSame(output, []) + t.end() + }) + }) + + t.test('run scripts across all workspaces', t => { + runScript.execWorkspaces(['test'], [], er => { + if (er) + throw er + + t.match(RUN_SCRIPTS, [ + { + path: resolve(npm.localPrefix, 'packages/c'), + pkg: { name: 'c', version: '1.0.0' }, + event: 'test', + }, + { + path: resolve(npm.localPrefix, 'packages/c'), + pkg: { name: 'c', version: '1.0.0' }, + event: 'posttest', + }, + { + path: resolve(npm.localPrefix, 'packages/d'), + pkg: { name: 'd', version: '1.0.0' }, + event: 'test', + }, + { + path: resolve(npm.localPrefix, 'packages/d'), + pkg: { name: 'd', version: '1.0.0' }, + event: 'posttest', + }, + { + path: resolve(npm.localPrefix, 'packages/e'), + pkg: { name: 'e' }, + event: 'test', + }, + ]) + t.end() + }) + }) + + t.test('missing scripts in all workspaces', t => { + const LOG = [] + npmlog.error = (err) => { + LOG.push(String(err)) + } + runScript.execWorkspaces(['missing-script'], [], er => { + t.match( + er, + /Missing script: missing-script/, + 'should throw missing script error' + ) + + process.exitCode = 0 // clean exit code + + t.match(RUN_SCRIPTS, []) + t.strictSame(LOG.map(cleanOutput), [ + 'Lifecycle script `missing-script` failed with error:', + 'Error: Missing script: "missing-script"\n\nTo see a list of scripts, run:\n npm run', + ' in workspace: a@1.0.0', + ' at location: {CWD}/test/lib/tap-testdir-run-script-workspaces/packages/a', + 'Lifecycle script `missing-script` failed with error:', + 'Error: Missing script: "missing-script"\n\nTo see a list of scripts, run:\n npm run', + ' in workspace: b@2.0.0', + ' at location: {CWD}/test/lib/tap-testdir-run-script-workspaces/packages/b', + 'Lifecycle script `missing-script` failed with error:', + 'Error: Missing script: "missing-script"\n\nTo see a list of scripts, run:\n npm run', + ' in workspace: c@1.0.0', + ' at location: {CWD}/test/lib/tap-testdir-run-script-workspaces/packages/c', + 'Lifecycle script `missing-script` failed with error:', + 'Error: Missing script: "missing-script"\n\nTo see a list of scripts, run:\n npm run', + ' in workspace: d@1.0.0', + ' at location: {CWD}/test/lib/tap-testdir-run-script-workspaces/packages/d', + 'Lifecycle script `missing-script` failed with error:', + 'Error: Missing script: "missing-script"\n\nTo see a list of scripts, run:\n npm run', + ' in workspace: e', + ' at location: {CWD}/test/lib/tap-testdir-run-script-workspaces/packages/e', + 'Lifecycle script `missing-script` failed with error:', + 'Error: Missing script: "missing-script"\n\nTo see a list of scripts, run:\n npm run', + ' in workspace: noscripts@1.0.0', + ' at location: {CWD}/test/lib/tap-testdir-run-script-workspaces/packages/noscripts', + ], 'should log error msgs for each workspace script') + + t.end() + }) + }) + + t.test('missing scripts in some workspaces', t => { + const LOG = [] + npmlog.error = (err) => { + LOG.push(String(err)) + } + runScript.execWorkspaces(['test'], ['a', 'b', 'c', 'd'], er => { + if (er) + throw er + + t.match(RUN_SCRIPTS, []) + t.strictSame(LOG.map(cleanOutput), [ + 'Lifecycle script `test` failed with error:', + 'Error: Missing script: "test"\n\nTo see a list of scripts, run:\n npm run', + ' in workspace: a@1.0.0', + ' at location: {CWD}/test/lib/tap-testdir-run-script-workspaces/packages/a', + 'Lifecycle script `test` failed with error:', + 'Error: Missing script: "test"\n\nTo see a list of scripts, run:\n npm run', + ' in workspace: b@2.0.0', + ' at location: {CWD}/test/lib/tap-testdir-run-script-workspaces/packages/b', + ], 'should log error msgs for each workspace script') + t.end() + }) + }) + + t.test('no workspaces when filtering by user args', t => { + runScript.execWorkspaces([], ['foo', 'bar'], er => { + t.equal( + er.message, + 'No workspaces found:\n --workspace=foo --workspace=bar', + 'should throw error msg' + ) + t.end() + }) + }) + + t.test('no workspaces', t => { + const _prevPrefix = npm.localPrefix + npm.localPrefix = t.testdir({ + 'package.json': JSON.stringify({ + name: 'foo', + version: '1.0.0', + }), + }) + + runScript.execWorkspaces([], [], er => { + t.match(er, /No workspaces found!/, 'should throw error msg') + npm.localPrefix = _prevPrefix + t.end() + }) + }) + + t.test('single failed workspace run', t => { + const RunScript = t.mock('../../lib/run-script.js', { + '@npmcli/run-script': () => { + throw new Error('err') + }, + npmlog, + '../../lib/utils/is-windows-shell.js': false, + }) + const runScript = new RunScript(npm) + + runScript.execWorkspaces(['test'], ['c'], er => { + t.ok('should complete running all targets') + process.exitCode = 0 // clean up exit code + t.end() + }) + }) + + t.test('failed workspace run with succeeded runs', t => { + const RunScript = t.mock('../../lib/run-script.js', { + '@npmcli/run-script': async opts => { + if (opts.pkg.name === 'a') + throw new Error('ERR') + + RUN_SCRIPTS.push(opts) + }, + npmlog, + '../../lib/utils/is-windows-shell.js': false, + }) + const runScript = new RunScript(npm) + + runScript.execWorkspaces(['glorp'], ['a', 'b'], er => { + t.match(RUN_SCRIPTS, [ + { + path: resolve(npm.localPrefix, 'packages/b'), + pkg: { name: 'b', version: '2.0.0' }, + event: 'glorp', + }, + ]) + + process.exitCode = 0 // clean up exit code + t.end() + }) + }) + + t.end() +}) diff --git a/test/lib/search.js b/test/lib/search.js index 59c59f3b96e27..510a470f48088 100644 --- a/test/lib/search.js +++ b/test/lib/search.js @@ -1,6 +1,6 @@ -const Minipass = require('minipass') const t = require('tap') -const requireInject = require('require-inject') +const Minipass = require('minipass') +const { fake: mockNpm } = require('../fixtures/mock-npm') const libnpmsearchResultFixture = require('../fixtures/libnpmsearch-stream-result.js') @@ -12,7 +12,17 @@ const flatOptions = { opts: '', }, } -const npm = { flatOptions: { ...flatOptions } } +const config = { + json: false, + parseable: false, +} +const npm = mockNpm({ + config, + flatOptions: { ...flatOptions }, + output: (...msg) => { + result += msg.join('\n') + }, +}) const npmlog = { silly () {}, clearProgress () {}, @@ -23,20 +33,17 @@ const libnpmsearch = { const mocks = { npmlog, libnpmsearch, - '../../lib/utils/output.js': (...msg) => { - result += msg.join('\n') - }, '../../lib/utils/usage.js': () => 'usage instructions', - // '../../lib/search/format-package-stream.js': a => a, } -t.afterEach(cb => { +t.afterEach(() => { result = '' - npm.flatOptions = flatOptions - cb() + config.json = false + config.parseable = false + npm.flatOptions = { ...flatOptions } }) -const Search = requireInject('../../lib/search.js', mocks) +const Search = t.mock('../../lib/search.js', mocks) const search = new Search(npm) t.test('no args', t => { @@ -59,7 +66,7 @@ t.test('search <name>', t => { }, } - const Search = requireInject('../../lib/search.js', { + const Search = t.mock('../../lib/search.js', { ...mocks, libnpmsearch, }) @@ -80,6 +87,49 @@ t.test('search <name>', t => { src.end() }) +t.test('search <name> --json', (t) => { + const src = new Minipass() + src.objectMode = true + + npm.flatOptions.json = true + config.json = true + const libnpmsearch = { + stream () { + return src + }, + } + + const Search = t.mock('../../lib/search.js', { + ...mocks, + libnpmsearch, + }) + const search = new Search(npm) + + search.exec(['libnpm'], (err) => { + if (err) + throw err + + const parsedResult = JSON.parse(result) + parsedResult.forEach((entry) => { + entry.date = new Date(entry.date) + }) + + t.same( + parsedResult, + libnpmsearchResultFixture, + 'should have expected search results as json' + ) + + config.json = false + t.end() + }) + + for (const i of libnpmsearchResultFixture) + src.write(i) + + src.end() +}) + t.test('search <name> --searchexclude --searchopts', t => { npm.flatOptions.search = { ...flatOptions.search, @@ -94,7 +144,7 @@ t.test('search <name> --searchexclude --searchopts', t => { }, } - const Search = requireInject('../../lib/search.js', { + const Search = t.mock('../../lib/search.js', { ...mocks, libnpmsearch, }) @@ -148,7 +198,7 @@ t.test('empty search results', t => { }, } - const Search = requireInject('../../lib/search.js', { + const Search = t.mock('../../lib/search.js', { ...mocks, libnpmsearch, }) @@ -175,7 +225,7 @@ t.test('search api response error', t => { }, } - const Search = requireInject('../../lib/search.js', { + const Search = t.mock('../../lib/search.js', { ...mocks, libnpmsearch, }) diff --git a/test/lib/set-script.js b/test/lib/set-script.js index 7a057c503652f..37ba9a1cc71a2 100644 --- a/test/lib/set-script.js +++ b/test/lib/set-script.js @@ -1,153 +1,179 @@ -const test = require('tap') -const requireInject = require('require-inject') +const t = require('tap') +const fs = require('fs') const parseJSON = require('json-parse-even-better-errors') +const { fake: mockNpm } = require('../fixtures/mock-npm') +const { resolve } = require('path') -test.test('fails on invalid arguments', (t) => { - const SetScript = requireInject('../../lib/set-script.js', { - npmlog: {}, +const flatOptions = {} +const npm = mockNpm(flatOptions) + +const ERROR_OUTPUT = [] +const WARN_OUTPUT = [] +const SetScript = t.mock('../../lib/set-script.js', { + npmlog: { + error: (...args) => { + ERROR_OUTPUT.push(args) + }, + warn: (...args) => { + WARN_OUTPUT.push(args) + }, + }, +}) +const setScript = new SetScript(npm) + +t.test('completion', t => { + t.test('already have a script name', async t => { + npm.localPrefix = t.testdir({}) + const res = await setScript.completion({conf: {argv: {remain: ['npm', 'run', 'x']}}}) + t.equal(res, undefined) + t.end() + }) + + t.test('no package.json', async t => { + npm.localPrefix = t.testdir({}) + const res = await setScript.completion({conf: {argv: {remain: ['npm', 'run']}}}) + t.strictSame(res, []) + t.end() }) - const setScript = new SetScript({}) + + t.test('has package.json, no scripts', async t => { + npm.localPrefix = t.testdir({ + 'package.json': JSON.stringify({}), + }) + const res = await setScript.completion({conf: {argv: {remain: ['npm', 'run']}}}) + t.strictSame(res, []) + t.end() + }) + + t.test('has package.json, with scripts', async t => { + npm.localPrefix = t.testdir({ + 'package.json': JSON.stringify({ + scripts: { hello: 'echo hello', world: 'echo world' }, + }), + }) + const res = await setScript.completion({conf: {argv: {remain: ['npm', 'run']}}}) + t.strictSame(res, ['hello', 'world']) + t.end() + }) + + t.end() +}) + +t.test('fails on invalid arguments', (t) => { t.plan(3) setScript.exec(['arg1'], (fail) => t.match(fail, /Expected 2 arguments: got 1/)) setScript.exec(['arg1', 'arg2', 'arg3'], (fail) => t.match(fail, /Expected 2 arguments: got 3/)) setScript.exec(['arg1', 'arg2', 'arg3', 'arg4'], (fail) => t.match(fail, /Expected 2 arguments: got 4/)) }) -test.test('fails if run in postinstall script', (t) => { - const originalVar = process.env.npm_lifecycle_event - process.env.npm_lifecycle_event = 'postinstall' - const SetScript = requireInject('../../lib/set-script.js', { - npmlog: {}, + +t.test('fails if run in postinstall script', (t) => { + const lifecycleEvent = process.env.npm_lifecycle_event + t.teardown(() => { + process.env.npm_lifecycle_event = lifecycleEvent }) + + process.env.npm_lifecycle_event = 'postinstall' t.plan(1) - const setScript = new SetScript({}) setScript.exec(['arg1', 'arg2'], (fail) => t.equal(fail.toString(), 'Error: Scripts can’t set from the postinstall script')) - process.env.npm_lifecycle_event = originalVar }) -test.test('fails when package.json not found', (t) => { - const SetScript = requireInject('../../lib/set-script.js') - const setScript = new SetScript({}) + +t.test('fails when package.json not found', (t) => { t.plan(1) setScript.exec(['arg1', 'arg2'], (fail) => t.match(fail, /package.json not found/)) }) -test.test('fails on invalid JSON', (t) => { - const SetScript = requireInject('../../lib/set-script.js', { - fs: { - readFile: () => {}, // read-package-json-fast explodes w/o this - readFileSync: (name, charcode) => { - return 'iamnotjson' - }, - }, + +t.test('fails on invalid JSON', (t) => { + npm.localPrefix = t.testdir({ + 'package.json': 'iamnotjson', }) - const setScript = new SetScript({}) + t.plan(1) setScript.exec(['arg1', 'arg2'], (fail) => t.match(fail, /Invalid package.json: JSONParseError/)) }) -test.test('creates scripts object', (t) => { - var mockFile = '' - const SetScript = requireInject('../../lib/set-script.js', { - fs: { - readFileSync: (name, charcode) => { - return '{}' - }, - writeFileSync: (location, inner) => { - mockFile = inner - }, - }, - 'read-package-json-fast': async function (filename) { - return { - [Symbol.for('indent')]: ' ', - [Symbol.for('newline')]: '\n', - } - }, + +t.test('creates scripts object', (t) => { + npm.localPrefix = t.testdir({ + 'package.json': '{}', }) - const setScript = new SetScript({}) + t.plan(2) setScript.exec(['arg1', 'arg2'], (error) => { t.equal(error, undefined) - t.assert(parseJSON(mockFile), {scripts: {arg1: 'arg2'}}) + const contents = fs.readFileSync(resolve(npm.localPrefix, 'package.json')) + t.ok(parseJSON(contents), {scripts: {arg1: 'arg2'}}) }) }) -test.test('warns before overwriting', (t) => { - var warningListened = '' - const SetScript = requireInject('../../lib/set-script.js', { - fs: { - readFileSync: (name, charcode) => { - return JSON.stringify({ - scripts: { - arg1: 'blah', - }, - }) - }, - writeFileSync: (name, content) => {}, - }, - 'read-package-json-fast': async function (filename) { - return { - [Symbol.for('indent')]: ' ', - [Symbol.for('newline')]: '\n', - } - }, - npmlog: { - warn: (prefix, message) => { - warningListened = message + +t.test('warns when overwriting', (t) => { + WARN_OUTPUT.length = 0 + npm.localPrefix = t.testdir({ + 'package.json': JSON.stringify({ + scripts: { + arg1: 'blah', }, - }, + }), }) - const setScript = new SetScript({}) + t.plan(2) setScript.exec(['arg1', 'arg2'], (error) => { t.equal(error, undefined, 'no error') - t.equal(warningListened, 'Script "arg1" was overwritten') + t.hasStrict(WARN_OUTPUT[0], ['set-script', 'Script "arg1" was overwritten'], 'warning was logged') }) }) -test.test('provided indentation and eol is used', (t) => { - var mockFile = '' - const SetScript = requireInject('../../lib/set-script.js', { - fs: { - readFileSync: (name, charcode) => { - return '{}' - }, - writeFileSync: (name, content) => { - mockFile = content - }, + +t.test('workspaces', (t) => { + ERROR_OUTPUT.length = 0 + WARN_OUTPUT.length = 0 + npm.localPrefix = t.testdir({ + 'package.json': JSON.stringify({ + name: 'workspaces-test', + version: '1.0.0', + workspaces: ['workspace-a', 'workspace-b', 'workspace-c'], + }), + 'workspace-a': { + 'package.json': '{}', }, - 'read-package-json-fast': async function (filename) { - return { - [Symbol.for('indent')]: ' '.repeat(6), - [Symbol.for('newline')]: '\r\n', - } + 'workspace-b': { + 'package.json': '"notajsonobject"', }, - }) - const setScript = new SetScript({}) - t.plan(3) - setScript.exec(['arg1', 'arg2'], (error) => { - t.equal(error, undefined) - t.equal(mockFile.split('\r\n').length > 1, true) - t.equal(mockFile.split('\r\n').every((value) => !value.startsWith(' ') || value.startsWith(' '.repeat(6))), true) - }) -}) -test.test('goes to default when undefined indent and eol provided', (t) => { - var mockFile = '' - const SetScript = requireInject('../../lib/set-script.js', { - fs: { - readFileSync: (name, charcode) => { - return '{}' - }, - writeFileSync: (name, content) => { - mockFile = content - }, - }, - 'read-package-json-fast': async function (filename) { - return { - [Symbol.for('indent')]: undefined, - [Symbol.for('newline')]: undefined, - } + 'workspace-c': { + 'package.json': JSON.stringify({ + scripts: { + arg1: 'test', + }, + }, null, ' '.repeat(6)).replace(/\n/g, '\r\n'), }, }) - const setScript = new SetScript({}) - t.plan(3) - setScript.exec(['arg1', 'arg2'], (error) => { - t.equal(error, undefined) - t.equal(mockFile.split('\n').length > 1, true) - t.equal(mockFile.split('\n').every((value) => !value.startsWith(' ') || value.startsWith(' ')), true) + + setScript.execWorkspaces(['arg1', 'arg2'], [], (error) => { + t.equal(error, undefined, 'did not callback with an error') + t.equal(process.exitCode, 1, 'did set the exitCode to 1') + // force the exitCode back to 0 to make tap happy + process.exitCode = 0 + + // workspace-a had the script added + const contentsA = fs.readFileSync(resolve(npm.localPrefix, 'workspace-a', 'package.json')) + const dataA = parseJSON(contentsA) + t.hasStrict(dataA, { scripts: { arg1: 'arg2' } }, 'defined the script') + + // workspace-b logged an error + t.strictSame(ERROR_OUTPUT, [ + ['set-script', `Can't update invalid package.json data`], + [' in workspace: workspace-b'], + [` at location: ${resolve(npm.localPrefix, 'workspace-b')}`], + ], 'logged workspace-b error') + + // workspace-c overwrite a script and logged a warning + const contentsC = fs.readFileSync(resolve(npm.localPrefix, 'workspace-c', 'package.json')) + const dataC = parseJSON(contentsC) + t.hasStrict(dataC, { scripts: { arg1: 'arg2' } }, 'defined the script') + t.equal(dataC[Symbol.for('indent')], ' '.repeat(6), 'kept the correct indent') + t.equal(dataC[Symbol.for('newline')], '\r\n', 'kept the correct newline') + t.match(WARN_OUTPUT, [ + ['set-script', 'Script "arg1" was overwritten'], + [' in workspace: workspace-c'], + [` at location: ${resolve(npm.localPrefix, 'workspace-c')}`], + ], 'logged workspace-c warning') + t.end() }) }) diff --git a/test/lib/set.js b/test/lib/set.js index 3b38fdc276ce2..f51065a4b293d 100644 --- a/test/lib/set.js +++ b/test/lib/set.js @@ -1,5 +1,4 @@ const t = require('tap') -const requireInject = require('require-inject') let configArgs = null const npm = { @@ -11,7 +10,7 @@ const npm = { }, } -const Set = requireInject('../../lib/set.js') +const Set = t.mock('../../lib/set.js') const set = new Set(npm) t.test('npm set - no args', t => { diff --git a/test/lib/shrinkwrap.js b/test/lib/shrinkwrap.js index dc4bc3b220ca2..ab3b8d0ffe447 100644 --- a/test/lib/shrinkwrap.js +++ b/test/lib/shrinkwrap.js @@ -1,16 +1,20 @@ const t = require('tap') const fs = require('fs') -const requireInject = require('require-inject') +const { fake: mockNpm } = require('../fixtures/mock-npm') -const npm = { +const config = { + global: false, +} +const flatOptions = { + depth: 0, +} +const npm = mockNpm({ + config, + flatOptions, lockfileVersion: 2, globalDir: '', - flatOptions: { - depth: 0, - global: false, - }, prefix: '', -} +}) const tree = { meta: { hiddenLockfile: null, @@ -32,13 +36,13 @@ const mocks = { } }, '../../lib/utils/usage.js': () => 'usage instructions', + '../../lib/utils/config/definitions.js': {}, } -t.afterEach(cb => { +t.afterEach(() => { npm.prefix = '' - npm.flatOptions.global = false + config.global = false npm.globalDir = '' - cb() }) t.test('no args', t => { @@ -48,9 +52,9 @@ t.test('no args', t => { class Arborist { constructor (args) { - t.deepEqual( + t.same( args, - { ...npm.flatOptions, path: npm.prefix }, + { ...flatOptions, path: npm.prefix }, 'should call arborist constructor with expected args' ) } @@ -79,7 +83,7 @@ t.test('no args', t => { }, } - const Shrinkwrap = requireInject('../../lib/shrinkwrap.js', { + const Shrinkwrap = t.mock('../../lib/shrinkwrap.js', { ...mocks, npmlog, '@npmcli/arborist': Arborist, @@ -99,9 +103,9 @@ t.test('no virtual tree', t => { class Arborist { constructor (args) { - t.deepEqual( + t.same( args, - { ...npm.flatOptions, path: npm.prefix }, + { ...flatOptions, path: npm.prefix }, 'should call arborist constructor with expected args' ) } @@ -134,7 +138,7 @@ t.test('no virtual tree', t => { }, } - const Shrinkwrap = requireInject('../../lib/shrinkwrap.js', { + const Shrinkwrap = t.mock('../../lib/shrinkwrap.js', { ...mocks, npmlog, '@npmcli/arborist': Arborist, @@ -154,9 +158,9 @@ t.test('existing package-json file', t => { class Arborist { constructor (args) { - t.deepEqual( + t.same( args, - { ...npm.flatOptions, path: npm.prefix }, + { ...flatOptions, path: npm.prefix }, 'should call arborist constructor with expected args' ) } @@ -195,7 +199,7 @@ t.test('existing package-json file', t => { }, } - const Shrinkwrap = requireInject('../../lib/shrinkwrap.js', { + const Shrinkwrap = t.mock('../../lib/shrinkwrap.js', { ...mocks, fs, npmlog, @@ -216,9 +220,9 @@ t.test('update shrinkwrap file version', t => { class Arborist { constructor (args) { - t.deepEqual( + t.same( args, - { ...npm.flatOptions, path: npm.prefix }, + { ...flatOptions, path: npm.prefix }, 'should call arborist constructor with expected args' ) } @@ -250,7 +254,7 @@ t.test('update shrinkwrap file version', t => { }, } - const Shrinkwrap = requireInject('../../lib/shrinkwrap.js', { + const Shrinkwrap = t.mock('../../lib/shrinkwrap.js', { ...mocks, npmlog, '@npmcli/arborist': Arborist, @@ -270,9 +274,9 @@ t.test('update to date shrinkwrap file', t => { class Arborist { constructor (args) { - t.deepEqual( + t.same( args, - { ...npm.flatOptions, path: npm.prefix }, + { ...flatOptions, path: npm.prefix }, 'should call arborist constructor with expected args' ) } @@ -304,7 +308,7 @@ t.test('update to date shrinkwrap file', t => { }, } - const Shrinkwrap = requireInject('../../lib/shrinkwrap.js', { + const Shrinkwrap = t.mock('../../lib/shrinkwrap.js', { ...mocks, npmlog, '@npmcli/arborist': Arborist, @@ -318,9 +322,9 @@ t.test('update to date shrinkwrap file', t => { }) t.test('shrinkwrap --global', t => { - const Shrinkwrap = requireInject('../../lib/shrinkwrap.js', mocks) + const Shrinkwrap = t.mock('../../lib/shrinkwrap.js', mocks) - npm.flatOptions.global = true + config.global = true const shrinkwrap = new Shrinkwrap(npm) shrinkwrap.exec([], err => { @@ -336,7 +340,7 @@ t.test('shrinkwrap --global', t => { t.test('works without fs.promises', async t => { t.doesNotThrow(() => { - const Shrinkwrap = requireInject('../../lib/shrinkwrap.js', { + const Shrinkwrap = t.mock('../../lib/shrinkwrap.js', { ...mocks, fs: { ...fs, promises: null }, }) diff --git a/test/lib/star.js b/test/lib/star.js index 64efd9ef8c7ed..8820d6e9cfb0b 100644 --- a/test/lib/star.js +++ b/test/lib/star.js @@ -1,31 +1,36 @@ -const requireInject = require('require-inject') const t = require('tap') +const { fake: mockNpm } = require('../fixtures/mock-npm') let result = '' const noop = () => null -const npm = { config: { get () {} }, flatOptions: { unicode: false } } +const config = { + unicode: false, + 'star.unstar': false, +} +const npm = mockNpm({ + config, + output: (...msg) => { + result += msg.join('\n') + }, +}) const npmFetch = { json: noop } const npmlog = { error: noop, info: noop, verbose: noop } const mocks = { npmlog, 'npm-registry-fetch': npmFetch, - '../../lib/utils/output.js': (...msg) => { - result += msg.join('\n') - }, '../../lib/utils/get-identity.js': async () => 'foo', '../../lib/utils/usage.js': () => 'usage instructions', } -const Star = requireInject('../../lib/star.js', mocks) +const Star = t.mock('../../lib/star.js', mocks) const star = new Star(npm) -t.afterEach(cb => { - npm.config = { get () {} } - npm.flatOptions.unicode = false +t.afterEach(() => { + config.unicode = false + config['star.unstar'] = false npmlog.info = noop result = '' - cb() }) t.test('no args', t => { @@ -70,7 +75,7 @@ t.test('star a package', t => { t.test('unstar a package', t => { t.plan(4) const pkgName = '@npmcli/arborist' - npm.config.get = key => key === 'star.unstar' + config['star.unstar'] = true npmFetch.json = async (uri, opts) => ({ _id: pkgName, _rev: 'hash', @@ -97,7 +102,7 @@ t.test('unstar a package', t => { t.test('unicode', async t => { t.test('star a package', t => { - npm.flatOptions.unicode = true + config.unicode = true npmFetch.json = async (uri, opts) => ({}) star.exec(['pkg'], err => { if (err) @@ -112,8 +117,8 @@ t.test('unicode', async t => { }) t.test('unstar a package', t => { - npm.flatOptions.unicode = true - npm.config.get = key => key === 'star.unstar' + config.unicode = true + config['star.unstar'] = true npmFetch.json = async (uri, opts) => ({}) star.exec(['pkg'], err => { if (err) @@ -129,7 +134,7 @@ t.test('unicode', async t => { }) t.test('logged out user', t => { - const Star = requireInject('../../lib/star.js', { + const Star = t.mock('../../lib/star.js', { ...mocks, '../../lib/utils/get-identity.js': async () => undefined, }) diff --git a/test/lib/stars.js b/test/lib/stars.js index 383b5adf42677..bf345aeb4cf42 100644 --- a/test/lib/stars.js +++ b/test/lib/stars.js @@ -1,30 +1,31 @@ -const requireInject = require('require-inject') const t = require('tap') let result = '' const noop = () => null -const npm = { config: { get () {} }, flatOptions: {} } +const npm = { + config: { get () {} }, + flatOptions: {}, + output: (...msg) => { + result = [result, ...msg].join('\n') + }, +} const npmFetch = { json: noop } const npmlog = { warn: noop } const mocks = { npmlog, 'npm-registry-fetch': npmFetch, - '../../lib/utils/output.js': (...msg) => { - result = [result, ...msg].join('\n') - }, '../../lib/utils/get-identity.js': async () => 'foo', '../../lib/utils/usage.js': () => 'usage instructions', } -const Stars = requireInject('../../lib/stars.js', mocks) +const Stars = t.mock('../../lib/stars.js', mocks) const stars = new Stars(npm) -t.afterEach(cb => { +t.afterEach(() => { npm.config = { get () {} } npmlog.warn = noop result = '' - cb() }) t.test('no args', t => { diff --git a/test/lib/start.js b/test/lib/start.js index 9a3328309b84a..4e77f9691b815 100644 --- a/test/lib/start.js +++ b/test/lib/start.js @@ -10,7 +10,6 @@ const npm = { } const Start = require('../../lib/start.js') const start = new Start(npm) -t.equal(start.usage, 'npm start [-- <args>]') start.exec(['foo'], () => { t.match(runArgs, ['start', 'foo']) t.end() diff --git a/test/lib/stop.js b/test/lib/stop.js index e6cb193b62973..92ca84bd8741a 100644 --- a/test/lib/stop.js +++ b/test/lib/stop.js @@ -10,7 +10,6 @@ const npm = { } const Stop = require('../../lib/stop.js') const stop = new Stop(npm) -t.equal(stop.usage, 'npm stop [-- <args>]') stop.exec(['foo'], () => { t.match(runArgs, ['stop', 'foo']) t.end() diff --git a/test/lib/team.js b/test/lib/team.js index a264597258d5a..68ac28fff36ff 100644 --- a/test/lib/team.js +++ b/test/lib/team.js @@ -1,5 +1,4 @@ const t = require('tap') -const requireInject = require('require-inject') let result = '' const libnpmteam = { @@ -10,24 +9,25 @@ const libnpmteam = { async lsUsers () {}, async rm () {}, } -const npm = { flatOptions: {} } +const npm = { + flatOptions: {}, + output: (...msg) => { + result += msg.join('\n') + }, +} const mocks = { libnpmteam, 'cli-columns': a => a.join(' '), - '../../lib/utils/output.js': (...msg) => { - result += msg.join('\n') - }, '../../lib/utils/otplease.js': async (opts, fn) => fn(opts), '../../lib/utils/usage.js': () => 'usage instructions', } -t.afterEach(cb => { +t.afterEach(() => { result = '' npm.flatOptions = {} - cb() }) -const Team = requireInject('../../lib/team.js', mocks) +const Team = t.mock('../../lib/team.js', mocks) const team = new Team(npm) t.test('no args', t => { @@ -74,7 +74,7 @@ t.test('team add <scope:team> <user>', t => { if (err) throw err - t.deepEqual( + t.same( JSON.parse(result), { added: true, @@ -94,7 +94,7 @@ t.test('team add <scope:team> <user>', t => { if (err) throw err - t.deepEqual(result, '', 'should not output success if silent') + t.same(result, '', 'should not output success if silent') t.end() }) }) @@ -135,7 +135,7 @@ t.test('team create <scope:team>', t => { if (err) throw err - t.deepEqual( + t.same( JSON.parse(result), { created: true, @@ -154,7 +154,7 @@ t.test('team create <scope:team>', t => { if (err) throw err - t.deepEqual(result, '', 'should not output create success if silent') + t.same(result, '', 'should not output create success if silent') t.end() }) }) @@ -192,7 +192,7 @@ t.test('team destroy <scope:team>', t => { if (err) throw err - t.deepEqual( + t.same( JSON.parse(result), { deleted: true, @@ -211,7 +211,7 @@ t.test('team destroy <scope:team>', t => { if (err) throw err - t.deepEqual(result, '', 'should not output destroy if silent') + t.same(result, '', 'should not output destroy if silent') t.end() }) }) @@ -230,7 +230,7 @@ t.test('team ls <scope>', t => { }, } - const Team = requireInject('../../lib/team.js', { + const Team = t.mock('../../lib/team.js', { ...mocks, libnpmteam, }) @@ -265,7 +265,7 @@ t.test('team ls <scope>', t => { if (err) throw err - t.deepEqual( + t.same( JSON.parse(result), [ 'npmcli:designers', @@ -285,7 +285,7 @@ t.test('team ls <scope>', t => { if (err) throw err - t.deepEqual(result, '', 'should not list teams if silent') + t.same(result, '', 'should not list teams if silent') t.end() }) }) @@ -297,7 +297,7 @@ t.test('team ls <scope>', t => { }, } - const Team = requireInject('../../lib/team.js', { + const Team = t.mock('../../lib/team.js', { ...mocks, libnpmteam, }) @@ -319,7 +319,7 @@ t.test('team ls <scope>', t => { }, } - const Team = requireInject('../../lib/team.js', { + const Team = t.mock('../../lib/team.js', { ...mocks, libnpmteam, }) @@ -343,7 +343,7 @@ t.test('team ls <scope:team>', t => { return ['nlf', 'ruyadorno', 'darcyclarke', 'isaacs'] }, } - const Team = requireInject('../../lib/team.js', { + const Team = t.mock('../../lib/team.js', { ...mocks, libnpmteam, }) @@ -378,7 +378,7 @@ t.test('team ls <scope:team>', t => { if (err) throw err - t.deepEqual( + t.same( JSON.parse(result), [ 'darcyclarke', @@ -399,7 +399,7 @@ t.test('team ls <scope:team>', t => { if (err) throw err - t.deepEqual(result, '', 'should not output users if silent') + t.same(result, '', 'should not output users if silent') t.end() }) }) @@ -411,7 +411,7 @@ t.test('team ls <scope:team>', t => { }, } - const Team = requireInject('../../lib/team.js', { + const Team = t.mock('../../lib/team.js', { ...mocks, libnpmteam, }) @@ -433,7 +433,7 @@ t.test('team ls <scope:team>', t => { }, } - const Team = requireInject('../../lib/team.js', { + const Team = t.mock('../../lib/team.js', { ...mocks, libnpmteam, }) @@ -481,7 +481,7 @@ t.test('team rm <scope:team> <user>', t => { if (err) throw err - t.deepEqual( + t.same( JSON.parse(result), { removed: true, @@ -501,7 +501,7 @@ t.test('team rm <scope:team> <user>', t => { if (err) throw err - t.deepEqual(result, '', 'should not output rm result if silent') + t.same(result, '', 'should not output rm result if silent') t.end() }) }) diff --git a/test/lib/test.js b/test/lib/test.js index f6f3d7afb858d..c151b1e825343 100644 --- a/test/lib/test.js +++ b/test/lib/test.js @@ -1,5 +1,4 @@ const t = require('tap') -const requireInject = require('require-inject') let RUN_ARGS = null const npm = { commands: { @@ -9,7 +8,7 @@ const npm = { }, }, } -const Test = requireInject('../../lib/test.js') +const Test = require('../../lib/test.js') const test = new Test(npm) t.test('run a test', t => { diff --git a/test/lib/token.js b/test/lib/token.js index 412d2746befd4..94218824d8f74 100644 --- a/test/lib/token.js +++ b/test/lib/token.js @@ -1,5 +1,4 @@ -const { test } = require('tap') -const requireInject = require('require-inject') +const t = require('tap') const mocks = { profile: {}, @@ -7,9 +6,11 @@ const mocks = { log: {}, readUserInfo: {}, } +const npm = { + output: (...args) => mocks.output(...args), +} -const Token = requireInject('../../lib/token.js', { - '../../lib/utils/output.js': (...args) => mocks.output(...args), +const Token = t.mock('../../lib/token.js', { '../../lib/utils/otplease.js': (opts, fn) => { return Promise.resolve().then(() => fn(opts)) }, @@ -17,11 +18,14 @@ const Token = requireInject('../../lib/token.js', { 'npm-profile': mocks.profile, npmlog: mocks.log, }) -const token = new Token({}) + +const token = new Token(npm) const tokenWithMocks = (mockRequests) => { for (const mod in mockRequests) { - if (mod !== 'npm') { + if (mod === 'npm') + mockRequests.npm = { ...npm, ...mockRequests.npm } + else { if (typeof mockRequests[mod] === 'function') mocks[mod] = mockRequests[mod] else { @@ -44,11 +48,11 @@ const tokenWithMocks = (mockRequests) => { } } - const token = new Token(mockRequests.npm || {}) + const token = new Token(mockRequests.npm || npm) return [token, reset] } -test('completion', (t) => { +t.test('completion', (t) => { t.plan(5) const testComp = (argv, expect) => { @@ -66,7 +70,7 @@ test('completion', (t) => { ) }) -test('token foobar', (t) => { +t.test('token foobar', (t) => { t.plan(2) const [, reset] = tokenWithMocks({ @@ -79,14 +83,14 @@ test('token foobar', (t) => { }, }) - t.tearDown(reset) + t.teardown(reset) token.exec(['foobar'], (err) => { t.match(err.message, 'foobar is not a recognized subcommand') }) }) -test('token list', (t) => { +t.test('token list', (t) => { t.plan(15) const now = new Date().toISOString() @@ -147,14 +151,14 @@ test('token list', (t) => { }, }) - t.tearDown(reset) + t.teardown(reset) token.exec([], (err) => { - t.ifError(err, 'npm token list') + t.error(err, 'npm token list') }) }) -test('token list json output', (t) => { +t.test('token list json output', (t) => { t.plan(8) const now = new Date().toISOString() @@ -201,14 +205,14 @@ test('token list json output', (t) => { }, }) - t.tearDown(reset) + t.teardown(reset) token.exec(['list'], (err) => { - t.ifError(err, 'npm token list') + t.error(err, 'npm token list') }) }) -test('token list parseable output', (t) => { +t.test('token list parseable output', (t) => { t.plan(12) const now = new Date().toISOString() @@ -269,14 +273,14 @@ test('token list parseable output', (t) => { }, }) - t.tearDown(reset) + t.teardown(reset) token.exec(['list'], (err) => { - t.ifError(err, 'npm token list') + t.error(err, 'npm token list') }) }) -test('token revoke', (t) => { +t.test('token revoke', (t) => { t.plan(10) const [token, reset] = tokenWithMocks({ @@ -322,14 +326,14 @@ test('token revoke', (t) => { }, }) - t.tearDown(reset) + t.teardown(reset) token.exec(['rm', 'abcd'], (err) => { - t.ifError(err, 'npm token rm') + t.error(err, 'npm token rm') }) }) -test('token revoke multiple tokens', (t) => { +t.test('token revoke multiple tokens', (t) => { t.plan(10) const [token, reset] = tokenWithMocks({ @@ -374,14 +378,14 @@ test('token revoke multiple tokens', (t) => { }, }) - t.tearDown(reset) + t.teardown(reset) token.exec(['revoke', 'abcd', 'efgh'], (err) => { - t.ifError(err, 'npm token rm') + t.error(err, 'npm token rm') }) }) -test('token revoke json output', (t) => { +t.test('token revoke json output', (t) => { t.plan(10) const [token, reset] = tokenWithMocks({ @@ -426,14 +430,14 @@ test('token revoke json output', (t) => { }, }) - t.tearDown(reset) + t.teardown(reset) token.exec(['delete', 'abcd'], (err) => { - t.ifError(err, 'npm token rm') + t.error(err, 'npm token rm') }) }) -test('token revoke parseable output', (t) => { +t.test('token revoke parseable output', (t) => { t.plan(9) const [token, reset] = tokenWithMocks({ @@ -476,14 +480,14 @@ test('token revoke parseable output', (t) => { }, }) - t.tearDown(reset) + t.teardown(reset) token.exec(['remove', 'abcd'], (err) => { - t.ifError(err, 'npm token rm') + t.error(err, 'npm token rm') }) }) -test('token revoke by token', (t) => { +t.test('token revoke by token', (t) => { t.plan(9) const [token, reset] = tokenWithMocks({ @@ -526,14 +530,14 @@ test('token revoke by token', (t) => { }, }) - t.tearDown(reset) + t.teardown(reset) token.exec(['rm', 'efgh5678'], (err) => { - t.ifError(err, 'npm token rm') + t.error(err, 'npm token rm') }) }) -test('token revoke requires an id', (t) => { +t.test('token revoke requires an id', (t) => { t.plan(2) const [token, reset] = tokenWithMocks({ @@ -546,14 +550,14 @@ test('token revoke requires an id', (t) => { }, }) - t.tearDown(reset) + t.teardown(reset) token.exec(['rm'], (err) => { t.match(err.message, '`<tokenKey>` argument is required') }) }) -test('token revoke ambiguous id errors', (t) => { +t.test('token revoke ambiguous id errors', (t) => { t.plan(7) const [token, reset] = tokenWithMocks({ @@ -591,14 +595,14 @@ test('token revoke ambiguous id errors', (t) => { }, }) - t.tearDown(reset) + t.teardown(reset) token.exec(['rm', 'abcd'], (err) => { t.match(err.message, 'Token ID "abcd" was ambiguous') }) }) -test('token revoke unknown id errors', (t) => { +t.test('token revoke unknown id errors', (t) => { t.plan(7) const [token, reset] = tokenWithMocks({ @@ -635,14 +639,14 @@ test('token revoke unknown id errors', (t) => { }, }) - t.tearDown(reset) + t.teardown(reset) token.exec(['rm', 'efgh'], (err) => { t.match(err.message, 'Unknown token id or value "efgh".') }) }) -test('token create', (t) => { +t.test('token create', (t) => { t.plan(15) const now = new Date().toISOString() @@ -699,14 +703,14 @@ test('token create', (t) => { }, }) - t.tearDown(reset) + t.teardown(reset) token.exec(['create'], (err) => { - t.ifError(err, 'npm token create') + t.error(err, 'npm token create') }) }) -test('token create json output', (t) => { +t.test('token create json output', (t) => { t.plan(10) const now = new Date().toISOString() @@ -758,14 +762,14 @@ test('token create json output', (t) => { }, }) - t.tearDown(reset) + t.teardown(reset) token.exec(['create'], (err) => { - t.ifError(err, 'npm token create') + t.error(err, 'npm token create') }) }) -test('token create parseable output', (t) => { +t.test('token create parseable output', (t) => { t.plan(12) const now = new Date().toISOString() @@ -824,14 +828,14 @@ test('token create parseable output', (t) => { }, }) - t.tearDown(reset) + t.teardown(reset) token.exec(['create'], (err) => { - t.ifError(err, 'npm token create') + t.error(err, 'npm token create') }) }) -test('token create ipv6 cidr', (t) => { +t.test('token create ipv6 cidr', (t) => { t.plan(4) const password = 'thisisnotreallyapassword' @@ -858,7 +862,7 @@ test('token create ipv6 cidr', (t) => { }, }) - t.tearDown(reset) + t.teardown(reset) token.exec(['create'], (err) => { t.equal(err.message, 'CIDR whitelist can only contain IPv4 addresses, ::1/128 is IPv6', 'returns correct error') @@ -866,7 +870,7 @@ test('token create ipv6 cidr', (t) => { }) }) -test('token create invalid cidr', (t) => { +t.test('token create invalid cidr', (t) => { t.plan(4) const password = 'thisisnotreallyapassword' @@ -893,7 +897,7 @@ test('token create invalid cidr', (t) => { }, }) - t.tearDown(reset) + t.teardown(reset) token.exec(['create'], (err) => { t.equal(err.message, 'CIDR whitelist contains invalid CIDR entry: apple/cider', 'returns correct error') diff --git a/test/lib/uninstall.js b/test/lib/uninstall.js index c62b59950b894..272adb8683602 100644 --- a/test/lib/uninstall.js +++ b/test/lib/uninstall.js @@ -1,30 +1,29 @@ +const t = require('tap') const fs = require('fs') const { resolve } = require('path') -const t = require('tap') -const requireInject = require('require-inject') +const { fake: mockNpm } = require('../fixtures/mock-npm') -const npm = { +const npm = mockNpm({ globalDir: '', - flatOptions: { + config: { global: false, prefix: '', }, localPrefix: '', -} +}) const mocks = { '../../lib/utils/reify-finish.js': () => Promise.resolve(), - '../../lib/utils/usage.js': () => 'usage instructions', } -const Uninstall = requireInject('../../lib/uninstall.js', mocks) +const Uninstall = t.mock('../../lib/uninstall.js', mocks) const uninstall = new Uninstall(npm) -t.afterEach(cb => { +t.afterEach(() => { npm.globalDir = '' npm.prefix = '' + npm.localPrefix = '' npm.flatOptions.global = false npm.flatOptions.prefix = '' - cb() }) t.test('remove single installed lib', t => { @@ -85,13 +84,13 @@ t.test('remove single installed lib', t => { const b = resolve(path, 'node_modules/b') t.ok(() => fs.statSync(b)) - npm.flatOptions.prefix = path + npm.localPrefix = path uninstall.exec(['b'], err => { if (err) throw err - t.throws(() => fs.statSync(b), 'should have removed package from nm') + t.throws(() => fs.statSync(b), 'should have removed package from npm') t.end() }) }) @@ -148,7 +147,7 @@ t.test('remove multiple installed libs', t => { t.ok(() => fs.statSync(a)) t.ok(() => fs.statSync(b)) - npm.flatOptions.prefix = path + npm.localPrefix = path uninstall.exec(['b'], err => { if (err) @@ -195,8 +194,7 @@ t.test('no args global', t => { npm.localPrefix = resolve(path, 'projects', 'a') npm.globalDir = resolve(path, 'lib', 'node_modules') - npm.flatOptions.global = true - npm.flatOptions.prefix = path + npm.config.set('global', true) const a = resolve(path, 'lib/node_modules/a') t.ok(() => fs.statSync(a)) @@ -221,8 +219,7 @@ t.test('no args global but no package.json', t => { uninstall.exec([], err => { t.match( err, - 'usage instructions', - 'should throw usage instructions' + 'npm uninstall' ) t.end() @@ -232,7 +229,7 @@ t.test('no args global but no package.json', t => { t.test('unknown error reading from localPrefix package.json', t => { const path = t.testdir({}) - const Uninstall = requireInject('../../lib/uninstall.js', { + const Uninstall = t.mock('../../lib/uninstall.js', { ...mocks, 'read-package-json-fast': () => Promise.reject(new Error('ERR')), }) diff --git a/test/lib/unpublish.js b/test/lib/unpublish.js index 80a879cb6e6df..9199b8aed9442 100644 --- a/test/lib/unpublish.js +++ b/test/lib/unpublish.js @@ -1,45 +1,50 @@ const t = require('tap') -const requireInject = require('require-inject') +const { fake: mockNpm } = require('../fixtures/mock-npm') let result = '' const noop = () => null -const npm = { - localPrefix: '', - flatOptions: { - force: false, - silent: false, - loglevel: 'silly', - }, +const config = { + force: false, + loglevel: 'silly', } + +const testDir = t.testdir({ + 'package.json': JSON.stringify({ + name: 'pkg', + version: '1.0.0', + }, null, 2), +}) + +const npm = mockNpm({ + localPrefix: testDir, + log: { silly () {}, verbose () {} }, + config, + output: (...msg) => { + result += msg.join('\n') + }, +}) + const mocks = { - npmlog: { silly () {}, verbose () {} }, libnpmaccess: { lsPackages: noop }, libnpmpublish: { unpublish: noop }, - 'npm-package-arg': noop, 'npm-registry-fetch': { json: noop }, - 'read-package-json': cb => cb(), - '../../lib/utils/output.js': (...msg) => { - result += msg.join('\n') - }, '../../lib/utils/otplease.js': async (opts, fn) => fn(opts), - '../../lib/utils/usage.js': () => 'usage instructions', '../../lib/utils/get-identity.js': async () => 'foo', } -t.afterEach(cb => { +t.afterEach(() => { + npm.log = { silly () {}, verbose () {} } + npm.localPrefix = testDir result = '' - npm.flatOptions.force = false - npm.flatOptions.loglevel = 'silly' - npm.flatOptions.silent = false - cb() + config['dry-run'] = false + config.force = false + config.loglevel = 'silly' }) t.test('no args --force', t => { - t.plan(9) + config.force = true - npm.flatOptions.force = true - - const npmlog = { + npm.log = { silly (title) { t.equal(title, 'unpublish', 'should silly log args') }, @@ -53,23 +58,12 @@ t.test('no args --force', t => { }, } - const npa = { - resolve (name, version) { - t.equal(name, 'pkg', 'should npa.resolve package name') - t.equal(version, '1.0.0', 'should npa.resolve package version') - return 'pkg@1.0.0' - }, - } - const libnpmpublish = { unpublish (spec, opts) { - t.equal(spec, 'pkg@1.0.0', 'should unpublish expected spec') - t.deepEqual( + t.equal(spec.raw, 'pkg@1.0.0', 'should unpublish expected spec') + t.same( opts, { - force: true, - silent: false, - loglevel: 'silly', publishConfig: undefined, }, 'should unpublish with expected opts' @@ -77,16 +71,11 @@ t.test('no args --force', t => { }, } - const Unpublish = requireInject('../../lib/unpublish.js', { + const Unpublish = t.mock('../../lib/unpublish.js', { ...mocks, - npmlog, libnpmpublish, - 'npm-package-arg': npa, - 'read-package-json': (path, cb) => cb(null, { - name: 'pkg', - version: '1.0.0', - }), }) + const unpublish = new Unpublish(npm) unpublish.exec([], err => { @@ -98,25 +87,24 @@ t.test('no args --force', t => { '- pkg@1.0.0', 'should output removed pkg@version on success' ) + t.end() }) }) t.test('no args --force missing package.json', t => { - npm.flatOptions.force = true + config.force = true - const Unpublish = requireInject('../../lib/unpublish.js', { + const testDir = t.testdir({}) + npm.localPrefix = testDir + const Unpublish = t.mock('../../lib/unpublish.js', { ...mocks, - 'read-package-json': (path, cb) => cb(Object.assign( - new Error('ENOENT'), - { code: 'ENOENT' } - )), }) const unpublish = new Unpublish(npm) unpublish.exec([], err => { t.match( err, - /usage instructions/, + /Usage: npm unpublish/, 'should throw usage instructions on missing package.json' ) t.end() @@ -124,9 +112,9 @@ t.test('no args --force missing package.json', t => { }) t.test('no args --force unknown error reading package.json', t => { - npm.flatOptions.force = true + config.force = true - const Unpublish = requireInject('../../lib/unpublish.js', { + const Unpublish = t.mock('../../lib/unpublish.js', { ...mocks, 'read-package-json': (path, cb) => cb(new Error('ERR')), }) @@ -143,7 +131,7 @@ t.test('no args --force unknown error reading package.json', t => { }) t.test('no args', t => { - const Unpublish = requireInject('../../lib/unpublish.js', { + const Unpublish = t.mock('../../lib/unpublish.js', { ...mocks, }) const unpublish = new Unpublish(npm) @@ -159,7 +147,7 @@ t.test('no args', t => { }) t.test('too many args', t => { - const Unpublish = requireInject('../../lib/unpublish.js', { + const Unpublish = t.mock('../../lib/unpublish.js', { ...mocks, }) const unpublish = new Unpublish(npm) @@ -167,7 +155,7 @@ t.test('too many args', t => { unpublish.exec(['a', 'b'], err => { t.match( err, - /usage instructions/, + /Usage: npm unpublish/, 'should throw usage instructions if too many args' ) t.end() @@ -175,46 +163,30 @@ t.test('too many args', t => { }) t.test('unpublish <pkg>@version', t => { - t.plan(7) - - const pa = { - name: 'pkg', - rawSpec: '1.0.0', - type: 'version', - } - - const npmlog = { + npm.log = { silly (title, key, value) { t.equal(title, 'unpublish', 'should silly log args') if (key === 'spec') - t.equal(value, pa, 'should log parsed npa object') + t.match(value, { name: 'pkg', rawSpec: '1.0.0' }) else t.equal(value, 'pkg@1.0.0', 'should log originally passed arg') }, } - const npa = () => pa - const libnpmpublish = { unpublish (spec, opts) { - t.equal(spec, pa, 'should unpublish expected parsed spec') - t.deepEqual( + t.equal(spec.raw, 'pkg@1.0.0', 'should unpublish expected parsed spec') + t.same( opts, - { - force: false, - silent: false, - loglevel: 'silly', - }, + {}, 'should unpublish with expected opts' ) }, } - const Unpublish = requireInject('../../lib/unpublish.js', { + const Unpublish = t.mock('../../lib/unpublish.js', { ...mocks, - npmlog, libnpmpublish, - 'npm-package-arg': npa, }) const unpublish = new Unpublish(npm) @@ -227,25 +199,22 @@ t.test('unpublish <pkg>@version', t => { '- pkg@1.0.0', 'should output removed pkg@version on success' ) + t.end() }) }) t.test('no version found in package.json', t => { - npm.flatOptions.force = true + config.force = true - const npa = () => ({ - name: 'pkg', - type: 'version', + const testDir = t.testdir({ + 'package.json': JSON.stringify({ + name: 'pkg', + }, null, 2), }) + npm.localPrefix = testDir - npa.resolve = () => '' - - const Unpublish = requireInject('../../lib/unpublish.js', { + const Unpublish = t.mock('../../lib/unpublish.js', { ...mocks, - 'npm-package-arg': npa, - 'read-package-json': (path, cb) => cb(null, { - name: 'pkg', - }), }) const unpublish = new Unpublish(npm) @@ -263,15 +232,10 @@ t.test('no version found in package.json', t => { }) t.test('unpublish <pkg> --force no version set', t => { - npm.flatOptions.force = true + config.force = true - const Unpublish = requireInject('../../lib/unpublish.js', { + const Unpublish = t.mock('../../lib/unpublish.js', { ...mocks, - 'npm-package-arg': () => ({ - name: 'pkg', - rawSpec: '', - type: 'tag', - }), }) const unpublish = new Unpublish(npm) @@ -289,30 +253,129 @@ t.test('unpublish <pkg> --force no version set', t => { }) t.test('silent', t => { - npm.flatOptions.loglevel = 'silent' + config.loglevel = 'silent' - const npa = () => ({ - name: 'pkg', - rawSpec: '1.0.0', - type: 'version', + const Unpublish = t.mock('../../lib/unpublish.js', { + ...mocks, }) + const unpublish = new Unpublish(npm) - npa.resolve = () => '' + unpublish.exec(['pkg@1.0.0'], err => { + if (err) + throw err - const Unpublish = requireInject('../../lib/unpublish.js', { + t.equal( + result, + '', + 'should have no output' + ) + t.end() + }) +}) + +t.test('workspaces', t => { + const testDir = t.testdir({ + 'package.json': JSON.stringify({ + name: 'my-cool-pkg', + version: '1.0.0', + workspaces: ['workspace-a', 'workspace-b', 'workspace-c'], + }, null, 2), + 'workspace-a': { + 'package.json': JSON.stringify({ + name: 'workspace-a', + version: '1.2.3-a', + repository: 'http://repo.workspace-a/', + }), + }, + 'workspace-b': { + 'package.json': JSON.stringify({ + name: 'workspace-b', + version: '1.2.3-n', + repository: 'https://github.com/npm/workspace-b', + }), + }, + 'workspace-c': { + 'package.json': JSON.stringify({ + name: 'workspace-n', + version: '1.2.3-n', + }), + }, + }) + const Unpublish = t.mock('../../lib/unpublish.js', { ...mocks, - 'npm-package-arg': npa, }) const unpublish = new Unpublish(npm) + t.test('no force', (t) => { + npm.localPrefix = testDir + unpublish.execWorkspaces([], [], (err) => { + t.match(err, /--force/, 'should require force') + t.end() + }) + }) + + t.test('all workspaces --force', (t) => { + npm.localPrefix = testDir + config.force = true + unpublish.execWorkspaces([], [], (err) => { + t.notOk(err) + t.matchSnapshot(result, 'should output all workspaces') + t.end() + }) + }) + + t.test('one workspace --force', (t) => { + npm.localPrefix = testDir + config.force = true + unpublish.execWorkspaces([], ['workspace-a'], (err) => { + t.notOk(err) + t.matchSnapshot(result, 'should output one workspaces') + t.end() + }) + }) + t.end() +}) + +t.test('dryRun with spec', (t) => { + config['dry-run'] = true + const Unpublish = t.mock('../../lib/unpublish.js', { + ...mocks, + libnpmpublish: { unpublish: () => { + throw new Error('should not be called') + } }, + }) + const unpublish = new Unpublish(npm) unpublish.exec(['pkg@1.0.0'], err => { if (err) throw err t.equal( result, - '', - 'should have no output' + '- pkg@1.0.0', + 'should output removed pkg@version on success' + ) + t.end() + }) +}) + +t.test('dryRun with local package', (t) => { + config['dry-run'] = true + config.force = true + const Unpublish = t.mock('../../lib/unpublish.js', { + ...mocks, + libnpmpublish: { unpublish: () => { + throw new Error('should not be called') + } }, + }) + const unpublish = new Unpublish(npm) + unpublish.exec([], err => { + if (err) + throw err + + t.equal( + result, + '- pkg@1.0.0', + 'should output removed pkg@1.0.0 on success' ) t.end() }) @@ -328,7 +391,7 @@ t.test('completion', async t => { } t.test('completing with multiple versions from the registry', async t => { - const Unpublish = requireInject('../../lib/unpublish.js', { + const Unpublish = t.mock('../../lib/unpublish.js', { ...mocks, libnpmaccess: { async lsPackages () { @@ -338,7 +401,6 @@ t.test('completion', async t => { } }, }, - 'npm-package-arg': require('npm-package-arg'), 'npm-registry-fetch': { async json () { return { @@ -366,7 +428,7 @@ t.test('completion', async t => { }) t.test('no versions retrieved', async t => { - const Unpublish = requireInject('../../lib/unpublish.js', { + const Unpublish = t.mock('../../lib/unpublish.js', { ...mocks, libnpmaccess: { async lsPackages () { @@ -376,7 +438,6 @@ t.test('completion', async t => { } }, }, - 'npm-package-arg': require('npm-package-arg'), 'npm-registry-fetch': { async json () { return { @@ -399,7 +460,7 @@ t.test('completion', async t => { }) t.test('packages starting with same letters', async t => { - const Unpublish = requireInject('../../lib/unpublish.js', { + const Unpublish = t.mock('../../lib/unpublish.js', { ...mocks, libnpmaccess: { async lsPackages () { @@ -410,7 +471,6 @@ t.test('completion', async t => { } }, }, - 'npm-package-arg': require('npm-package-arg'), }) const unpublish = new Unpublish(npm) @@ -427,7 +487,7 @@ t.test('completion', async t => { }) t.test('no packages retrieved', async t => { - const Unpublish = requireInject('../../lib/unpublish.js', { + const Unpublish = t.mock('../../lib/unpublish.js', { ...mocks, libnpmaccess: { async lsPackages () { @@ -447,7 +507,7 @@ t.test('completion', async t => { }) t.test('no pkg name to complete', async t => { - const Unpublish = requireInject('../../lib/unpublish.js', { + const Unpublish = t.mock('../../lib/unpublish.js', { ...mocks, libnpmaccess: { async lsPackages () { @@ -470,7 +530,7 @@ t.test('completion', async t => { }) t.test('no pkg names retrieved from user account', async t => { - const Unpublish = requireInject('../../lib/unpublish.js', { + const Unpublish = t.mock('../../lib/unpublish.js', { ...mocks, libnpmaccess: { async lsPackages () { @@ -490,7 +550,7 @@ t.test('completion', async t => { }) t.test('logged out user', async t => { - const Unpublish = requireInject('../../lib/unpublish.js', { + const Unpublish = t.mock('../../lib/unpublish.js', { ...mocks, '../../lib/utils/get-identity.js': () => Promise.reject(new Error('ERR')), }) @@ -505,7 +565,7 @@ t.test('completion', async t => { }) t.test('too many args', async t => { - const Unpublish = requireInject('../../lib/unpublish.js', mocks) + const Unpublish = t.mock('../../lib/unpublish.js', mocks) const unpublish = new Unpublish(npm) await testComp(t, { diff --git a/test/lib/unstar.js b/test/lib/unstar.js index 3f3487176d995..8b853230a6c8b 100644 --- a/test/lib/unstar.js +++ b/test/lib/unstar.js @@ -1,4 +1,3 @@ -const requireInject = require('require-inject') const t = require('tap') t.test('unstar', t => { @@ -10,11 +9,11 @@ t.test('unstar', t => { } exec (args, cb) { - t.deepEqual(args, ['pkg'], 'should forward packages') + t.same(args, ['pkg'], 'should forward packages') cb() } } - const Unstar = requireInject('../../lib/unstar.js', { + const Unstar = t.mock('../../lib/unstar.js', { '../../lib/star.js': Star, }) diff --git a/test/lib/update.js b/test/lib/update.js index 15195573f5a24..487b12e5fa297 100644 --- a/test/lib/update.js +++ b/test/lib/update.js @@ -1,16 +1,18 @@ -const { resolve } = require('path') const t = require('tap') -const requireInject = require('require-inject') +const { resolve } = require('path') +const { fake: mockNpm } = require('../fixtures/mock-npm') +const config = { + depth: 0, + global: false, +} const noop = () => null -const npm = { +const npm = mockNpm({ globalDir: '', - flatOptions: { - depth: 0, - global: false, - }, + log: noop, + config, prefix: '', -} +}) const mocks = { npmlog: { warn () {} }, '@npmcli/arborist': class { @@ -20,11 +22,10 @@ const mocks = { '../../lib/utils/usage.js': () => 'usage instructions', } -t.afterEach(cb => { +t.afterEach(() => { npm.prefix = '' - npm.flatOptions.global = false + config.global = false npm.globalDir = '' - cb() }) t.test('no args', t => { @@ -34,9 +35,14 @@ t.test('no args', t => { class Arborist { constructor (args) { - t.deepEqual( + t.same( args, - { ...npm.flatOptions, path: npm.prefix }, + { + ...npm.flatOptions, + path: npm.prefix, + log: noop, + workspaces: null, + }, 'should call arborist contructor with expected args' ) } @@ -46,10 +52,10 @@ t.test('no args', t => { } } - const Update = requireInject('../../lib/update.js', { + const Update = t.mock('../../lib/update.js', { ...mocks, '../../lib/utils/reify-finish.js': (npm, arb) => { - t.isLike(arb, Arborist, 'should reify-finish with arborist instance') + t.match(arb, Arborist, 'should reify-finish with arborist instance') }, '@npmcli/arborist': Arborist, }) @@ -68,22 +74,27 @@ t.test('with args', t => { class Arborist { constructor (args) { - t.deepEqual( + t.same( args, - { ...npm.flatOptions, path: npm.prefix }, + { + ...npm.flatOptions, + path: npm.prefix, + log: noop, + workspaces: null, + }, 'should call arborist contructor with expected args' ) } reify ({ update }) { - t.deepEqual(update, ['ipt'], 'should update listed deps') + t.same(update, ['ipt'], 'should update listed deps') } } - const Update = requireInject('../../lib/update.js', { + const Update = t.mock('../../lib/update.js', { ...mocks, '../../lib/utils/reify-finish.js': (npm, arb) => { - t.isLike(arb, Arborist, 'should reify-finish with arborist instance') + t.match(arb, Arborist, 'should reify-finish with arborist instance') }, '@npmcli/arborist': Arborist, }) @@ -99,9 +110,9 @@ t.test('update --depth=<number>', t => { t.plan(2) npm.prefix = '/project/a' - npm.flatOptions.depth = 1 + config.depth = 1 - const Update = requireInject('../../lib/update.js', { + const Update = t.mock('../../lib/update.js', { ...mocks, npmlog: { warn: (title, msg) => { @@ -131,14 +142,14 @@ t.test('update --global', t => { npm.prefix = '/project/a' npm.globalDir = resolve(process.cwd(), 'global/lib/node_modules') - npm.flatOptions.global = true + config.global = true class Arborist { constructor (args) { const { path, ...opts } = args - t.deepEqual( + t.same( opts, - npm.flatOptions, + { ...npm.flatOptions, log: noop, workspaces: undefined }, 'should call arborist contructor with expected options' ) @@ -152,7 +163,7 @@ t.test('update --global', t => { reify () {} } - const Update = requireInject('../../lib/update.js', { + const Update = t.mock('../../lib/update.js', { ...mocks, '@npmcli/arborist': Arborist, }) diff --git a/test/lib/utils/audit-error.js b/test/lib/utils/audit-error.js index ea7c84373e9f3..c683053cbf787 100644 --- a/test/lib/utils/audit-error.js +++ b/test/lib/utils/audit-error.js @@ -1,25 +1,22 @@ const t = require('tap') -const requireInject = require('require-inject') const LOGS = [] +const OUTPUT = [] +const output = (...msg) => OUTPUT.push(msg) +const auditError = require('../../../lib/utils/audit-error.js') + const npm = { command: null, flatOptions: {}, log: { warn: (...msg) => LOGS.push(msg), }, + output, } -const OUTPUT = [] -const output = (...msg) => OUTPUT.push(msg) -const auditError = requireInject('../../../lib/utils/audit-error.js', { - '../../../lib/utils/output.js': output, -}) - -t.afterEach(cb => { +t.afterEach(() => { npm.flatOptions = {} OUTPUT.length = 0 LOGS.length = 0 - cb() }) t.test('no error, not audit command', t => { diff --git a/test/lib/utils/cleanup-log-files.js b/test/lib/utils/cleanup-log-files.js index 7af0633fe715d..e97cf36b55dec 100644 --- a/test/lib/utils/cleanup-log-files.js +++ b/test/lib/utils/cleanup-log-files.js @@ -1,10 +1,9 @@ const t = require('tap') -const requireInject = require('require-inject') const glob = require('glob') const rimraf = require('rimraf') const mocks = { glob, rimraf } -const cleanup = requireInject('../../../lib/utils/cleanup-log-files.js', { +const cleanup = t.mock('../../../lib/utils/cleanup-log-files.js', { glob: (...args) => mocks.glob(...args), rimraf: (...args) => mocks.rimraf(...args), }) @@ -72,7 +71,7 @@ t.test('rimraf fail', t => { const warnings = [] const warn = (...warning) => warnings.push(basename(warning[2])) return cleanup(cache, 3, warn).then(() => { - t.strictSame(warnings.sort((a, b) => a.localeCompare(b)), [ + t.strictSame(warnings.sort((a, b) => a.localeCompare(b, 'en')), [ '1-debug.log', '2-debug.log', ]) diff --git a/test/lib/utils/completion/installed-deep.js b/test/lib/utils/completion/installed-deep.js index 0e80a5a1983da..21e77a568bd8a 100644 --- a/test/lib/utils/completion/installed-deep.js +++ b/test/lib/utils/completion/installed-deep.js @@ -1,6 +1,5 @@ const { resolve } = require('path') -const { test } = require('tap') -const requireInject = require('require-inject') +const t = require('tap') let prefix let globalDir = 'MISSING_GLOBAL_DIR' @@ -12,7 +11,7 @@ const _flatOptions = { }, } const p = '../../../../lib/utils/completion/installed-deep.js' -const installedDeep = requireInject(p) +const installedDeep = require(p) const npm = { flatOptions: _flatOptions, get prefix () { @@ -64,6 +63,15 @@ const fixture = { 'package.json': JSON.stringify({ name: 'c', version: '1.0.0', + dependencies: { + ch: '1.0.0', + }, + }), + }, + ch: { + 'package.json': JSON.stringify({ + name: 'ch', + version: '1.0.0', }), }, d: { @@ -144,7 +152,7 @@ const globalFixture = { }, } -test('get list of package names', async t => { +t.test('get list of package names', async t => { const fix = t.testdir({ local: fixture, global: globalFixture, @@ -154,22 +162,22 @@ test('get list of package names', async t => { globalDir = resolve(fix, 'global/node_modules') const res = await installedDeep(npm, null) - t.deepEqual( + t.same( res, [ ['bar', '-g'], ['foo', '-g'], ['a-bar', '-g'], 'a', 'b', 'c', - 'd', 'e', 'f', - 'g', 'bb', + 'ch', 'd', 'e', + 'f', 'g', 'bb', ], 'should return list of package names and global flag' ) t.end() }) -test('get list of package names as global', async t => { +t.test('get list of package names as global', async t => { const fix = t.testdir({ local: fixture, global: globalFixture, @@ -181,7 +189,7 @@ test('get list of package names as global', async t => { _flatOptions.global = true const res = await installedDeep(npm, null) - t.deepEqual( + t.same( res, [ 'bar', @@ -194,7 +202,7 @@ test('get list of package names as global', async t => { t.end() }) -test('limit depth', async t => { +t.test('limit depth', async t => { const fix = t.testdir({ local: fixture, global: globalFixture, @@ -206,15 +214,15 @@ test('limit depth', async t => { _flatOptions.depth = 0 const res = await installedDeep(npm, null) - t.deepEqual( + t.same( res, [ ['bar', '-g'], ['foo', '-g'], 'a', 'b', - 'c', 'd', - 'e', 'f', - 'g', + 'c', 'ch', + 'd', 'e', + 'f', 'g', ], 'should print only packages up to the specified depth' ) @@ -222,7 +230,7 @@ test('limit depth', async t => { t.end() }) -test('limit depth as global', async t => { +t.test('limit depth as global', async t => { const fix = t.testdir({ local: fixture, global: globalFixture, @@ -235,7 +243,7 @@ test('limit depth as global', async t => { _flatOptions.depth = 0 const res = await installedDeep(npm, null) - t.deepEqual( + t.same( res, [ 'bar', diff --git a/test/lib/utils/completion/installed-shallow.js b/test/lib/utils/completion/installed-shallow.js index 1067a50acd4bb..1445cbf2ffb71 100644 --- a/test/lib/utils/completion/installed-shallow.js +++ b/test/lib/utils/completion/installed-shallow.js @@ -1,11 +1,10 @@ -const requireInject = require('require-inject') const flatOptions = { global: false } const npm = { flatOptions } const t = require('tap') const { resolve } = require('path') const p = '../../../../lib/utils/completion/installed-shallow.js' -const installed = requireInject(p) +const installed = require(p) t.test('global not set, include globals with -g', async t => { const dir = t.testdir({ diff --git a/test/lib/utils/config.js b/test/lib/utils/config.js deleted file mode 100644 index 4d4b1a1d1a70d..0000000000000 --- a/test/lib/utils/config.js +++ /dev/null @@ -1,143 +0,0 @@ -const t = require('tap') -const requireInject = require('require-inject') - -// have to fake the node version, or else it'll only pass on this one -Object.defineProperty(process, 'version', { - value: 'v14.8.0', -}) - -t.formatSnapshot = obj => { - if (typeof obj !== 'object' || !obj || !obj.types) - return obj - - return { - ...obj, - defaults: { - ...obj.defaults, - cache: '{CACHE DIR} ' + path.basename(obj.defaults.cache), - }, - types: formatTypes(obj.types), - } -} - -const path = require('path') -const url = require('url') -const semver = require('semver') - -const formatTypes = (types) => Object.entries(types).map(([key, value]) => { - return [key, formatTypeValue(value)] -}).reduce((set, [key, value]) => { - set[key] = value - return set -}, {}) - -const formatTypeValue = (value) => { - if (Array.isArray(value)) - return value.map(formatTypeValue) - else if (value === url) - return '{URL MODULE}' - else if (value === path) - return '{PATH MODULE}' - else if (value === semver) - return '{SEMVER MODULE}' - else if (typeof value === 'function') - return `{${value.name} TYPE}` - else - return value -} - -process.env.ComSpec = 'cmd.exe' -process.env.SHELL = '/usr/local/bin/bash' -process.env.LANG = 'UTF-8' -delete process.env.LC_ALL -delete process.env.LC_CTYPE -process.env.EDITOR = 'vim' -process.env.VISUAL = 'mate' - -const networkInterfacesThrow = () => { - throw new Error('no network interfaces for some reason') -} -const networkInterfaces = () => ({ - eth420: [{ address: '127.0.0.1' }], - eth69: [{ address: 'no place like home' }], -}) -const tmpdir = () => '/tmp' -const os = { networkInterfaces, tmpdir } -const pkg = { version: '7.0.0' } - -t.test('working network interfaces, not windows', t => { - const config = requireInject('../../../lib/utils/config.js', { - os, - '@npmcli/ci-detect': () => false, - '../../../lib/utils/is-windows.js': false, - '../../../package.json': pkg, - }) - t.matchSnapshot(config) - t.end() -}) - -t.test('no working network interfaces, on windows', t => { - const config = requireInject('../../../lib/utils/config.js', { - os: { tmpdir, networkInterfaces: networkInterfacesThrow }, - '@npmcli/ci-detect': () => false, - '../../../lib/utils/is-windows.js': true, - '../../../package.json': pkg, - }) - t.matchSnapshot(config) - t.end() -}) - -t.test('no comspec on windows', t => { - delete process.env.ComSpec - const config = requireInject('../../../lib/utils/config.js', { - os: { tmpdir, networkInterfaces: networkInterfacesThrow }, - '@npmcli/ci-detect': () => false, - '../../../lib/utils/is-windows.js': true, - }) - t.equal(config.defaults.shell, 'cmd') - t.end() -}) - -t.test('no shell on posix', t => { - delete process.env.SHELL - const config = requireInject('../../../lib/utils/config.js', { - os, - '@npmcli/ci-detect': () => false, - '../../../lib/utils/is-windows.js': false, - }) - t.equal(config.defaults.shell, 'sh') - t.end() -}) - -t.test('no EDITOR env, use VISUAL', t => { - delete process.env.EDITOR - const config = requireInject('../../../lib/utils/config.js', { - os, - '@npmcli/ci-detect': () => false, - '../../../lib/utils/is-windows.js': false, - }) - t.equal(config.defaults.editor, 'mate') - t.end() -}) - -t.test('no VISUAL, use system default, not windows', t => { - delete process.env.VISUAL - const config = requireInject('../../../lib/utils/config.js', { - os, - '@npmcli/ci-detect': () => false, - '../../../lib/utils/is-windows.js': false, - }) - t.equal(config.defaults.editor, 'vi') - t.end() -}) - -t.test('no VISUAL, use system default, not windows', t => { - delete process.env.VISUAL - const config = requireInject('../../../lib/utils/config.js', { - os, - '@npmcli/ci-detect': () => false, - '../../../lib/utils/is-windows.js': true, - }) - t.equal(config.defaults.editor, 'notepad.exe') - t.end() -}) diff --git a/test/lib/utils/config/definition.js b/test/lib/utils/config/definition.js new file mode 100644 index 0000000000000..a17a1a09a2240 --- /dev/null +++ b/test/lib/utils/config/definition.js @@ -0,0 +1,218 @@ +const t = require('tap') +const Definition = require('../../../../lib/utils/config/definition.js') +const { + typeDefs: { + semver: { type: semver }, + Umask: { type: Umask }, + url: { type: url }, + path: { type: path }, + }, +} = require('@npmcli/config') + +t.test('basic definition', async t => { + const def = new Definition('key', { + default: 'some default value', + type: [Number, String], + description: 'just a test thingie', + }) + t.same(def, { + constructor: Definition, + key: 'key', + default: 'some default value', + defaultDescription: '"some default value"', + type: [Number, String], + hint: '<key>', + usage: '--key <key>', + typeDescription: 'Number or String', + description: 'just a test thingie', + envExport: true, + }) + t.matchSnapshot(def.describe(), 'human-readable description') + + const deprecated = new Definition('deprecated', { + deprecated: 'do not use this', + default: 1234, + description: ' it should not be used\n ever\n\n not even once.\n\n', + type: Number, + defaultDescription: 'A number bigger than 1', + typeDescription: 'An expression of a numeric quantity using numerals', + }) + t.matchSnapshot(deprecated.describe(), 'description of deprecated thing') + + const nullOrUmask = new Definition('key', { + default: null, + type: [null, Umask], + description: 'asdf', + }) + t.equal(nullOrUmask.typeDescription, 'null or Octal numeric string in range 0000..0777 (0..511)') + const nullDateOrBool = new Definition('key', { + default: 7, + type: [null, Date, Boolean], + description: 'asdf', + }) + t.equal(nullDateOrBool.typeDescription, 'null, Date, or Boolean') + const manyPaths = new Definition('key', { + default: ['asdf'], + type: [path, Array], + description: 'asdf', + }) + t.equal(manyPaths.typeDescription, 'Path (can be set multiple times)') + const pathOrUrl = new Definition('key', { + default: ['https://example.com'], + type: [path, url], + description: 'asdf', + }) + t.equal(pathOrUrl.typeDescription, 'Path or URL') + const multi12 = new Definition('key', { + default: [], + type: [1, 2, Array], + description: 'asdf', + }) + t.equal(multi12.typeDescription, '1 or 2 (can be set multiple times)') + const multi123 = new Definition('key', { + default: [], + type: [1, 2, 3, Array], + description: 'asdf', + }) + t.equal(multi123.typeDescription, '1, 2, or 3 (can be set multiple times)') + const multi123Semver = new Definition('key', { + default: [], + type: [1, 2, 3, Array, semver], + description: 'asdf', + }) + t.equal(multi123Semver.typeDescription, '1, 2, 3, or SemVer string (can be set multiple times)') + const hasUsage = new Definition('key', { + default: 'test default', + type: String, + description: 'test description', + usage: 'test usage', + }) + t.equal(hasUsage.usage, 'test usage') + const hasShort = new Definition('key', { + default: 'test default', + short: 't', + type: String, + description: 'test description', + }) + t.equal(hasShort.usage, '-t|--key <key>') + const multiHasShort = new Definition('key', { + default: 'test default', + short: 't', + type: [null, String], + description: 'test description', + }) + t.equal(multiHasShort.usage, '-t|--key <key>') + const hardCodedTypes = new Definition('key', { + default: 'test default', + type: ['string1', 'string2'], + description: 'test description', + }) + t.equal(hardCodedTypes.usage, '--key <string1|string2>') + const hardCodedOptionalTypes = new Definition('key', { + default: 'test default', + type: [null, 'string1', 'string2'], + description: 'test description', + }) + t.equal(hardCodedOptionalTypes.usage, '--key <string1|string2>') + const hasHint = new Definition('key', { + default: 'test default', + type: String, + description: 'test description', + hint: '<testparam>', + }) + t.equal(hasHint.usage, '--key <testparam>') + const optionalBool = new Definition('key', { + default: null, + type: [null, Boolean], + description: 'asdf', + }) + t.equal(optionalBool.usage, '--key') + + const noExported = new Definition('methane', { + envExport: false, + type: String, + typeDescription: 'Greenhouse Gas', + default: 'CH4', + description: ` + This is bad for the environment, for our children, do not put it there. + `, + }) + t.equal(noExported.envExport, false, 'envExport flag is false') + t.equal(noExported.describe(), `#### \`methane\` + +* Default: "CH4" +* Type: Greenhouse Gas + +This is bad for the environment, for our children, do not put it there. + +This value is not exported to the environment for child processes.`) +}) + +t.test('missing fields', async t => { + t.throws(() => new Definition('lacks-default', { + description: 'no default', + type: String, + }), { message: 'config lacks default: lacks-default' }) + t.throws(() => new Definition('lacks-type', { + description: 'no type', + default: 1234, + }), { message: 'config lacks type: lacks-type' }) + t.throws(() => new Definition(null, { + description: 'falsey key', + default: 1234, + type: Number, + }), { message: 'config lacks key: null' }) + t.throws(() => new Definition('extra-field', { + type: String, + default: 'extra', + extra: 'more than is wanted', + description: 'this is not ok', + }), { message: 'config defines unknown field extra: extra-field' }) +}) + +t.test('long description', async t => { + const { stdout: { columns } } = process + t.teardown(() => process.stdout.columns = columns) + + const long = new Definition('walden', { + description: ` + WHEN I WROTE the following pages, or rather the bulk of them, I lived + alone, in the woods, a mile from any neighbor, in a house which I had + built myself, on the shore of Walden Pond, in Concord, Massachusetts, and + earned my living by the labor of my hands only. I lived there two years + and two months. At present I am a sojourner in civilized life again. + + I should not obtrude my affairs so much on the notice of my readers if + very particular inquiries had not been made by my townsmen concerning my + mode of life, which some would call impertinent, though they do not + appear to me at all impertinent, but, considering the circumstances, very + natural and pertinent. + + \`\`\` + this.is('a', { + code: 'sample', + }) + + with (multiple) { + blocks() + } + \`\`\` + `, + default: true, + type: Boolean, + }) + process.stdout.columns = 40 + t.matchSnapshot(long.describe(), 'cols=40') + + process.stdout.columns = 9000 + t.matchSnapshot(long.describe(), 'cols=9000') + + process.stdout.columns = 0 + t.matchSnapshot(long.describe(), 'cols=0') + + process.stdout.columns = -1 + t.matchSnapshot(long.describe(), 'cols=-1') + + process.stdout.columns = NaN + t.matchSnapshot(long.describe(), 'cols=NaN') +}) diff --git a/test/lib/utils/config/definitions.js b/test/lib/utils/config/definitions.js new file mode 100644 index 0000000000000..63d9bbd195ab2 --- /dev/null +++ b/test/lib/utils/config/definitions.js @@ -0,0 +1,830 @@ +const t = require('tap') + +const { resolve } = require('path') + +// have to fake the node version, or else it'll only pass on this one +Object.defineProperty(process, 'version', { + value: 'v14.8.0', +}) + +// also fake the npm version, so that it doesn't get reset every time +const pkg = require('../../../../package.json') + +// this is a pain to keep typing +const defpath = '../../../../lib/utils/config/definitions.js' + +// set this in the test when we need it +delete process.env.NODE_ENV +const definitions = require(defpath) + +// Tie the definitions to a snapshot so that if they change we are forced to +// update snapshots, which rebuilds the docs +for (const key of Object.keys(definitions)) + t.matchSnapshot(definitions[key].describe(), `config description for ${key}`) + +const isWin = '../../../../lib/utils/is-windows.js' + +// snapshot these just so we note when they change +t.matchSnapshot(Object.keys(definitions), 'all config keys') +t.matchSnapshot(Object.keys(definitions).filter(d => d.flatten), + 'all config keys that are shared to flatOptions') + +t.equal(definitions['npm-version'].default, pkg.version, 'npm-version default') +t.equal(definitions['node-version'].default, process.version, 'node-version default') + +t.test('basic flattening function camelCases from css-case', t => { + const flat = {} + const obj = { 'prefer-online': true } + definitions['prefer-online'].flatten('prefer-online', obj, flat) + t.strictSame(flat, { preferOnline: true }) + t.end() +}) + +t.test('editor', t => { + t.test('has EDITOR and VISUAL, use EDITOR', t => { + process.env.EDITOR = 'vim' + process.env.VISUAL = 'mate' + const defs = t.mock(defpath) + t.equal(defs.editor.default, 'vim') + t.end() + }) + t.test('has VISUAL but no EDITOR, use VISUAL', t => { + delete process.env.EDITOR + process.env.VISUAL = 'mate' + const defs = t.mock(defpath) + t.equal(defs.editor.default, 'mate') + t.end() + }) + t.test('has neither EDITOR nor VISUAL, system specific', t => { + delete process.env.EDITOR + delete process.env.VISUAL + const defsWin = t.mock(defpath, { + [isWin]: true, + }) + t.equal(defsWin.editor.default, 'notepad.exe') + const defsNix = t.mock(defpath, { + [isWin]: false, + }) + t.equal(defsNix.editor.default, 'vi') + t.end() + }) + t.end() +}) + +t.test('shell', t => { + t.test('windows, env.ComSpec then cmd.exe', t => { + process.env.ComSpec = 'command.com' + const defsComSpec = t.mock(defpath, { + [isWin]: true, + }) + t.equal(defsComSpec.shell.default, 'command.com') + delete process.env.ComSpec + const defsNoComSpec = t.mock(defpath, { + [isWin]: true, + }) + t.equal(defsNoComSpec.shell.default, 'cmd') + t.end() + }) + + t.test('nix, SHELL then sh', t => { + process.env.SHELL = '/usr/local/bin/bash' + const defsShell = t.mock(defpath, { + [isWin]: false, + }) + t.equal(defsShell.shell.default, '/usr/local/bin/bash') + delete process.env.SHELL + const defsNoShell = t.mock(defpath, { + [isWin]: false, + }) + t.equal(defsNoShell.shell.default, 'sh') + t.end() + }) + + t.end() +}) + +t.test('local-address allowed types', t => { + t.test('get list from os.networkInterfaces', t => { + const os = { + tmpdir: () => '/tmp', + networkInterfaces: () => ({ + eth420: [{ address: '127.0.0.1' }], + eth69: [{ address: 'no place like home' }], + }), + } + const defs = t.mock(defpath, { os }) + t.same(defs['local-address'].type, [ + null, + '127.0.0.1', + 'no place like home', + ]) + t.end() + }) + t.test('handle os.networkInterfaces throwing', t => { + const os = { + tmpdir: () => '/tmp', + networkInterfaces: () => { + throw new Error('no network interfaces for some reason') + }, + } + const defs = t.mock(defpath, { os }) + t.same(defs['local-address'].type, [null]) + t.end() + }) + t.end() +}) + +t.test('unicode allowed?', t => { + const { LC_ALL, LC_CTYPE, LANG } = process.env + t.teardown(() => Object.assign(process.env, { LC_ALL, LC_CTYPE, LANG })) + + process.env.LC_ALL = 'utf8' + process.env.LC_CTYPE = 'UTF-8' + process.env.LANG = 'Unicode utf-8' + + const lcAll = t.mock(defpath) + t.equal(lcAll.unicode.default, true) + process.env.LC_ALL = 'no unicode for youUUUU!' + const noLcAll = t.mock(defpath) + t.equal(noLcAll.unicode.default, false) + + delete process.env.LC_ALL + const lcCtype = t.mock(defpath) + t.equal(lcCtype.unicode.default, true) + process.env.LC_CTYPE = 'something other than unicode version 8' + const noLcCtype = t.mock(defpath) + t.equal(noLcCtype.unicode.default, false) + + delete process.env.LC_CTYPE + const lang = t.mock(defpath) + t.equal(lang.unicode.default, true) + process.env.LANG = 'ISO-8859-1' + const noLang = t.mock(defpath) + t.equal(noLang.unicode.default, false) + t.end() +}) + +t.test('cache', t => { + process.env.LOCALAPPDATA = 'app/data/local' + const defsWinLocalAppData = t.mock(defpath, { + [isWin]: true, + }) + t.equal(defsWinLocalAppData.cache.default, 'app/data/local/npm-cache') + + delete process.env.LOCALAPPDATA + const defsWinNoLocalAppData = t.mock(defpath, { + [isWin]: true, + }) + t.equal(defsWinNoLocalAppData.cache.default, '~/npm-cache') + + const defsNix = t.mock(defpath, { + [isWin]: false, + }) + t.equal(defsNix.cache.default, '~/.npm') + + const flat = {} + defsNix.cache.flatten('cache', { cache: '/some/cache/value' }, flat) + const {join} = require('path') + t.equal(flat.cache, join('/some/cache/value', '_cacache')) + t.equal(flat.npxCache, join('/some/cache/value', '_npx')) + + t.end() +}) + +t.test('flatteners that populate flat.omit array', t => { + t.test('also', t => { + const flat = {} + const obj = {} + + // ignored if setting is not dev or development + obj.also = 'ignored' + definitions.also.flatten('also', obj, flat) + t.strictSame(obj, {also: 'ignored', omit: [], include: []}, 'nothing done') + t.strictSame(flat, {omit: []}, 'nothing done') + + obj.also = 'development' + definitions.also.flatten('also', obj, flat) + t.strictSame(obj, { + also: 'development', + omit: [], + include: ['dev'], + }, 'marked dev as included') + t.strictSame(flat, { omit: [] }, 'nothing omitted, so nothing changed') + + obj.omit = ['dev', 'optional'] + obj.include = [] + definitions.also.flatten('also', obj, flat) + t.strictSame(obj, { + also: 'development', + omit: ['optional'], + include: ['dev'], + }, 'marked dev as included') + t.strictSame(flat, { omit: ['optional'] }, 'removed dev from omit') + t.end() + }) + + t.test('include', t => { + const flat = {} + const obj = { include: ['dev'] } + definitions.include.flatten('include', obj, flat) + t.strictSame(flat, {omit: []}, 'not omitting anything') + obj.omit = ['optional', 'dev'] + definitions.include.flatten('include', obj, flat) + t.strictSame(flat, {omit: ['optional']}, 'only omitting optional') + t.end() + }) + + t.test('omit', t => { + const flat = {} + const obj = { include: ['dev'], omit: ['dev', 'optional'] } + definitions.omit.flatten('omit', obj, flat) + t.strictSame(flat, { omit: ['optional'] }, 'do not omit what is included') + + process.env.NODE_ENV = 'production' + const defProdEnv = t.mock(defpath) + t.strictSame(defProdEnv.omit.default, ['dev'], 'omit dev in production') + t.end() + }) + + t.test('only', t => { + const flat = {} + const obj = { only: 'asdf' } + definitions.only.flatten('only', obj, flat) + t.strictSame(flat, { omit: [] }, 'ignored if value is not production') + + obj.only = 'prod' + definitions.only.flatten('only', obj, flat) + t.strictSame(flat, {omit: ['dev']}, 'omit dev when --only=prod') + + obj.include = ['dev'] + flat.omit = [] + definitions.only.flatten('only', obj, flat) + t.strictSame(flat, {omit: []}, 'do not omit when included') + + t.end() + }) + + t.test('optional', t => { + const flat = {} + const obj = { optional: null } + + definitions.optional.flatten('optional', obj, flat) + t.strictSame(obj, { + optional: null, + omit: [], + include: [], + }, 'do nothing by default') + t.strictSame(flat, { omit: [] }, 'do nothing by default') + + obj.optional = true + definitions.optional.flatten('optional', obj, flat) + t.strictSame(obj, { + omit: [], + optional: true, + include: ['optional'], + }, 'include optional when set') + t.strictSame(flat, {omit: []}, 'nothing to omit in flatOptions') + + delete obj.include + obj.optional = false + definitions.optional.flatten('optional', obj, flat) + t.strictSame(obj, { + omit: ['optional'], + optional: false, + include: [], + }, 'omit optional when set false') + t.strictSame(flat, {omit: ['optional']}, 'omit optional when set false') + + t.end() + }) + + t.test('production', t => { + const flat = {} + const obj = {production: true} + definitions.production.flatten('production', obj, flat) + t.strictSame(obj, { + production: true, + omit: ['dev'], + include: [], + }, '--production sets --omit=dev') + t.strictSame(flat, {omit: ['dev']}, '--production sets --omit=dev') + + delete obj.omit + obj.production = false + delete flat.omit + definitions.production.flatten('production', obj, flat) + t.strictSame(obj, { + production: false, + include: ['dev'], + omit: [], + }, '--no-production explicitly includes dev') + t.strictSame(flat, { omit: [] }, '--no-production has no effect') + + obj.production = true + obj.include = ['dev'] + definitions.production.flatten('production', obj, flat) + t.strictSame(obj, { + production: true, + include: ['dev'], + omit: [], + }, 'omit and include dev') + t.strictSame(flat, {omit: []}, 'do not omit dev when included') + + t.end() + }) + + t.test('dev', t => { + const flat = {} + const obj = {dev: true} + definitions.dev.flatten('dev', obj, flat) + t.strictSame(obj, { + dev: true, + omit: [], + include: ['dev'], + }) + t.end() + }) + + t.end() +}) + +t.test('cache-max', t => { + const flat = {} + const obj = { 'cache-max': 10342 } + definitions['cache-max'].flatten('cache-max', obj, flat) + t.strictSame(flat, {}, 'no effect if not <= 0') + obj['cache-max'] = 0 + definitions['cache-max'].flatten('cache-max', obj, flat) + t.strictSame(flat, {preferOnline: true}, 'preferOnline if <= 0') + t.end() +}) + +t.test('cache-min', t => { + const flat = {} + const obj = { 'cache-min': 123 } + definitions['cache-min'].flatten('cache-min', obj, flat) + t.strictSame(flat, {}, 'no effect if not >= 9999') + obj['cache-min'] = 9999 + definitions['cache-min'].flatten('cache-min', obj, flat) + t.strictSame(flat, {preferOffline: true}, 'preferOffline if >=9999') + t.end() +}) + +t.test('color', t => { + const { isTTY } = process.stdout + t.teardown(() => process.stdout.isTTY = isTTY) + + const flat = {} + const obj = { color: 'always' } + + definitions.color.flatten('color', obj, flat) + t.strictSame(flat, {color: true}, 'true when --color=always') + + obj.color = false + definitions.color.flatten('color', obj, flat) + t.strictSame(flat, {color: false}, 'true when --no-color') + + process.stdout.isTTY = false + obj.color = true + definitions.color.flatten('color', obj, flat) + t.strictSame(flat, {color: false}, 'no color when stdout not tty') + process.stdout.isTTY = true + definitions.color.flatten('color', obj, flat) + t.strictSame(flat, {color: true}, '--color turns on color when stdout is tty') + + delete process.env.NO_COLOR + const defsAllowColor = t.mock(defpath) + t.equal(defsAllowColor.color.default, true, 'default true when no NO_COLOR env') + + process.env.NO_COLOR = '0' + const defsNoColor0 = t.mock(defpath) + t.equal(defsNoColor0.color.default, true, 'default true when no NO_COLOR=0') + + process.env.NO_COLOR = '1' + const defsNoColor1 = t.mock(defpath) + t.equal(defsNoColor1.color.default, false, 'default false when no NO_COLOR=1') + + t.end() +}) + +t.test('retry options', t => { + const obj = {} + // <config>: flat.retry[<option>] + const mapping = { + 'fetch-retries': 'retries', + 'fetch-retry-factor': 'factor', + 'fetch-retry-maxtimeout': 'maxTimeout', + 'fetch-retry-mintimeout': 'minTimeout', + } + for (const [config, option] of Object.entries(mapping)) { + const msg = `${config} -> retry.${option}` + const flat = {} + obj[config] = 99 + definitions[config].flatten(config, obj, flat) + t.strictSame(flat, {retry: {[option]: 99}}, msg) + delete obj[config] + } + t.end() +}) + +t.test('search options', t => { + const obj = {} + // <config>: flat.search[<option>] + const mapping = { + description: 'description', + searchexclude: 'exclude', + searchlimit: 'limit', + searchstaleness: 'staleness', + } + + for (const [config, option] of Object.entries(mapping)) { + const msg = `${config} -> search.${option}` + const flat = {} + obj[config] = 99 + definitions[config].flatten(config, obj, flat) + t.strictSame(flat, { search: { limit: 20, [option]: 99 }}, msg) + delete obj[config] + } + + const flat = {} + obj.searchopts = 'a=b&b=c' + definitions.searchopts.flatten('searchopts', obj, flat) + t.strictSame(flat, { + search: { + limit: 20, + opts: Object.assign(Object.create(null), { + a: 'b', + b: 'c', + }), + }, + }, 'searchopts -> querystring.parse() -> search.opts') + delete obj.searchopts + + t.end() +}) + +t.test('noProxy - array', t => { + const obj = { noproxy: ['1.2.3.4,2.3.4.5', '3.4.5.6'] } + const flat = {} + definitions.noproxy.flatten('noproxy', obj, flat) + t.strictSame(flat, { noProxy: '1.2.3.4,2.3.4.5,3.4.5.6' }) + t.end() +}) + +t.test('noProxy - string', t => { + const obj = { noproxy: '1.2.3.4,2.3.4.5,3.4.5.6' } + const flat = {} + definitions.noproxy.flatten('noproxy', obj, flat) + t.strictSame(flat, { noProxy: '1.2.3.4,2.3.4.5,3.4.5.6' }) + t.end() +}) + +t.test('maxSockets', t => { + const obj = { maxsockets: 123 } + const flat = {} + definitions.maxsockets.flatten('maxsockets', obj, flat) + t.strictSame(flat, { maxSockets: 123 }) + t.end() +}) + +t.test('projectScope', t => { + const obj = { scope: 'asdf' } + const flat = {} + definitions.scope.flatten('scope', obj, flat) + t.strictSame(flat, { projectScope: '@asdf' }, 'prepend @ if needed') + + obj.scope = '@asdf' + definitions.scope.flatten('scope', obj, flat) + t.strictSame(flat, { projectScope: '@asdf' }, 'leave untouched if has @') + + t.end() +}) + +t.test('strictSSL', t => { + const obj = { 'strict-ssl': false } + const flat = {} + definitions['strict-ssl'].flatten('strict-ssl', obj, flat) + t.strictSame(flat, { strictSSL: false }) + obj['strict-ssl'] = true + definitions['strict-ssl'].flatten('strict-ssl', obj, flat) + t.strictSame(flat, { strictSSL: true }) + t.end() +}) + +t.test('shrinkwrap/package-lock', t => { + const obj = { shrinkwrap: false } + const flat = {} + definitions.shrinkwrap.flatten('shrinkwrap', obj, flat) + t.strictSame(flat, {packageLock: false}) + obj.shrinkwrap = true + definitions.shrinkwrap.flatten('shrinkwrap', obj, flat) + t.strictSame(flat, {packageLock: true}) + + delete obj.shrinkwrap + obj['package-lock'] = false + definitions['package-lock'].flatten('package-lock', obj, flat) + t.strictSame(flat, {packageLock: false}) + obj['package-lock'] = true + definitions['package-lock'].flatten('package-lock', obj, flat) + t.strictSame(flat, {packageLock: true}) + + t.end() +}) + +t.test('scriptShell', t => { + const obj = { 'script-shell': null } + const flat = {} + definitions['script-shell'].flatten('script-shell', obj, flat) + t.ok(Object.prototype.hasOwnProperty.call(flat, 'scriptShell'), + 'should set it to undefined explicitly') + t.strictSame(flat, { scriptShell: undefined }, 'no other fields') + + obj['script-shell'] = 'asdf' + definitions['script-shell'].flatten('script-shell', obj, flat) + t.strictSame(flat, { scriptShell: 'asdf' }, 'sets if not falsey') + + t.end() +}) + +t.test('defaultTag', t => { + const obj = { tag: 'next' } + const flat = {} + definitions.tag.flatten('tag', obj, flat) + t.strictSame(flat, {defaultTag: 'next'}) + t.end() +}) + +t.test('timeout', t => { + const obj = { 'fetch-timeout': 123 } + const flat = {} + definitions['fetch-timeout'].flatten('fetch-timeout', obj, flat) + t.strictSame(flat, {timeout: 123}) + t.end() +}) + +t.test('saveType', t => { + t.test('save-prod', t => { + const obj = { 'save-prod': false } + const flat = {} + definitions['save-prod'].flatten('save-prod', obj, flat) + t.strictSame(flat, {}, 'no effect if false and missing') + flat.saveType = 'prod' + definitions['save-prod'].flatten('save-prod', obj, flat) + t.strictSame(flat, {}, 'remove if false and set to prod') + flat.saveType = 'dev' + definitions['save-prod'].flatten('save-prod', obj, flat) + t.strictSame(flat, {saveType: 'dev'}, 'ignore if false and not already prod') + obj['save-prod'] = true + definitions['save-prod'].flatten('save-prod', obj, flat) + t.strictSame(flat, {saveType: 'prod'}, 'set to prod if true') + t.end() + }) + + t.test('save-dev', t => { + const obj = { 'save-dev': false } + const flat = {} + definitions['save-dev'].flatten('save-dev', obj, flat) + t.strictSame(flat, {}, 'no effect if false and missing') + flat.saveType = 'dev' + definitions['save-dev'].flatten('save-dev', obj, flat) + t.strictSame(flat, {}, 'remove if false and set to dev') + flat.saveType = 'prod' + obj['save-dev'] = false + definitions['save-dev'].flatten('save-dev', obj, flat) + t.strictSame(flat, {saveType: 'prod'}, 'ignore if false and not already dev') + obj['save-dev'] = true + definitions['save-dev'].flatten('save-dev', obj, flat) + t.strictSame(flat, {saveType: 'dev'}, 'set to dev if true') + t.end() + }) + + t.test('save-bundle', t => { + const obj = { 'save-bundle': true } + const flat = {} + definitions['save-bundle'].flatten('save-bundle', obj, flat) + t.strictSame(flat, {saveBundle: true}, 'set the saveBundle flag') + + obj['save-bundle'] = false + definitions['save-bundle'].flatten('save-bundle', obj, flat) + t.strictSame(flat, {saveBundle: false}, 'unset the saveBundle flag') + + obj['save-bundle'] = true + obj['save-peer'] = true + definitions['save-bundle'].flatten('save-bundle', obj, flat) + t.strictSame(flat, {saveBundle: false}, 'false if save-peer is set') + + t.end() + }) + + t.test('save-peer', t => { + const obj = { 'save-peer': false} + const flat = {} + definitions['save-peer'].flatten('save-peer', obj, flat) + t.strictSame(flat, {}, 'no effect if false and not yet set') + + obj['save-peer'] = true + definitions['save-peer'].flatten('save-peer', obj, flat) + t.strictSame(flat, {saveType: 'peer'}, 'set saveType to peer if unset') + + flat.saveType = 'optional' + definitions['save-peer'].flatten('save-peer', obj, flat) + t.strictSame(flat, {saveType: 'peerOptional'}, 'set to peerOptional if optional already') + + definitions['save-peer'].flatten('save-peer', obj, flat) + t.strictSame(flat, {saveType: 'peerOptional'}, 'no effect if already peerOptional') + + obj['save-peer'] = false + definitions['save-peer'].flatten('save-peer', obj, flat) + t.strictSame(flat, {saveType: 'optional'}, 'switch peerOptional to optional if false') + + obj['save-peer'] = false + flat.saveType = 'peer' + definitions['save-peer'].flatten('save-peer', obj, flat) + t.strictSame(flat, {}, 'remove saveType if peer and setting false') + + t.end() + }) + + t.test('save-optional', t => { + const obj = { 'save-optional': false} + const flat = {} + definitions['save-optional'].flatten('save-optional', obj, flat) + t.strictSame(flat, {}, 'no effect if false and not yet set') + + obj['save-optional'] = true + definitions['save-optional'].flatten('save-optional', obj, flat) + t.strictSame(flat, {saveType: 'optional'}, 'set saveType to optional if unset') + + flat.saveType = 'peer' + definitions['save-optional'].flatten('save-optional', obj, flat) + t.strictSame(flat, {saveType: 'peerOptional'}, 'set to peerOptional if peer already') + + definitions['save-optional'].flatten('save-optional', obj, flat) + t.strictSame(flat, {saveType: 'peerOptional'}, 'no effect if already peerOptional') + + obj['save-optional'] = false + definitions['save-optional'].flatten('save-optional', obj, flat) + t.strictSame(flat, {saveType: 'peer'}, 'switch peerOptional to peer if false') + + flat.saveType = 'optional' + definitions['save-optional'].flatten('save-optional', obj, flat) + t.strictSame(flat, {}, 'remove saveType if optional and setting false') + + t.end() + }) + + t.end() +}) + +t.test('cafile -> flat.ca', t => { + const path = t.testdir({ + cafile: ` +-----BEGIN CERTIFICATE----- +XXXX +XXXX +-----END CERTIFICATE----- +-----BEGIN CERTIFICATE----- +YYYY\r +YYYY\r +-----END CERTIFICATE----- +`, + }) + const cafile = resolve(path, 'cafile') + + const obj = {} + const flat = {} + definitions.cafile.flatten('cafile', obj, flat) + t.strictSame(flat, {}, 'no effect if no cafile set') + obj.cafile = resolve(path, 'no/cafile/here') + definitions.cafile.flatten('cafile', obj, flat) + t.strictSame(flat, {}, 'no effect if cafile not found') + obj.cafile = cafile + definitions.cafile.flatten('cafile', obj, flat) + t.strictSame(flat, { + ca: [ + '-----BEGIN CERTIFICATE-----\nXXXX\nXXXX\n-----END CERTIFICATE-----', + '-----BEGIN CERTIFICATE-----\nYYYY\nYYYY\n-----END CERTIFICATE-----', + ], + }) + t.test('error other than ENOENT gets thrown', t => { + const poo = new Error('poo') + const defnReadFileThrows = t.mock(defpath, { + fs: { + ...require('fs'), + readFileSync: () => { + throw poo + }, + }, + }) + t.throws(() => defnReadFileThrows.cafile.flatten('cafile', obj, {}), poo) + t.end() + }) + + t.end() +}) + +t.test('detect CI', t => { + const defnNoCI = t.mock(defpath, { + '@npmcli/ci-detect': () => false, + }) + const defnCIFoo = t.mock(defpath, { + '@npmcli/ci-detect': () => 'foo', + }) + t.equal(defnNoCI['ci-name'].default, null, 'null when not in CI') + t.equal(defnCIFoo['ci-name'].default, 'foo', 'name of CI when in CI') + t.end() +}) + +t.test('user-agent', t => { + const obj = { + 'user-agent': definitions['user-agent'].default, + 'npm-version': '1.2.3', + 'node-version': '9.8.7', + } + const flat = {} + const expectNoCI = `npm/1.2.3 node/9.8.7 ` + + `${process.platform} ${process.arch} workspaces/false` + definitions['user-agent'].flatten('user-agent', obj, flat) + t.equal(flat.userAgent, expectNoCI) + t.equal(process.env.npm_config_user_agent, flat.userAgent, 'npm_user_config environment is set') + t.equal(obj['user-agent'], flat.userAgent, 'config user-agent template is translated') + + obj['ci-name'] = 'foo' + obj['user-agent'] = definitions['user-agent'].default + const expectCI = `${expectNoCI} ci/foo` + definitions['user-agent'].flatten('user-agent', obj, flat) + t.equal(flat.userAgent, expectCI) + t.equal(process.env.npm_config_user_agent, flat.userAgent, 'npm_user_config environment is set') + t.equal(obj['user-agent'], flat.userAgent, 'config user-agent template is translated') + + delete obj['ci-name'] + obj.workspaces = true + obj['user-agent'] = definitions['user-agent'].default + const expectWorkspaces = expectNoCI.replace('workspaces/false', 'workspaces/true') + definitions['user-agent'].flatten('user-agent', obj, flat) + t.equal(flat.userAgent, expectWorkspaces) + t.equal(process.env.npm_config_user_agent, flat.userAgent, 'npm_user_config environment is set') + t.equal(obj['user-agent'], flat.userAgent, 'config user-agent template is translated') + + delete obj.workspaces + obj.workspace = ['foo'] + obj['user-agent'] = definitions['user-agent'].default + definitions['user-agent'].flatten('user-agent', obj, flat) + t.equal(flat.userAgent, expectWorkspaces) + t.equal(process.env.npm_config_user_agent, flat.userAgent, 'npm_user_config environment is set') + t.equal(obj['user-agent'], flat.userAgent, 'config user-agent template is translated') + t.end() +}) + +t.test('save-prefix', t => { + const obj = { + 'save-exact': true, + 'save-prefix': '~1.2.3', + } + const flat = {} + definitions['save-prefix'] + .flatten('save-prefix', { ...obj, 'save-exact': true }, flat) + t.strictSame(flat, { savePrefix: '' }) + definitions['save-prefix'] + .flatten('save-prefix', { ...obj, 'save-exact': false }, flat) + t.strictSame(flat, { savePrefix: '~1.2.3' }) + t.end() +}) + +t.test('save-exact', t => { + const obj = { + 'save-exact': true, + 'save-prefix': '~1.2.3', + } + const flat = {} + definitions['save-exact'] + .flatten('save-exact', { ...obj, 'save-exact': true }, flat) + t.strictSame(flat, { savePrefix: '' }) + definitions['save-exact'] + .flatten('save-exact', { ...obj, 'save-exact': false }, flat) + t.strictSame(flat, { savePrefix: '~1.2.3' }) + t.end() +}) + +t.test('location', t => { + const obj = { + global: true, + location: 'user', + } + const flat = {} + definitions.location.flatten('location', obj, flat) + // global = true sets location in both places to global + t.strictSame(flat, { location: 'global' }) + t.strictSame(obj, { global: true, location: 'global' }) + + obj.global = false + obj.location = 'user' + delete flat.global + delete flat.location + + definitions.location.flatten('location', obj, flat) + // global = false leaves location unaltered + t.strictSame(flat, { location: 'user' }) + t.strictSame(obj, { global: false, location: 'user' }) + t.end() +}) diff --git a/test/lib/utils/config/describe-all.js b/test/lib/utils/config/describe-all.js new file mode 100644 index 0000000000000..814d92ac95970 --- /dev/null +++ b/test/lib/utils/config/describe-all.js @@ -0,0 +1,6 @@ +const t = require('tap') +const describeAll = require('../../../../lib/utils/config/describe-all.js') +// this basically ends up being a duplicate of the helpdoc dumped into +// a snapshot, but it verifies that we get the same help output on every +// platform where we run CI. +t.matchSnapshot(describeAll()) diff --git a/test/lib/utils/config/flatten.js b/test/lib/utils/config/flatten.js new file mode 100644 index 0000000000000..7e13563920888 --- /dev/null +++ b/test/lib/utils/config/flatten.js @@ -0,0 +1,38 @@ +const t = require('tap') +const flatten = require('../../../../lib/utils/config/flatten.js') + +require.main.filename = '/path/to/npm' +delete process.env.NODE +process.execPath = '/path/to/node' + +const obj = { + 'save-exact': true, + 'save-prefix': 'ignored', + 'save-dev': true, + '@foobar:registry': 'https://foo.bar.com/', + '//foo.bar.com:_authToken': 'foobarbazquuxasdf', + userconfig: '/path/to/.npmrc', +} + +const flat = flatten(obj) +t.strictSame(flat, { + saveType: 'dev', + savePrefix: '', + '@foobar:registry': 'https://foo.bar.com/', + '//foo.bar.com:_authToken': 'foobarbazquuxasdf', + npmBin: '/path/to/npm', + nodeBin: '/path/to/node', + hashAlgorithm: 'sha1', +}) + +// now flatten something else on top of it. +process.env.NODE = '/usr/local/bin/node.exe' +flatten({ 'save-dev': false }, flat) +t.strictSame(flat, { + savePrefix: '', + '@foobar:registry': 'https://foo.bar.com/', + '//foo.bar.com:_authToken': 'foobarbazquuxasdf', + npmBin: '/path/to/npm', + nodeBin: '/usr/local/bin/node.exe', + hashAlgorithm: 'sha1', +}) diff --git a/test/lib/utils/config/index.js b/test/lib/utils/config/index.js new file mode 100644 index 0000000000000..75d72e784fd89 --- /dev/null +++ b/test/lib/utils/config/index.js @@ -0,0 +1,24 @@ +const t = require('tap') +const config = require('../../../../lib/utils/config/index.js') +const flatten = require('../../../../lib/utils/config/flatten.js') +const definitions = require('../../../../lib/utils/config/definitions.js') +const describeAll = require('../../../../lib/utils/config/describe-all.js') +t.matchSnapshot(config.shorthands, 'shorthands') + +// just spot check a few of these to show that we got defaults assembled +t.match(config.defaults, { + registry: definitions.registry.default, + 'init-module': definitions['init-module'].default, +}) + +// is a getter, so changes are reflected +definitions.registry.default = 'https://example.com' +t.strictSame(config.defaults.registry, 'https://example.com') + +t.strictSame(config, { + defaults: config.defaults, + shorthands: config.shorthands, + flatten, + definitions, + describeAll, +}) diff --git a/test/lib/utils/did-you-mean.js b/test/lib/utils/did-you-mean.js index 0c9c95c7f9e60..15712b665be6e 100644 --- a/test/lib/utils/did-you-mean.js +++ b/test/lib/utils/did-you-mean.js @@ -1,7 +1,38 @@ const t = require('tap') +const npm = require('../../../lib/npm.js') + const dym = require('../../../lib/utils/did-you-mean.js') -t.equal(dym('asdfa', ['asdf', 'asfd', 'adfs', 'safd', 'foobarbaz', 'foobar']), - '\nDid you mean this?\n asdf') -t.equal(dym('asdfa', ['asdf', 'sdfa', 'foo', 'bar', 'fdsa']), - '\nDid you mean one of these?\n asdf\n sdfa') -t.equal(dym('asdfa', ['install', 'list', 'test']), '') +t.test('did-you-mean', t => { + npm.load(err => { + t.notOk(err) + t.test('nistall', async t => { + const result = await dym(npm, npm.localPrefix, 'nistall') + t.match(result, 'npm install') + }) + t.test('sttest', async t => { + const result = await dym(npm, npm.localPrefix, 'sttest') + t.match(result, 'npm test') + t.match(result, 'npm run posttest') + }) + t.test('npz', async t => { + const result = await dym(npm, npm.localPrefix, 'npxx') + t.match(result, 'npm exec npx') + }) + t.test('qwuijbo', async t => { + const result = await dym(npm, npm.localPrefix, 'qwuijbo') + t.match(result, '') + }) + t.end() + }) +}) + +t.test('missing bin and script properties', async t => { + const path = t.testdir({ + 'package.json': JSON.stringify({ + name: 'missing-bin', + }), + }) + + const result = await dym(npm, path, 'nistall') + t.match(result, 'npm install') +}) diff --git a/test/lib/utils/error-handler.js b/test/lib/utils/error-handler.js deleted file mode 100644 index b1d3e2ca7ca1a..0000000000000 --- a/test/lib/utils/error-handler.js +++ /dev/null @@ -1,589 +0,0 @@ -/* eslint-disable no-extend-native */ -/* eslint-disable no-global-assign */ -const EventEmitter = require('events') -const requireInject = require('require-inject') -const t = require('tap') - -// NOTE: Although these unit tests may look like the rest on the surface, -// they are in fact very special due to the amount of things hooking directly -// to global process and variables defined in the module scope. That makes -// for tests that are very interdependent and their order are important. - -// generic error to be used in tests -const err = Object.assign(new Error('ERROR'), { code: 'ERROR' }) -err.stack = 'Error: ERROR' - -const redactCwd = (path) => { - const normalizePath = p => p - .replace(/\\+/g, '/') - .replace(/\r\n/g, '\n') - return normalizePath(path) - .replace(new RegExp(normalizePath(process.cwd()), 'g'), '{CWD}') -} - -t.cleanSnapshot = (str) => redactCwd(str) - -// internal modules mocks -const cacheFile = { - append: () => null, - write: () => null, -} -const config = { - values: { - cache: 'cachefolder', - timing: true, - }, - loaded: true, - updateNotification: null, - get (key) { - return this.values[key] - }, -} - -const npm = { - version: '1.0.0', - config, - shelloutCommands: ['exec', 'run-script'], -} - -const npmlog = { - disableProgress: () => null, - log (level, ...args) { - this.record.push({ - id: this.record.length, - level, - message: args.reduce((res, i) => `${res} ${i.message ? i.message : i}`, ''), - prefix: level !== 'verbose' ? 'foo' : '', - }) - }, - error (...args) { - this.log('error', ...args) - }, - info (...args) { - this.log('info', ...args) - }, - level: 'silly', - levels: { - silly: 0, - verbose: 1, - info: 2, - error: 3, - silent: 4, - }, - notice (...args) { - this.log('notice', ...args) - }, - record: [], - verbose (...args) { - this.log('verbose', ...args) - }, -} - -// overrides OS type/release for cross platform snapshots -const os = require('os') -os.type = () => 'Foo' -os.release = () => '1.0.0' - -// bootstrap tap before cutting off process ref -t.test('ok', (t) => { - t.ok('ok') - t.end() -}) -// cut off process from script so that it won't quit the test runner -// while trying to run through the myriad of cases -const _process = process -process = Object.assign( - new EventEmitter(), - { - argv: ['/node', ..._process.argv.slice(1)], - cwd: _process.cwd, - env: _process.env, - exit () {}, - exitCode: 0, - version: 'v1.0.0', - stdout: { write (_, cb) { - cb() - } }, - stderr: { write () {} }, - } -) -// needs to put process back in its place -// in order for tap to exit properly -t.teardown(() => { - process = _process -}) - -const mocks = { - npmlog, - '../../../lib/npm.js': npm, - '../../../lib/utils/error-message.js': (err) => ({ - ...err, - summary: [['ERR', err.message]], - detail: [['ERR', err.message]], - }), - '../../../lib/utils/cache-file.js': cacheFile, -} - -requireInject.installGlobally('../../../lib/utils/error-handler.js', mocks) -let errorHandler = require('../../../lib/utils/error-handler.js') - -t.test('default exit code', (t) => { - t.plan(1) - - // manually simulate timing handlers - process.emit('timing', 'foo', 1) - process.emit('timing', 'foo', 2) - - // generates logfile name with mocked date - const _toISOString = Date.prototype.toISOString - Date.prototype.toISOString = () => 'expecteddate' - - npmlog.level = 'silent' - const _exit = process.exit - process.exit = (code) => { - t.equal(code, 1, 'should default to error code 1') - } - - // skip console.error logs - const _error = console.error - console.error = () => null - - process.emit('exit', 1) - - t.teardown(() => { - npmlog.level = 'silly' - process.exit = _exit - console.error = _error - Date.prototype.toISOString = _toISOString - }) -}) - -t.test('handles unknown error', (t) => { - t.plan(2) - - cacheFile.write = (filename, content) => { - t.equal( - redactCwd(filename), - '{CWD}/cachefolder/_logs/expecteddate-debug.log', - 'should use expected log filename' - ) - t.matchSnapshot( - content, - 'should have expected log contents for unknown error' - ) - } - - errorHandler(err) - - t.teardown(() => { - cacheFile.write = () => null - }) - t.end() -}) - -t.test('npm.config not ready', (t) => { - t.plan(1) - - config.loaded = false - - const _error = console.error - console.error = (msg) => { - t.match( - msg, - /Error: Exit prior to config file resolving./, - 'should exit with config error msg' - ) - } - - errorHandler() - - t.teardown(() => { - console.error = _error - config.loaded = true - }) -}) - -t.test('fail to write logfile', (t) => { - t.plan(1) - - cacheFile.write = () => { - throw err - } - t.teardown(() => { - cacheFile.write = () => null - }) - - t.doesNotThrow( - () => errorHandler(err), - 'should not throw on cache write failure' - ) -}) - -t.test('console.log output using --json', (t) => { - t.plan(1) - - config.values.json = true - - const _error = console.error - console.error = (jsonOutput) => { - t.deepEqual( - JSON.parse(jsonOutput), - { - error: { - code: 'EBADTHING', // should default error code to E[A-Z]+ - summary: 'Error: EBADTHING Something happened', - detail: 'Error: EBADTHING Something happened', - }, - }, - 'should output expected json output' - ) - } - - errorHandler(new Error('Error: EBADTHING Something happened')) - - t.teardown(() => { - console.error = _error - delete config.values.json - }) -}) - -t.test('throw a non-error obj', (t) => { - t.plan(3) - - const weirdError = { - code: 'ESOMETHING', - message: 'foo bar', - } - - const _logError = npmlog.error - npmlog.error = (title, err) => { - t.equal(title, 'weird error', 'should name it a weird error') - t.deepEqual(err, weirdError, 'should log given weird error') - } - - const _exit = process.exit - process.exit = (code) => { - t.equal(code, 1, 'should exit with code 1') - } - - errorHandler(weirdError) - - t.teardown(() => { - process.exit = _exit - npmlog.error = _logError - }) -}) - -t.test('throw a string error', (t) => { - t.plan(3) - - const error = 'foo bar' - - const _logError = npmlog.error - npmlog.error = (title, err) => { - t.equal(title, '', 'should have an empty name ref') - t.deepEqual(err, 'foo bar', 'should log string error') - } - - const _exit = process.exit - process.exit = (code) => { - t.equal(code, 1, 'should exit with code 1') - } - - errorHandler(error) - - t.teardown(() => { - process.exit = _exit - npmlog.error = _logError - }) -}) - -t.test('update notification', (t) => { - t.plan(2) - - const updateMsg = 'you should update npm!' - npm.updateNotification = updateMsg - - const _notice = npmlog.notice - npmlog.notice = (prefix, msg) => { - t.equal(prefix, '', 'should have no prefix') - t.equal(msg, updateMsg, 'should show update message') - } - - errorHandler(err) - - t.teardown(() => { - npmlog.notice = _notice - delete npm.updateNotification - }) -}) - -t.test('on exit handler', (t) => { - t.plan(2) - - const _exit = process.exit - process.exit = (code) => { - t.equal(code, 1, 'should default to error code 1') - } - - process.once('timeEnd', (msg) => { - t.equal(msg, 'npm', 'should trigger timeEnd for npm') - }) - - // skip console.error logs - const _error = console.error - console.error = () => null - - process.emit('exit', 1) - - t.teardown(() => { - console.error = _error - process.exit = _exit - }) -}) - -t.test('it worked', (t) => { - t.plan(2) - - config.values.timing = false - - const _exit = process.exit - process.exit = (code) => { - process.exit = _exit - t.equal(code, 0, 'should exit with code 0') - - const _info = npmlog.info - npmlog.info = (msg) => { - npmlog.info = _info - t.equal(msg, 'ok', 'should log ok if "it worked"') - } - - process.emit('exit', 0) - } - - t.teardown(() => { - process.exit = _exit - config.values.timing = true - }) - - errorHandler.exit(0) -}) - -t.test('uses code from errno', (t) => { - t.plan(1) - - // RESET MODULE INTERNAL VARS AND GLOBAL REFS - requireInject.installGlobally.andClearCache('../../../lib/utils/error-handler.js', mocks) - errorHandler = require('../../../lib/utils/error-handler.js') - - npmlog.level = 'silent' - const _exit = process.exit - process.exit = (code) => { - t.equal(code, 127, 'should use set errno') - } - - errorHandler(Object.assign( - new Error('Error with errno'), - { - errno: 127, - } - )) - - t.teardown(() => { - npmlog.level = 'silly' - process.exit = _exit - }) -}) - -t.test('uses exitCode as code if using a number', (t) => { - t.plan(1) - - // RESET MODULE INTERNAL VARS AND GLOBAL REFS - requireInject.installGlobally.andClearCache( - '../../../lib/utils/error-handler.js', - mocks - ) - errorHandler = require('../../../lib/utils/error-handler.js') - - npmlog.level = 'silent' - const _exit = process.exit - process.exit = (code) => { - t.equal(code, 404, 'should use code if a number') - } - - errorHandler(Object.assign( - new Error('Error with code type number'), - { - code: 404, - } - )) - - t.teardown(() => { - npmlog.level = 'silly' - process.exit = _exit - }) -}) - -t.test('call errorHandler with no error', (t) => { - t.plan(1) - - // RESET MODULE INTERNAL VARS AND GLOBAL REFS - requireInject.installGlobally.andClearCache( - '../../../lib/utils/error-handler.js', - mocks - ) - errorHandler = require('../../../lib/utils/error-handler.js') - - const _exit = process.exit - process.exit = (code) => { - t.equal(code, 0, 'should exit with code 0') - } - - t.teardown(() => { - process.exit = _exit - }) - - errorHandler() -}) - -t.test('callback called twice', (t) => { - t.plan(2) - - const _verbose = npmlog.verbose - npmlog.verbose = (key, value) => { - t.equal(key, 'stack', 'should log stack in verbose level') - t.match( - value, - /Error: Callback called more than once./, - 'should have expected error msg' - ) - npmlog.verbose = _verbose - } - - errorHandler() -}) - -t.test('defaults to log error msg if stack is missing', (t) => { - t.plan(1) - - const noStackErr = Object.assign( - new Error('Error with no stack'), - { - code: 'ENOSTACK', - errno: 127, - } - ) - delete noStackErr.stack - - npm.config.loaded = false - - const _error = console.error - console.error = (msg) => { - console.error = _error - npm.config.loaded = true - t.equal(msg, 'Error with no stack', 'should use error msg') - } - - errorHandler(noStackErr) -}) - -t.test('set it worked', (t) => { - t.plan(1) - - // RESET MODULE INTERNAL VARS AND GLOBAL REFS - requireInject.installGlobally.andClearCache( - '../../../lib/utils/error-handler.js', - mocks - ) - errorHandler = require('../../../lib/utils/error-handler.js') - - const _exit = process.exit - process.exit = () => { - t.ok('ok') - } - - t.teardown(() => { - process.exit = _exit - }) - - errorHandler.exit(0, true) -}) - -t.test('use exitCode when emitting exit event', (t) => { - t.plan(1) - - npmlog.level = 'silent' - const _exit = process.exit - process.exit = (code) => { - process.exit = _exit - t.equal(code, 1, 'should exit with code 1') - } - - t.teardown(() => { - process.exit = _exit - npmlog.level = 'silly' - }) - - process.emit('exit') -}) - -t.test('do no fancy handling for shellouts', t => { - const { exit } = process - const { command } = npm - const { log } = npmlog - const LOG_RECORD = [] - t.teardown(() => { - npmlog.log = log - process.exit = exit - npm.command = command - }) - - npmlog.log = function (level, ...args) { - log.call(this, level, ...args) - LOG_RECORD.push(npmlog.record[npmlog.record.length - 1]) - } - - npm.command = 'exec' - - let EXPECT_EXIT = 0 - process.exit = code => { - t.equal(code, EXPECT_EXIT, 'got expected exit code') - EXPECT_EXIT = 0 - } - t.beforeEach((cb) => { - LOG_RECORD.length = 0 - cb() - }) - - const loudNoises = () => LOG_RECORD - .filter(({ level }) => ['warn', 'error'].includes(level)) - - t.test('shellout with a numeric error code', t => { - EXPECT_EXIT = 5 - errorHandler(Object.assign(new Error(), { code: 5 })) - t.equal(EXPECT_EXIT, 0, 'called process.exit') - // should log no warnings or errors, verbose/silly is fine. - t.strictSame(loudNoises(), [], 'no noisy warnings') - t.end() - }) - - t.test('shellout without a numeric error code (something in npm)', t => { - EXPECT_EXIT = 1 - errorHandler(Object.assign(new Error(), { code: 'banana stand' })) - t.equal(EXPECT_EXIT, 0, 'called process.exit') - // should log some warnings and errors, because something weird happened - t.strictNotSame(loudNoises(), [], 'bring the noise') - t.end() - }) - - t.test('shellout with code=0 (extra weird?)', t => { - EXPECT_EXIT = 1 - errorHandler(Object.assign(new Error(), { code: 0 })) - t.equal(EXPECT_EXIT, 0, 'called process.exit') - // should log some warnings and errors, because something weird happened - t.strictNotSame(loudNoises(), [], 'bring the noise') - t.end() - }) - - t.end() -}) diff --git a/test/lib/utils/error-message.js b/test/lib/utils/error-message.js index 86db7c94bad49..908d70fc3924d 100644 --- a/test/lib/utils/error-message.js +++ b/test/lib/utils/error-message.js @@ -1,4 +1,5 @@ const t = require('tap') +const path = require('path') // make a bunch of stuff consistent for snapshots @@ -10,26 +11,13 @@ Object.defineProperty(process, 'arch', { configurable: true, }) -const beWindows = () => { - Object.defineProperty(process, 'platform', { - value: 'win32', - configurable: true, - }) - delete require.cache[require.resolve('../../../lib/utils/is-windows.js')] -} - -const bePosix = () => { - Object.defineProperty(process, 'platform', { - value: 'posix', - configurable: true, - }) - delete require.cache[require.resolve('../../../lib/utils/is-windows.js')] -} - const { resolve } = require('path') const npm = require('../../../lib/npm.js') const CACHE = '/some/cache/dir' npm.config = { + flat: { + color: false, + }, loaded: false, localPrefix: '/some/prefix/dir', get: key => { @@ -56,16 +44,36 @@ npmlog.verbose = (...message) => { verboseLogs.push(message) } -const requireInject = require('require-inject') const EXPLAIN_CALLED = [] -const errorMessage = requireInject('../../../lib/utils/error-message.js', { +const mocks = { '../../../lib/utils/explain-eresolve.js': { report: (...args) => { EXPLAIN_CALLED.push(args) return 'explanation' }, }, -}) + // XXX ??? + get '../../../lib/utils/is-windows.js' () { + return process.platform === 'win32' + }, +} +let errorMessage = t.mock('../../../lib/utils/error-message.js', { ...mocks }) + +const beWindows = () => { + Object.defineProperty(process, 'platform', { + value: 'win32', + configurable: true, + }) + errorMessage = t.mock('../../../lib/utils/error-message.js', { ...mocks }) +} + +const bePosix = () => { + Object.defineProperty(process, 'platform', { + value: 'posix', + configurable: true, + }) + errorMessage = t.mock('../../../lib/utils/error-message.js', { ...mocks }) +} t.test('just simple messages', t => { npm.command = 'audit' @@ -92,6 +100,7 @@ t.test('just simple messages', t => { 'ETOOMANYARGS', 'ETARGET', 'E403', + 'ERR_SOCKET_TIMEOUT', ] t.plan(codes.length) codes.forEach(code => { @@ -106,7 +115,7 @@ t.test('just simple messages', t => { file, stack, }) - t.matchSnapshot(errorMessage(er)) + t.matchSnapshot(errorMessage(er, npm)) }) }) @@ -124,7 +133,7 @@ t.test('replace message/stack sensistive info', t => { file, stack, }) - t.matchSnapshot(errorMessage(er)) + t.matchSnapshot(errorMessage(er, npm)) t.end() }) @@ -144,7 +153,7 @@ t.test('bad engine with config loaded', t => { file, stack, }) - t.matchSnapshot(errorMessage(er)) + t.matchSnapshot(errorMessage(er, npm)) t.end() }) @@ -158,7 +167,7 @@ t.test('enoent without a file', t => { pkgid, stack, }) - t.matchSnapshot(errorMessage(er)) + t.matchSnapshot(errorMessage(er, npm)) t.end() }) @@ -175,20 +184,20 @@ t.test('enolock without a command', t => { file, stack, }) - t.matchSnapshot(errorMessage(er)) + t.matchSnapshot(errorMessage(er, npm)) t.end() }) t.test('default message', t => { - t.matchSnapshot(errorMessage(new Error('error object'))) - t.matchSnapshot(errorMessage('error string')) + t.matchSnapshot(errorMessage(new Error('error object'), npm)) + t.matchSnapshot(errorMessage('error string'), npm) t.matchSnapshot(errorMessage(Object.assign(new Error('cmd err'), { cmd: 'some command', signal: 'SIGYOLO', args: ['a', 'r', 'g', 's'], stdout: 'stdout', stderr: 'stderr', - }))) + }), npm)) t.end() }) @@ -209,7 +218,7 @@ t.test('eacces/eperm', t => { stack: 'dummy stack trace', }) verboseLogs.length = 0 - t.matchSnapshot(errorMessage(er)) + t.matchSnapshot(errorMessage(er, npm)) t.matchSnapshot(verboseLogs) t.end() verboseLogs.length = 0 @@ -284,7 +293,7 @@ t.test('json parse', t => { t.matchSnapshot(errorMessage(Object.assign(new Error('conflicted'), { code: 'EJSONPARSE', file: resolve(dir, 'package.json'), - }))) + }), npm)) t.end() }) @@ -306,7 +315,7 @@ t.test('json parse', t => { t.matchSnapshot(errorMessage(Object.assign(new Error('not json'), { code: 'EJSONPARSE', file: resolve(dir, 'package.json'), - }))) + }), npm)) t.end() }) @@ -322,7 +331,7 @@ t.test('json parse', t => { t.matchSnapshot(errorMessage(Object.assign(new Error('not json'), { code: 'EJSONPARSE', file: `${dir}/blerg.json`, - }))) + }), npm)) t.end() }) @@ -333,21 +342,21 @@ t.test('eotp/e401', t => { t.test('401, no auth headers', t => { t.matchSnapshot(errorMessage(Object.assign(new Error('nope'), { code: 'E401', - }))) + }), npm)) t.end() }) t.test('401, no message', t => { t.matchSnapshot(errorMessage({ code: 'E401', - })) + }, npm)) t.end() }) t.test('one-time pass challenge code', t => { t.matchSnapshot(errorMessage(Object.assign(new Error('nope'), { code: 'EOTP', - }))) + }), npm)) t.end() }) @@ -355,7 +364,7 @@ t.test('eotp/e401', t => { const message = 'one-time pass' t.matchSnapshot(errorMessage(Object.assign(new Error(message), { code: 'E401', - }))) + }), npm)) t.end() }) @@ -375,7 +384,7 @@ t.test('eotp/e401', t => { }, code: 'E401', }) - t.matchSnapshot(errorMessage(er)) + t.matchSnapshot(errorMessage(er, npm)) t.end() }) } @@ -387,7 +396,7 @@ t.test('eotp/e401', t => { t.test('404', t => { t.test('no package id', t => { const er = Object.assign(new Error('404 not found'), { code: 'E404' }) - t.matchSnapshot(errorMessage(er)) + t.matchSnapshot(errorMessage(er, npm)) t.end() }) t.test('you should publish it', t => { @@ -395,7 +404,7 @@ t.test('404', t => { pkgid: 'yolo', code: 'E404', }) - t.matchSnapshot(errorMessage(er)) + t.matchSnapshot(errorMessage(er, npm)) t.end() }) t.test('name with warning', t => { @@ -403,7 +412,7 @@ t.test('404', t => { pkgid: new Array(215).fill('x').join(''), code: 'E404', }) - t.matchSnapshot(errorMessage(er)) + t.matchSnapshot(errorMessage(er, npm)) t.end() }) t.test('name with error', t => { @@ -411,7 +420,7 @@ t.test('404', t => { pkgid: 'node_modules', code: 'E404', }) - t.matchSnapshot(errorMessage(er)) + t.matchSnapshot(errorMessage(er, npm)) t.end() }) t.end() @@ -431,7 +440,7 @@ t.test('bad platform', t => { }, code: 'EBADPLATFORM', }) - t.matchSnapshot(errorMessage(er)) + t.matchSnapshot(errorMessage(er, npm)) t.end() }) t.test('array os/arch', t => { @@ -447,7 +456,7 @@ t.test('bad platform', t => { }, code: 'EBADPLATFORM', }) - t.matchSnapshot(errorMessage(er)) + t.matchSnapshot(errorMessage(er, npm)) t.end() }) @@ -458,7 +467,11 @@ t.test('explain ERESOLVE errors', t => { const er = Object.assign(new Error('could not resolve'), { code: 'ERESOLVE', }) - t.matchSnapshot(errorMessage(er)) - t.strictSame(EXPLAIN_CALLED, [[er]]) + t.matchSnapshot(errorMessage(er, npm)) + t.match(EXPLAIN_CALLED, [[ + er, + false, + path.resolve(npm.cache, 'eresolve-report.txt'), + ]]) t.end() }) diff --git a/test/lib/utils/exit-handler.js b/test/lib/utils/exit-handler.js new file mode 100644 index 0000000000000..c88a1aef67927 --- /dev/null +++ b/test/lib/utils/exit-handler.js @@ -0,0 +1,393 @@ +/* eslint-disable no-extend-native */ +/* eslint-disable no-global-assign */ +const t = require('tap') +const EventEmitter = require('events') +const os = require('os') +const fs = require('fs') +const path = require('path') + +const { real: mockNpm } = require('../../fixtures/mock-npm') + +// generic error to be used in tests +const err = Object.assign(new Error('ERROR'), { code: 'ERROR' }) +err.stack = 'Error: ERROR' + +const redactCwd = (path) => { + const normalizePath = p => p + .replace(/\\+/g, '/') + .replace(/\r\n/g, '\n') + return normalizePath(path) + .replace(new RegExp(normalizePath(process.cwd()), 'g'), '{CWD}') +} + +t.cleanSnapshot = (str) => redactCwd(str) + +const cacheFolder = t.testdir({}) +const logFile = path.resolve(cacheFolder, '_logs', 'expecteddate-debug.log') +const timingFile = path.resolve(cacheFolder, '_timing.json') + +const { npm } = mockNpm(t) + +t.before(async () => { + npm.version = '1.0.0' + await npm.load() + npm.config.set('cache', cacheFolder) +}) + +t.test('bootstrap tap before cutting off process ref', (t) => { + t.ok('ok') + t.end() +}) + +// cut off process from script so that it won't quit the test runner +// while trying to run through the myriad of cases +const _process = process +process = Object.assign( + new EventEmitter(), + { + argv: ['/node', ..._process.argv.slice(1)], + cwd: _process.cwd, + env: _process.env, + version: 'v1.0.0', + exit: (code) => { + process.exitCode = code || process.exitCode || 0 + process.emit('exit', process.exitCode) + }, + stdout: { write (_, cb) { + cb() + } }, + stderr: { write () {} }, + hrtime: _process.hrtime, + } +) + +const osType = os.type +const osRelease = os.release +// overrides OS type/release for cross platform snapshots +os.type = () => 'Foo' +os.release = () => '1.0.0' + +// generates logfile name with mocked date +const _toISOString = Date.prototype.toISOString +Date.prototype.toISOString = () => 'expecteddate' + +const consoleError = console.error +const errors = [] +console.error = (err) => { + errors.push(err) +} +t.teardown(() => { + os.type = osType + os.release = osRelease + // needs to put process back in its place in order for tap to exit properly + process = _process + Date.prototype.toISOString = _toISOString + console.error = consoleError +}) + +t.afterEach(() => { + errors.length = 0 + npm.log.level = 'silent' + // clear out the 'A complete log' message + npm.log.record.length = 0 + delete process.exitCode +}) + +const mocks = { + '../../../lib/utils/error-message.js': (err) => ({ + ...err, + summary: [['ERR', err.message]], + detail: [['ERR', err.message]], + }), +} + +const exitHandler = t.mock('../../../lib/utils/exit-handler.js', mocks) +exitHandler.setNpm(npm) + +t.test('exit handler never called - loglevel silent', (t) => { + npm.log.level = 'silent' + process.emit('exit', 1) + const logData = fs.readFileSync(logFile, 'utf8') + t.match(logData, 'Exit handler never called!') + t.match(errors, [''], 'logs one empty string to console.error') + t.end() +}) + +t.test('exit handler never called - loglevel notice', (t) => { + npm.log.level = 'notice' + process.emit('exit', 1) + const logData = fs.readFileSync(logFile, 'utf8') + t.match(logData, 'Exit handler never called!') + t.match(errors, ['', ''], 'logs two empty strings to console.error') + t.end() +}) + +t.test('handles unknown error', (t) => { + t.plan(2) + + npm.log.level = 'notice' + + process.once('timeEnd', (msg) => { + t.equal(msg, 'npm', 'should trigger timeEnd for npm') + }) + + exitHandler(err) + const logData = fs.readFileSync(logFile, 'utf8') + t.matchSnapshot( + logData, + 'should have expected log contents for unknown error' + ) + t.end() +}) + +t.test('fail to write logfile', (t) => { + t.plan(1) + + t.teardown(() => { + npm.config.set('cache', cacheFolder) + }) + + const badDir = t.testdir({ + _logs: 'is a file', + }) + + npm.config.set('cache', badDir) + + t.doesNotThrow( + () => exitHandler(err), + 'should not throw on cache write failure' + ) +}) + +t.test('console.log output using --json', (t) => { + t.plan(1) + + npm.config.set('json', true) + t.teardown(() => { + npm.config.set('json', false) + }) + + exitHandler(new Error('Error: EBADTHING Something happened')) + t.same( + JSON.parse(errors[0]), + { + error: { + code: 'EBADTHING', // should default error code to E[A-Z]+ + summary: 'Error: EBADTHING Something happened', + detail: 'Error: EBADTHING Something happened', + }, + }, + 'should output expected json output' + ) +}) + +t.test('throw a non-error obj', (t) => { + t.plan(2) + + const weirdError = { + code: 'ESOMETHING', + message: 'foo bar', + } + + process.once('exit', code => { + t.equal(code, 1, 'exits with exitCode 1') + }) + exitHandler(weirdError) + t.match( + npm.log.record.find(r => r.level === 'error'), + { message: 'foo bar' } + ) +}) + +t.test('throw a string error', (t) => { + t.plan(2) + const error = 'foo bar' + + process.once('exit', code => { + t.equal(code, 1, 'exits with exitCode 1') + }) + exitHandler(error) + t.match( + npm.log.record.find(r => r.level === 'error'), + { message: 'foo bar' } + ) +}) + +t.test('update notification', (t) => { + const updateMsg = 'you should update npm!' + npm.updateNotification = updateMsg + npm.log.level = 'silent' + + t.teardown(() => { + delete npm.updateNotification + }) + + exitHandler() + t.match( + npm.log.record.find(r => r.level === 'notice'), + { message: 'you should update npm!' } + ) + t.end() +}) + +t.test('npm.config not ready', (t) => { + t.plan(1) + + const { npm: unloaded } = mockNpm(t) + + t.teardown(() => { + exitHandler.setNpm(npm) + }) + + exitHandler.setNpm(unloaded) + + exitHandler() + t.match( + errors[0], + /Error: Exit prior to config file resolving./, + 'should exit with config error msg' + ) + t.end() +}) + +t.test('timing', (t) => { + npm.config.set('timing', true) + + t.teardown(() => { + fs.unlinkSync(timingFile) + npm.config.set('timing', false) + }) + + exitHandler() + const timingData = JSON.parse(fs.readFileSync(timingFile, 'utf8')) + t.match(timingData, { version: '1.0.0', 'config:load:defaults': Number }) + t.end() +}) + +t.test('timing - with error', (t) => { + npm.config.set('timing', true) + + t.teardown(() => { + fs.unlinkSync(timingFile) + npm.config.set('timing', false) + }) + + exitHandler(err) + const timingData = JSON.parse(fs.readFileSync(timingFile, 'utf8')) + t.match(timingData, { version: '1.0.0', 'config:load:defaults': Number }) + t.end() +}) + +t.test('uses code from errno', (t) => { + t.plan(1) + + process.once('exit', code => { + t.equal(code, 127, 'should set exitCode from errno') + }) + exitHandler(Object.assign( + new Error('Error with errno'), + { + errno: 127, + } + )) +}) + +t.test('uses code from number', (t) => { + t.plan(1) + + process.once('exit', code => { + t.equal(code, 404, 'should set exitCode from a number') + }) + exitHandler(Object.assign( + new Error('Error with code type number'), + { + code: 404, + } + )) +}) + +t.test('call exitHandler with no error', (t) => { + t.plan(1) + process.once('exit', code => { + t.equal(code, 0, 'should end up with exitCode 0 (default)') + }) + exitHandler() +}) + +t.test('defaults to log error msg if stack is missing', (t) => { + const { npm: unloaded } = mockNpm(t) + + t.teardown(() => { + exitHandler.setNpm(npm) + }) + + exitHandler.setNpm(unloaded) + const noStackErr = Object.assign( + new Error('Error with no stack'), + { + code: 'ENOSTACK', + errno: 127, + } + ) + delete noStackErr.stack + + exitHandler(noStackErr) + t.equal(errors[0], 'Error with no stack', 'should use error msg') + t.end() +}) + +t.test('exits uncleanly when only emitting exit event', (t) => { + t.plan(2) + + npm.log.level = 'silent' + process.emit('exit') + const logData = fs.readFileSync(logFile, 'utf8') + t.match(logData, 'Exit handler never called!') + t.match(process.exitCode, 1, 'exitCode coerced to 1') + t.end() +}) + +t.test('do no fancy handling for shellouts', t => { + const { command } = npm + const LOG_RECORD = [] + npm.command = 'exec' + + t.teardown(() => { + npm.command = command + }) + t.beforeEach(() => LOG_RECORD.length = 0) + + const loudNoises = () => npm.log.record + .filter(({ level }) => ['warn', 'error'].includes(level)) + + t.test('shellout with a numeric error code', t => { + t.plan(2) + process.once('exit', code => { + t.equal(code, 5, 'got expected exit code') + }) + exitHandler(Object.assign(new Error(), { code: 5 })) + t.strictSame(loudNoises(), [], 'no noisy warnings') + }) + + t.test('shellout without a numeric error code (something in npm)', t => { + t.plan(2) + process.once('exit', code => { + t.equal(code, 1, 'got expected exit code') + }) + exitHandler(Object.assign(new Error(), { code: 'banana stand' })) + // should log some warnings and errors, because something weird happened + t.strictNotSame(loudNoises(), [], 'bring the noise') + t.end() + }) + + t.test('shellout with code=0 (extra weird?)', t => { + t.plan(2) + process.once('exit', code => { + t.equal(code, 1, 'got expected exit code') + }) + exitHandler(Object.assign(new Error(), { code: 0 })) + t.strictNotSame(loudNoises(), [], 'bring the noise') + }) + + t.end() +}) diff --git a/test/lib/utils/explain-dep.js b/test/lib/utils/explain-dep.js index 1fee6105081e9..000f5b8165a9b 100644 --- a/test/lib/utils/explain-dep.js +++ b/test/lib/utils/explain-dep.js @@ -1,9 +1,16 @@ +const { resolve } = require('path') const t = require('tap') -const requireInject = require('require-inject') -const npm = {} -const { explainNode, printNode } = requireInject('../../../lib/utils/explain-dep.js', { - '../../../lib/npm.js': npm, -}) +const { explainNode, printNode } = require('../../../lib/utils/explain-dep.js') +const testdir = t.testdirName + +const redactCwd = (path) => { + const normalizePath = p => p + .replace(/\\+/g, '/') + .replace(/\r\n/g, '\n') + return normalizePath(path) + .replace(new RegExp(normalizePath(process.cwd()), 'g'), '{CWD}') +} +t.cleanSnapshot = (str) => redactCwd(str) const cases = { prodDep: { @@ -205,9 +212,32 @@ cases.manyDeps = { ], } +cases.workspaces = { + name: 'a', + version: '1.0.0', + location: 'a', + isWorkspace: true, + dependents: [], + linksIn: [ + { + name: 'a', + version: '1.0.0', + location: 'node_modules/a', + isWorkspace: true, + dependents: [ + { + type: 'workspace', + name: 'a', + spec: `file:${resolve(testdir, 'ws-project', 'a')}`, + from: { location: resolve(testdir, 'ws-project') }, + }, + ], + }, + ], +} + for (const [name, expl] of Object.entries(cases)) { t.test(name, t => { - npm.color = true t.matchSnapshot(printNode(expl, true), 'print color') t.matchSnapshot(printNode(expl, false), 'print nocolor') t.matchSnapshot(explainNode(expl, Infinity, true), 'explain color deep') diff --git a/test/lib/utils/explain-eresolve.js b/test/lib/utils/explain-eresolve.js index 8dae1b92cd514..f9710ee889ab1 100644 --- a/test/lib/utils/explain-eresolve.js +++ b/test/lib/utils/explain-eresolve.js @@ -1,9 +1,6 @@ const t = require('tap') -const requireInject = require('require-inject') const npm = {} -const { explain, report } = requireInject('../../../lib/utils/explain-eresolve.js', { - '../../../lib/npm.js': npm, -}) +const { explain, report } = require('../../../lib/utils/explain-eresolve.js') const { statSync, readFileSync, unlinkSync } = require('fs') // strip out timestamps from reports const read = f => readFileSync(f, 'utf8') @@ -26,23 +23,18 @@ for (const [name, expl] of Object.entries(cases)) { t.cleanSnapshot = str => str.split(reportFile).join('${REPORT}') npm.color = true - t.matchSnapshot(report(expl), 'report with color') + t.matchSnapshot(report(expl, true, reportFile), 'report with color') const reportData = read(reportFile) t.matchSnapshot(reportData, 'report') unlinkSync(reportFile) - t.matchSnapshot(report(expl, 2), 'report with color, depth only 2') + + t.matchSnapshot(report(expl, false, reportFile), 'report with no color') t.equal(read(reportFile), reportData, 'same report written for object') unlinkSync(reportFile) - npm.color = false - t.matchSnapshot(report(expl, 6), 'report with no color, depth of 6') - t.equal(read(reportFile), reportData, 'same report written for object') - unlinkSync(reportFile) - npm.color = true - t.matchSnapshot(explain(expl), 'explain with color') + t.matchSnapshot(explain(expl, true, 2), 'explain with color, depth of 2') t.throws(() => statSync(reportFile), { code: 'ENOENT' }, 'no report') - npm.color = false - t.matchSnapshot(explain(expl, 6), 'explain with no color, depth of 6') + t.matchSnapshot(explain(expl, false, 6), 'explain with no color, depth of 6') t.throws(() => statSync(reportFile), { code: 'ENOENT' }, 'no report') t.end() diff --git a/test/lib/utils/file-exists.js b/test/lib/utils/file-exists.js index 473a4b050edef..c8edf4d968a0f 100644 --- a/test/lib/utils/file-exists.js +++ b/test/lib/utils/file-exists.js @@ -1,7 +1,7 @@ -const { test } = require('tap') +const t = require('tap') const fileExists = require('../../../lib/utils/file-exists.js') -test('returns true when arg is a file', async (t) => { +t.test('returns true when arg is a file', async (t) => { const path = t.testdir({ foo: 'just some file', }) @@ -11,7 +11,7 @@ test('returns true when arg is a file', async (t) => { t.end() }) -test('returns false when arg is not a file', async (t) => { +t.test('returns false when arg is not a file', async (t) => { const path = t.testdir({ foo: {}, }) @@ -21,7 +21,7 @@ test('returns false when arg is not a file', async (t) => { t.end() }) -test('returns false when arg does not exist', async (t) => { +t.test('returns false when arg does not exist', async (t) => { const path = t.testdir() const result = await fileExists(`${path}/foo`) diff --git a/test/lib/utils/flat-options.js b/test/lib/utils/flat-options.js deleted file mode 100644 index 6f580fabc4511..0000000000000 --- a/test/lib/utils/flat-options.js +++ /dev/null @@ -1,359 +0,0 @@ -const t = require('tap') - -process.env.NODE = '/path/to/some/node' -process.env.NODE_ENV = 'development' - -const logs = [] -const log = require('npmlog') -log.warn = (...args) => logs.push(['warn', ...args]) -log.verbose = (...args) => logs.push(['verbose', ...args]) - -class Mocknpm { - constructor (opts = {}) { - this.modes = { - exec: 0o777, - file: 0o666, - umask: 0o22, - } - this.color = true - this.projectScope = '@npmcli' - this.tmp = '/tmp' - this.command = null - this.globalPrefix = '/usr/local' - this.localPrefix = '/path/to/npm/cli' - this.prefix = this.localPrefix - this.version = '7.6.5' - this.config = new MockConfig(opts) - this.flatOptions = null - } -} - -class MockConfig { - constructor (opts = {}) { - this.list = [{ - cache: 'cache', - 'node-version': '1.2.3', - global: 'global', - registry: 'registry', - access: 'access', - 'always-auth': 'always-auth', - audit: 'audit', - 'audit-level': 'audit-level', - 'auth-type': 'auth-type', - before: 'before', - browser: 'browser', - ca: 'ca', - cafile: 'cafile', - call: 'call', - cert: 'cert', - key: 'key', - 'cache-lock-retries': 'cache-lock-retries', - 'cache-lock-stale': 'cache-lock-stale', - 'cache-lock-wait': 'cache-lock-wait', - cidr: 'cidr', - 'read-only': 'read-only', - preid: 'preid', - 'tag-version-prefix': 'tag-version-prefix', - 'allow-same-version': 'allow-same-version', - message: 'message', - 'commit-hooks': 'commit-hooks', - 'git-tag-version': 'git-tag-version', - 'sign-git-commit': 'sign-git-commit', - 'sign-git-tag': 'sign-git-tag', - depth: 'depth', - description: 'description', - searchexclude: 'searchexclude', - searchlimit: 'searchlimit', - searchopts: 'from=1', - searchstaleness: 'searchstaleness', - 'dry-run': 'dry-run', - 'engine-strict': 'engine-strict', - 'fetch-retries': 'fetch-retries', - 'fetch-retry-factor': 'fetch-retry-factor', - 'fetch-retry-mintimeout': 'fetch-retry-mintimeout', - 'fetch-retry-maxtimeout': 'fetch-retry-maxtimeout', - 'fetch-timeout': 'fetch-timeout', - force: 'force', - 'format-package-lock': 'format-package-lock', - fund: 'fund', - git: 'git', - viewer: 'viewer', - editor: 'editor', - 'bin-links': 'bin-links', - 'rebuild-bundle': 'rebuild-bundle', - package: 'package', - 'package-lock': 'package-lock', - 'package-lock-only': 'package-lock-only', - 'global-style': 'global-style', - 'legacy-bundling': 'legacy-bundling', - 'script-shell': 'script-shell', - omit: [], - include: [], - save: 'save', - 'save-bundle': 'save-bundle', - 'save-dev': 'save-dev', - 'save-optional': 'save-optional', - 'save-peer': 'save-peer', - 'save-prod': 'save-prod', - 'save-exact': 'save-exact', - 'save-prefix': 'save-prefix', - otp: 'otp', - offline: 'offline', - 'prefer-online': 'prefer-online', - 'prefer-offline': 'prefer-offline', - 'cache-max': 'cache-max', - 'cache-min': 'cache-min', - 'strict-ssl': 'strict-ssl', - scope: '', - tag: 'tag', - 'user-agent': 'user-agent', - '@scope:registry': '@scope:registry', - '//nerf.dart:_authToken': '//nerf.dart:_authToken', - proxy: 'proxy', - noproxy: 'noproxy', - ...opts, - }] - } - - get (key) { - return this.list[0][key] - } - - set (key, val) { - this.list[0][key] = val - } -} - -const flatOptions = require('../../../lib/utils/flat-options.js') -t.match(logs, [[ - 'verbose', - 'npm-session', - /^[0-9a-f]{16}$/, -]], 'logged npm session verbosely') -logs.length = 0 - -t.test('basic', t => { - const npm = new Mocknpm() - const generatedFlat = flatOptions(npm) - const clean = { - ...generatedFlat, - npmBin: '/path/to/npm/bin.js', - log: {}, - npmSession: '12345', - cache: generatedFlat.cache.replace(/\\/g, '/'), - } - t.matchSnapshot(clean, 'flat options') - t.equal(generatedFlat.npmCommand, null, 'command not set yet') - npm.command = 'view' - t.equal(generatedFlat.npmCommand, 'view', 'command updated via getter') - t.equal(generatedFlat.npmBin, require.main.filename) - // test the object is frozen - generatedFlat.newField = 'asdf' - t.equal(generatedFlat.newField, undefined, 'object is frozen') - const preExistingOpts = { flat: 'options' } - npm.flatOptions = preExistingOpts - t.equal(flatOptions(npm), preExistingOpts, 'use pre-existing npm.flatOptions') - t.end() -}) - -t.test('get preferOffline from cache-min', t => { - const npm = new Mocknpm({ - 'cache-min': 9999999, - 'prefer-offline': undefined, - }) - const opts = flatOptions(npm) - t.equal(opts.preferOffline, true, 'got preferOffline from cache min') - logs.length = 0 - t.equal(opts.cacheMin, undefined, 'opts.cacheMin is not set') - t.match(logs, []) - logs.length = 0 - t.end() -}) - -t.test('get preferOnline from cache-max', t => { - const npm = new Mocknpm({ - 'cache-max': -1, - 'prefer-online': undefined, - }) - const opts = flatOptions(npm) - t.equal(opts.preferOnline, true, 'got preferOnline from cache min') - logs.length = 0 - t.equal(opts.cacheMax, undefined, 'opts.cacheMax is not set') - t.match(logs, []) - logs.length = 0 - t.end() -}) - -t.test('tag emits warning', t => { - const npm = new Mocknpm({ tag: 'foobar' }) - t.equal(flatOptions(npm).tag, 'foobar', 'tag is foobar') - t.match(logs, []) - logs.length = 0 - t.end() -}) - -t.test('omit/include options', t => { - t.test('omit explicitly', t => { - const { NODE_ENV } = process.env - const npm = new Mocknpm({ - omit: ['dev', 'optional', 'peer'], - }) - t.strictSame(flatOptions(npm).omit, ['dev', 'optional', 'peer']) - t.equal(process.env.NODE_ENV, 'production') - process.env.NODE_ENV = NODE_ENV - t.end() - }) - - t.test('omit and include some', t => { - const { NODE_ENV } = process.env - const npm = new Mocknpm({ - omit: ['dev', 'optional', 'peer'], - include: ['peer'], - }) - t.strictSame(flatOptions(npm).omit, ['dev', 'optional']) - t.equal(process.env.NODE_ENV, 'production') - process.env.NODE_ENV = NODE_ENV - t.end() - }) - - t.test('dev flag', t => { - const { NODE_ENV } = process.env - const npm = new Mocknpm({ - omit: ['dev', 'optional', 'peer'], - include: [], - dev: true, - }) - t.strictSame(flatOptions(npm).omit, ['optional', 'peer']) - t.equal(process.env.NODE_ENV, NODE_ENV) - process.env.NODE_ENV = NODE_ENV - t.end() - }) - - t.test('production flag', t => { - const { NODE_ENV } = process.env - const npm = new Mocknpm({ - omit: [], - include: [], - production: true, - }) - t.strictSame(flatOptions(npm).omit, ['dev']) - t.equal(process.env.NODE_ENV, 'production') - process.env.NODE_ENV = NODE_ENV - t.end() - }) - - t.test('only', t => { - const { NODE_ENV } = process.env - const cases = ['prod', 'production'] - t.plan(cases.length) - cases.forEach(c => t.test(c, t => { - const npm = new Mocknpm({ - omit: [], - include: [], - only: c, - }) - t.strictSame(flatOptions(npm).omit, ['dev']) - t.equal(process.env.NODE_ENV, 'production') - process.env.NODE_ENV = NODE_ENV - t.end() - })) - }) - - t.test('also dev', t => { - const { NODE_ENV } = process.env - const npm = new Mocknpm({ - omit: ['dev', 'optional', 'peer'], - also: 'dev', - }) - t.strictSame(flatOptions(npm).omit, ['optional', 'peer']) - t.equal(process.env.NODE_ENV, NODE_ENV) - process.env.NODE_ENV = NODE_ENV - t.end() - }) - - t.test('no-optional', t => { - const { NODE_ENV } = process.env - const npm = new Mocknpm({ - optional: false, - omit: null, - include: null, - }) - t.strictSame(flatOptions(npm).omit, ['optional']) - t.equal(process.env.NODE_ENV, NODE_ENV) - process.env.NODE_ENV = NODE_ENV - t.end() - }) - - t.end() -}) - -t.test('get the node without the environ', t => { - delete process.env.NODE - t.equal(flatOptions(new Mocknpm()).nodeBin, process.execPath) - t.end() -}) - -t.test('various default values and falsey fallbacks', t => { - const npm = new Mocknpm({ - 'script-shell': false, - registry: 'http://example.com', - searchlimit: 0, - 'save-exact': false, - 'save-prefix': '>=', - }) - const opts = flatOptions(npm) - t.equal(opts.scriptShell, undefined, 'scriptShell is undefined if falsey') - t.equal(opts.search.limit, 20, 'searchLimit defaults to 20') - t.equal(opts.savePrefix, '>=', 'save-prefix respected if no save-exact') - t.equal(opts.scope, '', 'scope defaults to empty string') - logs.length = 0 - t.end() -}) - -t.test('legacy _auth token', t => { - const npm = new Mocknpm({ - _auth: 'asdfasdf', - }) - t.strictSame( - flatOptions(npm)._auth, - 'asdfasdf', - 'should set legacy _auth token' - ) - t.end() -}) - -t.test('save-type', t => { - const base = { - 'save-optional': false, - 'save-peer': false, - 'save-dev': false, - 'save-prod': false, - } - const cases = [ - ['peerOptional', { - 'save-optional': true, - 'save-peer': true, - }], - ['optional', { - 'save-optional': true, - }], - ['dev', { - 'save-dev': true, - }], - ['peer', { - 'save-peer': true, - }], - ['prod', { - 'save-prod': true, - }], - [null, {}], - ] - for (const [expect, options] of cases) { - const opts = flatOptions(new Mocknpm({ - ...base, - ...options, - })) - t.equal(opts.saveType, expect, JSON.stringify(options)) - } - t.end() -}) diff --git a/test/lib/utils/get-identity.js b/test/lib/utils/get-identity.js index cc713b3378166..5e6de9ca9cfee 100644 --- a/test/lib/utils/get-identity.js +++ b/test/lib/utils/get-identity.js @@ -1,9 +1,8 @@ -const { test } = require('tap') -const requireInject = require('require-inject') +const t = require('tap') -test('throws ENOREGISTRY when no registry option is provided', async (t) => { +t.test('throws ENOREGISTRY when no registry option is provided', async (t) => { t.plan(2) - const getIdentity = requireInject('../../../lib/utils/get-identity.js') + const getIdentity = t.mock('../../../lib/utils/get-identity.js') try { await getIdentity({}) @@ -13,10 +12,10 @@ test('throws ENOREGISTRY when no registry option is provided', async (t) => { } }) -test('returns username from uri when provided', async (t) => { +t.test('returns username from uri when provided', async (t) => { t.plan(1) - const getIdentity = requireInject('../../../lib/utils/get-identity.js') + const getIdentity = t.mock('../../../lib/utils/get-identity.js') const npm = { config: { getCredentialsByURI: () => { @@ -29,7 +28,7 @@ test('returns username from uri when provided', async (t) => { t.equal(identity, 'foo', 'returns username from uri') }) -test('calls registry whoami when token is provided', async (t) => { +t.test('calls registry whoami when token is provided', async (t) => { t.plan(3) const options = { @@ -37,7 +36,7 @@ test('calls registry whoami when token is provided', async (t) => { token: 'thisisnotreallyatoken', } - const getIdentity = requireInject('../../../lib/utils/get-identity.js', { + const getIdentity = t.mock('../../../lib/utils/get-identity.js', { 'npm-registry-fetch': { json: (path, opts) => { t.equal(path, '/-/whoami', 'calls whoami') @@ -56,7 +55,7 @@ test('calls registry whoami when token is provided', async (t) => { t.equal(identity, 'foo', 'fetched username from registry') }) -test('throws ENEEDAUTH when response does not include a username', async (t) => { +t.test('throws ENEEDAUTH when response does not include a username', async (t) => { t.plan(3) const options = { @@ -64,7 +63,7 @@ test('throws ENEEDAUTH when response does not include a username', async (t) => token: 'thisisnotreallyatoken', } - const getIdentity = requireInject('../../../lib/utils/get-identity.js', { + const getIdentity = t.mock('../../../lib/utils/get-identity.js', { 'npm-registry-fetch': { json: (path, opts) => { t.equal(path, '/-/whoami', 'calls whoami') @@ -86,9 +85,9 @@ test('throws ENEEDAUTH when response does not include a username', async (t) => } }) -test('throws ENEEDAUTH when neither username nor token is configured', async (t) => { +t.test('throws ENEEDAUTH when neither username nor token is configured', async (t) => { t.plan(1) - const getIdentity = requireInject('../../../lib/utils/get-identity.js', { + const getIdentity = t.mock('../../../lib/utils/get-identity.js', { }) const npm = { config: { diff --git a/test/lib/utils/lifecycle-cmd.js b/test/lib/utils/lifecycle-cmd.js index 2f1f693f2df4d..862c87a8e032c 100644 --- a/test/lib/utils/lifecycle-cmd.js +++ b/test/lib/utils/lifecycle-cmd.js @@ -10,11 +10,20 @@ const npm = { }, } t.test('create a lifecycle command', t => { - const cmd = new LifecycleCmd(npm, 'test-stage') + t.plan(5) + class TestStage extends LifecycleCmd { + static get name () { + return 'test-stage' + } + } + const cmd = new TestStage(npm) t.match(cmd.usage, /test-stage/) cmd.exec(['some', 'args'], (er, result) => { t.same(runArgs, ['test-stage', 'some', 'args']) t.strictSame(result, 'called npm.commands.run') - t.end() + }) + cmd.execWorkspaces(['some', 'args'], [], (er, result) => { + t.same(runArgs, ['test-stage', 'some', 'args']) + t.strictSame(result, 'called npm.commands.run') }) }) diff --git a/test/lib/utils/npm-usage.js b/test/lib/utils/npm-usage.js index dbbde947ce5ed..f846a01109d2f 100644 --- a/test/lib/utils/npm-usage.js +++ b/test/lib/utils/npm-usage.js @@ -1,19 +1,11 @@ const t = require('tap') - -const OUTPUT = [] -const output = (...msg) => OUTPUT.push(msg) -const requireInject = require('require-inject') -const usage = requireInject('../../../lib/utils/npm-usage.js', { - '../../../lib/utils/output.js': output, -}) -const npm = requireInject('../../../lib/npm.js') +const npm = require('../../../lib/npm.js') t.test('usage', t => { - t.afterEach((cb) => { + t.afterEach(() => { npm.config.set('viewer', null) npm.config.set('long', false) npm.config.set('userconfig', '/some/config/file/.npmrc') - cb() }) const { dirname } = require('path') const basedir = dirname(dirname(dirname(__dirname))) @@ -29,61 +21,19 @@ t.test('usage', t => { npm.config.set('userconfig', '/some/config/file/.npmrc') t.test('basic usage', t => { - usage(npm) - t.equal(OUTPUT.length, 1) - t.equal(OUTPUT[0].length, 1) - t.matchSnapshot(OUTPUT[0][0]) - OUTPUT.length = 0 + t.matchSnapshot(npm.usage) t.end() }) t.test('with browser', t => { npm.config.set('viewer', 'browser') - usage(npm) - t.equal(OUTPUT.length, 1) - t.equal(OUTPUT[0].length, 1) - t.matchSnapshot(OUTPUT[0][0]) - OUTPUT.length = 0 - npm.config.set('viewer', null) + t.matchSnapshot(npm.usage) t.end() }) t.test('with long', t => { npm.config.set('long', true) - usage(npm) - t.equal(OUTPUT.length, 1) - t.equal(OUTPUT[0].length, 1) - t.matchSnapshot(OUTPUT[0][0]) - OUTPUT.length = 0 - npm.config.set('long', false) - t.end() - }) - - t.test('did you mean?', t => { - npm.argv.push('unistnall') - usage(npm) - t.equal(OUTPUT.length, 2) - t.equal(OUTPUT[0].length, 1) - t.equal(OUTPUT[1].length, 1) - t.matchSnapshot(OUTPUT[0][0]) - t.matchSnapshot(OUTPUT[1][0]) - OUTPUT.length = 0 - npm.argv.length = 0 - t.end() - }) - - t.test('did you mean?', t => { - npm.argv.push('unistnall') - const { exitCode } = process - t.teardown(() => { - if (t.passing()) - process.exitCode = exitCode - }) - // make sure it fails when invalid - usage(npm, false) - t.equal(process.exitCode, 1) - OUTPUT.length = 0 - npm.argv.length = 0 + t.matchSnapshot(npm.usage) t.end() }) @@ -106,11 +56,7 @@ t.test('usage', t => { configurable: true, writable: true, }) - usage(npm) - t.equal(OUTPUT.length, 1) - t.equal(OUTPUT[0].length, 1) - t.matchSnapshot(OUTPUT[0][0]) - OUTPUT.length = 0 + t.matchSnapshot(npm.usage) t.end() }) } diff --git a/test/lib/utils/open-url.js b/test/lib/utils/open-url.js index e8ab8f15a14a8..a31a8cb6867df 100644 --- a/test/lib/utils/open-url.js +++ b/test/lib/utils/open-url.js @@ -1,6 +1,7 @@ -const { test } = require('tap') -const requireInject = require('require-inject') +const t = require('tap') +const OUTPUT = [] +const output = (...args) => OUTPUT.push(args) const npm = { _config: { json: false, @@ -12,11 +13,9 @@ const npm = { npm._config[k] = v }, }, + output, } -const OUTPUT = [] -const output = (...args) => OUTPUT.push(args) - let openerUrl = null let openerOpts = null let openerResult = null @@ -26,12 +25,11 @@ const opener = (url, opts, cb) => { return cb(openerResult) } -const openUrl = requireInject('../../../lib/utils/open-url.js', { - '../../../lib/utils/output.js': output, +const openUrl = t.mock('../../../lib/utils/open-url.js', { opener, }) -test('opens a url', async (t) => { +t.test('opens a url', async (t) => { t.teardown(() => { openerUrl = null openerOpts = null @@ -43,7 +41,7 @@ test('opens a url', async (t) => { t.same(OUTPUT, [], 'printed no output') }) -test('returns error for non-https and non-file url', async (t) => { +t.test('returns error for non-https and non-file url', async (t) => { t.teardown(() => { openerUrl = null openerOpts = null @@ -53,10 +51,10 @@ test('returns error for non-https and non-file url', async (t) => { t.equal(openerUrl, null, 'did not open') t.same(openerOpts, null, 'did not open') t.same(OUTPUT, [], 'printed no output') - t.done() + t.end() }) -test('returns error for non-parseable url', async (t) => { +t.test('returns error for non-parseable url', async (t) => { t.teardown(() => { openerUrl = null openerOpts = null @@ -66,10 +64,10 @@ test('returns error for non-parseable url', async (t) => { t.equal(openerUrl, null, 'did not open') t.same(openerOpts, null, 'did not open') t.same(OUTPUT, [], 'printed no output') - t.done() + t.end() }) -test('opens a url with the given browser', async (t) => { +t.test('opens a url with the given browser', async (t) => { npm.config.set('browser', 'chrome') t.teardown(() => { openerUrl = null @@ -81,10 +79,10 @@ test('opens a url with the given browser', async (t) => { t.equal(openerUrl, 'https://www.npmjs.com', 'opened the given url') t.same(openerOpts, { command: 'chrome' }, 'passed the given browser as command') t.same(OUTPUT, [], 'printed no output') - t.done() + t.end() }) -test('prints where to go when browser is disabled', async (t) => { +t.test('prints where to go when browser is disabled', async (t) => { npm.config.set('browser', false) t.teardown(() => { openerUrl = null @@ -98,10 +96,10 @@ test('prints where to go when browser is disabled', async (t) => { t.equal(OUTPUT.length, 1, 'got one logged message') t.equal(OUTPUT[0].length, 1, 'logged message had one value') t.matchSnapshot(OUTPUT[0][0], 'printed expected message') - t.done() + t.end() }) -test('prints where to go when browser is disabled and json is enabled', async (t) => { +t.test('prints where to go when browser is disabled and json is enabled', async (t) => { npm.config.set('browser', false) npm.config.set('json', true) t.teardown(() => { @@ -117,10 +115,10 @@ test('prints where to go when browser is disabled and json is enabled', async (t t.equal(OUTPUT.length, 1, 'got one logged message') t.equal(OUTPUT[0].length, 1, 'logged message had one value') t.matchSnapshot(OUTPUT[0][0], 'printed expected message') - t.done() + t.end() }) -test('prints where to go when given browser does not exist', async (t) => { +t.test('prints where to go when given browser does not exist', async (t) => { npm.config.set('browser', 'firefox') openerResult = Object.assign(new Error('failed'), { code: 'ENOENT' }) t.teardown(() => { @@ -135,10 +133,10 @@ test('prints where to go when given browser does not exist', async (t) => { t.equal(OUTPUT.length, 1, 'got one logged message') t.equal(OUTPUT[0].length, 1, 'logged message had one value') t.matchSnapshot(OUTPUT[0][0], 'printed expected message') - t.done() + t.end() }) -test('handles unknown opener error', async (t) => { +t.test('handles unknown opener error', async (t) => { npm.config.set('browser', 'firefox') openerResult = Object.assign(new Error('failed'), { code: 'ENOBRIAN' }) t.teardown(() => { @@ -148,5 +146,5 @@ test('handles unknown opener error', async (t) => { npm.config.set('browser', true) }) t.rejects(openUrl(npm, 'https://www.npmjs.com', 'npm home'), 'failed', 'got the correct error') - t.done() + t.end() }) diff --git a/test/lib/utils/otplease.js b/test/lib/utils/otplease.js index 048856b485770..fb9476120e2df 100644 --- a/test/lib/utils/otplease.js +++ b/test/lib/utils/otplease.js @@ -1,15 +1,14 @@ -const { test } = require('tap') -const requireInject = require('require-inject') +const t = require('tap') const readUserInfo = { otp: async () => '1234', } -const otplease = requireInject('../../../lib/utils/otplease.js', { +const otplease = t.mock('../../../lib/utils/otplease.js', { '../../../lib/utils/read-user-info.js': readUserInfo, }) -test('prompts for otp for EOTP', async (t) => { +t.test('prompts for otp for EOTP', async (t) => { const stdinTTY = process.stdin.isTTY const stdoutTTY = process.stdout.isTTY process.stdin.isTTY = true @@ -26,13 +25,13 @@ test('prompts for otp for EOTP', async (t) => { t.equal(opts.some, 'prop', 'carried original options') t.equal(opts.otp, '1234', 'received the otp') - t.done() + t.end() } await otplease({ some: 'prop' }, fn) }) -test('prompts for otp for 401', async (t) => { +t.test('prompts for otp for 401', async (t) => { const stdinTTY = process.stdin.isTTY const stdoutTTY = process.stdout.isTTY process.stdin.isTTY = true @@ -53,13 +52,13 @@ test('prompts for otp for 401', async (t) => { t.equal(opts.some, 'prop', 'carried original options') t.equal(opts.otp, '1234', 'received the otp') - t.done() + t.end() } await otplease({ some: 'prop' }, fn) }) -test('does not prompt for non-otp errors', async (t) => { +t.test('does not prompt for non-otp errors', async (t) => { const stdinTTY = process.stdin.isTTY const stdoutTTY = process.stdout.isTTY process.stdin.isTTY = true @@ -76,7 +75,7 @@ test('does not prompt for non-otp errors', async (t) => { t.rejects(otplease({ some: 'prop' }, fn), { message: 'nope' }, 'rejects with the original error') }) -test('does not prompt if stdin or stdout is not a tty', async (t) => { +t.test('does not prompt if stdin or stdout is not a tty', async (t) => { const stdinTTY = process.stdin.isTTY const stdoutTTY = process.stdout.isTTY process.stdin.isTTY = false diff --git a/test/lib/utils/output.js b/test/lib/utils/output.js deleted file mode 100644 index 72871187d889a..0000000000000 --- a/test/lib/utils/output.js +++ /dev/null @@ -1,8 +0,0 @@ -const t = require('tap') -const logs = [] -console.log = (...msg) => logs.push(msg) -const output = require('../../../lib/utils/output.js') -output('hello', 'world') -output('hello') -output('world') -t.strictSame(logs, [['hello', 'world'], ['hello'], ['world']]) diff --git a/test/lib/utils/path.js b/test/lib/utils/path.js index 74fb93462f755..0a7846d94bc67 100644 --- a/test/lib/utils/path.js +++ b/test/lib/utils/path.js @@ -1,13 +1,12 @@ const t = require('tap') -const requireInject = require('require-inject') const mod = '../../../lib/utils/path.js' const delim = require('../../../lib/utils/is-windows.js') ? ';' : ':' Object.defineProperty(process, 'env', { value: {}, }) process.env.path = ['foo', 'bar', 'baz'].join(delim) -t.strictSame(requireInject(mod), ['foo', 'bar', 'baz']) +t.strictSame(t.mock(mod), ['foo', 'bar', 'baz']) process.env.Path = ['a', 'b', 'c'].join(delim) -t.strictSame(requireInject(mod), ['a', 'b', 'c']) +t.strictSame(t.mock(mod), ['a', 'b', 'c']) process.env.PATH = ['x', 'y', 'z'].join(delim) -t.strictSame(requireInject(mod), ['x', 'y', 'z']) +t.strictSame(t.mock(mod), ['x', 'y', 'z']) diff --git a/test/lib/utils/perf.js b/test/lib/utils/perf.js deleted file mode 100644 index 840dcb6e32399..0000000000000 --- a/test/lib/utils/perf.js +++ /dev/null @@ -1,38 +0,0 @@ -const t = require('tap') -const logs = [] -const npmlog = require('npmlog') -npmlog.silly = (...msg) => logs.push(['silly', ...msg]) -npmlog.timing = (...msg) => logs.push(['timing', ...msg]) - -t.test('time some stuff', t => { - const timings = {} - process.on('timing', (name, value) => { - timings[name] = (timings[name] || 0) + value - }) - require('../../../lib/utils/perf.js') - process.emit('time', 'foo') - process.emit('time', 'bar') - setTimeout(() => { - process.emit('timeEnd', 'foo') - process.emit('timeEnd', 'bar') - process.emit('time', 'foo') - setTimeout(() => { - process.emit('timeEnd', 'foo') - process.emit('timeEnd', 'baz') - t.match(logs, [ - ['timing', 'foo', /Completed in [0-9]+ms/], - ['timing', 'bar', /Completed in [0-9]+ms/], - ['timing', 'foo', /Completed in [0-9]+ms/], - [ - 'silly', - 'timing', - "Tried to end timer that doesn't exist:", - 'baz', - ], - ]) - t.match(timings, { foo: Number, bar: Number }) - t.equal(timings.foo > timings.bar, true, 'foo should be > bar') - t.end() - }, 100) - }, 100) -}) diff --git a/test/lib/utils/ping.js b/test/lib/utils/ping.js index 6e0451538f9fa..1bebfa69d2b78 100644 --- a/test/lib/utils/ping.js +++ b/test/lib/utils/ping.js @@ -1,12 +1,11 @@ -const { test } = require('tap') -const requireInject = require('require-inject') +const t = require('tap') -test('pings', async (t) => { +t.test('pings', async (t) => { t.plan(3) const options = { fake: 'options' } const response = { some: 'details' } - const ping = requireInject('../../../lib/utils/ping.js', { + const ping = t.mock('../../../lib/utils/ping.js', { 'npm-registry-fetch': (url, opts) => { t.equal(url, '/-/ping?write=true', 'calls the correct url') t.equal(opts, options, 'passes through options') @@ -18,12 +17,12 @@ test('pings', async (t) => { t.match(res, response, 'returns json response') }) -test('catches errors and returns empty json', async (t) => { +t.test('catches errors and returns empty json', async (t) => { t.plan(3) const options = { fake: 'options' } const response = { some: 'details' } - const ping = requireInject('../../../lib/utils/ping.js', { + const ping = t.mock('../../../lib/utils/ping.js', { 'npm-registry-fetch': (url, opts) => { t.equal(url, '/-/ping?write=true', 'calls the correct url') t.equal(opts, options, 'passes through options') diff --git a/test/lib/utils/proc-log-listener.js b/test/lib/utils/proc-log-listener.js index 2c1009503762d..d580defa8a98f 100644 --- a/test/lib/utils/proc-log-listener.js +++ b/test/lib/utils/proc-log-listener.js @@ -1,5 +1,4 @@ const t = require('tap') -const requireInject = require('require-inject') const { inspect } = require('util') const logs = [] @@ -8,7 +7,7 @@ const npmlog = { verbose: (...args) => logs.push(['verbose', ...args]), } -requireInject('../../../lib/utils/proc-log-listener.js', { +t.mock('../../../lib/utils/proc-log-listener.js', { npmlog, })() diff --git a/test/lib/utils/pulse-till-done.js b/test/lib/utils/pulse-till-done.js index 16c2d521dad08..c1d7902c0684a 100644 --- a/test/lib/utils/pulse-till-done.js +++ b/test/lib/utils/pulse-till-done.js @@ -1,5 +1,4 @@ -const { test } = require('tap') -const requireInject = require('require-inject') +const t = require('tap') let pulseStarted = null const npmlog = { @@ -11,11 +10,11 @@ const npmlog = { }, } -const pulseTillDone = requireInject('../../../lib/utils/pulse-till-done.js', { +const pulseTillDone = t.mock('../../../lib/utils/pulse-till-done.js', { npmlog, }) -test('pulses (with promise)', async (t) => { +t.test('pulses (with promise)', async (t) => { t.teardown(() => { pulseStarted = null }) diff --git a/test/lib/utils/queryable.js b/test/lib/utils/queryable.js new file mode 100644 index 0000000000000..bde3ea66238f2 --- /dev/null +++ b/test/lib/utils/queryable.js @@ -0,0 +1,965 @@ +const { inspect } = require('util') +const t = require('tap') +const Queryable = require('../../../lib/utils/queryable.js') + +t.test('retrieve single nested property', async t => { + const fixture = { + foo: { + bar: 'bar', + baz: 'baz', + }, + lorem: { + ipsum: 'ipsum', + }, + } + const q = new Queryable(fixture) + const query = 'foo.bar' + t.strictSame(q.query(query), { [query]: 'bar' }, + 'should retrieve property value when querying for dot-sep name') +}) + +t.test('query', async t => { + const fixture = { + o: 'o', + single: [ + 'item', + ], + w: [ + 'a', + 'b', + 'c', + ], + list: [ + { + name: 'first', + }, + { + name: 'second', + }, + ], + foo: { + bar: 'bar', + baz: 'baz', + }, + lorem: { + ipsum: 'ipsum', + dolor: [ + 'a', + 'b', + 'c', + { + sit: [ + 'amet', + ], + }, + ], + }, + a: [ + [ + [ + { + b: [ + [ + { + c: 'd', + }, + ], + ], + }, + ], + ], + ], + } + const q = new Queryable(fixture) + t.strictSame( + q.query(['foo.baz', 'lorem.dolor[0]']), + { + 'foo.baz': 'baz', + 'lorem.dolor[0]': 'a', + }, + 'should retrieve property values when querying for multiple dot-sep names') + t.strictSame( + q.query('lorem.dolor[3].sit[0]'), + { + 'lorem.dolor[3].sit[0]': 'amet', + }, + 'should retrieve property from nested array items') + t.strictSame( + q.query('a[0][0][0].b[0][0].c'), + { + 'a[0][0][0].b[0][0].c': 'd', + }, + 'should retrieve property from deep nested array items') + t.strictSame( + q.query('o'), + { + o: 'o', + }, + 'should retrieve single level property value') + t.strictSame( + q.query('list.name'), + { + 'list[0].name': 'first', + 'list[1].name': 'second', + }, + 'should automatically expand arrays') + t.strictSame( + q.query(['list.name']), + { + 'list[0].name': 'first', + 'list[1].name': 'second', + }, + 'should automatically expand multiple arrays') + t.strictSame( + q.query('w'), + { + w: ['a', 'b', 'c'], + }, + 'should return arrays') + t.strictSame( + q.query('single'), + { + single: 'item', + }, + 'should return single item') + t.strictSame( + q.query('missing'), + undefined, + 'should return undefined') + t.strictSame( + q.query('missing[bar]'), + undefined, + 'should return undefined also') + t.throws(() => q.query('lorem.dolor[]'), + { code: 'EINVALIDSYNTAX' }, + 'should throw if using empty brackets notation' + ) + t.throws(() => q.query('lorem.dolor[].sit[0]'), + { code: 'EINVALIDSYNTAX' }, + 'should throw if using nested empty brackets notation' + ) + + const qq = new Queryable({ + foo: { + bar: 'bar', + }, + }) + t.strictSame( + qq.query(''), + { + '': { + foo: { + bar: 'bar', + }, + }, + }, + 'should return an object with results in an empty key' + ) +}) + +t.test('missing key', async t => { + const fixture = { + foo: { + bar: 'bar', + }, + } + const q = new Queryable(fixture) + const query = 'foo.missing' + t.equal(q.query(query), undefined, + 'should retrieve no results') +}) + +t.test('no data object', async t => { + t.throws( + () => new Queryable(), + { code: 'ENOQUERYABLEOBJ' }, + 'should throw ENOQUERYABLEOBJ error' + ) + t.throws( + () => new Queryable(1), + { code: 'ENOQUERYABLEOBJ' }, + 'should throw ENOQUERYABLEOBJ error' + ) +}) + +t.test('get values', async t => { + const q = new Queryable({ + foo: { + bar: 'bar', + }, + }) + t.equal(q.get('foo.bar'), 'bar', 'should retrieve value') + t.equal(q.get('missing'), undefined, 'should return undefined') +}) + +t.test('set property values', async t => { + const fixture = { + foo: { + bar: 'bar', + }, + } + const q = new Queryable(fixture) + q.set('foo.baz', 'baz') + t.strictSame( + q.toJSON(), + { + foo: { + bar: 'bar', + baz: 'baz', + }, + }, + 'should add new property and its assigned value' + ) + q.set('foo[lorem.ipsum]', 'LOREM IPSUM') + t.strictSame( + q.toJSON(), + { + foo: { + bar: 'bar', + baz: 'baz', + 'lorem.ipsum': 'LOREM IPSUM', + }, + }, + 'should be able to set square brackets props' + ) + q.set('a.b[c.d]', 'omg') + t.strictSame( + q.toJSON(), + { + foo: { + bar: 'bar', + baz: 'baz', + 'lorem.ipsum': 'LOREM IPSUM', + }, + a: { + b: { + 'c.d': 'omg', + }, + }, + }, + 'should be able to nest square brackets props' + ) + q.set('a.b[e][f.g][1.0.0]', 'multiple') + t.strictSame( + q.toJSON(), + { + foo: { + bar: 'bar', + baz: 'baz', + 'lorem.ipsum': 'LOREM IPSUM', + }, + a: { + b: { + 'c.d': 'omg', + e: { + 'f.g': { + '1.0.0': 'multiple', + }, + }, + }, + }, + }, + 'should be able to nest multiple square brackets props' + ) + q.set('a.b[e][f.g][2.0.0].author.name', 'Ruy Adorno') + t.strictSame( + q.toJSON(), + { + foo: { + bar: 'bar', + baz: 'baz', + 'lorem.ipsum': 'LOREM IPSUM', + }, + a: { + b: { + 'c.d': 'omg', + e: { + 'f.g': { + '1.0.0': 'multiple', + '2.0.0': { + author: { + name: 'Ruy Adorno', + }, + }, + }, + }, + }, + }, + }, + 'should be able to use dot-sep notation after square bracket props' + ) + q.set('a.b[e][f.g][2.0.0].author[url]', 'https://npmjs.com') + t.strictSame( + q.toJSON(), + { + foo: { + bar: 'bar', + baz: 'baz', + 'lorem.ipsum': 'LOREM IPSUM', + }, + a: { + b: { + 'c.d': 'omg', + e: { + 'f.g': { + '1.0.0': 'multiple', + '2.0.0': { + author: { + name: 'Ruy Adorno', + url: 'https://npmjs.com', + }, + }, + }, + }, + }, + }, + }, + 'should be able to have multiple, separated, square brackets props' + ) + q.set('a.b[e][f.g][2.0.0].author[foo][bar].lorem.ipsum[dolor][sit][amet].omg', 'O_O') + t.strictSame( + q.toJSON(), + { + foo: { + bar: 'bar', + baz: 'baz', + 'lorem.ipsum': 'LOREM IPSUM', + }, + a: { + b: { + 'c.d': 'omg', + e: { + 'f.g': { + '1.0.0': 'multiple', + '2.0.0': { + author: { + name: 'Ruy Adorno', + url: 'https://npmjs.com', + foo: { + bar: { + lorem: { + ipsum: { + dolor: { + sit: { + amet: { + omg: 'O_O', + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + 'many many times...' + ) + t.throws( + () => q.set('foo.bar.nest', 'should throw'), + { code: 'EOVERRIDEVALUE' }, + 'should throw if trying to override a literal value with an object' + ) + q.set('foo.bar.nest', 'use the force!', { force: true }) + t.strictSame( + q.toJSON().foo, + { + bar: { + nest: 'use the force!', + }, + baz: 'baz', + 'lorem.ipsum': 'LOREM IPSUM', + }, + 'should allow overriding literal values when using force option' + ) + + const qq = new Queryable({}) + qq.set('foo.bar.baz', 'BAZ') + t.strictSame( + qq.toJSON(), + { + foo: { + bar: { + baz: 'BAZ', + }, + }, + }, + 'should add new props to qq object' + ) + qq.set('foo.bar.bario', 'bario') + t.strictSame( + qq.toJSON(), + { + foo: { + bar: { + baz: 'BAZ', + bario: 'bario', + }, + }, + }, + 'should add new props to a previously existing object' + ) + qq.set('lorem', 'lorem') + t.strictSame( + qq.toJSON(), + { + foo: { + bar: { + baz: 'BAZ', + bario: 'bario', + }, + }, + lorem: 'lorem', + }, + 'should append new props added to object later' + ) + qq.set('foo.bar[foo.bar]', 'foo.bar.with.dots') + t.strictSame( + qq.toJSON(), + { + foo: { + bar: { + 'foo.bar': 'foo.bar.with.dots', + baz: 'BAZ', + bario: 'bario', + }, + }, + lorem: 'lorem', + }, + 'should append new props added to object later' + ) +}) + +t.test('set arrays', async t => { + const q = new Queryable({}) + + q.set('foo[1]', 'b') + t.strictSame( + q.toJSON(), + { + foo: [ + undefined, + 'b', + ], + }, + 'should be able to set items in an array using index references' + ) + + q.set('foo[0]', 'a') + t.strictSame( + q.toJSON(), + { + foo: [ + 'a', + 'b', + ], + }, + 'should be able to set a previously missing item to an array' + ) + + q.set('foo[2]', 'c') + t.strictSame( + q.toJSON(), + { + foo: [ + 'a', + 'b', + 'c', + ], + }, + 'should be able to append more items to an array' + ) + + q.set('foo[2]', 'C') + t.strictSame( + q.toJSON(), + { + foo: [ + 'a', + 'b', + 'C', + ], + }, + 'should be able to override array items' + ) + + t.throws( + () => q.set('foo[2].bar', 'bar'), + { code: 'EOVERRIDEVALUE' }, + 'should throw if trying to override an array literal item with an obj' + ) + + q.set('foo[2].bar', 'bar', { force: true }) + t.strictSame( + q.toJSON(), + { + foo: [ + 'a', + 'b', + { bar: 'bar' }, + ], + }, + 'should be able to override an array string item with an obj' + ) + + q.set('foo[3].foo', 'surprise surprise, another foo') + t.strictSame( + q.toJSON(), + { + foo: [ + 'a', + 'b', + { bar: 'bar' }, + { + foo: 'surprise surprise, another foo', + }, + ], + }, + 'should be able to append more items to an array' + ) + + q.set('foo[3].foo', 'FOO') + t.strictSame( + q.toJSON(), + { + foo: [ + 'a', + 'b', + { bar: 'bar' }, + { + foo: 'FOO', + }, + ], + }, + 'should be able to override property of an obj inside an array' + ) + + const qq = new Queryable({}) + qq.set('foo[0].bar[1].baz.bario[0][0][0]', 'something') + t.strictSame( + qq.toJSON(), + { + foo: [ + { + bar: [ + undefined, + { + baz: { + bario: [[['something']]], + }, + }, + ], + }, + ], + }, + 'should append as many arrays as necessary' + ) + qq.set('foo[0].bar[1].baz.bario[0][1][0]', 'something else') + t.strictSame( + qq.toJSON(), + { + foo: [ + { + bar: [ + undefined, + { + baz: { + bario: [[ + ['something'], + ['something else'], + ]], + }, + }, + ], + }, + ], + }, + 'should append as many arrays as necessary' + ) + qq.set('foo', null) + t.strictSame( + qq.toJSON(), + { + foo: null, + }, + 'should be able to set a value to null' + ) + qq.set('foo.bar', 'bar') + t.strictSame( + qq.toJSON(), + { + foo: { + bar: 'bar', + }, + }, + 'should be able to replace a null value with properties' + ) + + const qqq = new Queryable({ + arr: [ + 'a', + 'b', + ], + }) + + qqq.set('arr[]', 'c') + t.strictSame( + qqq.toJSON(), + { + arr: [ + 'a', + 'b', + 'c', + ], + }, + 'should be able to append to array using empty bracket notation' + ) + + qqq.set('arr[].foo', 'foo') + t.strictSame( + qqq.toJSON(), + { + arr: [ + 'a', + 'b', + 'c', + { + foo: 'foo', + }, + ], + }, + 'should be able to append objects to array using empty bracket notation' + ) + + qqq.set('arr[].bar.name', 'BAR') + t.strictSame( + qqq.toJSON(), + { + arr: [ + 'a', + 'b', + 'c', + { + foo: 'foo', + }, + { + bar: { + name: 'BAR', + }, + }, + ], + }, + 'should be able to append more objects to array using empty brackets' + ) + + qqq.set('foo.bar.baz[].lorem.ipsum', 'something') + t.strictSame( + qqq.toJSON(), + { + arr: [ + 'a', + 'b', + 'c', + { + foo: 'foo', + }, + { + bar: { + name: 'BAR', + }, + }, + ], + foo: { + bar: { + baz: [ + { + lorem: { + ipsum: 'something', + }, + }, + ], + }, + }, + }, + 'should be able to append to array using empty brackets in nested objs' + ) + + qqq.set('foo.bar.baz[].lorem.array[]', 'new item') + t.strictSame( + qqq.toJSON(), + { + arr: [ + 'a', + 'b', + 'c', + { + foo: 'foo', + }, + { + bar: { + name: 'BAR', + }, + }, + ], + foo: { + bar: { + baz: [ + { + lorem: { + ipsum: 'something', + }, + }, + { + lorem: { + array: [ + 'new item', + ], + }, + }, + ], + }, + }, + }, + 'should be able to append to array using empty brackets in nested objs' + ) + + const qqqq = new Queryable({ + arr: [ + 'a', + 'b', + ], + }) + t.throws( + () => qqqq.set('arr.foo', 'foo'), + { code: 'ENOADDPROP' }, + 'should throw an override error' + ) + + qqqq.set('arr.foo', 'foo', { force: true }) + t.strictSame( + qqqq.toJSON(), + { + arr: { + 0: 'a', + 1: 'b', + foo: 'foo', + }, + }, + 'should be able to override arrays with objects when using force=true' + ) + + qqqq.set('bar[]', 'item', { force: true }) + t.strictSame( + qqqq.toJSON(), + { + arr: { + 0: 'a', + 1: 'b', + foo: 'foo', + }, + bar: [ + 'item', + ], + }, + 'should be able to create new array with item when using force=true' + ) + + qqqq.set('bar[]', 'something else', { force: true }) + t.strictSame( + qqqq.toJSON(), + { + arr: { + 0: 'a', + 1: 'b', + foo: 'foo', + }, + bar: [ + 'item', + 'something else', + ], + }, + 'should be able to append items to arrays when using force=true' + ) + + const qqqqq = new Queryable({ + arr: [ + null, + ], + }) + qqqqq.set('arr[]', 'b') + t.strictSame( + qqqqq.toJSON(), + { + arr: [ + null, + 'b', + ], + }, + 'should be able to append items with empty items' + ) + qqqqq.set('arr[0]', 'a') + t.strictSame( + qqqqq.toJSON(), + { + arr: [ + 'a', + 'b', + ], + }, + 'should be able to replace empty items in an array' + ) + qqqqq.set('lorem.ipsum', 3) + t.strictSame( + qqqqq.toJSON(), + { + arr: [ + 'a', + 'b', + ], + lorem: { + ipsum: 3, + }, + }, + 'should be able to replace empty items in an array' + ) + t.throws( + () => qqqqq.set('lorem[]', 4), + { code: 'ENOAPPEND' }, + 'should throw error if using empty square bracket in an non-array item' + ) + qqqqq.set('lorem[0]', 3) + t.strictSame( + qqqqq.toJSON(), + { + arr: [ + 'a', + 'b', + ], + lorem: { + 0: 3, + ipsum: 3, + }, + }, + 'should be able add indexes as props when finding an object' + ) + qqqqq.set('lorem.1', 3) + t.strictSame( + qqqqq.toJSON(), + { + arr: [ + 'a', + 'b', + ], + lorem: { + 0: 3, + 1: 3, + ipsum: 3, + }, + }, + 'should be able add numeric props to an obj' + ) +}) + +t.test('delete values', async t => { + const q = new Queryable({ + foo: { + bar: { + lorem: 'lorem', + }, + }, + }) + q.delete('foo.bar.lorem') + t.strictSame( + q.toJSON(), + { + foo: { + bar: {}, + }, + }, + 'should delete queried item' + ) + q.delete('foo') + t.strictSame( + q.toJSON(), + {}, + 'should delete nested items' + ) + q.set('foo.a.b.c[0]', 'value') + q.delete('foo.a.b.c[0]') + t.strictSame( + q.toJSON(), + { + foo: { + a: { + b: { + c: [], + }, + }, + }, + }, + 'should delete array item' + ) + // creates an array that has an implicit empty first item + q.set('foo.a.b.c[1][0].foo.bar[0][0]', 'value') + q.delete('foo.a.b.c[1]') + t.strictSame( + q.toJSON(), + { + foo: { + a: { + b: { + c: [null], + }, + }, + }, + }, + 'should delete array item' + ) +}) + +t.test('logger', async t => { + const q = new Queryable({}) + q.set('foo.bar[0].baz', 'baz') + t.strictSame( + inspect(q, { depth: 10 }), + inspect({ + foo: { + bar: [ + { + baz: 'baz', + }, + ], + }, + }, { depth: 10 }), + 'should retrieve expected data' + ) +}) + +t.test('bracket lovers', async t => { + const q = new Queryable({}) + q.set('[iLoveBrackets]', 'seriously?') + t.strictSame( + q.toJSON(), + { + '[iLoveBrackets]': 'seriously?', + }, + 'should be able to set top-level props using square brackets notation' + ) + + t.equal(q.get('[iLoveBrackets]'), 'seriously?', + 'should bypass square bracket in top-level properties') + + q.set('[0]', '-.-') + t.strictSame( + q.toJSON(), + { + '[iLoveBrackets]': 'seriously?', + '[0]': '-.-', + }, + 'any top-level item can not be parsed with square bracket notation' + ) +}) diff --git a/test/lib/utils/read-local-package.js b/test/lib/utils/read-local-package.js deleted file mode 100644 index 9ae21f7d62b4c..0000000000000 --- a/test/lib/utils/read-local-package.js +++ /dev/null @@ -1,58 +0,0 @@ -const requireInject = require('require-inject') -const { test } = require('tap') - -let prefix -const _flatOptions = { - json: false, - global: false, - get prefix () { - return prefix - }, -} - -const readLocalPackageName = requireInject('../../../lib/utils/read-local-package.js') -const npm = { - flatOptions: _flatOptions, -} - -test('read local package.json', async (t) => { - prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'my-local-package', - version: '1.0.0', - }), - }) - const packageName = await readLocalPackageName(npm) - t.equal( - packageName, - 'my-local-package', - 'should retrieve current package name' - ) -}) - -test('read local scoped-package.json', async (t) => { - prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: '@my-scope/my-local-package', - version: '1.0.0', - }), - }) - const packageName = await readLocalPackageName(npm) - t.equal( - packageName, - '@my-scope/my-local-package', - 'should retrieve scoped package name' - ) -}) - -test('read using --global', async (t) => { - prefix = t.testdir({}) - _flatOptions.global = true - const packageName = await readLocalPackageName(npm) - t.equal( - packageName, - undefined, - 'should not retrieve a package name' - ) - _flatOptions.global = false -}) diff --git a/test/lib/utils/read-package-name.js b/test/lib/utils/read-package-name.js new file mode 100644 index 0000000000000..a1a1b4a1504dc --- /dev/null +++ b/test/lib/utils/read-package-name.js @@ -0,0 +1,33 @@ +const t = require('tap') + +const readPackageName = require('../../../lib/utils/read-package-name.js') + +t.test('read local package.json', async (t) => { + const prefix = t.testdir({ + 'package.json': JSON.stringify({ + name: 'my-local-package', + version: '1.0.0', + }), + }) + const packageName = await readPackageName(prefix) + t.equal( + packageName, + 'my-local-package', + 'should retrieve current package name' + ) +}) + +t.test('read local scoped-package.json', async (t) => { + const prefix = t.testdir({ + 'package.json': JSON.stringify({ + name: '@my-scope/my-local-package', + version: '1.0.0', + }), + }) + const packageName = await readPackageName(prefix) + t.equal( + packageName, + '@my-scope/my-local-package', + 'should retrieve scoped package name' + ) +}) diff --git a/test/lib/utils/read-user-info.js b/test/lib/utils/read-user-info.js index 99d85d66c4feb..5d937ff78a551 100644 --- a/test/lib/utils/read-user-info.js +++ b/test/lib/utils/read-user-info.js @@ -1,5 +1,4 @@ -const { test } = require('tap') -const requireInject = require('require-inject') +const t = require('tap') let readOpts = null let readResult = null @@ -28,13 +27,13 @@ const npmUserValidate = { }, } -const readUserInfo = requireInject('../../../lib/utils/read-user-info.js', { +const readUserInfo = t.mock('../../../lib/utils/read-user-info.js', { read, npmlog, 'npm-user-validate': npmUserValidate, }) -test('otp', async (t) => { +t.test('otp', async (t) => { readResult = '1234' t.teardown(() => { readResult = null @@ -44,7 +43,7 @@ test('otp', async (t) => { t.equal(result, '1234', 'received the otp') }) -test('password', async (t) => { +t.test('password', async (t) => { readResult = 'password' t.teardown(() => { readResult = null @@ -57,7 +56,7 @@ test('password', async (t) => { }, 'got the correct options') }) -test('username', async (t) => { +t.test('username', async (t) => { readResult = 'username' t.teardown(() => { readResult = null @@ -67,7 +66,7 @@ test('username', async (t) => { t.equal(result, 'username', 'received the username') }) -test('username - invalid warns and retries', async (t) => { +t.test('username - invalid warns and retries', async (t) => { readResult = 'invalid' t.teardown(() => { readResult = null @@ -87,7 +86,7 @@ test('username - invalid warns and retries', async (t) => { t.equal(logMsg, 'invalid username') }) -test('email', async (t) => { +t.test('email', async (t) => { readResult = 'foo@bar.baz' t.teardown(() => { readResult = null @@ -97,7 +96,7 @@ test('email', async (t) => { t.equal(result, 'foo@bar.baz', 'received the email') }) -test('email - invalid warns and retries', async (t) => { +t.test('email - invalid warns and retries', async (t) => { readResult = 'invalid@bar.baz' t.teardown(() => { readResult = null diff --git a/test/lib/utils/reify-finish.js b/test/lib/utils/reify-finish.js index 7ff5146a6bdbf..bbe8a3d99021d 100644 --- a/test/lib/utils/reify-finish.js +++ b/test/lib/utils/reify-finish.js @@ -1,5 +1,4 @@ const t = require('tap') -const requireInject = require('require-inject') const npm = { config: { @@ -30,7 +29,7 @@ const fs = { }, } -const reifyFinish = requireInject('../../../lib/utils/reify-finish.js', { +const reifyFinish = t.mock('../../../lib/utils/reify-finish.js', { fs, '../../../lib/utils/reify-output.js': reifyOutput, }) @@ -79,7 +78,7 @@ t.test('should write if everything above passes', async t => { }) t.test('works without fs.promises', async t => { - t.doesNotThrow(() => requireInject('../../../lib/utils/reify-finish.js', { + t.doesNotThrow(() => t.mock('../../../lib/utils/reify-finish.js', { fs: { ...fs, promises: null }, '../../../lib/npm.js': npm, '../../../lib/utils/reify-output.js': reifyOutput, diff --git a/test/lib/utils/reify-output.js b/test/lib/utils/reify-output.js index e41eabcb896e9..3ffbdf86a2989 100644 --- a/test/lib/utils/reify-output.js +++ b/test/lib/utils/reify-output.js @@ -1,5 +1,4 @@ const t = require('tap') -const requireInject = require('require-inject') const log = require('npmlog') log.level = 'warn' @@ -13,22 +12,13 @@ const npm = { started: Date.now(), flatOptions: settings, } -const getReifyOutput = tester => - requireInject( - '../../../lib/utils/reify-output.js', - { - '../../../lib/utils/output.js': tester, - } - ) - +const reifyOutput = require('../../../lib/utils/reify-output.js') t.test('missing info', (t) => { t.plan(1) - const reifyOutput = getReifyOutput( - out => t.doesNotHave( - out, - 'looking for funding', - 'should not print fund message if missing info' - ) + npm.output = out => t.notMatch( + out, + 'looking for funding', + 'should not print fund message if missing info' ) reifyOutput(npm, { @@ -43,12 +33,10 @@ t.test('missing info', (t) => { t.test('even more missing info', t => { t.plan(1) - const reifyOutput = getReifyOutput( - out => t.doesNotHave( - out, - 'looking for funding', - 'should not print fund message if missing info' - ) + npm.output = out => t.notMatch( + out, + 'looking for funding', + 'should not print fund message if missing info' ) reifyOutput(npm, { @@ -60,17 +48,15 @@ t.test('even more missing info', t => { t.test('single package', (t) => { t.plan(1) - const reifyOutput = getReifyOutput( - out => { - if (out.endsWith('looking for funding')) { - t.match( - out, - '1 package is looking for funding', - 'should print single package message' - ) - } + npm.output = out => { + if (out.endsWith('looking for funding')) { + t.match( + out, + '1 package is looking for funding', + 'should print single package message' + ) } - ) + } reifyOutput(npm, { // a report with an error is the same as no report at all, if @@ -110,12 +96,10 @@ t.test('no message when funding config is false', (t) => { settings.fund = true }) settings.fund = false - const reifyOutput = getReifyOutput( - out => { - if (out.endsWith('looking for funding')) - t.fail('should not print funding info', { actual: out }) - } - ) + npm.output = out => { + if (out.endsWith('looking for funding')) + t.fail('should not print funding info', { actual: out }) + } reifyOutput(npm, { actualTree: { @@ -147,17 +131,15 @@ t.test('no message when funding config is false', (t) => { t.test('print appropriate message for many packages', (t) => { t.plan(1) - const reifyOutput = getReifyOutput( - out => { - if (out.endsWith('looking for funding')) { - t.match( - out, - '3 packages are looking for funding', - 'should print single package message' - ) - } + npm.output = out => { + if (out.endsWith('looking for funding')) { + t.match( + out, + '3 packages are looking for funding', + 'should print single package message' + ) } - ) + } reifyOutput(npm, { actualTree: { @@ -205,39 +187,162 @@ t.test('print appropriate message for many packages', (t) => { }) }) -t.test('no output when silent', t => { - const reifyOutput = getReifyOutput(out => { - t.fail('should not get output when silent', { actual: out }) - }) - t.teardown(() => log.level = 'warn') - log.level = 'silent' - reifyOutput(npm, { - actualTree: { inventory: { size: 999 }, children: [] }, - auditReport: { - toJSON: () => { - throw new Error('this should not get called') - }, - vulnerabilities: {}, - metadata: { - vulnerabilities: { - total: 99, - }, +t.test('showing and not showing audit report', async t => { + const auditReport = { + toJSON: () => auditReport, + auditReportVersion: 2, + vulnerabilities: { + minimist: { + name: 'minimist', + severity: 'low', + via: [ + { + id: 1179, + url: 'https://npmjs.com/advisories/1179', + title: 'Prototype Pollution', + severity: 'low', + vulnerable_versions: '<0.2.1 || >=1.0.0 <1.2.3', + }, + ], + effects: [], + range: '<0.2.1 || >=1.0.0 <1.2.3', + nodes: [ + 'node_modules/minimist', + ], + fixAvailable: true, }, }, - diff: { - children: [ - { action: 'ADD', ideal: { location: 'loc' } }, - ], + metadata: { + vulnerabilities: { + info: 0, + low: 1, + moderate: 0, + high: 0, + critical: 0, + total: 1, + }, + dependencies: { + prod: 1, + dev: 0, + optional: 0, + peer: 0, + peerOptional: 0, + total: 1, + }, }, + } + + t.test('no output when silent', t => { + npm.output = out => { + t.fail('should not get output when silent', { actual: out }) + } + t.teardown(() => log.level = 'warn') + log.level = 'silent' + reifyOutput(npm, { + actualTree: { inventory: { size: 999 }, children: [] }, + auditReport, + diff: { + children: [ + { action: 'ADD', ideal: { location: 'loc' } }, + ], + }, + }) + t.end() + }) + + t.test('output when not silent', t => { + const OUT = [] + npm.output = out => { + OUT.push(out) + } + reifyOutput(npm, { + actualTree: { inventory: new Map(), children: [] }, + auditReport, + diff: { + children: [ + { action: 'ADD', ideal: { location: 'loc' } }, + ], + }, + }) + t.match(OUT.join('\n'), /Run `npm audit` for details\.$/, 'got audit report') + t.end() }) + + for (const json of [true, false]) { + t.test(`json=${json}`, t => { + t.teardown(() => { + delete npm.flatOptions.json + }) + npm.flatOptions.json = json + t.test('set exit code when cmd is audit', t => { + npm.output = () => {} + const { exitCode } = process + const { command } = npm + npm.flatOptions.auditLevel = 'low' + t.teardown(() => { + delete npm.flatOptions.auditLevel + npm.command = command + // only set exitCode back if we're passing tests + if (t.passing()) + process.exitCode = exitCode + }) + + process.exitCode = 0 + npm.command = 'audit' + reifyOutput(npm, { + actualTree: { inventory: new Map(), children: [] }, + auditReport, + diff: { + children: [ + { action: 'ADD', ideal: { location: 'loc' } }, + ], + }, + }) + + t.equal(process.exitCode, 1, 'set exit code') + t.end() + }) + + t.test('do not set exit code when cmd is install', t => { + npm.output = () => {} + const { exitCode } = process + const { command } = npm + npm.flatOptions.auditLevel = 'low' + t.teardown(() => { + delete npm.flatOptions.auditLevel + npm.command = command + // only set exitCode back if we're passing tests + if (t.passing()) + process.exitCode = exitCode + }) + + process.exitCode = 0 + npm.command = 'install' + reifyOutput(npm, { + actualTree: { inventory: new Map(), children: [] }, + auditReport, + diff: { + children: [ + { action: 'ADD', ideal: { location: 'loc' } }, + ], + }, + }) + + t.equal(process.exitCode, 0, 'did not set exit code') + t.end() + }) + t.end() + }) + } + t.end() }) t.test('packages changed message', t => { const output = [] - const reifyOutput = getReifyOutput(out => { + npm.output = out => { output.push(out) - }) + } // return a test function that builds up the mock and snapshots output const testCase = (t, added, removed, changed, audited, json, command) => { @@ -311,9 +416,7 @@ t.test('packages changed message', t => { t.test('added packages should be looked up within returned tree', t => { t.test('has added pkg in inventory', t => { t.plan(1) - const reifyOutput = getReifyOutput( - out => t.matchSnapshot(out) - ) + npm.output = out => t.matchSnapshot(out) reifyOutput(npm, { actualTree: { @@ -332,9 +435,7 @@ t.test('added packages should be looked up within returned tree', t => { t.test('missing added pkg in inventory', t => { t.plan(1) - const reifyOutput = getReifyOutput( - out => t.matchSnapshot(out) - ) + npm.output = out => t.matchSnapshot(out) reifyOutput(npm, { actualTree: { diff --git a/test/lib/utils/replace-info.js b/test/lib/utils/replace-info.js index ea9f06520d0a7..e4b83783a55a7 100644 --- a/test/lib/utils/replace-info.js +++ b/test/lib/utils/replace-info.js @@ -49,7 +49,7 @@ t.equal( 'should replace single item within a phrase' ) -t.deepEqual( +t.same( replaceInfo([ 'Something https://user:pass@registry.npmjs.org/ foo bar', 'http://foo:bar@registry.npmjs.org', @@ -63,7 +63,7 @@ t.deepEqual( 'should replace single item within a phrase' ) -t.deepEqual( +t.same( replaceInfo([ 'Something https://user:pass@registry.npmjs.org/ foo bar', null, diff --git a/test/lib/utils/setup-log.js b/test/lib/utils/setup-log.js index 4398200abe22c..7f907bc7e4148 100644 --- a/test/lib/utils/setup-log.js +++ b/test/lib/utils/setup-log.js @@ -1,14 +1,12 @@ const t = require('tap') -const requireInject = require('require-inject') const settings = { level: 'warn', } -t.afterEach(cb => { +t.afterEach(() => { Object.keys(settings).forEach(k => { delete settings[k] }) - cb() }) const WARN_CALLED = [] @@ -61,7 +59,7 @@ const npmlog = { } const EXPLAIN_CALLED = [] -const setupLog = requireInject('../../../lib/utils/setup-log.js', { +const setupLog = t.mock('../../../lib/utils/setup-log.js', { '../../../lib/utils/explain-eresolve.js': { explain: (...args) => { EXPLAIN_CALLED.push(args) @@ -86,15 +84,15 @@ t.test('setup with color=always and unicode', t => { t.strictSame(WARN_CALLED, [['ERESOLVE', 'hello', { some: 'object' }]]) WARN_CALLED.length = 0 - t.equal(setupLog(config({ + setupLog(config({ loglevel: 'warn', color: 'always', unicode: true, progress: false, - })), true) + })) npmlog.warn('ERESOLVE', 'hello', { some: { other: 'object' } }) - t.strictSame(EXPLAIN_CALLED, [[{ some: { other: 'object' } }]], + t.strictSame(EXPLAIN_CALLED, [[{ some: { other: 'object' } }, true, 2]], 'log.warn(ERESOLVE) patched to call explainEresolve()') t.strictSame(WARN_CALLED, [ ['ERESOLVE', 'hello'], @@ -127,12 +125,12 @@ t.test('setup with color=true, no unicode, and non-TTY terminal', t => { process.stderr.isTTY = false process.stdout.isTTY = false - t.equal(setupLog(config({ + setupLog(config({ loglevel: 'warn', color: false, progress: false, heading: 'asdf', - })), false) + })) t.strictSame(settings, { level: 'warn', @@ -158,12 +156,12 @@ t.test('setup with color=true, no unicode, and dumb TTY terminal', t => { process.stdout.isTTY = true process.env.TERM = 'dumb' - t.equal(setupLog(config({ + setupLog(config({ loglevel: 'warn', color: true, progress: false, heading: 'asdf', - })), true) + })) t.strictSame(settings, { level: 'warn', @@ -189,12 +187,12 @@ t.test('setup with color=true, no unicode, and non-dumb TTY terminal', t => { process.stdout.isTTY = true process.env.TERM = 'totes not dum' - t.equal(setupLog(config({ + setupLog(config({ loglevel: 'warn', color: true, progress: true, heading: 'asdf', - })), true) + })) t.strictSame(settings, { level: 'warn', @@ -220,12 +218,12 @@ t.test('setup with non-TTY stdout, TTY stderr', t => { process.stdout.isTTY = false process.env.TERM = 'definitely not a dummy' - t.equal(setupLog(config({ + setupLog(config({ loglevel: 'warn', color: true, progress: true, heading: 'asdf', - })), false) + })) t.strictSame(settings, { level: 'warn', @@ -250,12 +248,12 @@ t.test('setup with TTY stdout, non-TTY stderr', t => { process.stderr.isTTY = false process.stdout.isTTY = true - t.equal(setupLog(config({ + setupLog(config({ loglevel: 'warn', color: true, progress: true, heading: 'asdf', - })), true) + })) t.strictSame(settings, { level: 'warn', diff --git a/test/lib/utils/split-package-names.js b/test/lib/utils/split-package-names.js index c69bb2a3dab8c..82b8f5578397f 100644 --- a/test/lib/utils/split-package-names.js +++ b/test/lib/utils/split-package-names.js @@ -1,9 +1,9 @@ 'use strict' -const { test } = require('tap') +const t = require('tap') const splitPackageNames = require('../../../lib/utils/split-package-names.js') -test('splitPackageNames', t => { +t.test('splitPackageNames', t => { const assertions = [ ['semver', 'semver'], ['read-pkg/semver', 'read-pkg/node_modules/semver'], diff --git a/test/lib/utils/tar.js b/test/lib/utils/tar.js index b780a73e5ec1c..2662d47ace486 100644 --- a/test/lib/utils/tar.js +++ b/test/lib/utils/tar.js @@ -1,7 +1,6 @@ -const { test } = require('tap') +const t = require('tap') const pack = require('libnpmpack') const ssri = require('ssri') -const requireInject = require('require-inject') const { logTar, getContents } = require('../../../lib/utils/tar.js') @@ -18,7 +17,7 @@ const printLogs = (tarball, unicode) => { return logs.join('\n') } -test('should log tarball contents', async (t) => { +t.test('should log tarball contents', async (t) => { const testDir = t.testdir({ 'package.json': JSON.stringify({ name: 'my-cool-pkg', @@ -27,6 +26,9 @@ test('should log tarball contents', async (t) => { 'bundle-dep', ], }, null, 2), + cat: 'meow', + chai: 'blub', + dog: 'woof', node_modules: { 'bundle-dep': 'toto', }, @@ -42,8 +44,8 @@ test('should log tarball contents', async (t) => { t.matchSnapshot(printLogs(tarballContents, false)) }) -test('should log tarball contents with unicode', async (t) => { - const { logTar } = requireInject('../../../lib/utils/tar.js', { +t.test('should log tarball contents with unicode', async (t) => { + const { logTar } = t.mock('../../../lib/utils/tar.js', { npmlog: { notice: (str) => { t.ok(true, 'defaults to npmlog') @@ -60,8 +62,8 @@ test('should log tarball contents with unicode', async (t) => { t.end() }) -test('should default to npmlog', async (t) => { - const { logTar } = requireInject('../../../lib/utils/tar.js', { +t.test('should default to npmlog', async (t) => { + const { logTar } = t.mock('../../../lib/utils/tar.js', { npmlog: { notice: (str) => { t.ok(true, 'defaults to npmlog') @@ -78,7 +80,7 @@ test('should default to npmlog', async (t) => { t.end() }) -test('should getContents of a tarball', async (t) => { +t.test('should getContents of a tarball', async (t) => { const testDir = t.testdir({ 'package.json': JSON.stringify({ name: 'my-cool-pkg', @@ -101,9 +103,9 @@ test('should getContents of a tarball', async (t) => { id: 'my-cool-pkg@1.0.0', name: 'my-cool-pkg', version: '1.0.0', - size: 149, + size: 146, unpackedSize: 49, - shasum: 'c0bfd67a5142104e429afda09119eedd6a30d2fc', + shasum: 'b8379c5e69693cdda73aec3d81dae1d11c1e75bd', integrity: ssri.parse(integrity.sha512[0]), filename: 'my-cool-pkg-1.0.0.tgz', files: [{ path: 'package.json', size: 49, mode: 420 }], diff --git a/test/lib/utils/unsupported.js b/test/lib/utils/unsupported.js index f14cba9b744ad..3a05d90666025 100644 --- a/test/lib/utils/unsupported.js +++ b/test/lib/utils/unsupported.js @@ -1,4 +1,4 @@ -const test = require('tap').test +const t = require('tap') const unsupported = require('../../../lib/utils/unsupported.js') const versions = [ @@ -33,20 +33,20 @@ const versions = [ ['v13.0.0-0', false, false], ] -test('versions', function (t) { +t.test('versions', function (t) { t.plan(versions.length * 2) versions.forEach(function (verinfo) { const version = verinfo[0] const broken = verinfo[1] const unsupp = verinfo[2] const nodejs = unsupported.checkVersion(version) - t.is(nodejs.broken, broken, version + ' ' + (broken ? '' : 'not ') + 'broken') - t.is(nodejs.unsupported, unsupp, version + ' ' + (unsupp ? 'unsupported' : 'supported')) + t.equal(nodejs.broken, broken, version + ' ' + (broken ? '' : 'not ') + 'broken') + t.equal(nodejs.unsupported, unsupp, version + ' ' + (unsupp ? 'unsupported' : 'supported')) }) - t.done() + t.end() }) -test('checkForBrokenNode', t => { +t.test('checkForBrokenNode', t => { // run it once to not fail unsupported.checkForBrokenNode() @@ -77,7 +77,7 @@ test('checkForBrokenNode', t => { unsupported.checkForBrokenNode() }) -test('checkForUnsupportedNode', t => { +t.test('checkForUnsupportedNode', t => { const npmlog = require('npmlog') const { warn } = npmlog const versionPropDesc = Object.getOwnPropertyDescriptor(process, 'version') diff --git a/test/lib/utils/update-notifier.js b/test/lib/utils/update-notifier.js index 99c9dfc26626f..dc0a64ff46127 100644 --- a/test/lib/utils/update-notifier.js +++ b/test/lib/utils/update-notifier.js @@ -1,5 +1,4 @@ const t = require('tap') -const requireInject = require('require-inject') let ciMock = null const flatOptions = { global: false, cache: t.testdir() + '/_cacache' } @@ -52,6 +51,7 @@ let STAT_ERROR = null let STAT_MTIME = null let WRITE_ERROR = null const fs = { + ...require('fs'), stat: (path, cb) => { if (basename(path) !== '_update-notifier-last-checked') { console.error(new Error('should only write to notifier last checked file')) @@ -72,24 +72,28 @@ const fs = { }, } -const updateNotifier = requireInject('../../../lib/utils/update-notifier.js', { +const updateNotifier = t.mock('../../../lib/utils/update-notifier.js', { '@npmcli/ci-detect': () => ciMock, pacote, fs, }) -t.afterEach(cb => { +t.afterEach(() => { MANIFEST_REQUEST.length = 0 STAT_ERROR = null PACOTE_ERROR = null STAT_MTIME = null WRITE_ERROR = null - cb() }) +const runUpdateNotifier = async npm => { + await updateNotifier(npm) + return npm.updateNotification +} + t.test('situations in which we do not notify', t => { t.test('nothing to do if notifier disabled', async t => { - t.equal(await updateNotifier({ + t.equal(await runUpdateNotifier({ ...npm, config: { get: (k) => k !== 'update-notifier' }, }), null) @@ -97,7 +101,7 @@ t.test('situations in which we do not notify', t => { }) t.test('do not suggest update if already updating', async t => { - t.equal(await updateNotifier({ + t.equal(await runUpdateNotifier({ ...npm, flatOptions: { ...flatOptions, global: true }, command: 'install', @@ -106,32 +110,42 @@ t.test('situations in which we do not notify', t => { t.strictSame(MANIFEST_REQUEST, [], 'no requests for manifests') }) + t.test('do not suggest update if already updating with spec', async t => { + t.equal(await runUpdateNotifier({ + ...npm, + flatOptions: { ...flatOptions, global: true }, + command: 'install', + argv: ['npm@latest'], + }), null) + t.strictSame(MANIFEST_REQUEST, [], 'no requests for manifests') + }) + t.test('do not update if same as latest', async t => { - t.equal(await updateNotifier(npm), null) + t.equal(await runUpdateNotifier(npm), null) t.strictSame(MANIFEST_REQUEST, ['npm@latest'], 'requested latest version') }) t.test('check if stat errors (here for coverage)', async t => { STAT_ERROR = new Error('blorg') - t.equal(await updateNotifier(npm), null) + t.equal(await runUpdateNotifier(npm), null) t.strictSame(MANIFEST_REQUEST, ['npm@latest'], 'requested latest version') }) t.test('ok if write errors (here for coverage)', async t => { WRITE_ERROR = new Error('grolb') - t.equal(await updateNotifier(npm), null) + t.equal(await runUpdateNotifier(npm), null) t.strictSame(MANIFEST_REQUEST, ['npm@latest'], 'requested latest version') }) t.test('ignore pacote failures (here for coverage)', async t => { PACOTE_ERROR = new Error('pah-KO-tchay') - t.equal(await updateNotifier(npm), null) + t.equal(await runUpdateNotifier(npm), null) t.strictSame(MANIFEST_REQUEST, ['npm@latest'], 'requested latest version') }) t.test('do not update if newer than latest, but same as next', async t => { - t.equal(await updateNotifier({ ...npm, version: NEXT_VERSION }), null) + t.equal(await runUpdateNotifier({ ...npm, version: NEXT_VERSION }), null) const reqs = ['npm@latest', `npm@^${NEXT_VERSION}`] t.strictSame(MANIFEST_REQUEST, reqs, 'requested latest and next versions') }) t.test('do not update if on the latest beta', async t => { - t.equal(await updateNotifier({ ...npm, version: CURRENT_BETA }), null) + t.equal(await runUpdateNotifier({ ...npm, version: CURRENT_BETA }), null) const reqs = [`npm@^${CURRENT_BETA}`] t.strictSame(MANIFEST_REQUEST, reqs, 'requested latest and next versions') }) @@ -141,21 +155,21 @@ t.test('situations in which we do not notify', t => { ciMock = null }) ciMock = 'something' - t.equal(await updateNotifier(npm), null) + t.equal(await runUpdateNotifier(npm), null) t.strictSame(MANIFEST_REQUEST, [], 'no requests for manifests') }) t.test('only check weekly for GA releases', async t => { - // the 10 is fuzz factor for test environment - STAT_MTIME = Date.now() - (1000 * 60 * 60 * 24 * 7) + 10 - t.equal(await updateNotifier(npm), null) + // One week (plus five minutes to account for test environment fuzziness) + STAT_MTIME = Date.now() - (1000 * 60 * 60 * 24 * 7) + (1000 * 60 * 5) + t.equal(await runUpdateNotifier(npm), null) t.strictSame(MANIFEST_REQUEST, [], 'no requests for manifests') }) t.test('only check daily for betas', async t => { - // the 10 is fuzz factor for test environment - STAT_MTIME = Date.now() - (1000 * 60 * 60 * 24) + 10 - t.equal(await updateNotifier({ ...npm, version: HAVE_BETA }), null) + // One day (plus five minutes to account for test environment fuzziness) + STAT_MTIME = Date.now() - (1000 * 60 * 60 * 24) + (1000 * 60 * 5) + t.equal(await runUpdateNotifier({ ...npm, version: HAVE_BETA }), null) t.strictSame(MANIFEST_REQUEST, [], 'no requests for manifests') }) @@ -165,43 +179,43 @@ t.test('situations in which we do not notify', t => { t.test('notification situations', t => { t.test('new beta available', async t => { const version = HAVE_BETA - t.matchSnapshot(await updateNotifier({ ...npm, version }), 'color') - t.matchSnapshot(await updateNotifier({ ...npmNoColor, version }), 'no color') + t.matchSnapshot(await runUpdateNotifier({ ...npm, version }), 'color') + t.matchSnapshot(await runUpdateNotifier({ ...npmNoColor, version }), 'no color') t.strictSame(MANIFEST_REQUEST, [`npm@^${version}`, `npm@^${version}`]) }) t.test('patch to next version', async t => { const version = NEXT_PATCH - t.matchSnapshot(await updateNotifier({ ...npm, version }), 'color') - t.matchSnapshot(await updateNotifier({ ...npmNoColor, version }), 'no color') + t.matchSnapshot(await runUpdateNotifier({ ...npm, version }), 'color') + t.matchSnapshot(await runUpdateNotifier({ ...npmNoColor, version }), 'no color') t.strictSame(MANIFEST_REQUEST, ['npm@latest', `npm@^${version}`, 'npm@latest', `npm@^${version}`]) }) t.test('minor to next version', async t => { const version = NEXT_MINOR - t.matchSnapshot(await updateNotifier({ ...npm, version }), 'color') - t.matchSnapshot(await updateNotifier({ ...npmNoColor, version }), 'no color') + t.matchSnapshot(await runUpdateNotifier({ ...npm, version }), 'color') + t.matchSnapshot(await runUpdateNotifier({ ...npmNoColor, version }), 'no color') t.strictSame(MANIFEST_REQUEST, ['npm@latest', `npm@^${version}`, 'npm@latest', `npm@^${version}`]) }) t.test('patch to current', async t => { const version = CURRENT_PATCH - t.matchSnapshot(await updateNotifier({ ...npm, version }), 'color') - t.matchSnapshot(await updateNotifier({ ...npmNoColor, version }), 'no color') + t.matchSnapshot(await runUpdateNotifier({ ...npm, version }), 'color') + t.matchSnapshot(await runUpdateNotifier({ ...npmNoColor, version }), 'no color') t.strictSame(MANIFEST_REQUEST, ['npm@latest', 'npm@latest']) }) t.test('minor to current', async t => { const version = CURRENT_MINOR - t.matchSnapshot(await updateNotifier({ ...npm, version }), 'color') - t.matchSnapshot(await updateNotifier({ ...npmNoColor, version }), 'no color') + t.matchSnapshot(await runUpdateNotifier({ ...npm, version }), 'color') + t.matchSnapshot(await runUpdateNotifier({ ...npmNoColor, version }), 'no color') t.strictSame(MANIFEST_REQUEST, ['npm@latest', 'npm@latest']) }) t.test('major to current', async t => { const version = CURRENT_MAJOR - t.matchSnapshot(await updateNotifier({ ...npm, version }), 'color') - t.matchSnapshot(await updateNotifier({ ...npmNoColor, version }), 'no color') + t.matchSnapshot(await runUpdateNotifier({ ...npm, version }), 'color') + t.matchSnapshot(await runUpdateNotifier({ ...npmNoColor, version }), 'no color') t.strictSame(MANIFEST_REQUEST, ['npm@latest', 'npm@latest']) }) diff --git a/test/lib/version.js b/test/lib/version.js index e0e07f5172efe..df6d0dd797d0a 100644 --- a/test/lib/version.js +++ b/test/lib/version.js @@ -1,36 +1,36 @@ const t = require('tap') -const requireInject = require('require-inject') +const { fake: mockNpm } = require('../fixtures/mock-npm') let result = [] const noop = () => null -const npm = { - flatOptions: { - tagVersionPrefix: 'v', - json: false, - }, +const config = { + 'git-tag-version': true, + 'tag-version-prefix': 'v', + json: false, +} +const npm = mockNpm({ + config, prefix: '', version: '1.0.0', -} -const mocks = { - libnpmversion: noop, - '../../lib/utils/output.js': (...msg) => { + output: (...msg) => { for (const m of msg) result.push(m) }, - '../../lib/utils/usage.js': () => 'usage instructions', +}) +const mocks = { + libnpmversion: noop, } -const Version = requireInject('../../lib/version.js', mocks) +const Version = t.mock('../../lib/version.js', mocks) const version = new Version(npm) const _processVersions = process.versions -t.afterEach(cb => { - npm.flatOptions.json = false +t.afterEach(() => { + config.json = false npm.prefix = '' process.versions = _processVersions result = [] - cb() }) t.test('no args', t => { @@ -47,7 +47,7 @@ t.test('no args', t => { if (err) throw err - t.deepEqual( + t.same( result, [{ 'test-version-no-args': '3.2.1', @@ -65,7 +65,7 @@ t.test('too many args', t => { version.exec(['foo', 'bar'], err => { t.match( err, - 'usage instructions', + 'npm version', 'should throw usage instructions error' ) @@ -102,7 +102,7 @@ t.test('failure reading package.json', t => { if (err) throw err - t.deepEqual( + t.same( result, [{ npm: '1.0.0', @@ -117,14 +117,14 @@ t.test('failure reading package.json', t => { t.test('--json option', t => { const prefix = t.testdir({}) - npm.flatOptions.json = true + config.json = true npm.prefix = prefix Object.defineProperty(process, 'versions', { value: {} }) version.exec([], err => { if (err) throw err - t.deepEqual( + t.same( result, ['{\n "npm": "1.0.0"\n}'], 'should return json stringified result' @@ -134,15 +134,13 @@ t.test('--json option', t => { }) t.test('with one arg', t => { - const Version = requireInject('../../lib/version.js', { + const Version = t.mock('../../lib/version.js', { ...mocks, libnpmversion: (arg, opts) => { t.equal(arg, 'major', 'should forward expected value') - t.deepEqual( + t.same( opts, { - tagVersionPrefix: 'v', - json: false, path: '', }, 'should forward expected options' @@ -159,3 +157,174 @@ t.test('with one arg', t => { t.end() }) }) + +t.test('workspaces', t => { + t.teardown(() => { + npm.localPrefix = '' + npm.prefix = '' + }) + + t.test('no args, all workspaces', t => { + const testDir = t.testdir({ + 'package.json': JSON.stringify({ + name: 'workspaces-test', + version: '1.0.0', + workspaces: ['workspace-a', 'workspace-b'], + }, null, 2), + 'workspace-a': { + 'package.json': JSON.stringify({ + name: 'workspace-a', + version: '1.0.0', + }), + }, + 'workspace-b': { + 'package.json': JSON.stringify({ + name: 'workspace-b', + version: '1.0.0', + }), + }, + }) + npm.localPrefix = testDir + npm.prefix = testDir + const version = new Version(npm) + version.execWorkspaces([], [], err => { + if (err) + throw err + t.same(result, [{ + 'workspaces-test': '1.0.0', + 'workspace-a': '1.0.0', + 'workspace-b': '1.0.0', + npm: '1.0.0', + }], 'outputs includes main package and workspace versions') + t.end() + }) + }) + + t.test('no args, single workspaces', t => { + const testDir = t.testdir({ + 'package.json': JSON.stringify({ + name: 'workspaces-test', + version: '1.0.0', + workspaces: ['workspace-a', 'workspace-b'], + }, null, 2), + 'workspace-a': { + 'package.json': JSON.stringify({ + name: 'workspace-a', + version: '1.0.0', + }), + }, + 'workspace-b': { + 'package.json': JSON.stringify({ + name: 'workspace-b', + version: '1.0.0', + }), + }, + }) + npm.localPrefix = testDir + npm.prefix = testDir + const version = new Version(npm) + version.execWorkspaces([], ['workspace-a'], err => { + if (err) + throw err + t.same(result, [{ + 'workspaces-test': '1.0.0', + 'workspace-a': '1.0.0', + npm: '1.0.0', + }], 'outputs includes main package and requested workspace versions') + t.end() + }) + }) + + t.test('no args, all workspaces, workspace with missing name or version', t => { + const testDir = t.testdir({ + 'package.json': JSON.stringify({ + name: 'workspaces-test', + version: '1.0.0', + workspaces: ['workspace-a', 'workspace-b', 'workspace-c'], + }, null, 2), + 'workspace-a': { + 'package.json': JSON.stringify({ + name: 'workspace-a', + version: '1.0.0', + }), + }, + 'workspace-b': { + 'package.json': JSON.stringify({ + name: 'workspace-b', + }), + }, + 'workspace-c': { + 'package.json': JSON.stringify({ + version: '1.0.0', + }), + }, + }) + npm.localPrefix = testDir + npm.prefix = testDir + const version = new Version(npm) + version.execWorkspaces([], [], err => { + if (err) + throw err + t.same(result, [{ + 'workspaces-test': '1.0.0', + 'workspace-a': '1.0.0', + npm: '1.0.0', + }], 'outputs includes main package and valid workspace versions') + t.end() + }) + }) + + t.test('with one arg, all workspaces', t => { + const libNpmVersionArgs = [] + const testDir = t.testdir({ + 'package.json': JSON.stringify({ + name: 'workspaces-test', + version: '1.0.0', + workspaces: ['workspace-a', 'workspace-b'], + }, null, 2), + 'workspace-a': { + 'package.json': JSON.stringify({ + name: 'workspace-a', + version: '1.0.0', + }), + }, + 'workspace-b': { + 'package.json': JSON.stringify({ + name: 'workspace-b', + version: '1.0.0', + }), + }, + }) + const Version = t.mock('../../lib/version.js', { + ...mocks, + libnpmversion: (arg, opts) => { + libNpmVersionArgs.push([arg, opts]) + return '2.0.0' + }, + }) + npm.localPrefix = testDir + npm.prefix = testDir + const version = new Version(npm) + + version.execWorkspaces(['major'], [], err => { + if (err) + throw err + t.same(result, ['workspace-a', 'v2.0.0', 'workspace-b', 'v2.0.0'], 'outputs the new version for only the workspaces prefixed by the tagVersionPrefix') + t.end() + }) + }) + + t.test('too many args', t => { + version.execWorkspaces(['foo', 'bar'], [], err => { + t.match( + err, + 'npm version', + 'should throw usage instructions error' + ) + + t.end() + }) + }) + + t.end() +}) diff --git a/test/lib/view.js b/test/lib/view.js index 1363a5b9f9ac8..793917adc6476 100644 --- a/test/lib/view.js +++ b/test/lib/view.js @@ -1,15 +1,20 @@ const t = require('tap') -const requireInject = require('require-inject') + +t.cleanSnapshot = str => str.replace(/published .*? ago/g, 'published {TIME} ago') + +// run the same as tap does when running directly with node +process.stdout.columns = undefined + +const { fake: mockNpm } = require('../fixtures/mock-npm') let logs -const cleanLogs = (done) => { +const cleanLogs = () => { logs = '' const fn = (...args) => { logs += '\n' args.map(el => logs += el) } console.log = fn - done() } const packument = (nv, opts) => { @@ -31,7 +36,9 @@ const packument = (nv, opts) => { }, blue: { name: 'blue', - 'dist-tags': {}, + 'dist-tags': { + latest: '1.0.0', + }, time: { '1.0.0': '2019-08-06T16:21:09.842Z', }, @@ -56,7 +63,9 @@ const packument = (nv, opts) => { email: 'claudia@cyan.com', }, name: 'cyan', - 'dist-tags': {}, + 'dist-tags': { + latest: '1.0.0', + }, versions: { '1.0.0': { version: '1.0.0', @@ -233,43 +242,52 @@ const packument = (nv, opts) => { }, }, } + if (nv.type === 'git') + return mocks[nv.hosted.project] return mocks[nv.name] } t.beforeEach(cleanLogs) + t.test('should log package info', t => { - const View = requireInject('../../lib/view.js', { + const View = t.mock('../../lib/view.js', { pacote: { packument, }, }) - const view = new View({ - flatOptions: { - global: false, - }, + const npm = mockNpm({ + config: { unicode: false }, }) + const view = new View(npm) - const ViewJson = requireInject('../../lib/view.js', { + const ViewJson = t.mock('../../lib/view.js', { pacote: { packument, }, }) - const viewJson = new ViewJson({ - flatOptions: { + const jsonNpm = mockNpm({ + config: { json: true, + tag: 'latest', }, }) + const viewJson = new ViewJson(jsonNpm) - const ViewUnicode = requireInject('../../lib/view.js', { + const ViewUnicode = t.mock('../../lib/view.js', { pacote: { packument, }, }) - const viewUnicode = new ViewUnicode({ - flatOptions: { - global: false, - unicode: true, - }, + const unicodeNpm = mockNpm({ + config: { unicode: true }, + }) + const viewUnicode = new ViewUnicode(unicodeNpm) + + t.test('package from git', t => { + view.exec(['https://github.com/npm/green'], () => { + t.matchSnapshot(logs) + t.end() + }) }) t.test('package with license, bugs, repository and other fields', t => { @@ -302,7 +320,7 @@ t.test('should log package info', t => { t.test('package with no versions', t => { view.exec(['brown'], () => { - t.equals(logs, '', 'no info to display') + t.equal(logs, '', 'no info to display') t.end() }) }) @@ -330,7 +348,7 @@ t.test('should log package info', t => { t.test('package with --json and no versions', t => { viewJson.exec(['brown'], () => { - t.equals(logs, '', 'no info to display') + t.equal(logs, '', 'no info to display') t.end() }) }) @@ -346,18 +364,18 @@ t.test('should log info of package in current working dir', t => { }, null, 2), }) - const View = requireInject('../../lib/view.js', { + const View = t.mock('../../lib/view.js', { pacote: { packument, }, }) - const view = new View({ + const npm = mockNpm({ prefix: testDir, - flatOptions: { - defaultTag: '1.0.0', - global: false, + config: { + tag: '1.0.0', }, }) + const view = new View(npm) t.test('specific version', t => { view.exec(['.@1.0.0'], () => { @@ -377,28 +395,27 @@ t.test('should log info of package in current working dir', t => { }) t.test('should log info by field name', t => { - const ViewJson = requireInject('../../lib/view.js', { + const ViewJson = t.mock('../../lib/view.js', { pacote: { packument, }, }) - const viewJson = new ViewJson({ - flatOptions: { + const jsonNpm = mockNpm({ + config: { + tag: 'latest', json: true, - global: false, }, }) - const View = requireInject('../../lib/view.js', { + const viewJson = new ViewJson(jsonNpm) + + const View = t.mock('../../lib/view.js', { pacote: { packument, }, }) - const view = new View({ - flatOptions: { - global: false, - }, - }) + const npm = mockNpm() + const view = new View(npm) t.test('readme', t => { view.exec(['yellow@1.0.0', 'readme'], () => { @@ -444,7 +461,7 @@ t.test('should log info by field name', t => { t.test('unknown nested field ', t => { view.exec(['yellow@1.0.0', 'dist.foobar'], () => { - t.equals(logs, '', 'no info to display') + t.equal(logs, '', 'no info to display') t.end() }) }) @@ -467,14 +484,16 @@ t.test('should log info by field name', t => { }) t.test('throw error if global mode', (t) => { - const View = requireInject('../../lib/view.js') - const view = new View({ - flatOptions: { + const View = t.mock('../../lib/view.js') + const npm = mockNpm({ + config: { global: true, + tag: 'latest', }, }) + const view = new View(npm) view.exec([], (err) => { - t.equals(err.message, 'Cannot use view command in global mode.') + t.equal(err.message, 'Cannot use view command in global mode.') t.end() }) }) @@ -482,13 +501,11 @@ t.test('throw error if global mode', (t) => { t.test('throw ENOENT error if package.json misisng', (t) => { const testDir = t.testdir({}) - const View = requireInject('../../lib/view.js') - const view = new View({ + const View = t.mock('../../lib/view.js') + const npm = mockNpm({ prefix: testDir, - flatOptions: { - global: false, - }, }) + const view = new View(npm) view.exec([], (err) => { t.match(err, { code: 'ENOENT' }) t.end() @@ -500,13 +517,11 @@ t.test('throw EJSONPARSE error if package.json not json', (t) => { 'package.json': 'not json, nope, not even a little bit!', }) - const View = requireInject('../../lib/view.js') - const view = new View({ + const View = t.mock('../../lib/view.js') + const npm = mockNpm({ prefix: testDir, - flatOptions: { - global: false, - }, }) + const view = new View(npm) view.exec([], (err) => { t.match(err, { code: 'EJSONPARSE' }) t.end() @@ -518,49 +533,172 @@ t.test('throw error if package.json has no name', (t) => { 'package.json': '{}', }) - const View = requireInject('../../lib/view.js') - const view = new View({ + const View = t.mock('../../lib/view.js') + const npm = mockNpm({ prefix: testDir, - flatOptions: { - global: false, - }, }) + const view = new View(npm) view.exec([], (err) => { - t.equals(err.message, 'Invalid package.json, no "name" field') + t.equal(err.message, 'Invalid package.json, no "name" field') t.end() }) }) t.test('throws when unpublished', (t) => { - const View = requireInject('../../lib/view.js', { + const View = t.mock('../../lib/view.js', { pacote: { packument, }, }) - const view = new View({ - flatOptions: { - defaultTag: '1.0.1', - global: false, + const npm = mockNpm({ + config: { + tag: '1.0.1', }, }) + const view = new View(npm) view.exec(['red'], (err) => { - t.equals(err.code, 'E404') + t.equal(err.code, 'E404') t.end() }) }) +t.test('workspaces', t => { + t.beforeEach(() => { + warnMsg = undefined + config.json = false + }) + const testDir = t.testdir({ + 'package.json': JSON.stringify({ + name: 'workspaces-test-package', + version: '1.2.3', + workspaces: ['test-workspace-a', 'test-workspace-b'], + }), + 'test-workspace-a': { + 'package.json': JSON.stringify({ + name: 'green', + version: '1.2.3', + }), + }, + 'test-workspace-b': { + 'package.json': JSON.stringify({ + name: 'orange', + version: '1.2.3', + }), + }, + }) + const View = t.mock('../../lib/view.js', { + pacote: { + packument, + }, + }) + const config = { + unicode: false, + tag: 'latest', + } + let warnMsg + const npm = mockNpm({ + log: { + warn: (msg) => { + warnMsg = msg + }, + }, + config, + localPrefix: testDir, + }) + const view = new View(npm) + + t.test('all workspaces', t => { + view.execWorkspaces([], [], (err) => { + t.error(err) + t.matchSnapshot(logs) + t.end() + }) + }) + + t.test('one specific workspace', t => { + view.execWorkspaces([], ['green'], (err) => { + t.error(err) + t.matchSnapshot(logs) + t.end() + }) + }) + + t.test('all workspaces --json', t => { + config.json = true + view.execWorkspaces([], [], (err) => { + t.error(err) + t.matchSnapshot(logs) + t.end() + }) + }) + + t.test('all workspaces single field', t => { + view.execWorkspaces(['.', 'name'], [], (err) => { + t.error(err) + t.matchSnapshot(logs) + t.end() + }) + }) + + t.test('all workspaces nonexistent field', t => { + view.execWorkspaces(['.', 'foo'], [], (err) => { + t.error(err) + t.matchSnapshot(logs) + t.end() + }) + }) + + t.test('all workspaces nonexistent field --json', t => { + config.json = true + view.execWorkspaces(['.', 'foo'], [], (err) => { + t.error(err) + t.matchSnapshot(logs) + t.end() + }) + }) + + t.test('all workspaces single field --json', t => { + config.json = true + view.execWorkspaces(['.', 'name'], [], (err) => { + t.error(err) + t.matchSnapshot(logs) + t.end() + }) + }) + + t.test('single workspace --json', t => { + config.json = true + view.execWorkspaces([], ['green'], (err) => { + t.error(err) + t.matchSnapshot(logs) + t.end() + }) + }) + + t.test('remote package name', t => { + view.execWorkspaces(['pink'], [], (err) => { + t.error(err) + t.matchSnapshot(warnMsg) + t.matchSnapshot(logs) + t.end() + }) + }) + + t.end() +}) + t.test('completion', async t => { - const View = requireInject('../../lib/view.js', { + const View = t.mock('../../lib/view.js', { pacote: { packument, }, }) - const view = new View({ - flatOptions: { - defaultTag: '1.0.1', - global: false, + const npm = mockNpm({ + config: { + tag: '1.0.1', }, }) + const view = new View(npm) const res = await view.completion({ conf: { argv: { remain: ['npm', 'view', 'green@1.0.0'] } }, }) @@ -569,12 +707,13 @@ t.test('completion', async t => { }) t.test('no registry completion', async t => { - const View = requireInject('../../lib/view.js') - const view = new View({ - flatOptions: { - defaultTag: '1.0.1', + const View = t.mock('../../lib/view.js') + const npm = mockNpm({ + config: { + tag: '1.0.1', }, }) + const view = new View(npm) const res = await view.completion({conf: { argv: { remain: ['npm', 'view'] } } }) t.notOk(res, 'there is no package completion') t.end() diff --git a/test/lib/whoami.js b/test/lib/whoami.js index 3d9618ffa7150..9190e3858b137 100644 --- a/test/lib/whoami.js +++ b/test/lib/whoami.js @@ -1,34 +1,41 @@ -const { test } = require('tap') -const requireInject = require('require-inject') +const t = require('tap') +const { fake: mockNpm } = require('../fixtures/mock-npm') -test('whoami', (t) => { +t.test('whoami', (t) => { t.plan(3) - const Whoami = requireInject('../../lib/whoami.js', { + const Whoami = t.mock('../../lib/whoami.js', { '../../lib/utils/get-identity.js': () => Promise.resolve('foo'), - '../../lib/utils/output.js': (output) => { + }) + const npm = mockNpm({ + config: { json: false }, + output: (output) => { t.equal(output, 'foo', 'should output the username') }, }) - const whoami = new Whoami({ flatOptions: {} }) + + const whoami = new Whoami(npm) whoami.exec([], (err) => { - t.ifError(err, 'npm whoami') + t.error(err, 'npm whoami') t.ok('should successfully print username') }) }) -test('whoami json', (t) => { +t.test('whoami json', (t) => { t.plan(3) - const Whoami = requireInject('../../lib/whoami.js', { + const Whoami = t.mock('../../lib/whoami.js', { '../../lib/utils/get-identity.js': () => Promise.resolve('foo'), - '../../lib/utils/output.js': (output) => { - t.equal(output, '"foo"', 'should output the username as json') + }) + const npm = mockNpm({ + config: { json: true }, + output: (output) => { + t.equal(output, '"foo"', 'should output the username') }, }) - const whoami = new Whoami({ flatOptions: { json: true } }) + const whoami = new Whoami(npm) whoami.exec([], (err) => { - t.ifError(err, 'npm whoami') + t.error(err, 'npm whoami') t.ok('should successfully print username as json') }) }) diff --git a/test/lib/workspaces/arborist-cmd.js b/test/lib/workspaces/arborist-cmd.js new file mode 100644 index 0000000000000..75ac8f4ebf804 --- /dev/null +++ b/test/lib/workspaces/arborist-cmd.js @@ -0,0 +1,125 @@ +const { resolve } = require('path') +const t = require('tap') +const ArboristCmd = require('../../../lib/workspaces/arborist-cmd.js') + +t.test('arborist-cmd', async t => { + const path = t.testdir({ + 'package.json': JSON.stringify({ + name: 'simple-workspaces-list', + version: '1.1.1', + workspaces: [ + 'a', + 'b', + 'group/*', + ], + }), + node_modules: { + abbrev: { + 'package.json': JSON.stringify({ name: 'abbrev', version: '1.1.1' }), + }, + a: t.fixture('symlink', '../a'), + b: t.fixture('symlink', '../b'), + }, + a: { + 'package.json': JSON.stringify({ name: 'a', version: '1.0.0' }), + }, + b: { + 'package.json': JSON.stringify({ name: 'b', version: '1.0.0' }), + }, + group: { + c: { + 'package.json': JSON.stringify({ + name: 'c', + version: '1.0.0', + dependencies: { + abbrev: '^1.1.1', + }, + }), + }, + d: { + 'package.json': JSON.stringify({ name: 'd', version: '1.0.0' }), + }, + }, + }) + + class TestCmd extends ArboristCmd {} + + const cmd = new TestCmd() + cmd.npm = { localPrefix: path } + + // check filtering for a single workspace name + cmd.exec = function (args, cb) { + t.same(this.workspaceNames, ['a'], 'should set array with single ws name') + t.same(args, ['foo'], 'should get received args') + cb() + } + await new Promise(res => { + cmd.execWorkspaces(['foo'], ['a'], res) + }) + + // check filtering single workspace by path + cmd.exec = function (args, cb) { + t.same(this.workspaceNames, ['a'], + 'should set array with single ws name from path') + cb() + } + await new Promise(res => { + cmd.execWorkspaces([], ['./a'], res) + }) + + // check filtering single workspace by full path + cmd.exec = function (args, cb) { + t.same(this.workspaceNames, ['a'], + 'should set array with single ws name from full path') + cb() + } + await new Promise(res => { + cmd.execWorkspaces([], [resolve(path, './a')], res) + }) + + // filtering multiple workspaces by name + cmd.exec = function (args, cb) { + t.same(this.workspaceNames, ['a', 'c'], + 'should set array with multiple listed ws names') + cb() + } + await new Promise(res => { + cmd.execWorkspaces([], ['a', 'c'], res) + }) + + // filtering multiple workspaces by path names + cmd.exec = function (args, cb) { + t.same(this.workspaceNames, ['a', 'c'], + 'should set array with multiple ws names from paths') + cb() + } + await new Promise(res => { + cmd.execWorkspaces([], ['./a', 'group/c'], res) + }) + + // filtering multiple workspaces by parent path name + cmd.exec = function (args, cb) { + t.same(this.workspaceNames, ['c', 'd'], + 'should set array with multiple ws names from a parent folder name') + cb() + } + await new Promise(res => { + cmd.execWorkspaces([], ['./group'], res) + }) +}) + +t.test('handle getWorkspaces raising an error', t => { + const ArboristCmd = t.mock('../../../lib/workspaces/arborist-cmd.js', { + '../../../lib/workspaces/get-workspaces.js': async () => { + throw new Error('oopsie') + }, + }) + class TestCmd extends ArboristCmd {} + const cmd = new TestCmd() + cmd.npm = {} + + cmd.execWorkspaces(['foo'], ['a'], er => { + t.match(er, { message: 'oopsie' }) + t.end() + }) +}) diff --git a/test/lib/workspaces/get-workspaces.js b/test/lib/workspaces/get-workspaces.js new file mode 100644 index 0000000000000..4ea055e02f8f2 --- /dev/null +++ b/test/lib/workspaces/get-workspaces.js @@ -0,0 +1,199 @@ +const { resolve } = require('path') +const t = require('tap') +const getWorkspaces = require('../../../lib/workspaces/get-workspaces.js') + +const normalizePath = p => p + .replace(/\\+/g, '/') + .replace(/\r\n/g, '\n') + +const cleanOutput = (str, path) => normalizePath(str) + .replace(normalizePath(path), '{PATH}') + +const clean = (res, path) => { + const cleaned = new Map() + for (const [key, value] of res.entries()) + cleaned.set(key, cleanOutput(value, path)) + return cleaned +} + +t.test('get-workspaces', async t => { + const path = t.testdir({ + packages: { + a: { + 'package.json': JSON.stringify({ + name: 'a', + version: '1.0.0', + scripts: { glorp: 'echo a doing the glerp glop' }, + }), + }, + b: { + 'package.json': JSON.stringify({ + name: 'b', + version: '2.0.0', + scripts: { glorp: 'echo b doing the glerp glop' }, + }), + }, + c: { + 'package.json': JSON.stringify({ + name: 'c', + version: '1.0.0', + scripts: { + test: 'exit 0', + posttest: 'echo posttest', + lorem: 'echo c lorem', + }, + }), + }, + d: { + 'package.json': JSON.stringify({ + name: 'd', + version: '1.0.0', + scripts: { + test: 'exit 0', + posttest: 'echo posttest', + }, + }), + }, + e: { + 'package.json': JSON.stringify({ + name: 'e', + scripts: { test: 'exit 0', start: 'echo start something' }, + }), + }, + noscripts: { + 'package.json': JSON.stringify({ + name: 'noscripts', + version: '1.0.0', + }), + }, + }, + 'package.json': JSON.stringify({ + name: 'x', + version: '1.2.3', + workspaces: ['packages/*'], + }), + }) + + let workspaces + + workspaces = await getWorkspaces(['a', 'b'], { path }) + t.same( + clean(workspaces, path), + new Map(Object.entries({ + a: '{PATH}/packages/a', + b: '{PATH}/packages/b', + })), + 'should filter by package name' + ) + + workspaces = await getWorkspaces(['./packages/c'], { path }) + t.same( + clean(workspaces, path), + new Map(Object.entries({ + c: '{PATH}/packages/c', + })), + 'should filter by package directory' + ) + + workspaces = await getWorkspaces(['packages/c'], { path }) + t.same( + clean(workspaces, path), + new Map(Object.entries({ + c: '{PATH}/packages/c', + })), + 'should filter by rel package directory' + ) + + workspaces = await getWorkspaces([resolve(path, 'packages/c')], { path }) + t.same( + clean(workspaces, path), + new Map(Object.entries({ + c: '{PATH}/packages/c', + })), + 'should filter by absolute package directory' + ) + + workspaces = await getWorkspaces(['packages'], { path }) + t.same( + clean(workspaces, path), + new Map(Object.entries({ + a: '{PATH}/packages/a', + b: '{PATH}/packages/b', + c: '{PATH}/packages/c', + d: '{PATH}/packages/d', + e: '{PATH}/packages/e', + noscripts: '{PATH}/packages/noscripts', + })), + 'should filter by parent directory name' + ) + + workspaces = await getWorkspaces(['./packages/'], { path }) + t.same( + clean(workspaces, path), + new Map(Object.entries({ + a: '{PATH}/packages/a', + b: '{PATH}/packages/b', + c: '{PATH}/packages/c', + d: '{PATH}/packages/d', + e: '{PATH}/packages/e', + noscripts: '{PATH}/packages/noscripts', + })), + 'should filter by parent directory path' + ) + + workspaces = await getWorkspaces([resolve(path, './packages')], { path }) + t.same( + clean(workspaces, path), + new Map(Object.entries({ + a: '{PATH}/packages/a', + b: '{PATH}/packages/b', + c: '{PATH}/packages/c', + d: '{PATH}/packages/d', + e: '{PATH}/packages/e', + noscripts: '{PATH}/packages/noscripts', + })), + 'should filter by absolute parent directory path' + ) + + workspaces = await getWorkspaces([], { path }) + t.same( + clean(workspaces, path), + new Map(Object.entries({ + a: '{PATH}/packages/a', + b: '{PATH}/packages/b', + c: '{PATH}/packages/c', + d: '{PATH}/packages/d', + e: '{PATH}/packages/e', + noscripts: '{PATH}/packages/noscripts', + })), + 'should return all workspaces if no filter set' + ) + + try { + await getWorkspaces(['missing'], { path }) + throw new Error('missed throw') + } catch (err) { + t.match( + err, + /No workspaces found/, + 'should throw no workspaces found error' + ) + } + + const unconfiguredWorkspaces = t.testdir({ + 'package.json': JSON.stringify({ + name: 'no-configured-workspaces', + version: '1.0.0', + }), + }) + try { + await getWorkspaces([], { path: unconfiguredWorkspaces }) + throw new Error('missed throw') + } catch (err) { + t.match( + err, + /No workspaces found/, + 'should throw no workspaces found error' + ) + } +})