From e7ab206370e5fc62fefe6916e5dcc40b3e577d22 Mon Sep 17 00:00:00 2001 From: reggi Date: Wed, 2 Oct 2024 13:25:36 -0400 Subject: [PATCH] deps: update pacote@19.0.0 --- mock-registry/package.json | 2 +- node_modules/.gitignore | 40 +- .../node_modules/@npmcli/agent/lib/agents.js | 0 .../node_modules/@npmcli/agent/lib/dns.js | 0 .../node_modules/@npmcli/agent/lib/errors.js | 0 .../node_modules/@npmcli/agent/lib/index.js | 0 .../node_modules/@npmcli/agent/lib/options.js | 0 .../node_modules/@npmcli/agent/lib/proxy.js | 0 .../node_modules/@npmcli/agent/package.json | 0 .../node_modules/@npmcli/package-json/LICENSE | 0 .../@npmcli/package-json/lib/index.js | 0 .../@npmcli/package-json/lib/normalize.js | 0 .../@npmcli/package-json/lib/read-package.js | 0 .../package-json/lib/update-dependencies.js | 0 .../package-json/lib/update-scripts.js | 0 .../package-json/lib/update-workspaces.js | 0 .../@npmcli/package-json/package.json | 0 .../@npmcli/promise-spawn/LICENSE | 0 .../@npmcli/promise-spawn/lib/escape.js | 0 .../@npmcli/promise-spawn/lib/index.js | 0 .../@npmcli/promise-spawn/package.json | 0 .../node_modules/@npmcli/redact/LICENSE | 0 .../@npmcli/redact/lib/deep-map.js | 0 .../node_modules/@npmcli/redact/lib/index.js | 0 .../@npmcli/redact/lib/matchers.js | 0 .../node_modules/@npmcli/redact/lib/server.js | 0 .../node_modules/@npmcli/redact/lib/utils.js | 0 .../node_modules/@npmcli/redact/package.json | 0 .../node_modules/@npmcli/run-script/LICENSE | 0 .../run-script/lib/is-server-package.js | 0 .../@npmcli/run-script/lib/make-spawn-args.js | 0 .../run-script/lib/node-gyp-bin/node-gyp | 0 .../run-script/lib/node-gyp-bin/node-gyp.cmd | 0 .../@npmcli/run-script/lib/package-envs.js | 0 .../@npmcli/run-script/lib/run-script-pkg.js | 0 .../@npmcli/run-script/lib/run-script.js | 0 .../@npmcli/run-script/lib/set-path.js | 0 .../@npmcli/run-script/lib/signal-manager.js | 0 .../run-script/lib/validate-options.js | 0 .../@npmcli/run-script/package.json | 0 .../node_modules/hosted-git-info/LICENSE | 0 .../hosted-git-info/lib/from-url.js | 0 .../node_modules/hosted-git-info/lib/hosts.js | 0 .../node_modules/hosted-git-info/lib/index.js | 0 .../hosted-git-info/lib/parse-url.js | 0 .../node_modules/hosted-git-info/package.json | 0 .../node_modules/make-fetch-happen/LICENSE | 0 .../make-fetch-happen/lib/cache/entry.js | 0 .../make-fetch-happen/lib/cache/errors.js | 0 .../make-fetch-happen/lib/cache/index.js | 0 .../make-fetch-happen/lib/cache/key.js | 0 .../make-fetch-happen/lib/cache/policy.js | 0 .../make-fetch-happen/lib/fetch.js | 0 .../make-fetch-happen/lib/index.js | 0 .../make-fetch-happen/lib/options.js | 0 .../make-fetch-happen/lib/pipeline.js | 0 .../make-fetch-happen/lib/remote.js | 0 .../make-fetch-happen/package.json | 0 .../normalize-package-data/LICENSE | 0 .../lib/extract_description.js | 0 .../normalize-package-data/lib/fixer.js | 0 .../lib/make_warning.js | 0 .../normalize-package-data/lib/normalize.js | 0 .../normalize-package-data/lib/safe_format.js | 0 .../normalize-package-data/lib/typos.json | 0 .../lib/warning_messages.json | 0 .../normalize-package-data/package.json | 0 .../node_modules/npm-install-checks/LICENSE | 0 .../npm-install-checks/lib/index.js | 0 .../npm-install-checks/package.json | 0 .../node_modules/npm-package-arg/LICENSE | 0 .../node_modules/npm-package-arg/lib/npa.js | 0 .../node_modules/npm-package-arg/package.json | 0 .../npm-pick-manifest}/LICENSE.md | 0 .../npm-pick-manifest/lib/index.js | 0 .../npm-pick-manifest/package.json | 0 .../npm-registry-fetch}/LICENSE.md | 0 .../npm-registry-fetch/lib/auth.js | 0 .../npm-registry-fetch/lib/check-response.js | 0 .../npm-registry-fetch/lib/default-opts.js | 0 .../npm-registry-fetch/lib/errors.js | 0 .../npm-registry-fetch/lib/index.js | 0 .../npm-registry-fetch/lib/json-stream.js | 0 .../npm-registry-fetch/package.json | 0 .../node_modules/pacote/LICENSE | 15 + .../node_modules/pacote/bin/index.js | 158 +++++ .../node_modules/pacote/lib/dir.js | 100 +++ .../node_modules/pacote/lib/fetcher.js | 489 +++++++++++++++ .../node_modules/pacote/lib/file.js | 94 +++ .../node_modules/pacote/lib/git.js | 317 ++++++++++ .../node_modules/pacote/lib/index.js | 23 + .../node_modules/pacote/lib/registry.js | 369 +++++++++++ .../node_modules/pacote/lib/remote.js | 89 +++ .../pacote/lib/util/add-git-sha.js | 15 + .../node_modules/pacote/lib/util/cache-dir.js | 15 + .../pacote/lib/util/is-package-bin.js | 25 + .../node_modules/pacote/lib/util/npm.js | 14 + .../node_modules/pacote/lib/util/protected.js | 5 + .../pacote/lib/util/tar-create-options.js | 31 + .../pacote/lib/util/trailing-slashes.js | 10 + .../node_modules/pacote}/package.json | 106 ++-- .../@npmcli/fs/lib/common/get-options.js | 20 - .../@npmcli/fs/lib/common/node.js | 9 - .../node_modules/@npmcli/fs/lib/cp/LICENSE | 15 - .../node_modules/@npmcli/fs/lib/cp/errors.js | 129 ---- .../node_modules/@npmcli/fs/lib/cp/index.js | 22 - .../@npmcli/fs/lib/cp/polyfill.js | 428 ------------- .../node_modules/@npmcli/fs/lib/index.js | 13 - .../node_modules/@npmcli/fs/lib/move-file.js | 78 --- .../@npmcli/fs/lib/readdir-scoped.js | 20 - .../@npmcli/fs/lib/with-temp-dir.js | 39 -- .../pacote/node_modules/@npmcli/git/LICENSE | 15 + .../node_modules/@npmcli/git/lib/clone.js | 172 ++++++ .../node_modules/@npmcli/git/lib/errors.js | 36 ++ .../node_modules/@npmcli/git/lib/find.js | 15 + .../node_modules/@npmcli/git/lib/index.js | 9 + .../node_modules/@npmcli/git/lib/is-clean.js | 6 + .../pacote/node_modules/@npmcli/git/lib/is.js | 4 + .../@npmcli/git/lib/lines-to-revs.js | 147 +++++ .../@npmcli/git/lib/make-error.js | 33 + .../node_modules/@npmcli/git/lib/opts.js | 57 ++ .../node_modules/@npmcli/git/lib/revs.js | 28 + .../node_modules/@npmcli/git/lib/spawn.js | 44 ++ .../node_modules/@npmcli/git/lib/utils.js | 3 + .../node_modules/@npmcli/git/lib/which.js | 18 + .../node_modules/@npmcli/git/package.json | 59 ++ .../installed-package-contents}/LICENSE | 2 +- .../installed-package-contents/bin/index.js | 44 ++ .../installed-package-contents/lib/index.js | 181 ++++++ .../installed-package-contents/package.json | 52 ++ .../node_modules/cacache/lib/content/path.js | 29 - .../node_modules/cacache/lib/content/read.js | 165 ----- .../node_modules/cacache/lib/content/rm.js | 18 - .../node_modules/cacache/lib/content/write.js | 206 ------- .../node_modules/cacache/lib/entry-index.js | 336 ---------- .../pacote/node_modules/cacache/lib/get.js | 170 ----- .../pacote/node_modules/cacache/lib/index.js | 42 -- .../node_modules/cacache/lib/memoization.js | 72 --- .../pacote/node_modules/cacache/lib/put.js | 80 --- .../pacote/node_modules/cacache/lib/rm.js | 31 - .../node_modules/cacache/lib/util/glob.js | 7 - .../cacache/lib/util/hash-to-segments.js | 7 - .../node_modules/cacache/lib/util/tmp.js | 26 - .../pacote/node_modules/cacache/lib/verify.js | 257 -------- .../pacote/node_modules/ignore-walk/LICENSE | 15 + .../node_modules/ignore-walk/lib/index.js | 310 ++++++++++ .../node_modules/ignore-walk/package.json | 64 ++ .../pacote/node_modules/isexe/LICENSE | 15 + .../node_modules/isexe/dist/cjs/index.js | 46 ++ .../node_modules/isexe/dist/cjs/options.js | 3 + .../node_modules/isexe/dist/cjs/package.json | 3 + .../node_modules/isexe/dist/cjs/posix.js | 67 ++ .../node_modules/isexe/dist/cjs/win32.js | 62 ++ .../node_modules/isexe/dist/mjs/index.js | 16 + .../node_modules/isexe/dist/mjs/options.js | 2 + .../node_modules/isexe/dist/mjs/package.json | 3 + .../node_modules/isexe/dist/mjs/posix.js | 62 ++ .../node_modules/isexe/dist/mjs/win32.js | 57 ++ .../pacote/node_modules/isexe/package.json | 96 +++ .../json-parse-even-better-errors/LICENSE.md | 25 - .../lib/index.js | 137 ----- .../package.json | 49 -- .../pacote/node_modules/npm-bundled/LICENSE | 15 + .../node_modules/npm-bundled/lib/index.js | 254 ++++++++ .../node_modules/npm-bundled/package.json | 49 ++ .../npm-normalize-package-bin/LICENSE | 15 + .../npm-normalize-package-bin/lib/index.js | 64 ++ .../package.json | 38 +- .../pacote/node_modules/npm-packlist/LICENSE | 15 + .../node_modules/npm-packlist/lib/index.js | 456 ++++++++++++++ .../{@npmcli/fs => npm-packlist}/package.json | 67 +- .../npm-registry-fetch/LICENSE.md | 20 - .../pacote/node_modules/proc-log/LICENSE | 15 + .../pacote/node_modules/proc-log/lib/index.js | 153 +++++ .../pacote/node_modules/proc-log/package.json | 46 ++ .../{npm-pick-manifest => ssri}/LICENSE.md | 2 +- .../pacote/node_modules/ssri/lib/index.js | 580 ++++++++++++++++++ .../pacote/node_modules/ssri/package.json | 66 ++ .../node_modules/unique-filename/LICENSE | 5 - .../node_modules/unique-filename/lib/index.js | 7 - .../node_modules/unique-slug/lib/index.js | 11 - .../pacote/node_modules/which/LICENSE | 15 + .../pacote/node_modules/which/bin/which.js | 52 ++ .../pacote/node_modules/which/lib/index.js | 111 ++++ .../{unique-filename => which}/package.json | 61 +- node_modules/pacote/package.json | 43 +- package-lock.json | 539 +++++++++------- package.json | 2 +- workspaces/arborist/package.json | 2 +- workspaces/libnpmdiff/package.json | 2 +- workspaces/libnpmexec/package.json | 2 +- workspaces/libnpmpack/package.json | 2 +- 192 files changed, 5914 insertions(+), 2888 deletions(-) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/@npmcli/agent/lib/agents.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/@npmcli/agent/lib/dns.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/@npmcli/agent/lib/errors.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/@npmcli/agent/lib/index.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/@npmcli/agent/lib/options.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/@npmcli/agent/lib/proxy.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/@npmcli/agent/package.json (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/@npmcli/package-json/LICENSE (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/@npmcli/package-json/lib/index.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/@npmcli/package-json/lib/normalize.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/@npmcli/package-json/lib/read-package.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/@npmcli/package-json/lib/update-dependencies.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/@npmcli/package-json/lib/update-scripts.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/@npmcli/package-json/lib/update-workspaces.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/@npmcli/package-json/package.json (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/@npmcli/promise-spawn/LICENSE (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/@npmcli/promise-spawn/lib/escape.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/@npmcli/promise-spawn/lib/index.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/@npmcli/promise-spawn/package.json (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/@npmcli/redact/LICENSE (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/@npmcli/redact/lib/deep-map.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/@npmcli/redact/lib/index.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/@npmcli/redact/lib/matchers.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/@npmcli/redact/lib/server.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/@npmcli/redact/lib/utils.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/@npmcli/redact/package.json (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/@npmcli/run-script/LICENSE (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/@npmcli/run-script/lib/is-server-package.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/@npmcli/run-script/lib/make-spawn-args.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/@npmcli/run-script/lib/node-gyp-bin/node-gyp (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/@npmcli/run-script/lib/node-gyp-bin/node-gyp.cmd (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/@npmcli/run-script/lib/package-envs.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/@npmcli/run-script/lib/run-script-pkg.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/@npmcli/run-script/lib/run-script.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/@npmcli/run-script/lib/set-path.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/@npmcli/run-script/lib/signal-manager.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/@npmcli/run-script/lib/validate-options.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/@npmcli/run-script/package.json (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/hosted-git-info/LICENSE (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/hosted-git-info/lib/from-url.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/hosted-git-info/lib/hosts.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/hosted-git-info/lib/index.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/hosted-git-info/lib/parse-url.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/hosted-git-info/package.json (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/make-fetch-happen/LICENSE (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/make-fetch-happen/lib/cache/entry.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/make-fetch-happen/lib/cache/errors.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/make-fetch-happen/lib/cache/index.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/make-fetch-happen/lib/cache/key.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/make-fetch-happen/lib/cache/policy.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/make-fetch-happen/lib/fetch.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/make-fetch-happen/lib/index.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/make-fetch-happen/lib/options.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/make-fetch-happen/lib/pipeline.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/make-fetch-happen/lib/remote.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/make-fetch-happen/package.json (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/normalize-package-data/LICENSE (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/normalize-package-data/lib/extract_description.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/normalize-package-data/lib/fixer.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/normalize-package-data/lib/make_warning.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/normalize-package-data/lib/normalize.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/normalize-package-data/lib/safe_format.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/normalize-package-data/lib/typos.json (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/normalize-package-data/lib/warning_messages.json (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/normalize-package-data/package.json (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/npm-install-checks/LICENSE (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/npm-install-checks/lib/index.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/npm-install-checks/package.json (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/npm-package-arg/LICENSE (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/npm-package-arg/lib/npa.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/npm-package-arg/package.json (100%) rename node_modules/{pacote/node_modules/cacache => @npmcli/metavuln-calculator/node_modules/npm-pick-manifest}/LICENSE.md (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/npm-pick-manifest/lib/index.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/npm-pick-manifest/package.json (100%) rename node_modules/{pacote/node_modules/@npmcli/fs => @npmcli/metavuln-calculator/node_modules/npm-registry-fetch}/LICENSE.md (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/npm-registry-fetch/lib/auth.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/npm-registry-fetch/lib/check-response.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/npm-registry-fetch/lib/default-opts.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/npm-registry-fetch/lib/errors.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/npm-registry-fetch/lib/index.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/npm-registry-fetch/lib/json-stream.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/npm-registry-fetch/package.json (100%) create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/LICENSE create mode 100755 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/bin/index.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/dir.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/fetcher.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/file.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/git.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/index.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/registry.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/remote.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/add-git-sha.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/cache-dir.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/is-package-bin.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/npm.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/protected.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/tar-create-options.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/trailing-slashes.js rename node_modules/{pacote/node_modules/cacache => @npmcli/metavuln-calculator/node_modules/pacote}/package.json (50%) delete mode 100644 node_modules/pacote/node_modules/@npmcli/fs/lib/common/get-options.js delete mode 100644 node_modules/pacote/node_modules/@npmcli/fs/lib/common/node.js delete mode 100644 node_modules/pacote/node_modules/@npmcli/fs/lib/cp/LICENSE delete mode 100644 node_modules/pacote/node_modules/@npmcli/fs/lib/cp/errors.js delete mode 100644 node_modules/pacote/node_modules/@npmcli/fs/lib/cp/index.js delete mode 100644 node_modules/pacote/node_modules/@npmcli/fs/lib/cp/polyfill.js delete mode 100644 node_modules/pacote/node_modules/@npmcli/fs/lib/index.js delete mode 100644 node_modules/pacote/node_modules/@npmcli/fs/lib/move-file.js delete mode 100644 node_modules/pacote/node_modules/@npmcli/fs/lib/readdir-scoped.js delete mode 100644 node_modules/pacote/node_modules/@npmcli/fs/lib/with-temp-dir.js create mode 100644 node_modules/pacote/node_modules/@npmcli/git/LICENSE create mode 100644 node_modules/pacote/node_modules/@npmcli/git/lib/clone.js create mode 100644 node_modules/pacote/node_modules/@npmcli/git/lib/errors.js create mode 100644 node_modules/pacote/node_modules/@npmcli/git/lib/find.js create mode 100644 node_modules/pacote/node_modules/@npmcli/git/lib/index.js create mode 100644 node_modules/pacote/node_modules/@npmcli/git/lib/is-clean.js create mode 100644 node_modules/pacote/node_modules/@npmcli/git/lib/is.js create mode 100644 node_modules/pacote/node_modules/@npmcli/git/lib/lines-to-revs.js create mode 100644 node_modules/pacote/node_modules/@npmcli/git/lib/make-error.js create mode 100644 node_modules/pacote/node_modules/@npmcli/git/lib/opts.js create mode 100644 node_modules/pacote/node_modules/@npmcli/git/lib/revs.js create mode 100644 node_modules/pacote/node_modules/@npmcli/git/lib/spawn.js create mode 100644 node_modules/pacote/node_modules/@npmcli/git/lib/utils.js create mode 100644 node_modules/pacote/node_modules/@npmcli/git/lib/which.js create mode 100644 node_modules/pacote/node_modules/@npmcli/git/package.json rename node_modules/pacote/node_modules/{unique-slug => @npmcli/installed-package-contents}/LICENSE (96%) create mode 100755 node_modules/pacote/node_modules/@npmcli/installed-package-contents/bin/index.js create mode 100644 node_modules/pacote/node_modules/@npmcli/installed-package-contents/lib/index.js create mode 100644 node_modules/pacote/node_modules/@npmcli/installed-package-contents/package.json delete mode 100644 node_modules/pacote/node_modules/cacache/lib/content/path.js delete mode 100644 node_modules/pacote/node_modules/cacache/lib/content/read.js delete mode 100644 node_modules/pacote/node_modules/cacache/lib/content/rm.js delete mode 100644 node_modules/pacote/node_modules/cacache/lib/content/write.js delete mode 100644 node_modules/pacote/node_modules/cacache/lib/entry-index.js delete mode 100644 node_modules/pacote/node_modules/cacache/lib/get.js delete mode 100644 node_modules/pacote/node_modules/cacache/lib/index.js delete mode 100644 node_modules/pacote/node_modules/cacache/lib/memoization.js delete mode 100644 node_modules/pacote/node_modules/cacache/lib/put.js delete mode 100644 node_modules/pacote/node_modules/cacache/lib/rm.js delete mode 100644 node_modules/pacote/node_modules/cacache/lib/util/glob.js delete mode 100644 node_modules/pacote/node_modules/cacache/lib/util/hash-to-segments.js delete mode 100644 node_modules/pacote/node_modules/cacache/lib/util/tmp.js delete mode 100644 node_modules/pacote/node_modules/cacache/lib/verify.js create mode 100644 node_modules/pacote/node_modules/ignore-walk/LICENSE create mode 100644 node_modules/pacote/node_modules/ignore-walk/lib/index.js create mode 100644 node_modules/pacote/node_modules/ignore-walk/package.json create mode 100644 node_modules/pacote/node_modules/isexe/LICENSE create mode 100644 node_modules/pacote/node_modules/isexe/dist/cjs/index.js create mode 100644 node_modules/pacote/node_modules/isexe/dist/cjs/options.js create mode 100644 node_modules/pacote/node_modules/isexe/dist/cjs/package.json create mode 100644 node_modules/pacote/node_modules/isexe/dist/cjs/posix.js create mode 100644 node_modules/pacote/node_modules/isexe/dist/cjs/win32.js create mode 100644 node_modules/pacote/node_modules/isexe/dist/mjs/index.js create mode 100644 node_modules/pacote/node_modules/isexe/dist/mjs/options.js create mode 100644 node_modules/pacote/node_modules/isexe/dist/mjs/package.json create mode 100644 node_modules/pacote/node_modules/isexe/dist/mjs/posix.js create mode 100644 node_modules/pacote/node_modules/isexe/dist/mjs/win32.js create mode 100644 node_modules/pacote/node_modules/isexe/package.json delete mode 100644 node_modules/pacote/node_modules/json-parse-even-better-errors/LICENSE.md delete mode 100644 node_modules/pacote/node_modules/json-parse-even-better-errors/lib/index.js delete mode 100644 node_modules/pacote/node_modules/json-parse-even-better-errors/package.json create mode 100644 node_modules/pacote/node_modules/npm-bundled/LICENSE create mode 100644 node_modules/pacote/node_modules/npm-bundled/lib/index.js create mode 100644 node_modules/pacote/node_modules/npm-bundled/package.json create mode 100644 node_modules/pacote/node_modules/npm-normalize-package-bin/LICENSE create mode 100644 node_modules/pacote/node_modules/npm-normalize-package-bin/lib/index.js rename node_modules/pacote/node_modules/{unique-slug => npm-normalize-package-bin}/package.json (54%) create mode 100644 node_modules/pacote/node_modules/npm-packlist/LICENSE create mode 100644 node_modules/pacote/node_modules/npm-packlist/lib/index.js rename node_modules/pacote/node_modules/{@npmcli/fs => npm-packlist}/package.json (54%) delete mode 100644 node_modules/pacote/node_modules/npm-registry-fetch/LICENSE.md create mode 100644 node_modules/pacote/node_modules/proc-log/LICENSE create mode 100644 node_modules/pacote/node_modules/proc-log/lib/index.js create mode 100644 node_modules/pacote/node_modules/proc-log/package.json rename node_modules/pacote/node_modules/{npm-pick-manifest => ssri}/LICENSE.md (96%) create mode 100644 node_modules/pacote/node_modules/ssri/lib/index.js create mode 100644 node_modules/pacote/node_modules/ssri/package.json delete mode 100644 node_modules/pacote/node_modules/unique-filename/LICENSE delete mode 100644 node_modules/pacote/node_modules/unique-filename/lib/index.js delete mode 100644 node_modules/pacote/node_modules/unique-slug/lib/index.js create mode 100644 node_modules/pacote/node_modules/which/LICENSE create mode 100755 node_modules/pacote/node_modules/which/bin/which.js create mode 100644 node_modules/pacote/node_modules/which/lib/index.js rename node_modules/pacote/node_modules/{unique-filename => which}/package.json (50%) diff --git a/mock-registry/package.json b/mock-registry/package.json index 5245e009839b4..8ab0ae7fa069a 100644 --- a/mock-registry/package.json +++ b/mock-registry/package.json @@ -52,7 +52,7 @@ "json-stringify-safe": "^5.0.1", "nock": "^13.3.3", "npm-package-arg": "^12.0.0", - "pacote": "^18.0.6", + "pacote": "^19.0.0", "tap": "^16.3.8" } } diff --git a/node_modules/.gitignore b/node_modules/.gitignore index e91060af4d64c..800cd7bed4f48 100644 --- a/node_modules/.gitignore +++ b/node_modules/.gitignore @@ -41,9 +41,22 @@ /@npmcli/metavuln-calculator/node_modules/* !/@npmcli/metavuln-calculator/node_modules/@npmcli/ /@npmcli/metavuln-calculator/node_modules/@npmcli/* +!/@npmcli/metavuln-calculator/node_modules/@npmcli/agent !/@npmcli/metavuln-calculator/node_modules/@npmcli/fs +!/@npmcli/metavuln-calculator/node_modules/@npmcli/package-json +!/@npmcli/metavuln-calculator/node_modules/@npmcli/promise-spawn +!/@npmcli/metavuln-calculator/node_modules/@npmcli/redact +!/@npmcli/metavuln-calculator/node_modules/@npmcli/run-script !/@npmcli/metavuln-calculator/node_modules/cacache +!/@npmcli/metavuln-calculator/node_modules/hosted-git-info !/@npmcli/metavuln-calculator/node_modules/json-parse-even-better-errors +!/@npmcli/metavuln-calculator/node_modules/make-fetch-happen +!/@npmcli/metavuln-calculator/node_modules/normalize-package-data +!/@npmcli/metavuln-calculator/node_modules/npm-install-checks +!/@npmcli/metavuln-calculator/node_modules/npm-package-arg +!/@npmcli/metavuln-calculator/node_modules/npm-pick-manifest +!/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch +!/@npmcli/metavuln-calculator/node_modules/pacote !/@npmcli/metavuln-calculator/node_modules/unique-filename !/@npmcli/metavuln-calculator/node_modules/unique-slug !/@npmcli/name-from-folder @@ -260,23 +273,16 @@ /pacote/node_modules/* !/pacote/node_modules/@npmcli/ /pacote/node_modules/@npmcli/* -!/pacote/node_modules/@npmcli/agent -!/pacote/node_modules/@npmcli/fs -!/pacote/node_modules/@npmcli/package-json -!/pacote/node_modules/@npmcli/promise-spawn -!/pacote/node_modules/@npmcli/redact -!/pacote/node_modules/@npmcli/run-script -!/pacote/node_modules/cacache -!/pacote/node_modules/hosted-git-info -!/pacote/node_modules/json-parse-even-better-errors -!/pacote/node_modules/make-fetch-happen -!/pacote/node_modules/normalize-package-data -!/pacote/node_modules/npm-install-checks -!/pacote/node_modules/npm-package-arg -!/pacote/node_modules/npm-pick-manifest -!/pacote/node_modules/npm-registry-fetch -!/pacote/node_modules/unique-filename -!/pacote/node_modules/unique-slug +!/pacote/node_modules/@npmcli/git +!/pacote/node_modules/@npmcli/installed-package-contents +!/pacote/node_modules/ignore-walk +!/pacote/node_modules/isexe +!/pacote/node_modules/npm-bundled +!/pacote/node_modules/npm-normalize-package-bin +!/pacote/node_modules/npm-packlist +!/pacote/node_modules/proc-log +!/pacote/node_modules/ssri +!/pacote/node_modules/which !/parse-conflict-json !/parse-conflict-json/node_modules/ /parse-conflict-json/node_modules/* diff --git a/node_modules/pacote/node_modules/@npmcli/agent/lib/agents.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/agent/lib/agents.js similarity index 100% rename from node_modules/pacote/node_modules/@npmcli/agent/lib/agents.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/agent/lib/agents.js diff --git a/node_modules/pacote/node_modules/@npmcli/agent/lib/dns.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/agent/lib/dns.js similarity index 100% rename from node_modules/pacote/node_modules/@npmcli/agent/lib/dns.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/agent/lib/dns.js diff --git a/node_modules/pacote/node_modules/@npmcli/agent/lib/errors.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/agent/lib/errors.js similarity index 100% rename from node_modules/pacote/node_modules/@npmcli/agent/lib/errors.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/agent/lib/errors.js diff --git a/node_modules/pacote/node_modules/@npmcli/agent/lib/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/agent/lib/index.js similarity index 100% rename from node_modules/pacote/node_modules/@npmcli/agent/lib/index.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/agent/lib/index.js diff --git a/node_modules/pacote/node_modules/@npmcli/agent/lib/options.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/agent/lib/options.js similarity index 100% rename from node_modules/pacote/node_modules/@npmcli/agent/lib/options.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/agent/lib/options.js diff --git a/node_modules/pacote/node_modules/@npmcli/agent/lib/proxy.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/agent/lib/proxy.js similarity index 100% rename from node_modules/pacote/node_modules/@npmcli/agent/lib/proxy.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/agent/lib/proxy.js diff --git a/node_modules/pacote/node_modules/@npmcli/agent/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/agent/package.json similarity index 100% rename from node_modules/pacote/node_modules/@npmcli/agent/package.json rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/agent/package.json diff --git a/node_modules/pacote/node_modules/@npmcli/package-json/LICENSE b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/package-json/LICENSE similarity index 100% rename from node_modules/pacote/node_modules/@npmcli/package-json/LICENSE rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/package-json/LICENSE diff --git a/node_modules/pacote/node_modules/@npmcli/package-json/lib/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/package-json/lib/index.js similarity index 100% rename from node_modules/pacote/node_modules/@npmcli/package-json/lib/index.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/package-json/lib/index.js diff --git a/node_modules/pacote/node_modules/@npmcli/package-json/lib/normalize.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/package-json/lib/normalize.js similarity index 100% rename from node_modules/pacote/node_modules/@npmcli/package-json/lib/normalize.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/package-json/lib/normalize.js diff --git a/node_modules/pacote/node_modules/@npmcli/package-json/lib/read-package.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/package-json/lib/read-package.js similarity index 100% rename from node_modules/pacote/node_modules/@npmcli/package-json/lib/read-package.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/package-json/lib/read-package.js diff --git a/node_modules/pacote/node_modules/@npmcli/package-json/lib/update-dependencies.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/package-json/lib/update-dependencies.js similarity index 100% rename from node_modules/pacote/node_modules/@npmcli/package-json/lib/update-dependencies.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/package-json/lib/update-dependencies.js diff --git a/node_modules/pacote/node_modules/@npmcli/package-json/lib/update-scripts.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/package-json/lib/update-scripts.js similarity index 100% rename from node_modules/pacote/node_modules/@npmcli/package-json/lib/update-scripts.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/package-json/lib/update-scripts.js diff --git a/node_modules/pacote/node_modules/@npmcli/package-json/lib/update-workspaces.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/package-json/lib/update-workspaces.js similarity index 100% rename from node_modules/pacote/node_modules/@npmcli/package-json/lib/update-workspaces.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/package-json/lib/update-workspaces.js diff --git a/node_modules/pacote/node_modules/@npmcli/package-json/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/package-json/package.json similarity index 100% rename from node_modules/pacote/node_modules/@npmcli/package-json/package.json rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/package-json/package.json diff --git a/node_modules/pacote/node_modules/@npmcli/promise-spawn/LICENSE b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/promise-spawn/LICENSE similarity index 100% rename from node_modules/pacote/node_modules/@npmcli/promise-spawn/LICENSE rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/promise-spawn/LICENSE diff --git a/node_modules/pacote/node_modules/@npmcli/promise-spawn/lib/escape.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/promise-spawn/lib/escape.js similarity index 100% rename from node_modules/pacote/node_modules/@npmcli/promise-spawn/lib/escape.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/promise-spawn/lib/escape.js diff --git a/node_modules/pacote/node_modules/@npmcli/promise-spawn/lib/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/promise-spawn/lib/index.js similarity index 100% rename from node_modules/pacote/node_modules/@npmcli/promise-spawn/lib/index.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/promise-spawn/lib/index.js diff --git a/node_modules/pacote/node_modules/@npmcli/promise-spawn/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/promise-spawn/package.json similarity index 100% rename from node_modules/pacote/node_modules/@npmcli/promise-spawn/package.json rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/promise-spawn/package.json diff --git a/node_modules/pacote/node_modules/@npmcli/redact/LICENSE b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/redact/LICENSE similarity index 100% rename from node_modules/pacote/node_modules/@npmcli/redact/LICENSE rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/redact/LICENSE diff --git a/node_modules/pacote/node_modules/@npmcli/redact/lib/deep-map.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/redact/lib/deep-map.js similarity index 100% rename from node_modules/pacote/node_modules/@npmcli/redact/lib/deep-map.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/redact/lib/deep-map.js diff --git a/node_modules/pacote/node_modules/@npmcli/redact/lib/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/redact/lib/index.js similarity index 100% rename from node_modules/pacote/node_modules/@npmcli/redact/lib/index.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/redact/lib/index.js diff --git a/node_modules/pacote/node_modules/@npmcli/redact/lib/matchers.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/redact/lib/matchers.js similarity index 100% rename from node_modules/pacote/node_modules/@npmcli/redact/lib/matchers.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/redact/lib/matchers.js diff --git a/node_modules/pacote/node_modules/@npmcli/redact/lib/server.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/redact/lib/server.js similarity index 100% rename from node_modules/pacote/node_modules/@npmcli/redact/lib/server.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/redact/lib/server.js diff --git a/node_modules/pacote/node_modules/@npmcli/redact/lib/utils.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/redact/lib/utils.js similarity index 100% rename from node_modules/pacote/node_modules/@npmcli/redact/lib/utils.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/redact/lib/utils.js diff --git a/node_modules/pacote/node_modules/@npmcli/redact/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/redact/package.json similarity index 100% rename from node_modules/pacote/node_modules/@npmcli/redact/package.json rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/redact/package.json diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/LICENSE b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/run-script/LICENSE similarity index 100% rename from node_modules/pacote/node_modules/@npmcli/run-script/LICENSE rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/run-script/LICENSE diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/lib/is-server-package.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/run-script/lib/is-server-package.js similarity index 100% rename from node_modules/pacote/node_modules/@npmcli/run-script/lib/is-server-package.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/run-script/lib/is-server-package.js diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/lib/make-spawn-args.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/run-script/lib/make-spawn-args.js similarity index 100% rename from node_modules/pacote/node_modules/@npmcli/run-script/lib/make-spawn-args.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/run-script/lib/make-spawn-args.js diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/lib/node-gyp-bin/node-gyp b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/run-script/lib/node-gyp-bin/node-gyp similarity index 100% rename from node_modules/pacote/node_modules/@npmcli/run-script/lib/node-gyp-bin/node-gyp rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/run-script/lib/node-gyp-bin/node-gyp diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/lib/node-gyp-bin/node-gyp.cmd b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/run-script/lib/node-gyp-bin/node-gyp.cmd similarity index 100% rename from node_modules/pacote/node_modules/@npmcli/run-script/lib/node-gyp-bin/node-gyp.cmd rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/run-script/lib/node-gyp-bin/node-gyp.cmd diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/lib/package-envs.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/run-script/lib/package-envs.js similarity index 100% rename from node_modules/pacote/node_modules/@npmcli/run-script/lib/package-envs.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/run-script/lib/package-envs.js diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/lib/run-script-pkg.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/run-script/lib/run-script-pkg.js similarity index 100% rename from node_modules/pacote/node_modules/@npmcli/run-script/lib/run-script-pkg.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/run-script/lib/run-script-pkg.js diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/lib/run-script.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/run-script/lib/run-script.js similarity index 100% rename from node_modules/pacote/node_modules/@npmcli/run-script/lib/run-script.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/run-script/lib/run-script.js diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/lib/set-path.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/run-script/lib/set-path.js similarity index 100% rename from node_modules/pacote/node_modules/@npmcli/run-script/lib/set-path.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/run-script/lib/set-path.js diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/lib/signal-manager.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/run-script/lib/signal-manager.js similarity index 100% rename from node_modules/pacote/node_modules/@npmcli/run-script/lib/signal-manager.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/run-script/lib/signal-manager.js diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/lib/validate-options.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/run-script/lib/validate-options.js similarity index 100% rename from node_modules/pacote/node_modules/@npmcli/run-script/lib/validate-options.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/run-script/lib/validate-options.js diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/run-script/package.json similarity index 100% rename from node_modules/pacote/node_modules/@npmcli/run-script/package.json rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/run-script/package.json diff --git a/node_modules/pacote/node_modules/hosted-git-info/LICENSE b/node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info/LICENSE similarity index 100% rename from node_modules/pacote/node_modules/hosted-git-info/LICENSE rename to node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info/LICENSE diff --git a/node_modules/pacote/node_modules/hosted-git-info/lib/from-url.js b/node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info/lib/from-url.js similarity index 100% rename from node_modules/pacote/node_modules/hosted-git-info/lib/from-url.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info/lib/from-url.js diff --git a/node_modules/pacote/node_modules/hosted-git-info/lib/hosts.js b/node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info/lib/hosts.js similarity index 100% rename from node_modules/pacote/node_modules/hosted-git-info/lib/hosts.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info/lib/hosts.js diff --git a/node_modules/pacote/node_modules/hosted-git-info/lib/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info/lib/index.js similarity index 100% rename from node_modules/pacote/node_modules/hosted-git-info/lib/index.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info/lib/index.js diff --git a/node_modules/pacote/node_modules/hosted-git-info/lib/parse-url.js b/node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info/lib/parse-url.js similarity index 100% rename from node_modules/pacote/node_modules/hosted-git-info/lib/parse-url.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info/lib/parse-url.js diff --git a/node_modules/pacote/node_modules/hosted-git-info/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info/package.json similarity index 100% rename from node_modules/pacote/node_modules/hosted-git-info/package.json rename to node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info/package.json diff --git a/node_modules/pacote/node_modules/make-fetch-happen/LICENSE b/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/LICENSE similarity index 100% rename from node_modules/pacote/node_modules/make-fetch-happen/LICENSE rename to node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/LICENSE diff --git a/node_modules/pacote/node_modules/make-fetch-happen/lib/cache/entry.js b/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/cache/entry.js similarity index 100% rename from node_modules/pacote/node_modules/make-fetch-happen/lib/cache/entry.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/cache/entry.js diff --git a/node_modules/pacote/node_modules/make-fetch-happen/lib/cache/errors.js b/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/cache/errors.js similarity index 100% rename from node_modules/pacote/node_modules/make-fetch-happen/lib/cache/errors.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/cache/errors.js diff --git a/node_modules/pacote/node_modules/make-fetch-happen/lib/cache/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/cache/index.js similarity index 100% rename from node_modules/pacote/node_modules/make-fetch-happen/lib/cache/index.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/cache/index.js diff --git a/node_modules/pacote/node_modules/make-fetch-happen/lib/cache/key.js b/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/cache/key.js similarity index 100% rename from node_modules/pacote/node_modules/make-fetch-happen/lib/cache/key.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/cache/key.js diff --git a/node_modules/pacote/node_modules/make-fetch-happen/lib/cache/policy.js b/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/cache/policy.js similarity index 100% rename from node_modules/pacote/node_modules/make-fetch-happen/lib/cache/policy.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/cache/policy.js diff --git a/node_modules/pacote/node_modules/make-fetch-happen/lib/fetch.js b/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/fetch.js similarity index 100% rename from node_modules/pacote/node_modules/make-fetch-happen/lib/fetch.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/fetch.js diff --git a/node_modules/pacote/node_modules/make-fetch-happen/lib/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/index.js similarity index 100% rename from node_modules/pacote/node_modules/make-fetch-happen/lib/index.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/index.js diff --git a/node_modules/pacote/node_modules/make-fetch-happen/lib/options.js b/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/options.js similarity index 100% rename from node_modules/pacote/node_modules/make-fetch-happen/lib/options.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/options.js diff --git a/node_modules/pacote/node_modules/make-fetch-happen/lib/pipeline.js b/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/pipeline.js similarity index 100% rename from node_modules/pacote/node_modules/make-fetch-happen/lib/pipeline.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/pipeline.js diff --git a/node_modules/pacote/node_modules/make-fetch-happen/lib/remote.js b/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/remote.js similarity index 100% rename from node_modules/pacote/node_modules/make-fetch-happen/lib/remote.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/remote.js diff --git a/node_modules/pacote/node_modules/make-fetch-happen/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/package.json similarity index 100% rename from node_modules/pacote/node_modules/make-fetch-happen/package.json rename to node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/package.json diff --git a/node_modules/pacote/node_modules/normalize-package-data/LICENSE b/node_modules/@npmcli/metavuln-calculator/node_modules/normalize-package-data/LICENSE similarity index 100% rename from node_modules/pacote/node_modules/normalize-package-data/LICENSE rename to node_modules/@npmcli/metavuln-calculator/node_modules/normalize-package-data/LICENSE diff --git a/node_modules/pacote/node_modules/normalize-package-data/lib/extract_description.js b/node_modules/@npmcli/metavuln-calculator/node_modules/normalize-package-data/lib/extract_description.js similarity index 100% rename from node_modules/pacote/node_modules/normalize-package-data/lib/extract_description.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/normalize-package-data/lib/extract_description.js diff --git a/node_modules/pacote/node_modules/normalize-package-data/lib/fixer.js b/node_modules/@npmcli/metavuln-calculator/node_modules/normalize-package-data/lib/fixer.js similarity index 100% rename from node_modules/pacote/node_modules/normalize-package-data/lib/fixer.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/normalize-package-data/lib/fixer.js diff --git a/node_modules/pacote/node_modules/normalize-package-data/lib/make_warning.js b/node_modules/@npmcli/metavuln-calculator/node_modules/normalize-package-data/lib/make_warning.js similarity index 100% rename from node_modules/pacote/node_modules/normalize-package-data/lib/make_warning.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/normalize-package-data/lib/make_warning.js diff --git a/node_modules/pacote/node_modules/normalize-package-data/lib/normalize.js b/node_modules/@npmcli/metavuln-calculator/node_modules/normalize-package-data/lib/normalize.js similarity index 100% rename from node_modules/pacote/node_modules/normalize-package-data/lib/normalize.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/normalize-package-data/lib/normalize.js diff --git a/node_modules/pacote/node_modules/normalize-package-data/lib/safe_format.js b/node_modules/@npmcli/metavuln-calculator/node_modules/normalize-package-data/lib/safe_format.js similarity index 100% rename from node_modules/pacote/node_modules/normalize-package-data/lib/safe_format.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/normalize-package-data/lib/safe_format.js diff --git a/node_modules/pacote/node_modules/normalize-package-data/lib/typos.json b/node_modules/@npmcli/metavuln-calculator/node_modules/normalize-package-data/lib/typos.json similarity index 100% rename from node_modules/pacote/node_modules/normalize-package-data/lib/typos.json rename to node_modules/@npmcli/metavuln-calculator/node_modules/normalize-package-data/lib/typos.json diff --git a/node_modules/pacote/node_modules/normalize-package-data/lib/warning_messages.json b/node_modules/@npmcli/metavuln-calculator/node_modules/normalize-package-data/lib/warning_messages.json similarity index 100% rename from node_modules/pacote/node_modules/normalize-package-data/lib/warning_messages.json rename to node_modules/@npmcli/metavuln-calculator/node_modules/normalize-package-data/lib/warning_messages.json diff --git a/node_modules/pacote/node_modules/normalize-package-data/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/normalize-package-data/package.json similarity index 100% rename from node_modules/pacote/node_modules/normalize-package-data/package.json rename to node_modules/@npmcli/metavuln-calculator/node_modules/normalize-package-data/package.json diff --git a/node_modules/pacote/node_modules/npm-install-checks/LICENSE b/node_modules/@npmcli/metavuln-calculator/node_modules/npm-install-checks/LICENSE similarity index 100% rename from node_modules/pacote/node_modules/npm-install-checks/LICENSE rename to node_modules/@npmcli/metavuln-calculator/node_modules/npm-install-checks/LICENSE diff --git a/node_modules/pacote/node_modules/npm-install-checks/lib/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/npm-install-checks/lib/index.js similarity index 100% rename from node_modules/pacote/node_modules/npm-install-checks/lib/index.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/npm-install-checks/lib/index.js diff --git a/node_modules/pacote/node_modules/npm-install-checks/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/npm-install-checks/package.json similarity index 100% rename from node_modules/pacote/node_modules/npm-install-checks/package.json rename to node_modules/@npmcli/metavuln-calculator/node_modules/npm-install-checks/package.json diff --git a/node_modules/pacote/node_modules/npm-package-arg/LICENSE b/node_modules/@npmcli/metavuln-calculator/node_modules/npm-package-arg/LICENSE similarity index 100% rename from node_modules/pacote/node_modules/npm-package-arg/LICENSE rename to node_modules/@npmcli/metavuln-calculator/node_modules/npm-package-arg/LICENSE diff --git a/node_modules/pacote/node_modules/npm-package-arg/lib/npa.js b/node_modules/@npmcli/metavuln-calculator/node_modules/npm-package-arg/lib/npa.js similarity index 100% rename from node_modules/pacote/node_modules/npm-package-arg/lib/npa.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/npm-package-arg/lib/npa.js diff --git a/node_modules/pacote/node_modules/npm-package-arg/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/npm-package-arg/package.json similarity index 100% rename from node_modules/pacote/node_modules/npm-package-arg/package.json rename to node_modules/@npmcli/metavuln-calculator/node_modules/npm-package-arg/package.json diff --git a/node_modules/pacote/node_modules/cacache/LICENSE.md b/node_modules/@npmcli/metavuln-calculator/node_modules/npm-pick-manifest/LICENSE.md similarity index 100% rename from node_modules/pacote/node_modules/cacache/LICENSE.md rename to node_modules/@npmcli/metavuln-calculator/node_modules/npm-pick-manifest/LICENSE.md diff --git a/node_modules/pacote/node_modules/npm-pick-manifest/lib/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/npm-pick-manifest/lib/index.js similarity index 100% rename from node_modules/pacote/node_modules/npm-pick-manifest/lib/index.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/npm-pick-manifest/lib/index.js diff --git a/node_modules/pacote/node_modules/npm-pick-manifest/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/npm-pick-manifest/package.json similarity index 100% rename from node_modules/pacote/node_modules/npm-pick-manifest/package.json rename to node_modules/@npmcli/metavuln-calculator/node_modules/npm-pick-manifest/package.json diff --git a/node_modules/pacote/node_modules/@npmcli/fs/LICENSE.md b/node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/LICENSE.md similarity index 100% rename from node_modules/pacote/node_modules/@npmcli/fs/LICENSE.md rename to node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/LICENSE.md diff --git a/node_modules/pacote/node_modules/npm-registry-fetch/lib/auth.js b/node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/lib/auth.js similarity index 100% rename from node_modules/pacote/node_modules/npm-registry-fetch/lib/auth.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/lib/auth.js diff --git a/node_modules/pacote/node_modules/npm-registry-fetch/lib/check-response.js b/node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/lib/check-response.js similarity index 100% rename from node_modules/pacote/node_modules/npm-registry-fetch/lib/check-response.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/lib/check-response.js diff --git a/node_modules/pacote/node_modules/npm-registry-fetch/lib/default-opts.js b/node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/lib/default-opts.js similarity index 100% rename from node_modules/pacote/node_modules/npm-registry-fetch/lib/default-opts.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/lib/default-opts.js diff --git a/node_modules/pacote/node_modules/npm-registry-fetch/lib/errors.js b/node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/lib/errors.js similarity index 100% rename from node_modules/pacote/node_modules/npm-registry-fetch/lib/errors.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/lib/errors.js diff --git a/node_modules/pacote/node_modules/npm-registry-fetch/lib/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/lib/index.js similarity index 100% rename from node_modules/pacote/node_modules/npm-registry-fetch/lib/index.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/lib/index.js diff --git a/node_modules/pacote/node_modules/npm-registry-fetch/lib/json-stream.js b/node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/lib/json-stream.js similarity index 100% rename from node_modules/pacote/node_modules/npm-registry-fetch/lib/json-stream.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/lib/json-stream.js diff --git a/node_modules/pacote/node_modules/npm-registry-fetch/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/package.json similarity index 100% rename from node_modules/pacote/node_modules/npm-registry-fetch/package.json rename to node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/package.json diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/LICENSE b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/LICENSE new file mode 100644 index 0000000000000..a03cd0ed0b338 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter, Kat Marchán, npm, Inc., and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/bin/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/bin/index.js new file mode 100755 index 0000000000000..f35b62ca71a53 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/bin/index.js @@ -0,0 +1,158 @@ +#!/usr/bin/env node + +const run = conf => { + const pacote = require('../') + switch (conf._[0]) { + case 'resolve': + case 'manifest': + case 'packument': + if (conf._[0] === 'resolve' && conf.long) { + return pacote.manifest(conf._[1], conf).then(mani => ({ + resolved: mani._resolved, + integrity: mani._integrity, + from: mani._from, + })) + } + return pacote[conf._[0]](conf._[1], conf) + + case 'tarball': + if (!conf._[2] || conf._[2] === '-') { + return pacote.tarball.stream(conf._[1], stream => { + stream.pipe( + conf.testStdout || + /* istanbul ignore next */ + process.stdout + ) + // make sure it resolves something falsey + return stream.promise().then(() => { + return false + }) + }, conf) + } else { + return pacote.tarball.file(conf._[1], conf._[2], conf) + } + + case 'extract': + return pacote.extract(conf._[1], conf._[2], conf) + + default: /* istanbul ignore next */ { + throw new Error(`bad command: ${conf._[0]}`) + } + } +} + +const version = require('../package.json').version +const usage = () => +`Pacote - The JavaScript Package Handler, v${version} + +Usage: + + pacote resolve + Resolve a specifier and output the fully resolved target + Returns integrity and from if '--long' flag is set. + + pacote manifest + Fetch a manifest and print to stdout + + pacote packument + Fetch a full packument and print to stdout + + pacote tarball [] + Fetch a package tarball and save to + If is missing or '-', the tarball will be streamed to stdout. + + pacote extract + Extract a package to the destination folder. + +Configuration values all match the names of configs passed to npm, or +options passed to Pacote. Additional flags for this executable: + + --long Print an object from 'resolve', including integrity and spec. + --json Print result objects as JSON rather than node's default. + (This is the default if stdout is not a TTY.) + --help -h Print this helpful text. + +For example '--cache=/path/to/folder' will use that folder as the cache. +` + +const shouldJSON = (conf, result) => + conf.json || + !process.stdout.isTTY && + conf.json === undefined && + result && + typeof result === 'object' + +const pretty = (conf, result) => + shouldJSON(conf, result) ? JSON.stringify(result, 0, 2) : result + +let addedLogListener = false +const main = args => { + const conf = parse(args) + if (conf.help || conf.h) { + return console.log(usage()) + } + + if (!addedLogListener) { + process.on('log', console.error) + addedLogListener = true + } + + try { + return run(conf) + .then(result => result && console.log(pretty(conf, result))) + .catch(er => { + console.error(er) + process.exit(1) + }) + } catch (er) { + console.error(er.message) + console.error(usage()) + } +} + +const parseArg = arg => { + const split = arg.slice(2).split('=') + const k = split.shift() + const v = split.join('=') + const no = /^no-/.test(k) && !v + const key = (no ? k.slice(3) : k) + .replace(/^tag$/, 'defaultTag') + .replace(/-([a-z])/g, (_, c) => c.toUpperCase()) + const value = v ? v.replace(/^~/, process.env.HOME) : !no + return { key, value } +} + +const parse = args => { + const conf = { + _: [], + cache: process.env.HOME + '/.npm/_cacache', + } + let dashdash = false + args.forEach(arg => { + if (dashdash) { + conf._.push(arg) + } else if (arg === '--') { + dashdash = true + } else if (arg === '-h') { + conf.help = true + } else if (/^--/.test(arg)) { + const { key, value } = parseArg(arg) + conf[key] = value + } else { + conf._.push(arg) + } + }) + return conf +} + +if (module === require.main) { + main(process.argv.slice(2)) +} else { + module.exports = { + main, + run, + usage, + parseArg, + parse, + } +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/dir.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/dir.js new file mode 100644 index 0000000000000..f3229b34e463a --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/dir.js @@ -0,0 +1,100 @@ +const { resolve } = require('node:path') +const packlist = require('npm-packlist') +const runScript = require('@npmcli/run-script') +const tar = require('tar') +const { Minipass } = require('minipass') +const Fetcher = require('./fetcher.js') +const FileFetcher = require('./file.js') +const _ = require('./util/protected.js') +const tarCreateOptions = require('./util/tar-create-options.js') + +class DirFetcher extends Fetcher { + constructor (spec, opts) { + super(spec, opts) + // just the fully resolved filename + this.resolved = this.spec.fetchSpec + + this.tree = opts.tree || null + this.Arborist = opts.Arborist || null + } + + // exposes tarCreateOptions as public API + static tarCreateOptions (manifest) { + return tarCreateOptions(manifest) + } + + get types () { + return ['directory'] + } + + #prepareDir () { + return this.manifest().then(mani => { + if (!mani.scripts || !mani.scripts.prepare) { + return + } + + // we *only* run prepare. + // pre/post-pack is run by the npm CLI for publish and pack, + // but this function is *also* run when installing git deps + const stdio = this.opts.foregroundScripts ? 'inherit' : 'pipe' + + return runScript({ + pkg: mani, + event: 'prepare', + path: this.resolved, + stdio, + env: { + npm_package_resolved: this.resolved, + npm_package_integrity: this.integrity, + npm_package_json: resolve(this.resolved, 'package.json'), + }, + }) + }) + } + + [_.tarballFromResolved] () { + if (!this.tree && !this.Arborist) { + throw new Error('DirFetcher requires either a tree or an Arborist constructor to pack') + } + + const stream = new Minipass() + stream.resolved = this.resolved + stream.integrity = this.integrity + + const { prefix, workspaces } = this.opts + + // run the prepare script, get the list of files, and tar it up + // pipe to the stream, and proxy errors the chain. + this.#prepareDir() + .then(async () => { + if (!this.tree) { + const arb = new this.Arborist({ path: this.resolved }) + this.tree = await arb.loadActual() + } + return packlist(this.tree, { path: this.resolved, prefix, workspaces }) + }) + .then(files => tar.c(tarCreateOptions(this.package), files) + .on('error', er => stream.emit('error', er)).pipe(stream)) + .catch(er => stream.emit('error', er)) + return stream + } + + manifest () { + if (this.package) { + return Promise.resolve(this.package) + } + + return this[_.readPackageJson](this.resolved) + .then(mani => this.package = { + ...mani, + _integrity: this.integrity && String(this.integrity), + _resolved: this.resolved, + _from: this.from, + }) + } + + packument () { + return FileFetcher.prototype.packument.apply(this) + } +} +module.exports = DirFetcher diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/fetcher.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/fetcher.js new file mode 100644 index 0000000000000..cc2c2db70c697 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/fetcher.js @@ -0,0 +1,489 @@ +// This is the base class that the other fetcher types in lib +// all descend from. +// It handles the unpacking and retry logic that is shared among +// all of the other Fetcher types. + +const { basename, dirname } = require('node:path') +const { rm, mkdir } = require('node:fs/promises') +const PackageJson = require('@npmcli/package-json') +const cacache = require('cacache') +const fsm = require('fs-minipass') +const getContents = require('@npmcli/installed-package-contents') +const npa = require('npm-package-arg') +const retry = require('promise-retry') +const ssri = require('ssri') +const tar = require('tar') +const { Minipass } = require('minipass') +const { log } = require('proc-log') +const _ = require('./util/protected.js') +const cacheDir = require('./util/cache-dir.js') +const isPackageBin = require('./util/is-package-bin.js') +const removeTrailingSlashes = require('./util/trailing-slashes.js') + +// Pacote is only concerned with the package.json contents +const packageJsonPrepare = (p) => PackageJson.prepare(p).then(pkg => pkg.content) +const packageJsonNormalize = (p) => PackageJson.normalize(p).then(pkg => pkg.content) + +class FetcherBase { + constructor (spec, opts) { + if (!opts || typeof opts !== 'object') { + throw new TypeError('options object is required') + } + this.spec = npa(spec, opts.where) + + this.allowGitIgnore = !!opts.allowGitIgnore + + // a bit redundant because presumably the caller already knows this, + // but it makes it easier to not have to keep track of the requested + // spec when we're dispatching thousands of these at once, and normalizing + // is nice. saveSpec is preferred if set, because it turns stuff like + // x/y#committish into github:x/y#committish. use name@rawSpec for + // registry deps so that we turn xyz and xyz@ -> xyz@ + this.from = this.spec.registry + ? `${this.spec.name}@${this.spec.rawSpec}` : this.spec.saveSpec + + this.#assertType() + // clone the opts object so that others aren't upset when we mutate it + // by adding/modifying the integrity value. + this.opts = { ...opts } + + this.cache = opts.cache || cacheDir().cacache + this.tufCache = opts.tufCache || cacheDir().tufcache + this.resolved = opts.resolved || null + + // default to caching/verifying with sha512, that's what we usually have + // need to change this default, or start overriding it, when sha512 + // is no longer strong enough. + this.defaultIntegrityAlgorithm = opts.defaultIntegrityAlgorithm || 'sha512' + + if (typeof opts.integrity === 'string') { + this.opts.integrity = ssri.parse(opts.integrity) + } + + this.package = null + this.type = this.constructor.name + this.fmode = opts.fmode || 0o666 + this.dmode = opts.dmode || 0o777 + // we don't need a default umask, because we don't chmod files coming + // out of package tarballs. they're forced to have a mode that is + // valid, regardless of what's in the tarball entry, and then we let + // the process's umask setting do its job. but if configured, we do + // respect it. + this.umask = opts.umask || 0 + + this.preferOnline = !!opts.preferOnline + this.preferOffline = !!opts.preferOffline + this.offline = !!opts.offline + + this.before = opts.before + this.fullMetadata = this.before ? true : !!opts.fullMetadata + this.fullReadJson = !!opts.fullReadJson + this[_.readPackageJson] = this.fullReadJson + ? packageJsonPrepare + : packageJsonNormalize + + // rrh is a registry hostname or 'never' or 'always' + // defaults to registry.npmjs.org + this.replaceRegistryHost = (!opts.replaceRegistryHost || opts.replaceRegistryHost === 'npmjs') ? + 'registry.npmjs.org' : opts.replaceRegistryHost + + this.defaultTag = opts.defaultTag || 'latest' + this.registry = removeTrailingSlashes(opts.registry || 'https://registry.npmjs.org') + + // command to run 'prepare' scripts on directories and git dirs + // To use pacote with yarn, for example, set npmBin to 'yarn' + // and npmCliConfig with yarn's equivalents. + this.npmBin = opts.npmBin || 'npm' + + // command to install deps for preparing + this.npmInstallCmd = opts.npmInstallCmd || ['install', '--force'] + + // XXX fill more of this in based on what we know from this.opts + // we explicitly DO NOT fill in --tag, though, since we are often + // going to be packing in the context of a publish, which may set + // a dist-tag, but certainly wants to keep defaulting to latest. + this.npmCliConfig = opts.npmCliConfig || [ + `--cache=${dirname(this.cache)}`, + `--prefer-offline=${!!this.preferOffline}`, + `--prefer-online=${!!this.preferOnline}`, + `--offline=${!!this.offline}`, + ...(this.before ? [`--before=${this.before.toISOString()}`] : []), + '--no-progress', + '--no-save', + '--no-audit', + // override any omit settings from the environment + '--include=dev', + '--include=peer', + '--include=optional', + // we need the actual things, not just the lockfile + '--no-package-lock-only', + '--no-dry-run', + ] + } + + get integrity () { + return this.opts.integrity || null + } + + set integrity (i) { + if (!i) { + return + } + + i = ssri.parse(i) + const current = this.opts.integrity + + // do not ever update an existing hash value, but do + // merge in NEW algos and hashes that we don't already have. + if (current) { + current.merge(i) + } else { + this.opts.integrity = i + } + } + + get notImplementedError () { + return new Error('not implemented in this fetcher type: ' + this.type) + } + + // override in child classes + // Returns a Promise that resolves to this.resolved string value + resolve () { + return this.resolved ? Promise.resolve(this.resolved) + : Promise.reject(this.notImplementedError) + } + + packument () { + return Promise.reject(this.notImplementedError) + } + + // override in child class + // returns a manifest containing: + // - name + // - version + // - _resolved + // - _integrity + // - plus whatever else was in there (corgi, full metadata, or pj file) + manifest () { + return Promise.reject(this.notImplementedError) + } + + // private, should be overridden. + // Note that they should *not* calculate or check integrity or cache, + // but *just* return the raw tarball data stream. + [_.tarballFromResolved] () { + throw this.notImplementedError + } + + // public, should not be overridden + tarball () { + return this.tarballStream(stream => stream.concat().then(data => { + data.integrity = this.integrity && String(this.integrity) + data.resolved = this.resolved + data.from = this.from + return data + })) + } + + // private + // Note: cacache will raise a EINTEGRITY error if the integrity doesn't match + #tarballFromCache () { + return cacache.get.stream.byDigest(this.cache, this.integrity, this.opts) + } + + get [_.cacheFetches] () { + return true + } + + #istream (stream) { + // if not caching this, just return it + if (!this.opts.cache || !this[_.cacheFetches]) { + // instead of creating a new integrity stream, we only piggyback on the + // provided stream's events + if (stream.hasIntegrityEmitter) { + stream.on('integrity', i => this.integrity = i) + return stream + } + + const istream = ssri.integrityStream(this.opts) + istream.on('integrity', i => this.integrity = i) + stream.on('error', err => istream.emit('error', err)) + return stream.pipe(istream) + } + + // we have to return a stream that gets ALL the data, and proxies errors, + // but then pipe from the original tarball stream into the cache as well. + // To do this without losing any data, and since the cacache put stream + // is not a passthrough, we have to pipe from the original stream into + // the cache AFTER we pipe into the middleStream. Since the cache stream + // has an asynchronous flush to write its contents to disk, we need to + // defer the middleStream end until the cache stream ends. + const middleStream = new Minipass() + stream.on('error', err => middleStream.emit('error', err)) + stream.pipe(middleStream, { end: false }) + const cstream = cacache.put.stream( + this.opts.cache, + `pacote:tarball:${this.from}`, + this.opts + ) + cstream.on('integrity', i => this.integrity = i) + cstream.on('error', err => stream.emit('error', err)) + stream.pipe(cstream) + + // eslint-disable-next-line promise/catch-or-return + cstream.promise().catch(() => {}).then(() => middleStream.end()) + return middleStream + } + + pickIntegrityAlgorithm () { + return this.integrity ? this.integrity.pickAlgorithm(this.opts) + : this.defaultIntegrityAlgorithm + } + + // TODO: check error class, once those are rolled out to our deps + isDataCorruptionError (er) { + return er.code === 'EINTEGRITY' || er.code === 'Z_DATA_ERROR' + } + + // override the types getter + get types () { + return false + } + + #assertType () { + if (this.types && !this.types.includes(this.spec.type)) { + throw new TypeError(`Wrong spec type (${ + this.spec.type + }) for ${ + this.constructor.name + }. Supported types: ${this.types.join(', ')}`) + } + } + + // We allow ENOENTs from cacache, but not anywhere else. + // An ENOENT trying to read a tgz file, for example, is Right Out. + isRetriableError (er) { + // TODO: check error class, once those are rolled out to our deps + return this.isDataCorruptionError(er) || + er.code === 'ENOENT' || + er.code === 'EISDIR' + } + + // Mostly internal, but has some uses + // Pass in a function which returns a promise + // Function will be called 1 or more times with streams that may fail. + // Retries: + // Function MUST handle errors on the stream by rejecting the promise, + // so that retry logic can pick it up and either retry or fail whatever + // promise it was making (ie, failing extraction, etc.) + // + // The return value of this method is a Promise that resolves the same + // as whatever the streamHandler resolves to. + // + // This should never be overridden by child classes, but it is public. + tarballStream (streamHandler) { + // Only short-circuit via cache if we have everything else we'll need, + // and the user has not expressed a preference for checking online. + + const fromCache = ( + !this.preferOnline && + this.integrity && + this.resolved + ) ? streamHandler(this.#tarballFromCache()).catch(er => { + if (this.isDataCorruptionError(er)) { + log.warn('tarball', `cached data for ${ + this.spec + } (${this.integrity}) seems to be corrupted. Refreshing cache.`) + return this.cleanupCached().then(() => { + throw er + }) + } else { + throw er + } + }) : null + + const fromResolved = er => { + if (er) { + if (!this.isRetriableError(er)) { + throw er + } + log.silly('tarball', `no local data for ${ + this.spec + }. Extracting by manifest.`) + } + return this.resolve().then(() => retry(tryAgain => + streamHandler(this.#istream(this[_.tarballFromResolved]())) + .catch(streamErr => { + // Most likely data integrity. A cache ENOENT error is unlikely + // here, since we're definitely not reading from the cache, but it + // IS possible that the fetch subsystem accessed the cache, and the + // entry got blown away or something. Try one more time to be sure. + if (this.isRetriableError(streamErr)) { + log.warn('tarball', `tarball data for ${ + this.spec + } (${this.integrity}) seems to be corrupted. Trying again.`) + return this.cleanupCached().then(() => tryAgain(streamErr)) + } + throw streamErr + }), { retries: 1, minTimeout: 0, maxTimeout: 0 })) + } + + return fromCache ? fromCache.catch(fromResolved) : fromResolved() + } + + cleanupCached () { + return cacache.rm.content(this.cache, this.integrity, this.opts) + } + + #empty (path) { + return getContents({ path, depth: 1 }).then(contents => Promise.all( + contents.map(entry => rm(entry, { recursive: true, force: true })))) + } + + async #mkdir (dest) { + await this.#empty(dest) + return await mkdir(dest, { recursive: true }) + } + + // extraction is always the same. the only difference is where + // the tarball comes from. + async extract (dest) { + await this.#mkdir(dest) + return this.tarballStream((tarball) => this.#extract(dest, tarball)) + } + + #toFile (dest) { + return this.tarballStream(str => new Promise((res, rej) => { + const writer = new fsm.WriteStream(dest) + str.on('error', er => writer.emit('error', er)) + writer.on('error', er => rej(er)) + writer.on('close', () => res({ + integrity: this.integrity && String(this.integrity), + resolved: this.resolved, + from: this.from, + })) + str.pipe(writer) + })) + } + + // don't use this.#mkdir because we don't want to rimraf anything + async tarballFile (dest) { + const dir = dirname(dest) + await mkdir(dir, { recursive: true }) + return this.#toFile(dest) + } + + #extract (dest, tarball) { + const extractor = tar.x(this.#tarxOptions({ cwd: dest })) + const p = new Promise((resolve, reject) => { + extractor.on('end', () => { + resolve({ + resolved: this.resolved, + integrity: this.integrity && String(this.integrity), + from: this.from, + }) + }) + + extractor.on('error', er => { + log.warn('tar', er.message) + log.silly('tar', er) + reject(er) + }) + + tarball.on('error', er => reject(er)) + }) + + tarball.pipe(extractor) + return p + } + + // always ensure that entries are at least as permissive as our configured + // dmode/fmode, but never more permissive than the umask allows. + #entryMode (path, mode, type) { + const m = /Directory|GNUDumpDir/.test(type) ? this.dmode + : /File$/.test(type) ? this.fmode + : /* istanbul ignore next - should never happen in a pkg */ 0 + + // make sure package bins are executable + const exe = isPackageBin(this.package, path) ? 0o111 : 0 + // always ensure that files are read/writable by the owner + return ((mode | m) & ~this.umask) | exe | 0o600 + } + + #tarxOptions ({ cwd }) { + const sawIgnores = new Set() + return { + cwd, + noChmod: true, + noMtime: true, + filter: (name, entry) => { + if (/Link$/.test(entry.type)) { + return false + } + entry.mode = this.#entryMode(entry.path, entry.mode, entry.type) + // this replicates the npm pack behavior where .gitignore files + // are treated like .npmignore files, but only if a .npmignore + // file is not present. + if (/File$/.test(entry.type)) { + const base = basename(entry.path) + if (base === '.npmignore') { + sawIgnores.add(entry.path) + } else if (base === '.gitignore' && !this.allowGitIgnore) { + // rename, but only if there's not already a .npmignore + const ni = entry.path.replace(/\.gitignore$/, '.npmignore') + if (sawIgnores.has(ni)) { + return false + } + entry.path = ni + } + return true + } + }, + strip: 1, + onwarn: /* istanbul ignore next - we can trust that tar logs */ + (code, msg, data) => { + log.warn('tar', code, msg) + log.silly('tar', code, msg, data) + }, + umask: this.umask, + // always ignore ownership info from tarball metadata + preserveOwner: false, + } + } +} + +module.exports = FetcherBase + +// Child classes +const GitFetcher = require('./git.js') +const RegistryFetcher = require('./registry.js') +const FileFetcher = require('./file.js') +const DirFetcher = require('./dir.js') +const RemoteFetcher = require('./remote.js') + +// Get an appropriate fetcher object from a spec and options +FetcherBase.get = (rawSpec, opts = {}) => { + const spec = npa(rawSpec, opts.where) + switch (spec.type) { + case 'git': + return new GitFetcher(spec, opts) + + case 'remote': + return new RemoteFetcher(spec, opts) + + case 'version': + case 'range': + case 'tag': + case 'alias': + return new RegistryFetcher(spec.subSpec || spec, opts) + + case 'file': + return new FileFetcher(spec, opts) + + case 'directory': + return new DirFetcher(spec, opts) + + default: + throw new TypeError('Unknown spec type: ' + spec.type) + } +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/file.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/file.js new file mode 100644 index 0000000000000..2021325085e4f --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/file.js @@ -0,0 +1,94 @@ +const { resolve } = require('node:path') +const { stat, chmod } = require('node:fs/promises') +const cacache = require('cacache') +const fsm = require('fs-minipass') +const Fetcher = require('./fetcher.js') +const _ = require('./util/protected.js') + +class FileFetcher extends Fetcher { + constructor (spec, opts) { + super(spec, opts) + // just the fully resolved filename + this.resolved = this.spec.fetchSpec + } + + get types () { + return ['file'] + } + + manifest () { + if (this.package) { + return Promise.resolve(this.package) + } + + // have to unpack the tarball for this. + return cacache.tmp.withTmp(this.cache, this.opts, dir => + this.extract(dir) + .then(() => this[_.readPackageJson](dir)) + .then(mani => this.package = { + ...mani, + _integrity: this.integrity && String(this.integrity), + _resolved: this.resolved, + _from: this.from, + })) + } + + #exeBins (pkg, dest) { + if (!pkg.bin) { + return Promise.resolve() + } + + return Promise.all(Object.keys(pkg.bin).map(async k => { + const script = resolve(dest, pkg.bin[k]) + // Best effort. Ignore errors here, the only result is that + // a bin script is not executable. But if it's missing or + // something, we just leave it for a later stage to trip over + // when we can provide a more useful contextual error. + try { + const st = await stat(script) + const mode = st.mode | 0o111 + if (mode === st.mode) { + return + } + await chmod(script, mode) + } catch { + // Ignore errors here + } + })) + } + + extract (dest) { + // if we've already loaded the manifest, then the super got it. + // but if not, read the unpacked manifest and chmod properly. + return super.extract(dest) + .then(result => this.package ? result + : this[_.readPackageJson](dest).then(pkg => + this.#exeBins(pkg, dest)).then(() => result)) + } + + [_.tarballFromResolved] () { + // create a read stream and return it + return new fsm.ReadStream(this.resolved) + } + + packument () { + // simulate based on manifest + return this.manifest().then(mani => ({ + name: mani.name, + 'dist-tags': { + [this.defaultTag]: mani.version, + }, + versions: { + [mani.version]: { + ...mani, + dist: { + tarball: `file:${this.resolved}`, + integrity: this.integrity && String(this.integrity), + }, + }, + }, + })) + } +} + +module.exports = FileFetcher diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/git.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/git.js new file mode 100644 index 0000000000000..077193a86f026 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/git.js @@ -0,0 +1,317 @@ +const cacache = require('cacache') +const git = require('@npmcli/git') +const npa = require('npm-package-arg') +const pickManifest = require('npm-pick-manifest') +const { Minipass } = require('minipass') +const { log } = require('proc-log') +const DirFetcher = require('./dir.js') +const Fetcher = require('./fetcher.js') +const FileFetcher = require('./file.js') +const RemoteFetcher = require('./remote.js') +const _ = require('./util/protected.js') +const addGitSha = require('./util/add-git-sha.js') +const npm = require('./util/npm.js') + +const hashre = /^[a-f0-9]{40}$/ + +// get the repository url. +// prefer https if there's auth, since ssh will drop that. +// otherwise, prefer ssh if available (more secure). +// We have to add the git+ back because npa suppresses it. +const repoUrl = (h, opts) => + h.sshurl && !(h.https && h.auth) && addGitPlus(h.sshurl(opts)) || + h.https && addGitPlus(h.https(opts)) + +// add git+ to the url, but only one time. +const addGitPlus = url => url && `git+${url}`.replace(/^(git\+)+/, 'git+') + +class GitFetcher extends Fetcher { + constructor (spec, opts) { + super(spec, opts) + + // we never want to compare integrity for git dependencies: npm/rfcs#525 + if (this.opts.integrity) { + delete this.opts.integrity + log.warn(`skipping integrity check for git dependency ${this.spec.fetchSpec}`) + } + + this.resolvedRef = null + if (this.spec.hosted) { + this.from = this.spec.hosted.shortcut({ noCommittish: false }) + } + + // shortcut: avoid full clone when we can go straight to the tgz + // if we have the full sha and it's a hosted git platform + if (this.spec.gitCommittish && hashre.test(this.spec.gitCommittish)) { + this.resolvedSha = this.spec.gitCommittish + // use hosted.tarball() when we shell to RemoteFetcher later + this.resolved = this.spec.hosted + ? repoUrl(this.spec.hosted, { noCommittish: false }) + : this.spec.rawSpec + } else { + this.resolvedSha = '' + } + + this.Arborist = opts.Arborist || null + } + + // just exposed to make it easier to test all the combinations + static repoUrl (hosted, opts) { + return repoUrl(hosted, opts) + } + + get types () { + return ['git'] + } + + resolve () { + // likely a hosted git repo with a sha, so get the tarball url + // but in general, no reason to resolve() more than necessary! + if (this.resolved) { + return super.resolve() + } + + // fetch the git repo and then look at the current hash + const h = this.spec.hosted + // try to use ssh, fall back to git. + return h + ? this.#resolvedFromHosted(h) + : this.#resolvedFromRepo(this.spec.fetchSpec) + } + + // first try https, since that's faster and passphrase-less for + // public repos, and supports private repos when auth is provided. + // Fall back to SSH to support private repos + // NB: we always store the https url in resolved field if auth + // is present, otherwise ssh if the hosted type provides it + #resolvedFromHosted (hosted) { + return this.#resolvedFromRepo(hosted.https && hosted.https()).catch(er => { + // Throw early since we know pathspec errors will fail again if retried + if (er instanceof git.errors.GitPathspecError) { + throw er + } + const ssh = hosted.sshurl && hosted.sshurl() + // no fallthrough if we can't fall through or have https auth + if (!ssh || hosted.auth) { + throw er + } + return this.#resolvedFromRepo(ssh) + }) + } + + #resolvedFromRepo (gitRemote) { + // XXX make this a custom error class + if (!gitRemote) { + return Promise.reject(new Error(`No git url for ${this.spec}`)) + } + const gitRange = this.spec.gitRange + const name = this.spec.name + return git.revs(gitRemote, this.opts).then(remoteRefs => { + return gitRange ? pickManifest({ + versions: remoteRefs.versions, + 'dist-tags': remoteRefs['dist-tags'], + name, + }, gitRange, this.opts) + : this.spec.gitCommittish ? + remoteRefs.refs[this.spec.gitCommittish] || + remoteRefs.refs[remoteRefs.shas[this.spec.gitCommittish]] + : remoteRefs.refs.HEAD // no git committish, get default head + }).then(revDoc => { + // the committish provided isn't in the rev list + // things like HEAD~3 or @yesterday can land here. + if (!revDoc || !revDoc.sha) { + return this.#resolvedFromClone() + } + + this.resolvedRef = revDoc + this.resolvedSha = revDoc.sha + this.#addGitSha(revDoc.sha) + return this.resolved + }) + } + + #setResolvedWithSha (withSha) { + // we haven't cloned, so a tgz download is still faster + // of course, if it's not a known host, we can't do that. + this.resolved = !this.spec.hosted ? withSha + : repoUrl(npa(withSha).hosted, { noCommittish: false }) + } + + // when we get the git sha, we affix it to our spec to build up + // either a git url with a hash, or a tarball download URL + #addGitSha (sha) { + this.#setResolvedWithSha(addGitSha(this.spec, sha)) + } + + #resolvedFromClone () { + // do a full or shallow clone, then look at the HEAD + // kind of wasteful, but no other option, really + return this.#clone(() => this.resolved) + } + + #prepareDir (dir) { + return this[_.readPackageJson](dir).then(mani => { + // no need if we aren't going to do any preparation. + const scripts = mani.scripts + if (!mani.workspaces && (!scripts || !( + scripts.postinstall || + scripts.build || + scripts.preinstall || + scripts.install || + scripts.prepack || + scripts.prepare))) { + return + } + + // to avoid cases where we have an cycle of git deps that depend + // on one another, we only ever do preparation for one instance + // of a given git dep along the chain of installations. + // Note that this does mean that a dependency MAY in theory end up + // trying to run its prepare script using a dependency that has not + // been properly prepared itself, but that edge case is smaller + // and less hazardous than a fork bomb of npm and git commands. + const noPrepare = !process.env._PACOTE_NO_PREPARE_ ? [] + : process.env._PACOTE_NO_PREPARE_.split('\n') + if (noPrepare.includes(this.resolved)) { + log.info('prepare', 'skip prepare, already seen', this.resolved) + return + } + noPrepare.push(this.resolved) + + // the DirFetcher will do its own preparation to run the prepare scripts + // All we have to do is put the deps in place so that it can succeed. + return npm( + this.npmBin, + [].concat(this.npmInstallCmd).concat(this.npmCliConfig), + dir, + { ...process.env, _PACOTE_NO_PREPARE_: noPrepare.join('\n') }, + { message: 'git dep preparation failed' } + ) + }) + } + + [_.tarballFromResolved] () { + const stream = new Minipass() + stream.resolved = this.resolved + stream.from = this.from + + // check it out and then shell out to the DirFetcher tarball packer + this.#clone(dir => this.#prepareDir(dir) + .then(() => new Promise((res, rej) => { + if (!this.Arborist) { + throw new Error('GitFetcher requires an Arborist constructor to pack a tarball') + } + const df = new DirFetcher(`file:${dir}`, { + ...this.opts, + Arborist: this.Arborist, + resolved: null, + integrity: null, + }) + const dirStream = df[_.tarballFromResolved]() + dirStream.on('error', rej) + dirStream.on('end', res) + dirStream.pipe(stream) + }))).catch( + /* istanbul ignore next: very unlikely and hard to test */ + er => stream.emit('error', er) + ) + return stream + } + + // clone a git repo into a temp folder (or fetch and unpack if possible) + // handler accepts a directory, and returns a promise that resolves + // when we're done with it, at which point, cacache deletes it + // + // TODO: after cloning, create a tarball of the folder, and add to the cache + // with cacache.put.stream(), using a key that's deterministic based on the + // spec and repo, so that we don't ever clone the same thing multiple times. + #clone (handler, tarballOk = true) { + const o = { tmpPrefix: 'git-clone' } + const ref = this.resolvedSha || this.spec.gitCommittish + const h = this.spec.hosted + const resolved = this.resolved + + // can be set manually to false to fall back to actual git clone + tarballOk = tarballOk && + h && resolved === repoUrl(h, { noCommittish: false }) && h.tarball + + return cacache.tmp.withTmp(this.cache, o, async tmp => { + // if we're resolved, and have a tarball url, shell out to RemoteFetcher + if (tarballOk) { + const nameat = this.spec.name ? `${this.spec.name}@` : '' + return new RemoteFetcher(h.tarball({ noCommittish: false }), { + ...this.opts, + allowGitIgnore: true, + pkgid: `git:${nameat}${this.resolved}`, + resolved: this.resolved, + integrity: null, // it'll always be different, if we have one + }).extract(tmp).then(() => handler(tmp), er => { + // fall back to ssh download if tarball fails + if (er.constructor.name.match(/^Http/)) { + return this.#clone(handler, false) + } else { + throw er + } + }) + } + + const sha = await ( + h ? this.#cloneHosted(ref, tmp) + : this.#cloneRepo(this.spec.fetchSpec, ref, tmp) + ) + this.resolvedSha = sha + if (!this.resolved) { + await this.#addGitSha(sha) + } + return handler(tmp) + }) + } + + // first try https, since that's faster and passphrase-less for + // public repos, and supports private repos when auth is provided. + // Fall back to SSH to support private repos + // NB: we always store the https url in resolved field if auth + // is present, otherwise ssh if the hosted type provides it + #cloneHosted (ref, tmp) { + const hosted = this.spec.hosted + return this.#cloneRepo(hosted.https({ noCommittish: true }), ref, tmp) + .catch(er => { + // Throw early since we know pathspec errors will fail again if retried + if (er instanceof git.errors.GitPathspecError) { + throw er + } + const ssh = hosted.sshurl && hosted.sshurl({ noCommittish: true }) + // no fallthrough if we can't fall through or have https auth + if (!ssh || hosted.auth) { + throw er + } + return this.#cloneRepo(ssh, ref, tmp) + }) + } + + #cloneRepo (repo, ref, tmp) { + const { opts, spec } = this + return git.clone(repo, ref, tmp, { ...opts, spec }) + } + + manifest () { + if (this.package) { + return Promise.resolve(this.package) + } + + return this.spec.hosted && this.resolved + ? FileFetcher.prototype.manifest.apply(this) + : this.#clone(dir => + this[_.readPackageJson](dir) + .then(mani => this.package = { + ...mani, + _resolved: this.resolved, + _from: this.from, + })) + } + + packument () { + return FileFetcher.prototype.packument.apply(this) + } +} +module.exports = GitFetcher diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/index.js new file mode 100644 index 0000000000000..f35314d275d5f --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/index.js @@ -0,0 +1,23 @@ +const { get } = require('./fetcher.js') +const GitFetcher = require('./git.js') +const RegistryFetcher = require('./registry.js') +const FileFetcher = require('./file.js') +const DirFetcher = require('./dir.js') +const RemoteFetcher = require('./remote.js') + +const tarball = (spec, opts) => get(spec, opts).tarball() +tarball.stream = (spec, handler, opts) => get(spec, opts).tarballStream(handler) +tarball.file = (spec, dest, opts) => get(spec, opts).tarballFile(dest) + +module.exports = { + GitFetcher, + RegistryFetcher, + FileFetcher, + DirFetcher, + RemoteFetcher, + resolve: (spec, opts) => get(spec, opts).resolve(), + extract: (spec, dest, opts) => get(spec, opts).extract(dest), + manifest: (spec, opts) => get(spec, opts).manifest(), + packument: (spec, opts) => get(spec, opts).packument(), + tarball, +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/registry.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/registry.js new file mode 100644 index 0000000000000..1ecf4ee177349 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/registry.js @@ -0,0 +1,369 @@ +const crypto = require('node:crypto') +const PackageJson = require('@npmcli/package-json') +const pickManifest = require('npm-pick-manifest') +const ssri = require('ssri') +const npa = require('npm-package-arg') +const sigstore = require('sigstore') +const fetch = require('npm-registry-fetch') +const Fetcher = require('./fetcher.js') +const RemoteFetcher = require('./remote.js') +const pacoteVersion = require('../package.json').version +const removeTrailingSlashes = require('./util/trailing-slashes.js') +const _ = require('./util/protected.js') + +// Corgis are cute. 🐕🐶 +const corgiDoc = 'application/vnd.npm.install-v1+json; q=1.0, application/json; q=0.8, */*' +const fullDoc = 'application/json' + +// Some really old packages have no time field in their packument so we need a +// cutoff date. +const MISSING_TIME_CUTOFF = '2015-01-01T00:00:00.000Z' + +class RegistryFetcher extends Fetcher { + #cacheKey + constructor (spec, opts) { + super(spec, opts) + + // you usually don't want to fetch the same packument multiple times in + // the span of a given script or command, no matter how many pacote calls + // are made, so this lets us avoid doing that. It's only relevant for + // registry fetchers, because other types simulate their packument from + // the manifest, which they memoize on this.package, so it's very cheap + // already. + this.packumentCache = this.opts.packumentCache || null + + this.registry = fetch.pickRegistry(spec, opts) + this.packumentUrl = `${removeTrailingSlashes(this.registry)}/${this.spec.escapedName}` + this.#cacheKey = `${this.fullMetadata ? 'full' : 'corgi'}:${this.packumentUrl}` + + const parsed = new URL(this.registry) + const regKey = `//${parsed.host}${parsed.pathname}` + // unlike the nerf-darted auth keys, this one does *not* allow a mismatch + // of trailing slashes. It must match exactly. + if (this.opts[`${regKey}:_keys`]) { + this.registryKeys = this.opts[`${regKey}:_keys`] + } + + // XXX pacote <=9 has some logic to ignore opts.resolved if + // the resolved URL doesn't go to the same registry. + // Consider reproducing that here, to throw away this.resolved + // in that case. + } + + async resolve () { + // fetching the manifest sets resolved and (if present) integrity + await this.manifest() + if (!this.resolved) { + throw Object.assign( + new Error('Invalid package manifest: no `dist.tarball` field'), + { package: this.spec.toString() } + ) + } + return this.resolved + } + + #headers () { + return { + // npm will override UA, but ensure that we always send *something* + 'user-agent': this.opts.userAgent || + `pacote/${pacoteVersion} node/${process.version}`, + ...(this.opts.headers || {}), + 'pacote-version': pacoteVersion, + 'pacote-req-type': 'packument', + 'pacote-pkg-id': `registry:${this.spec.name}`, + accept: this.fullMetadata ? fullDoc : corgiDoc, + } + } + + async packument () { + // note this might be either an in-flight promise for a request, + // or the actual packument, but we never want to make more than + // one request at a time for the same thing regardless. + if (this.packumentCache?.has(this.#cacheKey)) { + return this.packumentCache.get(this.#cacheKey) + } + + // npm-registry-fetch the packument + // set the appropriate header for corgis if fullMetadata isn't set + // return the res.json() promise + try { + const res = await fetch(this.packumentUrl, { + ...this.opts, + headers: this.#headers(), + spec: this.spec, + + // never check integrity for packuments themselves + integrity: null, + }) + const packument = await res.json() + const contentLength = res.headers.get('content-length') + if (contentLength) { + packument._contentLength = Number(contentLength) + } + this.packumentCache?.set(this.#cacheKey, packument) + return packument + } catch (err) { + this.packumentCache?.delete(this.#cacheKey) + if (err.code !== 'E404' || this.fullMetadata) { + throw err + } + // possible that corgis are not supported by this registry + this.fullMetadata = true + return this.packument() + } + } + + async manifest () { + if (this.package) { + return this.package + } + + // When verifying signatures, we need to fetch the full/uncompressed + // packument to get publish time as this is not included in the + // corgi/compressed packument. + if (this.opts.verifySignatures) { + this.fullMetadata = true + } + + const packument = await this.packument() + const steps = PackageJson.normalizeSteps.filter(s => s !== '_attributes') + const mani = await new PackageJson().fromContent(pickManifest(packument, this.spec.fetchSpec, { + ...this.opts, + defaultTag: this.defaultTag, + before: this.before, + })).normalize({ steps }).then(p => p.content) + + /* XXX add ETARGET and E403 revalidation of cached packuments here */ + + // add _time from packument if fetched with fullMetadata + const time = packument.time?.[mani.version] + if (time) { + mani._time = time + } + + // add _resolved and _integrity from dist object + const { dist } = mani + if (dist) { + this.resolved = mani._resolved = dist.tarball + mani._from = this.from + const distIntegrity = dist.integrity ? ssri.parse(dist.integrity) + : dist.shasum ? ssri.fromHex(dist.shasum, 'sha1', { ...this.opts }) + : null + if (distIntegrity) { + if (this.integrity && !this.integrity.match(distIntegrity)) { + // only bork if they have algos in common. + // otherwise we end up breaking if we have saved a sha512 + // previously for the tarball, but the manifest only + // provides a sha1, which is possible for older publishes. + // Otherwise, this is almost certainly a case of holding it + // wrong, and will result in weird or insecure behavior + // later on when building package tree. + for (const algo of Object.keys(this.integrity)) { + if (distIntegrity[algo]) { + throw Object.assign(new Error( + `Integrity checksum failed when using ${algo}: ` + + `wanted ${this.integrity} but got ${distIntegrity}.` + ), { code: 'EINTEGRITY' }) + } + } + } + // made it this far, the integrity is worthwhile. accept it. + // the setter here will take care of merging it into what we already + // had. + this.integrity = distIntegrity + } + } + if (this.integrity) { + mani._integrity = String(this.integrity) + if (dist.signatures) { + if (this.opts.verifySignatures) { + // validate and throw on error, then set _signatures + const message = `${mani._id}:${mani._integrity}` + for (const signature of dist.signatures) { + const publicKey = this.registryKeys && + this.registryKeys.filter(key => (key.keyid === signature.keyid))[0] + if (!publicKey) { + throw Object.assign(new Error( + `${mani._id} has a registry signature with keyid: ${signature.keyid} ` + + 'but no corresponding public key can be found' + ), { code: 'EMISSINGSIGNATUREKEY' }) + } + + const publishedTime = Date.parse(mani._time || MISSING_TIME_CUTOFF) + const validPublicKey = !publicKey.expires || + publishedTime < Date.parse(publicKey.expires) + if (!validPublicKey) { + throw Object.assign(new Error( + `${mani._id} has a registry signature with keyid: ${signature.keyid} ` + + `but the corresponding public key has expired ${publicKey.expires}` + ), { code: 'EEXPIREDSIGNATUREKEY' }) + } + const verifier = crypto.createVerify('SHA256') + verifier.write(message) + verifier.end() + const valid = verifier.verify( + publicKey.pemkey, + signature.sig, + 'base64' + ) + if (!valid) { + throw Object.assign(new Error( + `${mani._id} has an invalid registry signature with ` + + `keyid: ${publicKey.keyid} and signature: ${signature.sig}` + ), { + code: 'EINTEGRITYSIGNATURE', + keyid: publicKey.keyid, + signature: signature.sig, + resolved: mani._resolved, + integrity: mani._integrity, + }) + } + } + mani._signatures = dist.signatures + } else { + mani._signatures = dist.signatures + } + } + + if (dist.attestations) { + if (this.opts.verifyAttestations) { + // Always fetch attestations from the current registry host + const attestationsPath = new URL(dist.attestations.url).pathname + const attestationsUrl = removeTrailingSlashes(this.registry) + attestationsPath + const res = await fetch(attestationsUrl, { + ...this.opts, + // disable integrity check for attestations json payload, we check the + // integrity in the verification steps below + integrity: null, + }) + const { attestations } = await res.json() + const bundles = attestations.map(({ predicateType, bundle }) => { + const statement = JSON.parse( + Buffer.from(bundle.dsseEnvelope.payload, 'base64').toString('utf8') + ) + const keyid = bundle.dsseEnvelope.signatures[0].keyid + const signature = bundle.dsseEnvelope.signatures[0].sig + + return { + predicateType, + bundle, + statement, + keyid, + signature, + } + }) + + const attestationKeyIds = bundles.map((b) => b.keyid).filter((k) => !!k) + const attestationRegistryKeys = (this.registryKeys || []) + .filter(key => attestationKeyIds.includes(key.keyid)) + if (!attestationRegistryKeys.length) { + throw Object.assign(new Error( + `${mani._id} has attestations but no corresponding public key(s) can be found` + ), { code: 'EMISSINGSIGNATUREKEY' }) + } + + for (const { predicateType, bundle, keyid, signature, statement } of bundles) { + const publicKey = attestationRegistryKeys.find(key => key.keyid === keyid) + // Publish attestations have a keyid set and a valid public key must be found + if (keyid) { + if (!publicKey) { + throw Object.assign(new Error( + `${mani._id} has attestations with keyid: ${keyid} ` + + 'but no corresponding public key can be found' + ), { code: 'EMISSINGSIGNATUREKEY' }) + } + + const integratedTime = new Date( + Number( + bundle.verificationMaterial.tlogEntries[0].integratedTime + ) * 1000 + ) + const validPublicKey = !publicKey.expires || + (integratedTime < Date.parse(publicKey.expires)) + if (!validPublicKey) { + throw Object.assign(new Error( + `${mani._id} has attestations with keyid: ${keyid} ` + + `but the corresponding public key has expired ${publicKey.expires}` + ), { code: 'EEXPIREDSIGNATUREKEY' }) + } + } + + const subject = { + name: statement.subject[0].name, + sha512: statement.subject[0].digest.sha512, + } + + // Only type 'version' can be turned into a PURL + const purl = this.spec.type === 'version' ? npa.toPurl(this.spec) : this.spec + // Verify the statement subject matches the package, version + if (subject.name !== purl) { + throw Object.assign(new Error( + `${mani._id} package name and version (PURL): ${purl} ` + + `doesn't match what was signed: ${subject.name}` + ), { code: 'EATTESTATIONSUBJECT' }) + } + + // Verify the statement subject matches the tarball integrity + const integrityHexDigest = ssri.parse(this.integrity).hexDigest() + if (subject.sha512 !== integrityHexDigest) { + throw Object.assign(new Error( + `${mani._id} package integrity (hex digest): ` + + `${integrityHexDigest} ` + + `doesn't match what was signed: ${subject.sha512}` + ), { code: 'EATTESTATIONSUBJECT' }) + } + + try { + // Provenance attestations are signed with a signing certificate + // (including the key) so we don't need to return a public key. + // + // Publish attestations are signed with a keyid so we need to + // specify a public key from the keys endpoint: `registry-host.tld/-/npm/v1/keys` + const options = { + tufCachePath: this.tufCache, + tufForceCache: true, + keySelector: publicKey ? () => publicKey.pemkey : undefined, + } + await sigstore.verify(bundle, options) + } catch (e) { + throw Object.assign(new Error( + `${mani._id} failed to verify attestation: ${e.message}` + ), { + code: 'EATTESTATIONVERIFY', + predicateType, + keyid, + signature, + resolved: mani._resolved, + integrity: mani._integrity, + }) + } + } + mani._attestations = dist.attestations + } else { + mani._attestations = dist.attestations + } + } + } + + this.package = mani + return this.package + } + + [_.tarballFromResolved] () { + // we use a RemoteFetcher to get the actual tarball stream + return new RemoteFetcher(this.resolved, { + ...this.opts, + resolved: this.resolved, + pkgid: `registry:${this.spec.name}@${this.resolved}`, + })[_.tarballFromResolved]() + } + + get types () { + return [ + 'tag', + 'version', + 'range', + ] + } +} +module.exports = RegistryFetcher diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/remote.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/remote.js new file mode 100644 index 0000000000000..bd321e65a1f18 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/remote.js @@ -0,0 +1,89 @@ +const fetch = require('npm-registry-fetch') +const { Minipass } = require('minipass') +const Fetcher = require('./fetcher.js') +const FileFetcher = require('./file.js') +const _ = require('./util/protected.js') +const pacoteVersion = require('../package.json').version + +class RemoteFetcher extends Fetcher { + constructor (spec, opts) { + super(spec, opts) + this.resolved = this.spec.fetchSpec + const resolvedURL = new URL(this.resolved) + if (this.replaceRegistryHost !== 'never' + && (this.replaceRegistryHost === 'always' + || this.replaceRegistryHost === resolvedURL.host)) { + this.resolved = new URL(resolvedURL.pathname, this.registry).href + } + + // nam is a fermented pork sausage that is good to eat + const nameat = this.spec.name ? `${this.spec.name}@` : '' + this.pkgid = opts.pkgid ? opts.pkgid : `remote:${nameat}${this.resolved}` + } + + // Don't need to cache tarball fetches in pacote, because make-fetch-happen + // will write into cacache anyway. + get [_.cacheFetches] () { + return false + } + + [_.tarballFromResolved] () { + const stream = new Minipass() + stream.hasIntegrityEmitter = true + + const fetchOpts = { + ...this.opts, + headers: this.#headers(), + spec: this.spec, + integrity: this.integrity, + algorithms: [this.pickIntegrityAlgorithm()], + } + + // eslint-disable-next-line promise/always-return + fetch(this.resolved, fetchOpts).then(res => { + res.body.on('error', + /* istanbul ignore next - exceedingly rare and hard to simulate */ + er => stream.emit('error', er) + ) + + res.body.on('integrity', i => { + this.integrity = i + stream.emit('integrity', i) + }) + + res.body.pipe(stream) + }).catch(er => stream.emit('error', er)) + + return stream + } + + #headers () { + return { + // npm will override this, but ensure that we always send *something* + 'user-agent': this.opts.userAgent || + `pacote/${pacoteVersion} node/${process.version}`, + ...(this.opts.headers || {}), + 'pacote-version': pacoteVersion, + 'pacote-req-type': 'tarball', + 'pacote-pkg-id': this.pkgid, + ...(this.integrity ? { 'pacote-integrity': String(this.integrity) } + : {}), + ...(this.opts.headers || {}), + } + } + + get types () { + return ['remote'] + } + + // getting a packument and/or manifest is the same as with a file: spec. + // unpack the tarball stream, and then read from the package.json file. + packument () { + return FileFetcher.prototype.packument.apply(this) + } + + manifest () { + return FileFetcher.prototype.manifest.apply(this) + } +} +module.exports = RemoteFetcher diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/add-git-sha.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/add-git-sha.js new file mode 100644 index 0000000000000..843fe5b600caf --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/add-git-sha.js @@ -0,0 +1,15 @@ +// add a sha to a git remote url spec +const addGitSha = (spec, sha) => { + if (spec.hosted) { + const h = spec.hosted + const opt = { noCommittish: true } + const base = h.https && h.auth ? h.https(opt) : h.shortcut(opt) + + return `${base}#${sha}` + } else { + // don't use new URL for this, because it doesn't handle scp urls + return spec.rawSpec.replace(/#.*$/, '') + `#${sha}` + } +} + +module.exports = addGitSha diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/cache-dir.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/cache-dir.js new file mode 100644 index 0000000000000..ba5683a7bb5bf --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/cache-dir.js @@ -0,0 +1,15 @@ +const { resolve } = require('node:path') +const { tmpdir, homedir } = require('node:os') + +module.exports = (fakePlatform = false) => { + const temp = tmpdir() + const uidOrPid = process.getuid ? process.getuid() : process.pid + const home = homedir() || resolve(temp, 'npm-' + uidOrPid) + const platform = fakePlatform || process.platform + const cacheExtra = platform === 'win32' ? 'npm-cache' : '.npm' + const cacheRoot = (platform === 'win32' && process.env.LOCALAPPDATA) || home + return { + cacache: resolve(cacheRoot, cacheExtra, '_cacache'), + tufcache: resolve(cacheRoot, cacheExtra, '_tuf'), + } +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/is-package-bin.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/is-package-bin.js new file mode 100644 index 0000000000000..49a3f73f537ce --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/is-package-bin.js @@ -0,0 +1,25 @@ +// Function to determine whether a path is in the package.bin set. +// Used to prevent issues when people publish a package from a +// windows machine, and then install with --no-bin-links. +// +// Note: this is not possible in remote or file fetchers, since +// we don't have the manifest until AFTER we've unpacked. But the +// main use case is registry fetching with git a distant second, +// so that's an acceptable edge case to not handle. + +const binObj = (name, bin) => + typeof bin === 'string' ? { [name]: bin } : bin + +const hasBin = (pkg, path) => { + const bin = binObj(pkg.name, pkg.bin) + const p = path.replace(/^[^\\/]*\//, '') + for (const kv of Object.entries(bin)) { + if (kv[1] === p) { + return true + } + } + return false +} + +module.exports = (pkg, path) => + pkg && pkg.bin ? hasBin(pkg, path) : false diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/npm.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/npm.js new file mode 100644 index 0000000000000..a3005c255565f --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/npm.js @@ -0,0 +1,14 @@ +// run an npm command +const spawn = require('@npmcli/promise-spawn') + +module.exports = (npmBin, npmCommand, cwd, env, extra) => { + const isJS = npmBin.endsWith('.js') + const cmd = isJS ? process.execPath : npmBin + const args = (isJS ? [npmBin] : []).concat(npmCommand) + // when installing to run the `prepare` script for a git dep, we need + // to ensure that we don't run into a cycle of checking out packages + // in temp directories. this lets us link previously-seen repos that + // are also being prepared. + + return spawn(cmd, args, { cwd, env }, extra) +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/protected.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/protected.js new file mode 100644 index 0000000000000..e05203b481e6a --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/protected.js @@ -0,0 +1,5 @@ +module.exports = { + cacheFetches: Symbol.for('pacote.Fetcher._cacheFetches'), + readPackageJson: Symbol.for('package.Fetcher._readPackageJson'), + tarballFromResolved: Symbol.for('pacote.Fetcher._tarballFromResolved'), +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/tar-create-options.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/tar-create-options.js new file mode 100644 index 0000000000000..d070f0f7ba2d4 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/tar-create-options.js @@ -0,0 +1,31 @@ +const isPackageBin = require('./is-package-bin.js') + +const tarCreateOptions = manifest => ({ + cwd: manifest._resolved, + prefix: 'package/', + portable: true, + gzip: { + // forcing the level to 9 seems to avoid some + // platform specific optimizations that cause + // integrity mismatch errors due to differing + // end results after compression + level: 9, + }, + + // ensure that package bins are always executable + // Note that npm-packlist is already filtering out + // anything that is not a regular file, ignored by + // .npmignore or package.json "files", etc. + filter: (path, stat) => { + if (isPackageBin(manifest, path)) { + stat.mode |= 0o111 + } + return true + }, + + // Provide a specific date in the 1980s for the benefit of zip, + // which is confounded by files dated at the Unix epoch 0. + mtime: new Date('1985-10-26T08:15:00.000Z'), +}) + +module.exports = tarCreateOptions diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/trailing-slashes.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/trailing-slashes.js new file mode 100644 index 0000000000000..c50cb6173b92e --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/trailing-slashes.js @@ -0,0 +1,10 @@ +const removeTrailingSlashes = (input) => { + // in order to avoid regexp redos detection + let output = input + while (output.endsWith('/')) { + output = output.slice(0, -1) + } + return output +} + +module.exports = removeTrailingSlashes diff --git a/node_modules/pacote/node_modules/cacache/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/package.json similarity index 50% rename from node_modules/pacote/node_modules/cacache/package.json rename to node_modules/@npmcli/metavuln-calculator/node_modules/pacote/package.json index 6e6219158ed75..caadaf2db50c8 100644 --- a/node_modules/pacote/node_modules/cacache/package.json +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/package.json @@ -1,82 +1,78 @@ { - "name": "cacache", - "version": "18.0.4", - "cache-version": { - "content": "2", - "index": "5" + "name": "pacote", + "version": "18.0.6", + "description": "JavaScript package downloader", + "author": "GitHub Inc.", + "bin": { + "pacote": "bin/index.js" }, - "description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.", + "license": "ISC", "main": "lib/index.js", - "files": [ - "bin/", - "lib/" - ], "scripts": { "test": "tap", "snap": "tap", - "coverage": "tap", - "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test", "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", - "npmclilint": "npmcli-lint", - "lintfix": "npm run lint -- --fix", - "postsnap": "npm run lintfix --", "postlint": "template-oss-check", + "lintfix": "npm run lint -- --fix", "posttest": "npm run lint", "template-oss-apply": "template-oss-apply --force" }, - "repository": { - "type": "git", - "url": "git+https://github.com/npm/cacache.git" + "tap": { + "timeout": 300, + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] }, + "devDependencies": { + "@npmcli/arborist": "^7.1.0", + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.22.0", + "hosted-git-info": "^7.0.0", + "mutate-fs": "^2.1.1", + "nock": "^13.2.4", + "npm-registry-mock": "^1.3.2", + "tap": "^16.0.1" + }, + "files": [ + "bin/", + "lib/" + ], "keywords": [ - "cache", - "caching", - "content-addressable", - "sri", - "sri hash", - "subresource integrity", - "cache", - "storage", - "store", - "file store", - "filesystem", - "disk cache", - "disk storage" + "packages", + "npm", + "git" ], - "license": "ISC", "dependencies": { - "@npmcli/fs": "^3.1.0", + "@npmcli/git": "^5.0.0", + "@npmcli/installed-package-contents": "^2.0.1", + "@npmcli/package-json": "^5.1.0", + "@npmcli/promise-spawn": "^7.0.0", + "@npmcli/run-script": "^8.0.0", + "cacache": "^18.0.0", "fs-minipass": "^3.0.0", - "glob": "^10.2.2", - "lru-cache": "^10.0.1", - "minipass": "^7.0.3", - "minipass-collect": "^2.0.1", - "minipass-flush": "^1.0.5", - "minipass-pipeline": "^1.2.4", - "p-map": "^4.0.0", + "minipass": "^7.0.2", + "npm-package-arg": "^11.0.0", + "npm-packlist": "^8.0.0", + "npm-pick-manifest": "^9.0.0", + "npm-registry-fetch": "^17.0.0", + "proc-log": "^4.0.0", + "promise-retry": "^2.0.1", + "sigstore": "^2.2.0", "ssri": "^10.0.0", - "tar": "^6.1.11", - "unique-filename": "^3.0.0" - }, - "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", - "tap": "^16.0.0" + "tar": "^6.1.11" }, "engines": { "node": "^16.14.0 || >=18.0.0" }, + "repository": { + "type": "git", + "url": "git+https://github.com/npm/pacote.git" + }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "windowsCI": false, "version": "4.22.0", + "windowsCI": false, "publish": "true" - }, - "author": "GitHub Inc.", - "tap": { - "nyc-arg": [ - "--exclude", - "tap-snapshots/**" - ] } } diff --git a/node_modules/pacote/node_modules/@npmcli/fs/lib/common/get-options.js b/node_modules/pacote/node_modules/@npmcli/fs/lib/common/get-options.js deleted file mode 100644 index cb5982f79077a..0000000000000 --- a/node_modules/pacote/node_modules/@npmcli/fs/lib/common/get-options.js +++ /dev/null @@ -1,20 +0,0 @@ -// given an input that may or may not be an object, return an object that has -// a copy of every defined property listed in 'copy'. if the input is not an -// object, assign it to the property named by 'wrap' -const getOptions = (input, { copy, wrap }) => { - const result = {} - - if (input && typeof input === 'object') { - for (const prop of copy) { - if (input[prop] !== undefined) { - result[prop] = input[prop] - } - } - } else { - result[wrap] = input - } - - return result -} - -module.exports = getOptions diff --git a/node_modules/pacote/node_modules/@npmcli/fs/lib/common/node.js b/node_modules/pacote/node_modules/@npmcli/fs/lib/common/node.js deleted file mode 100644 index 4d13bc037359d..0000000000000 --- a/node_modules/pacote/node_modules/@npmcli/fs/lib/common/node.js +++ /dev/null @@ -1,9 +0,0 @@ -const semver = require('semver') - -const satisfies = (range) => { - return semver.satisfies(process.version, range, { includePrerelease: true }) -} - -module.exports = { - satisfies, -} diff --git a/node_modules/pacote/node_modules/@npmcli/fs/lib/cp/LICENSE b/node_modules/pacote/node_modules/@npmcli/fs/lib/cp/LICENSE deleted file mode 100644 index 93546dfb7655b..0000000000000 --- a/node_modules/pacote/node_modules/@npmcli/fs/lib/cp/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -(The MIT License) - -Copyright (c) 2011-2017 JP Richardson - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files -(the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, - merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is - furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS -OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, - ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/pacote/node_modules/@npmcli/fs/lib/cp/errors.js b/node_modules/pacote/node_modules/@npmcli/fs/lib/cp/errors.js deleted file mode 100644 index 1cd1e05d0c533..0000000000000 --- a/node_modules/pacote/node_modules/@npmcli/fs/lib/cp/errors.js +++ /dev/null @@ -1,129 +0,0 @@ -'use strict' -const { inspect } = require('util') - -// adapted from node's internal/errors -// https://github.com/nodejs/node/blob/c8a04049/lib/internal/errors.js - -// close copy of node's internal SystemError class. -class SystemError { - constructor (code, prefix, context) { - // XXX context.code is undefined in all constructors used in cp/polyfill - // that may be a bug copied from node, maybe the constructor should use - // `code` not `errno`? nodejs/node#41104 - let message = `${prefix}: ${context.syscall} returned ` + - `${context.code} (${context.message})` - - if (context.path !== undefined) { - message += ` ${context.path}` - } - if (context.dest !== undefined) { - message += ` => ${context.dest}` - } - - this.code = code - Object.defineProperties(this, { - name: { - value: 'SystemError', - enumerable: false, - writable: true, - configurable: true, - }, - message: { - value: message, - enumerable: false, - writable: true, - configurable: true, - }, - info: { - value: context, - enumerable: true, - configurable: true, - writable: false, - }, - errno: { - get () { - return context.errno - }, - set (value) { - context.errno = value - }, - enumerable: true, - configurable: true, - }, - syscall: { - get () { - return context.syscall - }, - set (value) { - context.syscall = value - }, - enumerable: true, - configurable: true, - }, - }) - - if (context.path !== undefined) { - Object.defineProperty(this, 'path', { - get () { - return context.path - }, - set (value) { - context.path = value - }, - enumerable: true, - configurable: true, - }) - } - - if (context.dest !== undefined) { - Object.defineProperty(this, 'dest', { - get () { - return context.dest - }, - set (value) { - context.dest = value - }, - enumerable: true, - configurable: true, - }) - } - } - - toString () { - return `${this.name} [${this.code}]: ${this.message}` - } - - [Symbol.for('nodejs.util.inspect.custom')] (_recurseTimes, ctx) { - return inspect(this, { - ...ctx, - getters: true, - customInspect: false, - }) - } -} - -function E (code, message) { - module.exports[code] = class NodeError extends SystemError { - constructor (ctx) { - super(code, message, ctx) - } - } -} - -E('ERR_FS_CP_DIR_TO_NON_DIR', 'Cannot overwrite directory with non-directory') -E('ERR_FS_CP_EEXIST', 'Target already exists') -E('ERR_FS_CP_EINVAL', 'Invalid src or dest') -E('ERR_FS_CP_FIFO_PIPE', 'Cannot copy a FIFO pipe') -E('ERR_FS_CP_NON_DIR_TO_DIR', 'Cannot overwrite non-directory with directory') -E('ERR_FS_CP_SOCKET', 'Cannot copy a socket file') -E('ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY', 'Cannot overwrite symlink in subdirectory of self') -E('ERR_FS_CP_UNKNOWN', 'Cannot copy an unknown file type') -E('ERR_FS_EISDIR', 'Path is a directory') - -module.exports.ERR_INVALID_ARG_TYPE = class ERR_INVALID_ARG_TYPE extends Error { - constructor (name, expected, actual) { - super() - this.code = 'ERR_INVALID_ARG_TYPE' - this.message = `The ${name} argument must be ${expected}. Received ${typeof actual}` - } -} diff --git a/node_modules/pacote/node_modules/@npmcli/fs/lib/cp/index.js b/node_modules/pacote/node_modules/@npmcli/fs/lib/cp/index.js deleted file mode 100644 index 972ce7aa12abe..0000000000000 --- a/node_modules/pacote/node_modules/@npmcli/fs/lib/cp/index.js +++ /dev/null @@ -1,22 +0,0 @@ -const fs = require('fs/promises') -const getOptions = require('../common/get-options.js') -const node = require('../common/node.js') -const polyfill = require('./polyfill.js') - -// node 16.7.0 added fs.cp -const useNative = node.satisfies('>=16.7.0') - -const cp = async (src, dest, opts) => { - const options = getOptions(opts, { - copy: ['dereference', 'errorOnExist', 'filter', 'force', 'preserveTimestamps', 'recursive'], - }) - - // the polyfill is tested separately from this module, no need to hack - // process.version to try to trigger it just for coverage - // istanbul ignore next - return useNative - ? fs.cp(src, dest, options) - : polyfill(src, dest, options) -} - -module.exports = cp diff --git a/node_modules/pacote/node_modules/@npmcli/fs/lib/cp/polyfill.js b/node_modules/pacote/node_modules/@npmcli/fs/lib/cp/polyfill.js deleted file mode 100644 index 80eb10de97191..0000000000000 --- a/node_modules/pacote/node_modules/@npmcli/fs/lib/cp/polyfill.js +++ /dev/null @@ -1,428 +0,0 @@ -// this file is a modified version of the code in node 17.2.0 -// which is, in turn, a modified version of the fs-extra module on npm -// node core changes: -// - Use of the assert module has been replaced with core's error system. -// - All code related to the glob dependency has been removed. -// - Bring your own custom fs module is not currently supported. -// - Some basic code cleanup. -// changes here: -// - remove all callback related code -// - drop sync support -// - change assertions back to non-internal methods (see options.js) -// - throws ENOTDIR when rmdir gets an ENOENT for a path that exists in Windows -'use strict' - -const { - ERR_FS_CP_DIR_TO_NON_DIR, - ERR_FS_CP_EEXIST, - ERR_FS_CP_EINVAL, - ERR_FS_CP_FIFO_PIPE, - ERR_FS_CP_NON_DIR_TO_DIR, - ERR_FS_CP_SOCKET, - ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY, - ERR_FS_CP_UNKNOWN, - ERR_FS_EISDIR, - ERR_INVALID_ARG_TYPE, -} = require('./errors.js') -const { - constants: { - errno: { - EEXIST, - EISDIR, - EINVAL, - ENOTDIR, - }, - }, -} = require('os') -const { - chmod, - copyFile, - lstat, - mkdir, - readdir, - readlink, - stat, - symlink, - unlink, - utimes, -} = require('fs/promises') -const { - dirname, - isAbsolute, - join, - parse, - resolve, - sep, - toNamespacedPath, -} = require('path') -const { fileURLToPath } = require('url') - -const defaultOptions = { - dereference: false, - errorOnExist: false, - filter: undefined, - force: true, - preserveTimestamps: false, - recursive: false, -} - -async function cp (src, dest, opts) { - if (opts != null && typeof opts !== 'object') { - throw new ERR_INVALID_ARG_TYPE('options', ['Object'], opts) - } - return cpFn( - toNamespacedPath(getValidatedPath(src)), - toNamespacedPath(getValidatedPath(dest)), - { ...defaultOptions, ...opts }) -} - -function getValidatedPath (fileURLOrPath) { - const path = fileURLOrPath != null && fileURLOrPath.href - && fileURLOrPath.origin - ? fileURLToPath(fileURLOrPath) - : fileURLOrPath - return path -} - -async function cpFn (src, dest, opts) { - // Warn about using preserveTimestamps on 32-bit node - // istanbul ignore next - if (opts.preserveTimestamps && process.arch === 'ia32') { - const warning = 'Using the preserveTimestamps option in 32-bit ' + - 'node is not recommended' - process.emitWarning(warning, 'TimestampPrecisionWarning') - } - const stats = await checkPaths(src, dest, opts) - const { srcStat, destStat } = stats - await checkParentPaths(src, srcStat, dest) - if (opts.filter) { - return handleFilter(checkParentDir, destStat, src, dest, opts) - } - return checkParentDir(destStat, src, dest, opts) -} - -async function checkPaths (src, dest, opts) { - const { 0: srcStat, 1: destStat } = await getStats(src, dest, opts) - if (destStat) { - if (areIdentical(srcStat, destStat)) { - throw new ERR_FS_CP_EINVAL({ - message: 'src and dest cannot be the same', - path: dest, - syscall: 'cp', - errno: EINVAL, - }) - } - if (srcStat.isDirectory() && !destStat.isDirectory()) { - throw new ERR_FS_CP_DIR_TO_NON_DIR({ - message: `cannot overwrite directory ${src} ` + - `with non-directory ${dest}`, - path: dest, - syscall: 'cp', - errno: EISDIR, - }) - } - if (!srcStat.isDirectory() && destStat.isDirectory()) { - throw new ERR_FS_CP_NON_DIR_TO_DIR({ - message: `cannot overwrite non-directory ${src} ` + - `with directory ${dest}`, - path: dest, - syscall: 'cp', - errno: ENOTDIR, - }) - } - } - - if (srcStat.isDirectory() && isSrcSubdir(src, dest)) { - throw new ERR_FS_CP_EINVAL({ - message: `cannot copy ${src} to a subdirectory of self ${dest}`, - path: dest, - syscall: 'cp', - errno: EINVAL, - }) - } - return { srcStat, destStat } -} - -function areIdentical (srcStat, destStat) { - return destStat.ino && destStat.dev && destStat.ino === srcStat.ino && - destStat.dev === srcStat.dev -} - -function getStats (src, dest, opts) { - const statFunc = opts.dereference ? - (file) => stat(file, { bigint: true }) : - (file) => lstat(file, { bigint: true }) - return Promise.all([ - statFunc(src), - statFunc(dest).catch((err) => { - // istanbul ignore next: unsure how to cover. - if (err.code === 'ENOENT') { - return null - } - // istanbul ignore next: unsure how to cover. - throw err - }), - ]) -} - -async function checkParentDir (destStat, src, dest, opts) { - const destParent = dirname(dest) - const dirExists = await pathExists(destParent) - if (dirExists) { - return getStatsForCopy(destStat, src, dest, opts) - } - await mkdir(destParent, { recursive: true }) - return getStatsForCopy(destStat, src, dest, opts) -} - -function pathExists (dest) { - return stat(dest).then( - () => true, - // istanbul ignore next: not sure when this would occur - (err) => (err.code === 'ENOENT' ? false : Promise.reject(err))) -} - -// Recursively check if dest parent is a subdirectory of src. -// It works for all file types including symlinks since it -// checks the src and dest inodes. It starts from the deepest -// parent and stops once it reaches the src parent or the root path. -async function checkParentPaths (src, srcStat, dest) { - const srcParent = resolve(dirname(src)) - const destParent = resolve(dirname(dest)) - if (destParent === srcParent || destParent === parse(destParent).root) { - return - } - let destStat - try { - destStat = await stat(destParent, { bigint: true }) - } catch (err) { - // istanbul ignore else: not sure when this would occur - if (err.code === 'ENOENT') { - return - } - // istanbul ignore next: not sure when this would occur - throw err - } - if (areIdentical(srcStat, destStat)) { - throw new ERR_FS_CP_EINVAL({ - message: `cannot copy ${src} to a subdirectory of self ${dest}`, - path: dest, - syscall: 'cp', - errno: EINVAL, - }) - } - return checkParentPaths(src, srcStat, destParent) -} - -const normalizePathToArray = (path) => - resolve(path).split(sep).filter(Boolean) - -// Return true if dest is a subdir of src, otherwise false. -// It only checks the path strings. -function isSrcSubdir (src, dest) { - const srcArr = normalizePathToArray(src) - const destArr = normalizePathToArray(dest) - return srcArr.every((cur, i) => destArr[i] === cur) -} - -async function handleFilter (onInclude, destStat, src, dest, opts, cb) { - const include = await opts.filter(src, dest) - if (include) { - return onInclude(destStat, src, dest, opts, cb) - } -} - -function startCopy (destStat, src, dest, opts) { - if (opts.filter) { - return handleFilter(getStatsForCopy, destStat, src, dest, opts) - } - return getStatsForCopy(destStat, src, dest, opts) -} - -async function getStatsForCopy (destStat, src, dest, opts) { - const statFn = opts.dereference ? stat : lstat - const srcStat = await statFn(src) - // istanbul ignore else: can't portably test FIFO - if (srcStat.isDirectory() && opts.recursive) { - return onDir(srcStat, destStat, src, dest, opts) - } else if (srcStat.isDirectory()) { - throw new ERR_FS_EISDIR({ - message: `${src} is a directory (not copied)`, - path: src, - syscall: 'cp', - errno: EINVAL, - }) - } else if (srcStat.isFile() || - srcStat.isCharacterDevice() || - srcStat.isBlockDevice()) { - return onFile(srcStat, destStat, src, dest, opts) - } else if (srcStat.isSymbolicLink()) { - return onLink(destStat, src, dest) - } else if (srcStat.isSocket()) { - throw new ERR_FS_CP_SOCKET({ - message: `cannot copy a socket file: ${dest}`, - path: dest, - syscall: 'cp', - errno: EINVAL, - }) - } else if (srcStat.isFIFO()) { - throw new ERR_FS_CP_FIFO_PIPE({ - message: `cannot copy a FIFO pipe: ${dest}`, - path: dest, - syscall: 'cp', - errno: EINVAL, - }) - } - // istanbul ignore next: should be unreachable - throw new ERR_FS_CP_UNKNOWN({ - message: `cannot copy an unknown file type: ${dest}`, - path: dest, - syscall: 'cp', - errno: EINVAL, - }) -} - -function onFile (srcStat, destStat, src, dest, opts) { - if (!destStat) { - return _copyFile(srcStat, src, dest, opts) - } - return mayCopyFile(srcStat, src, dest, opts) -} - -async function mayCopyFile (srcStat, src, dest, opts) { - if (opts.force) { - await unlink(dest) - return _copyFile(srcStat, src, dest, opts) - } else if (opts.errorOnExist) { - throw new ERR_FS_CP_EEXIST({ - message: `${dest} already exists`, - path: dest, - syscall: 'cp', - errno: EEXIST, - }) - } -} - -async function _copyFile (srcStat, src, dest, opts) { - await copyFile(src, dest) - if (opts.preserveTimestamps) { - return handleTimestampsAndMode(srcStat.mode, src, dest) - } - return setDestMode(dest, srcStat.mode) -} - -async function handleTimestampsAndMode (srcMode, src, dest) { - // Make sure the file is writable before setting the timestamp - // otherwise open fails with EPERM when invoked with 'r+' - // (through utimes call) - if (fileIsNotWritable(srcMode)) { - await makeFileWritable(dest, srcMode) - return setDestTimestampsAndMode(srcMode, src, dest) - } - return setDestTimestampsAndMode(srcMode, src, dest) -} - -function fileIsNotWritable (srcMode) { - return (srcMode & 0o200) === 0 -} - -function makeFileWritable (dest, srcMode) { - return setDestMode(dest, srcMode | 0o200) -} - -async function setDestTimestampsAndMode (srcMode, src, dest) { - await setDestTimestamps(src, dest) - return setDestMode(dest, srcMode) -} - -function setDestMode (dest, srcMode) { - return chmod(dest, srcMode) -} - -async function setDestTimestamps (src, dest) { - // The initial srcStat.atime cannot be trusted - // because it is modified by the read(2) system call - // (See https://nodejs.org/api/fs.html#fs_stat_time_values) - const updatedSrcStat = await stat(src) - return utimes(dest, updatedSrcStat.atime, updatedSrcStat.mtime) -} - -function onDir (srcStat, destStat, src, dest, opts) { - if (!destStat) { - return mkDirAndCopy(srcStat.mode, src, dest, opts) - } - return copyDir(src, dest, opts) -} - -async function mkDirAndCopy (srcMode, src, dest, opts) { - await mkdir(dest) - await copyDir(src, dest, opts) - return setDestMode(dest, srcMode) -} - -async function copyDir (src, dest, opts) { - const dir = await readdir(src) - for (let i = 0; i < dir.length; i++) { - const item = dir[i] - const srcItem = join(src, item) - const destItem = join(dest, item) - const { destStat } = await checkPaths(srcItem, destItem, opts) - await startCopy(destStat, srcItem, destItem, opts) - } -} - -async function onLink (destStat, src, dest) { - let resolvedSrc = await readlink(src) - if (!isAbsolute(resolvedSrc)) { - resolvedSrc = resolve(dirname(src), resolvedSrc) - } - if (!destStat) { - return symlink(resolvedSrc, dest) - } - let resolvedDest - try { - resolvedDest = await readlink(dest) - } catch (err) { - // Dest exists and is a regular file or directory, - // Windows may throw UNKNOWN error. If dest already exists, - // fs throws error anyway, so no need to guard against it here. - // istanbul ignore next: can only test on windows - if (err.code === 'EINVAL' || err.code === 'UNKNOWN') { - return symlink(resolvedSrc, dest) - } - // istanbul ignore next: should not be possible - throw err - } - if (!isAbsolute(resolvedDest)) { - resolvedDest = resolve(dirname(dest), resolvedDest) - } - if (isSrcSubdir(resolvedSrc, resolvedDest)) { - throw new ERR_FS_CP_EINVAL({ - message: `cannot copy ${resolvedSrc} to a subdirectory of self ` + - `${resolvedDest}`, - path: dest, - syscall: 'cp', - errno: EINVAL, - }) - } - // Do not copy if src is a subdir of dest since unlinking - // dest in this case would result in removing src contents - // and therefore a broken symlink would be created. - const srcStat = await stat(src) - if (srcStat.isDirectory() && isSrcSubdir(resolvedDest, resolvedSrc)) { - throw new ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY({ - message: `cannot overwrite ${resolvedDest} with ${resolvedSrc}`, - path: dest, - syscall: 'cp', - errno: EINVAL, - }) - } - return copyLink(resolvedSrc, dest) -} - -async function copyLink (resolvedSrc, dest) { - await unlink(dest) - return symlink(resolvedSrc, dest) -} - -module.exports = cp diff --git a/node_modules/pacote/node_modules/@npmcli/fs/lib/index.js b/node_modules/pacote/node_modules/@npmcli/fs/lib/index.js deleted file mode 100644 index 81c746304cc42..0000000000000 --- a/node_modules/pacote/node_modules/@npmcli/fs/lib/index.js +++ /dev/null @@ -1,13 +0,0 @@ -'use strict' - -const cp = require('./cp/index.js') -const withTempDir = require('./with-temp-dir.js') -const readdirScoped = require('./readdir-scoped.js') -const moveFile = require('./move-file.js') - -module.exports = { - cp, - withTempDir, - readdirScoped, - moveFile, -} diff --git a/node_modules/pacote/node_modules/@npmcli/fs/lib/move-file.js b/node_modules/pacote/node_modules/@npmcli/fs/lib/move-file.js deleted file mode 100644 index d56e06d384659..0000000000000 --- a/node_modules/pacote/node_modules/@npmcli/fs/lib/move-file.js +++ /dev/null @@ -1,78 +0,0 @@ -const { dirname, join, resolve, relative, isAbsolute } = require('path') -const fs = require('fs/promises') - -const pathExists = async path => { - try { - await fs.access(path) - return true - } catch (er) { - return er.code !== 'ENOENT' - } -} - -const moveFile = async (source, destination, options = {}, root = true, symlinks = []) => { - if (!source || !destination) { - throw new TypeError('`source` and `destination` file required') - } - - options = { - overwrite: true, - ...options, - } - - if (!options.overwrite && await pathExists(destination)) { - throw new Error(`The destination file exists: ${destination}`) - } - - await fs.mkdir(dirname(destination), { recursive: true }) - - try { - await fs.rename(source, destination) - } catch (error) { - if (error.code === 'EXDEV' || error.code === 'EPERM') { - const sourceStat = await fs.lstat(source) - if (sourceStat.isDirectory()) { - const files = await fs.readdir(source) - await Promise.all(files.map((file) => - moveFile(join(source, file), join(destination, file), options, false, symlinks) - )) - } else if (sourceStat.isSymbolicLink()) { - symlinks.push({ source, destination }) - } else { - await fs.copyFile(source, destination) - } - } else { - throw error - } - } - - if (root) { - await Promise.all(symlinks.map(async ({ source: symSource, destination: symDestination }) => { - let target = await fs.readlink(symSource) - // junction symlinks in windows will be absolute paths, so we need to - // make sure they point to the symlink destination - if (isAbsolute(target)) { - target = resolve(symDestination, relative(symSource, target)) - } - // try to determine what the actual file is so we can create the correct - // type of symlink in windows - let targetStat = 'file' - try { - targetStat = await fs.stat(resolve(dirname(symSource), target)) - if (targetStat.isDirectory()) { - targetStat = 'junction' - } - } catch { - // targetStat remains 'file' - } - await fs.symlink( - target, - symDestination, - targetStat - ) - })) - await fs.rm(source, { recursive: true, force: true }) - } -} - -module.exports = moveFile diff --git a/node_modules/pacote/node_modules/@npmcli/fs/lib/readdir-scoped.js b/node_modules/pacote/node_modules/@npmcli/fs/lib/readdir-scoped.js deleted file mode 100644 index cd601dfbe7486..0000000000000 --- a/node_modules/pacote/node_modules/@npmcli/fs/lib/readdir-scoped.js +++ /dev/null @@ -1,20 +0,0 @@ -const { readdir } = require('fs/promises') -const { join } = require('path') - -const readdirScoped = async (dir) => { - const results = [] - - for (const item of await readdir(dir)) { - if (item.startsWith('@')) { - for (const scopedItem of await readdir(join(dir, item))) { - results.push(join(item, scopedItem)) - } - } else { - results.push(item) - } - } - - return results -} - -module.exports = readdirScoped diff --git a/node_modules/pacote/node_modules/@npmcli/fs/lib/with-temp-dir.js b/node_modules/pacote/node_modules/@npmcli/fs/lib/with-temp-dir.js deleted file mode 100644 index 0738ac4f29e1b..0000000000000 --- a/node_modules/pacote/node_modules/@npmcli/fs/lib/with-temp-dir.js +++ /dev/null @@ -1,39 +0,0 @@ -const { join, sep } = require('path') - -const getOptions = require('./common/get-options.js') -const { mkdir, mkdtemp, rm } = require('fs/promises') - -// create a temp directory, ensure its permissions match its parent, then call -// the supplied function passing it the path to the directory. clean up after -// the function finishes, whether it throws or not -const withTempDir = async (root, fn, opts) => { - const options = getOptions(opts, { - copy: ['tmpPrefix'], - }) - // create the directory - await mkdir(root, { recursive: true }) - - const target = await mkdtemp(join(`${root}${sep}`, options.tmpPrefix || '')) - let err - let result - - try { - result = await fn(target) - } catch (_err) { - err = _err - } - - try { - await rm(target, { force: true, recursive: true }) - } catch { - // ignore errors - } - - if (err) { - throw err - } - - return result -} - -module.exports = withTempDir diff --git a/node_modules/pacote/node_modules/@npmcli/git/LICENSE b/node_modules/pacote/node_modules/@npmcli/git/LICENSE new file mode 100644 index 0000000000000..8f90f96f4c6c5 --- /dev/null +++ b/node_modules/pacote/node_modules/@npmcli/git/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) npm, Inc. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE NPM DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS. IN NO EVENT SHALL THE NPM BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, +OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, +DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS +ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS +SOFTWARE. diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/clone.js b/node_modules/pacote/node_modules/@npmcli/git/lib/clone.js new file mode 100644 index 0000000000000..e25a4d1426821 --- /dev/null +++ b/node_modules/pacote/node_modules/@npmcli/git/lib/clone.js @@ -0,0 +1,172 @@ +// The goal here is to minimize both git workload and +// the number of refs we download over the network. +// +// Every method ends up with the checked out working dir +// at the specified ref, and resolves with the git sha. + +// Only certain whitelisted hosts get shallow cloning. +// Many hosts (including GHE) don't always support it. +// A failed shallow fetch takes a LOT longer than a full +// fetch in most cases, so we skip it entirely. +// Set opts.gitShallow = true/false to force this behavior +// one way or the other. +const shallowHosts = new Set([ + 'github.com', + 'gist.github.com', + 'gitlab.com', + 'bitbucket.com', + 'bitbucket.org', +]) +// we have to use url.parse until we add the same shim that hosted-git-info has +// to handle scp:// urls +const { parse } = require('url') // eslint-disable-line node/no-deprecated-api +const path = require('path') + +const getRevs = require('./revs.js') +const spawn = require('./spawn.js') +const { isWindows } = require('./utils.js') + +const pickManifest = require('npm-pick-manifest') +const fs = require('fs/promises') + +module.exports = (repo, ref = 'HEAD', target = null, opts = {}) => + getRevs(repo, opts).then(revs => clone( + repo, + revs, + ref, + resolveRef(revs, ref, opts), + target || defaultTarget(repo, opts.cwd), + opts + )) + +const maybeShallow = (repo, opts) => { + if (opts.gitShallow === false || opts.gitShallow) { + return opts.gitShallow + } + return shallowHosts.has(parse(repo).host) +} + +const defaultTarget = (repo, /* istanbul ignore next */ cwd = process.cwd()) => + path.resolve(cwd, path.basename(repo.replace(/[/\\]?\.git$/, ''))) + +const clone = (repo, revs, ref, revDoc, target, opts) => { + if (!revDoc) { + return unresolved(repo, ref, target, opts) + } + if (revDoc.sha === revs.refs.HEAD.sha) { + return plain(repo, revDoc, target, opts) + } + if (revDoc.type === 'tag' || revDoc.type === 'branch') { + return branch(repo, revDoc, target, opts) + } + return other(repo, revDoc, target, opts) +} + +const resolveRef = (revs, ref, opts) => { + const { spec = {} } = opts + ref = spec.gitCommittish || ref + /* istanbul ignore next - will fail anyway, can't pull */ + if (!revs) { + return null + } + if (spec.gitRange) { + return pickManifest(revs, spec.gitRange, opts) + } + if (!ref) { + return revs.refs.HEAD + } + if (revs.refs[ref]) { + return revs.refs[ref] + } + if (revs.shas[ref]) { + return revs.refs[revs.shas[ref][0]] + } + return null +} + +// pull request or some other kind of advertised ref +const other = (repo, revDoc, target, opts) => { + const shallow = maybeShallow(repo, opts) + + const fetchOrigin = ['fetch', 'origin', revDoc.rawRef] + .concat(shallow ? ['--depth=1'] : []) + + const git = (args) => spawn(args, { ...opts, cwd: target }) + return fs.mkdir(target, { recursive: true }) + .then(() => git(['init'])) + .then(() => isWindows(opts) + ? git(['config', '--local', '--add', 'core.longpaths', 'true']) + : null) + .then(() => git(['remote', 'add', 'origin', repo])) + .then(() => git(fetchOrigin)) + .then(() => git(['checkout', revDoc.sha])) + .then(() => updateSubmodules(target, opts)) + .then(() => revDoc.sha) +} + +// tag or branches. use -b +const branch = (repo, revDoc, target, opts) => { + const args = [ + 'clone', + '-b', + revDoc.ref, + repo, + target, + '--recurse-submodules', + ] + if (maybeShallow(repo, opts)) { + args.push('--depth=1') + } + if (isWindows(opts)) { + args.push('--config', 'core.longpaths=true') + } + return spawn(args, opts).then(() => revDoc.sha) +} + +// just the head. clone it +const plain = (repo, revDoc, target, opts) => { + const args = [ + 'clone', + repo, + target, + '--recurse-submodules', + ] + if (maybeShallow(repo, opts)) { + args.push('--depth=1') + } + if (isWindows(opts)) { + args.push('--config', 'core.longpaths=true') + } + return spawn(args, opts).then(() => revDoc.sha) +} + +const updateSubmodules = async (target, opts) => { + const hasSubmodules = await fs.stat(`${target}/.gitmodules`) + .then(() => true) + .catch(() => false) + if (!hasSubmodules) { + return null + } + return spawn([ + 'submodule', + 'update', + '-q', + '--init', + '--recursive', + ], { ...opts, cwd: target }) +} + +const unresolved = (repo, ref, target, opts) => { + // can't do this one shallowly, because the ref isn't advertised + // but we can avoid checking out the working dir twice, at least + const lp = isWindows(opts) ? ['--config', 'core.longpaths=true'] : [] + const cloneArgs = ['clone', '--mirror', '-q', repo, target + '/.git'] + const git = (args) => spawn(args, { ...opts, cwd: target }) + return fs.mkdir(target, { recursive: true }) + .then(() => git(cloneArgs.concat(lp))) + .then(() => git(['init'])) + .then(() => git(['checkout', ref])) + .then(() => updateSubmodules(target, opts)) + .then(() => git(['rev-parse', '--revs-only', 'HEAD'])) + .then(({ stdout }) => stdout.trim()) +} diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/errors.js b/node_modules/pacote/node_modules/@npmcli/git/lib/errors.js new file mode 100644 index 0000000000000..3ceaa45811669 --- /dev/null +++ b/node_modules/pacote/node_modules/@npmcli/git/lib/errors.js @@ -0,0 +1,36 @@ + +const maxRetry = 3 + +class GitError extends Error { + shouldRetry () { + return false + } +} + +class GitConnectionError extends GitError { + constructor () { + super('A git connection error occurred') + } + + shouldRetry (number) { + return number < maxRetry + } +} + +class GitPathspecError extends GitError { + constructor () { + super('The git reference could not be found') + } +} + +class GitUnknownError extends GitError { + constructor () { + super('An unknown git error occurred') + } +} + +module.exports = { + GitConnectionError, + GitPathspecError, + GitUnknownError, +} diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/find.js b/node_modules/pacote/node_modules/@npmcli/git/lib/find.js new file mode 100644 index 0000000000000..34bd310b88e5d --- /dev/null +++ b/node_modules/pacote/node_modules/@npmcli/git/lib/find.js @@ -0,0 +1,15 @@ +const is = require('./is.js') +const { dirname } = require('path') + +module.exports = async ({ cwd = process.cwd(), root } = {}) => { + while (true) { + if (await is({ cwd })) { + return cwd + } + const next = dirname(cwd) + if (cwd === root || cwd === next) { + return null + } + cwd = next + } +} diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/index.js b/node_modules/pacote/node_modules/@npmcli/git/lib/index.js new file mode 100644 index 0000000000000..10a65f782e6da --- /dev/null +++ b/node_modules/pacote/node_modules/@npmcli/git/lib/index.js @@ -0,0 +1,9 @@ +module.exports = { + clone: require('./clone.js'), + revs: require('./revs.js'), + spawn: require('./spawn.js'), + is: require('./is.js'), + find: require('./find.js'), + isClean: require('./is-clean.js'), + errors: require('./errors.js'), +} diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/is-clean.js b/node_modules/pacote/node_modules/@npmcli/git/lib/is-clean.js new file mode 100644 index 0000000000000..182373be94193 --- /dev/null +++ b/node_modules/pacote/node_modules/@npmcli/git/lib/is-clean.js @@ -0,0 +1,6 @@ +const spawn = require('./spawn.js') + +module.exports = (opts = {}) => + spawn(['status', '--porcelain=v1', '-uno'], opts) + .then(res => !res.stdout.trim().split(/\r?\n+/) + .map(l => l.trim()).filter(l => l).length) diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/is.js b/node_modules/pacote/node_modules/@npmcli/git/lib/is.js new file mode 100644 index 0000000000000..f5a0e8754f10d --- /dev/null +++ b/node_modules/pacote/node_modules/@npmcli/git/lib/is.js @@ -0,0 +1,4 @@ +// not an airtight indicator, but a good gut-check to even bother trying +const { stat } = require('fs/promises') +module.exports = ({ cwd = process.cwd() } = {}) => + stat(cwd + '/.git').then(() => true, () => false) diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/lines-to-revs.js b/node_modules/pacote/node_modules/@npmcli/git/lib/lines-to-revs.js new file mode 100644 index 0000000000000..6bd7e7a4c1531 --- /dev/null +++ b/node_modules/pacote/node_modules/@npmcli/git/lib/lines-to-revs.js @@ -0,0 +1,147 @@ +// turn an array of lines from `git ls-remote` into a thing +// vaguely resembling a packument, where docs are a resolved ref + +const semver = require('semver') + +module.exports = lines => finish(lines.reduce(linesToRevsReducer, { + versions: {}, + 'dist-tags': {}, + refs: {}, + shas: {}, +})) + +const finish = revs => distTags(shaList(peelTags(revs))) + +// We can check out shallow clones on specific SHAs if we have a ref +const shaList = revs => { + Object.keys(revs.refs).forEach(ref => { + const doc = revs.refs[ref] + if (!revs.shas[doc.sha]) { + revs.shas[doc.sha] = [ref] + } else { + revs.shas[doc.sha].push(ref) + } + }) + return revs +} + +// Replace any tags with their ^{} counterparts, if those exist +const peelTags = revs => { + Object.keys(revs.refs).filter(ref => ref.endsWith('^{}')).forEach(ref => { + const peeled = revs.refs[ref] + const unpeeled = revs.refs[ref.replace(/\^\{\}$/, '')] + if (unpeeled) { + unpeeled.sha = peeled.sha + delete revs.refs[ref] + } + }) + return revs +} + +const distTags = revs => { + // not entirely sure what situations would result in an + // ichabod repo, but best to be careful in Sleepy Hollow anyway + const HEAD = revs.refs.HEAD || /* istanbul ignore next */ {} + const versions = Object.keys(revs.versions) + versions.forEach(v => { + // simulate a dist-tags with latest pointing at the + // 'latest' branch if one exists and is a version, + // or HEAD if not. + const ver = revs.versions[v] + if (revs.refs.latest && ver.sha === revs.refs.latest.sha) { + revs['dist-tags'].latest = v + } else if (ver.sha === HEAD.sha) { + revs['dist-tags'].HEAD = v + if (!revs.refs.latest) { + revs['dist-tags'].latest = v + } + } + }) + return revs +} + +const refType = ref => { + if (ref.startsWith('refs/tags/')) { + return 'tag' + } + if (ref.startsWith('refs/heads/')) { + return 'branch' + } + if (ref.startsWith('refs/pull/')) { + return 'pull' + } + if (ref === 'HEAD') { + return 'head' + } + // Could be anything, ignore for now + /* istanbul ignore next */ + return 'other' +} + +// return the doc, or null if we should ignore it. +const lineToRevDoc = line => { + const split = line.trim().split(/\s+/, 2) + if (split.length < 2) { + return null + } + + const sha = split[0].trim() + const rawRef = split[1].trim() + const type = refType(rawRef) + + if (type === 'tag') { + // refs/tags/foo^{} is the 'peeled tag', ie the commit + // that is tagged by refs/tags/foo they resolve to the same + // content, just different objects in git's data structure. + // But, we care about the thing the tag POINTS to, not the tag + // object itself, so we only look at the peeled tag refs, and + // ignore the pointer. + // For now, though, we have to save both, because some tags + // don't have peels, if they were not annotated. + const ref = rawRef.slice('refs/tags/'.length) + return { sha, ref, rawRef, type } + } + + if (type === 'branch') { + const ref = rawRef.slice('refs/heads/'.length) + return { sha, ref, rawRef, type } + } + + if (type === 'pull') { + // NB: merged pull requests installable with #pull/123/merge + // for the merged pr, or #pull/123 for the PR head + const ref = rawRef.slice('refs/'.length).replace(/\/head$/, '') + return { sha, ref, rawRef, type } + } + + if (type === 'head') { + const ref = 'HEAD' + return { sha, ref, rawRef, type } + } + + // at this point, all we can do is leave the ref un-munged + return { sha, ref: rawRef, rawRef, type } +} + +const linesToRevsReducer = (revs, line) => { + const doc = lineToRevDoc(line) + + if (!doc) { + return revs + } + + revs.refs[doc.ref] = doc + revs.refs[doc.rawRef] = doc + + if (doc.type === 'tag') { + // try to pull a semver value out of tags like `release-v1.2.3` + // which is a pretty common pattern. + const match = !doc.ref.endsWith('^{}') && + doc.ref.match(/v?(\d+\.\d+\.\d+(?:[-+].+)?)$/) + if (match && semver.valid(match[1], true)) { + revs.versions[semver.clean(match[1], true)] = doc + } + } + + return revs +} diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/make-error.js b/node_modules/pacote/node_modules/@npmcli/git/lib/make-error.js new file mode 100644 index 0000000000000..7540ec7c8b9f7 --- /dev/null +++ b/node_modules/pacote/node_modules/@npmcli/git/lib/make-error.js @@ -0,0 +1,33 @@ +const { + GitConnectionError, + GitPathspecError, + GitUnknownError, +} = require('./errors.js') + +const connectionErrorRe = new RegExp([ + 'remote error: Internal Server Error', + 'The remote end hung up unexpectedly', + 'Connection timed out', + 'Operation timed out', + 'Failed to connect to .* Timed out', + 'Connection reset by peer', + 'SSL_ERROR_SYSCALL', + 'The requested URL returned error: 503', +].join('|')) + +const missingPathspecRe = /pathspec .* did not match any file\(s\) known to git/ + +function makeError (er) { + const message = er.stderr + let gitEr + if (connectionErrorRe.test(message)) { + gitEr = new GitConnectionError(message) + } else if (missingPathspecRe.test(message)) { + gitEr = new GitPathspecError(message) + } else { + gitEr = new GitUnknownError(message) + } + return Object.assign(gitEr, er) +} + +module.exports = makeError diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/opts.js b/node_modules/pacote/node_modules/@npmcli/git/lib/opts.js new file mode 100644 index 0000000000000..1e80e9efe4989 --- /dev/null +++ b/node_modules/pacote/node_modules/@npmcli/git/lib/opts.js @@ -0,0 +1,57 @@ +const fs = require('node:fs') +const os = require('node:os') +const path = require('node:path') +const ini = require('ini') + +const gitConfigPath = path.join(os.homedir(), '.gitconfig') + +let cachedConfig = null + +// Function to load and cache the git config +const loadGitConfig = () => { + if (cachedConfig === null) { + try { + cachedConfig = {} + if (fs.existsSync(gitConfigPath)) { + const configContent = fs.readFileSync(gitConfigPath, 'utf-8') + cachedConfig = ini.parse(configContent) + } + } catch (error) { + cachedConfig = {} + } + } + return cachedConfig +} + +const checkGitConfigs = () => { + const config = loadGitConfig() + return { + sshCommandSetInConfig: config?.core?.sshCommand !== undefined, + askPassSetInConfig: config?.core?.askpass !== undefined, + } +} + +const sshCommandSetInEnv = process.env.GIT_SSH_COMMAND !== undefined +const askPassSetInEnv = process.env.GIT_ASKPASS !== undefined +const { sshCommandSetInConfig, askPassSetInConfig } = checkGitConfigs() + +// Values we want to set if they're not already defined by the end user +// This defaults to accepting new ssh host key fingerprints +const finalGitEnv = { + ...(askPassSetInEnv || askPassSetInConfig ? {} : { + GIT_ASKPASS: 'echo', + }), + ...(sshCommandSetInEnv || sshCommandSetInConfig ? {} : { + GIT_SSH_COMMAND: 'ssh -oStrictHostKeyChecking=accept-new', + }), +} + +module.exports = (opts = {}) => ({ + stdioString: true, + ...opts, + shell: false, + env: opts.env || { ...finalGitEnv, ...process.env }, +}) + +// Export the loadGitConfig function for testing +module.exports.loadGitConfig = loadGitConfig diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/revs.js b/node_modules/pacote/node_modules/@npmcli/git/lib/revs.js new file mode 100644 index 0000000000000..ca14837de1b87 --- /dev/null +++ b/node_modules/pacote/node_modules/@npmcli/git/lib/revs.js @@ -0,0 +1,28 @@ +const pinflight = require('promise-inflight') +const spawn = require('./spawn.js') +const { LRUCache } = require('lru-cache') + +const revsCache = new LRUCache({ + max: 100, + ttl: 5 * 60 * 1000, +}) + +const linesToRevs = require('./lines-to-revs.js') + +module.exports = async (repo, opts = {}) => { + if (!opts.noGitRevCache) { + const cached = revsCache.get(repo) + if (cached) { + return cached + } + } + + return pinflight(`ls-remote:${repo}`, () => + spawn(['ls-remote', repo], opts) + .then(({ stdout }) => linesToRevs(stdout.trim().split('\n'))) + .then(revs => { + revsCache.set(repo, revs) + return revs + }) + ) +} diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/spawn.js b/node_modules/pacote/node_modules/@npmcli/git/lib/spawn.js new file mode 100644 index 0000000000000..03c1cbde21547 --- /dev/null +++ b/node_modules/pacote/node_modules/@npmcli/git/lib/spawn.js @@ -0,0 +1,44 @@ +const spawn = require('@npmcli/promise-spawn') +const promiseRetry = require('promise-retry') +const { log } = require('proc-log') +const makeError = require('./make-error.js') +const makeOpts = require('./opts.js') + +module.exports = (gitArgs, opts = {}) => { + const whichGit = require('./which.js') + const gitPath = whichGit(opts) + + if (gitPath instanceof Error) { + return Promise.reject(gitPath) + } + + // undocumented option, mostly only here for tests + const args = opts.allowReplace || gitArgs[0] === '--no-replace-objects' + ? gitArgs + : ['--no-replace-objects', ...gitArgs] + + let retryOpts = opts.retry + if (retryOpts === null || retryOpts === undefined) { + retryOpts = { + retries: opts.fetchRetries || 2, + factor: opts.fetchRetryFactor || 10, + maxTimeout: opts.fetchRetryMaxtimeout || 60000, + minTimeout: opts.fetchRetryMintimeout || 1000, + } + } + return promiseRetry((retryFn, number) => { + if (number !== 1) { + log.silly('git', `Retrying git command: ${ + args.join(' ')} attempt # ${number}`) + } + + return spawn(gitPath, args, makeOpts(opts)) + .catch(er => { + const gitError = makeError(er) + if (!gitError.shouldRetry(number)) { + throw gitError + } + retryFn(gitError) + }) + }, retryOpts) +} diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/utils.js b/node_modules/pacote/node_modules/@npmcli/git/lib/utils.js new file mode 100644 index 0000000000000..fcd9578a19597 --- /dev/null +++ b/node_modules/pacote/node_modules/@npmcli/git/lib/utils.js @@ -0,0 +1,3 @@ +const isWindows = opts => (opts.fakePlatform || process.platform) === 'win32' + +exports.isWindows = isWindows diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/which.js b/node_modules/pacote/node_modules/@npmcli/git/lib/which.js new file mode 100644 index 0000000000000..dc2a1ad212166 --- /dev/null +++ b/node_modules/pacote/node_modules/@npmcli/git/lib/which.js @@ -0,0 +1,18 @@ +const which = require('which') + +let gitPath +try { + gitPath = which.sync('git') +} catch { + // ignore errors +} + +module.exports = (opts = {}) => { + if (opts.git) { + return opts.git + } + if (!gitPath || opts.git === false) { + return Object.assign(new Error('No git binary found in $PATH'), { code: 'ENOGIT' }) + } + return gitPath +} diff --git a/node_modules/pacote/node_modules/@npmcli/git/package.json b/node_modules/pacote/node_modules/@npmcli/git/package.json new file mode 100644 index 0000000000000..2bc6730ba2151 --- /dev/null +++ b/node_modules/pacote/node_modules/@npmcli/git/package.json @@ -0,0 +1,59 @@ +{ + "name": "@npmcli/git", + "version": "6.0.1", + "main": "lib/index.js", + "files": [ + "bin/", + "lib/" + ], + "description": "a util for spawning git from npm CLI contexts", + "repository": { + "type": "git", + "url": "git+https://github.com/npm/git.git" + }, + "author": "GitHub Inc.", + "license": "ISC", + "scripts": { + "lint": "npm run eslint", + "snap": "tap", + "test": "tap", + "posttest": "npm run lint", + "postlint": "template-oss-check", + "lintfix": "npm run eslint -- --fix", + "template-oss-apply": "template-oss-apply --force", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" + }, + "tap": { + "timeout": 600, + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "devDependencies": { + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", + "npm-package-arg": "^11.0.0", + "slash": "^3.0.0", + "tap": "^16.0.1" + }, + "dependencies": { + "@npmcli/promise-spawn": "^8.0.0", + "ini": "^5.0.0", + "lru-cache": "^10.0.1", + "npm-pick-manifest": "^10.0.0", + "proc-log": "^5.0.0", + "promise-inflight": "^1.0.1", + "promise-retry": "^2.0.1", + "semver": "^7.3.5", + "which": "^5.0.0" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.23.3", + "publish": true + } +} diff --git a/node_modules/pacote/node_modules/unique-slug/LICENSE b/node_modules/pacote/node_modules/@npmcli/installed-package-contents/LICENSE similarity index 96% rename from node_modules/pacote/node_modules/unique-slug/LICENSE rename to node_modules/pacote/node_modules/@npmcli/installed-package-contents/LICENSE index 7953647e7760b..19cec97b18468 100644 --- a/node_modules/pacote/node_modules/unique-slug/LICENSE +++ b/node_modules/pacote/node_modules/@npmcli/installed-package-contents/LICENSE @@ -1,6 +1,6 @@ The ISC License -Copyright npm, Inc +Copyright (c) npm, Inc. Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above diff --git a/node_modules/pacote/node_modules/@npmcli/installed-package-contents/bin/index.js b/node_modules/pacote/node_modules/@npmcli/installed-package-contents/bin/index.js new file mode 100755 index 0000000000000..7b83b23bf168c --- /dev/null +++ b/node_modules/pacote/node_modules/@npmcli/installed-package-contents/bin/index.js @@ -0,0 +1,44 @@ +#! /usr/bin/env node + +const { relative } = require('path') +const pkgContents = require('../') + +const usage = `Usage: + installed-package-contents [-d --depth=] + +Lists the files installed for a package specified by . + +Options: + -d --depth= Provide a numeric value ("Infinity" is allowed) + to specify how deep in the file tree to traverse. + Default=1 + -h --help Show this usage information` + +const options = {} + +process.argv.slice(2).forEach(arg => { + let match + if ((match = arg.match(/^(?:--depth=|-d)([0-9]+|Infinity)/))) { + options.depth = +match[1] + } else if (arg === '-h' || arg === '--help') { + console.log(usage) + process.exit(0) + } else { + options.path = arg + } +}) + +if (!options.path) { + console.error('ERROR: no path provided') + console.error(usage) + process.exit(1) +} + +const cwd = process.cwd() + +pkgContents(options) + .then(list => list.sort().forEach(p => console.log(relative(cwd, p)))) + .catch(/* istanbul ignore next - pretty unusual */ er => { + console.error(er) + process.exit(1) + }) diff --git a/node_modules/pacote/node_modules/@npmcli/installed-package-contents/lib/index.js b/node_modules/pacote/node_modules/@npmcli/installed-package-contents/lib/index.js new file mode 100644 index 0000000000000..ab1486cd01d00 --- /dev/null +++ b/node_modules/pacote/node_modules/@npmcli/installed-package-contents/lib/index.js @@ -0,0 +1,181 @@ +// to GET CONTENTS for folder at PATH (which may be a PACKAGE): +// - if PACKAGE, read path/package.json +// - if bins in ../node_modules/.bin, add those to result +// - if depth >= maxDepth, add PATH to result, and finish +// - readdir(PATH, with file types) +// - add all FILEs in PATH to result +// - if PARENT: +// - if depth < maxDepth, add GET CONTENTS of all DIRs in PATH +// - else, add all DIRs in PATH +// - if no parent +// - if no bundled deps, +// - if depth < maxDepth, add GET CONTENTS of DIRs in path except +// node_modules +// - else, add all DIRs in path other than node_modules +// - if has bundled deps, +// - get list of bundled deps +// - add GET CONTENTS of bundled deps, PACKAGE=true, depth + 1 + +const bundled = require('npm-bundled') +const { readFile, readdir, stat } = require('fs/promises') +const { resolve, basename, dirname } = require('path') +const normalizePackageBin = require('npm-normalize-package-bin') + +const readPackage = ({ path, packageJsonCache }) => packageJsonCache.has(path) + ? Promise.resolve(packageJsonCache.get(path)) + : readFile(path).then(json => { + const pkg = normalizePackageBin(JSON.parse(json)) + packageJsonCache.set(path, pkg) + return pkg + }).catch(() => null) + +// just normalize bundle deps and bin, that's all we care about here. +const normalized = Symbol('package data has been normalized') +const rpj = ({ path, packageJsonCache }) => readPackage({ path, packageJsonCache }) + .then(pkg => { + if (!pkg || pkg[normalized]) { + return pkg + } + if (pkg.bundledDependencies && !pkg.bundleDependencies) { + pkg.bundleDependencies = pkg.bundledDependencies + delete pkg.bundledDependencies + } + const bd = pkg.bundleDependencies + if (bd === true) { + pkg.bundleDependencies = [ + ...Object.keys(pkg.dependencies || {}), + ...Object.keys(pkg.optionalDependencies || {}), + ] + } + if (typeof bd === 'object' && !Array.isArray(bd)) { + pkg.bundleDependencies = Object.keys(bd) + } + pkg[normalized] = true + return pkg + }) + +const pkgContents = async ({ + path, + depth = 1, + currentDepth = 0, + pkg = null, + result = null, + packageJsonCache = null, +}) => { + if (!result) { + result = new Set() + } + + if (!packageJsonCache) { + packageJsonCache = new Map() + } + + if (pkg === true) { + return rpj({ path: path + '/package.json', packageJsonCache }) + .then(p => pkgContents({ + path, + depth, + currentDepth, + pkg: p, + result, + packageJsonCache, + })) + } + + if (pkg) { + // add all bins to result if they exist + if (pkg.bin) { + const dir = dirname(path) + const scope = basename(dir) + const nm = /^@.+/.test(scope) ? dirname(dir) : dir + + const binFiles = [] + Object.keys(pkg.bin).forEach(b => { + const base = resolve(nm, '.bin', b) + binFiles.push(base, base + '.cmd', base + '.ps1') + }) + + const bins = await Promise.all( + binFiles.map(b => stat(b).then(() => b).catch(() => null)) + ) + bins.filter(b => b).forEach(b => result.add(b)) + } + } + + if (currentDepth >= depth) { + result.add(path) + return result + } + + // we'll need bundle list later, so get that now in parallel + const [dirEntries, bundleDeps] = await Promise.all([ + readdir(path, { withFileTypes: true }), + currentDepth === 0 && pkg && pkg.bundleDependencies + ? bundled({ path, packageJsonCache }) : null, + ]).catch(() => []) + + // not a thing, probably a missing folder + if (!dirEntries) { + return result + } + + // empty folder, just add the folder itself to the result + if (!dirEntries.length && !bundleDeps && currentDepth !== 0) { + result.add(path) + return result + } + + const recursePromises = [] + + for (const entry of dirEntries) { + const p = resolve(path, entry.name) + if (entry.isDirectory() === false) { + result.add(p) + continue + } + + if (currentDepth !== 0 || entry.name !== 'node_modules') { + if (currentDepth < depth - 1) { + recursePromises.push(pkgContents({ + path: p, + packageJsonCache, + depth, + currentDepth: currentDepth + 1, + result, + })) + } else { + result.add(p) + } + continue + } + } + + if (bundleDeps) { + // bundle deps are all folders + // we always recurse to get pkg bins, but if currentDepth is too high, + // it'll return early before walking their contents. + recursePromises.push(...bundleDeps.map(dep => { + const p = resolve(path, 'node_modules', dep) + return pkgContents({ + path: p, + packageJsonCache, + pkg: true, + depth, + currentDepth: currentDepth + 1, + result, + }) + })) + } + + if (recursePromises.length) { + await Promise.all(recursePromises) + } + + return result +} + +module.exports = ({ path, ...opts }) => pkgContents({ + path: resolve(path), + ...opts, + pkg: true, +}).then(results => [...results]) diff --git a/node_modules/pacote/node_modules/@npmcli/installed-package-contents/package.json b/node_modules/pacote/node_modules/@npmcli/installed-package-contents/package.json new file mode 100644 index 0000000000000..d5b68a737daf4 --- /dev/null +++ b/node_modules/pacote/node_modules/@npmcli/installed-package-contents/package.json @@ -0,0 +1,52 @@ +{ + "name": "@npmcli/installed-package-contents", + "version": "3.0.0", + "description": "Get the list of files installed in a package in node_modules, including bundled dependencies", + "author": "GitHub Inc.", + "main": "lib/index.js", + "bin": { + "installed-package-contents": "bin/index.js" + }, + "license": "ISC", + "scripts": { + "test": "tap", + "snap": "tap", + "lint": "npm run eslint", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force", + "lintfix": "npm run eslint -- --fix", + "posttest": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" + }, + "devDependencies": { + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", + "tap": "^16.3.0" + }, + "dependencies": { + "npm-bundled": "^4.0.0", + "npm-normalize-package-bin": "^4.0.0" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/npm/installed-package-contents.git" + }, + "files": [ + "bin/", + "lib/" + ], + "engines": { + "node": "^18.17.0 || >=20.5.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.23.3", + "publish": true + }, + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + } +} diff --git a/node_modules/pacote/node_modules/cacache/lib/content/path.js b/node_modules/pacote/node_modules/cacache/lib/content/path.js deleted file mode 100644 index ad5a76a4f73f2..0000000000000 --- a/node_modules/pacote/node_modules/cacache/lib/content/path.js +++ /dev/null @@ -1,29 +0,0 @@ -'use strict' - -const contentVer = require('../../package.json')['cache-version'].content -const hashToSegments = require('../util/hash-to-segments') -const path = require('path') -const ssri = require('ssri') - -// Current format of content file path: -// -// sha512-BaSE64Hex= -> -// ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee -// -module.exports = contentPath - -function contentPath (cache, integrity) { - const sri = ssri.parse(integrity, { single: true }) - // contentPath is the *strongest* algo given - return path.join( - contentDir(cache), - sri.algorithm, - ...hashToSegments(sri.hexDigest()) - ) -} - -module.exports.contentDir = contentDir - -function contentDir (cache) { - return path.join(cache, `content-v${contentVer}`) -} diff --git a/node_modules/pacote/node_modules/cacache/lib/content/read.js b/node_modules/pacote/node_modules/cacache/lib/content/read.js deleted file mode 100644 index 5f6192c3cec56..0000000000000 --- a/node_modules/pacote/node_modules/cacache/lib/content/read.js +++ /dev/null @@ -1,165 +0,0 @@ -'use strict' - -const fs = require('fs/promises') -const fsm = require('fs-minipass') -const ssri = require('ssri') -const contentPath = require('./path') -const Pipeline = require('minipass-pipeline') - -module.exports = read - -const MAX_SINGLE_READ_SIZE = 64 * 1024 * 1024 -async function read (cache, integrity, opts = {}) { - const { size } = opts - const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => { - // get size - const stat = size ? { size } : await fs.stat(cpath) - return { stat, cpath, sri } - }) - - if (stat.size > MAX_SINGLE_READ_SIZE) { - return readPipeline(cpath, stat.size, sri, new Pipeline()).concat() - } - - const data = await fs.readFile(cpath, { encoding: null }) - - if (stat.size !== data.length) { - throw sizeError(stat.size, data.length) - } - - if (!ssri.checkData(data, sri)) { - throw integrityError(sri, cpath) - } - - return data -} - -const readPipeline = (cpath, size, sri, stream) => { - stream.push( - new fsm.ReadStream(cpath, { - size, - readSize: MAX_SINGLE_READ_SIZE, - }), - ssri.integrityStream({ - integrity: sri, - size, - }) - ) - return stream -} - -module.exports.stream = readStream -module.exports.readStream = readStream - -function readStream (cache, integrity, opts = {}) { - const { size } = opts - const stream = new Pipeline() - // Set all this up to run on the stream and then just return the stream - Promise.resolve().then(async () => { - const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => { - // get size - const stat = size ? { size } : await fs.stat(cpath) - return { stat, cpath, sri } - }) - - return readPipeline(cpath, stat.size, sri, stream) - }).catch(err => stream.emit('error', err)) - - return stream -} - -module.exports.copy = copy - -function copy (cache, integrity, dest) { - return withContentSri(cache, integrity, (cpath) => { - return fs.copyFile(cpath, dest) - }) -} - -module.exports.hasContent = hasContent - -async function hasContent (cache, integrity) { - if (!integrity) { - return false - } - - try { - return await withContentSri(cache, integrity, async (cpath, sri) => { - const stat = await fs.stat(cpath) - return { size: stat.size, sri, stat } - }) - } catch (err) { - if (err.code === 'ENOENT') { - return false - } - - if (err.code === 'EPERM') { - /* istanbul ignore else */ - if (process.platform !== 'win32') { - throw err - } else { - return false - } - } - } -} - -async function withContentSri (cache, integrity, fn) { - const sri = ssri.parse(integrity) - // If `integrity` has multiple entries, pick the first digest - // with available local data. - const algo = sri.pickAlgorithm() - const digests = sri[algo] - - if (digests.length <= 1) { - const cpath = contentPath(cache, digests[0]) - return fn(cpath, digests[0]) - } else { - // Can't use race here because a generic error can happen before - // a ENOENT error, and can happen before a valid result - const results = await Promise.all(digests.map(async (meta) => { - try { - return await withContentSri(cache, meta, fn) - } catch (err) { - if (err.code === 'ENOENT') { - return Object.assign( - new Error('No matching content found for ' + sri.toString()), - { code: 'ENOENT' } - ) - } - return err - } - })) - // Return the first non error if it is found - const result = results.find((r) => !(r instanceof Error)) - if (result) { - return result - } - - // Throw the No matching content found error - const enoentError = results.find((r) => r.code === 'ENOENT') - if (enoentError) { - throw enoentError - } - - // Throw generic error - throw results.find((r) => r instanceof Error) - } -} - -function sizeError (expected, found) { - /* eslint-disable-next-line max-len */ - const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) - err.expected = expected - err.found = found - err.code = 'EBADSIZE' - return err -} - -function integrityError (sri, path) { - const err = new Error(`Integrity verification failed for ${sri} (${path})`) - err.code = 'EINTEGRITY' - err.sri = sri - err.path = path - return err -} diff --git a/node_modules/pacote/node_modules/cacache/lib/content/rm.js b/node_modules/pacote/node_modules/cacache/lib/content/rm.js deleted file mode 100644 index ce58d679e4cb2..0000000000000 --- a/node_modules/pacote/node_modules/cacache/lib/content/rm.js +++ /dev/null @@ -1,18 +0,0 @@ -'use strict' - -const fs = require('fs/promises') -const contentPath = require('./path') -const { hasContent } = require('./read') - -module.exports = rm - -async function rm (cache, integrity) { - const content = await hasContent(cache, integrity) - // ~pretty~ sure we can't end up with a content lacking sri, but be safe - if (content && content.sri) { - await fs.rm(contentPath(cache, content.sri), { recursive: true, force: true }) - return true - } else { - return false - } -} diff --git a/node_modules/pacote/node_modules/cacache/lib/content/write.js b/node_modules/pacote/node_modules/cacache/lib/content/write.js deleted file mode 100644 index e7187abca8788..0000000000000 --- a/node_modules/pacote/node_modules/cacache/lib/content/write.js +++ /dev/null @@ -1,206 +0,0 @@ -'use strict' - -const events = require('events') - -const contentPath = require('./path') -const fs = require('fs/promises') -const { moveFile } = require('@npmcli/fs') -const { Minipass } = require('minipass') -const Pipeline = require('minipass-pipeline') -const Flush = require('minipass-flush') -const path = require('path') -const ssri = require('ssri') -const uniqueFilename = require('unique-filename') -const fsm = require('fs-minipass') - -module.exports = write - -// Cache of move operations in process so we don't duplicate -const moveOperations = new Map() - -async function write (cache, data, opts = {}) { - const { algorithms, size, integrity } = opts - - if (typeof size === 'number' && data.length !== size) { - throw sizeError(size, data.length) - } - - const sri = ssri.fromData(data, algorithms ? { algorithms } : {}) - if (integrity && !ssri.checkData(data, integrity, opts)) { - throw checksumError(integrity, sri) - } - - for (const algo in sri) { - const tmp = await makeTmp(cache, opts) - const hash = sri[algo].toString() - try { - await fs.writeFile(tmp.target, data, { flag: 'wx' }) - await moveToDestination(tmp, cache, hash, opts) - } finally { - if (!tmp.moved) { - await fs.rm(tmp.target, { recursive: true, force: true }) - } - } - } - return { integrity: sri, size: data.length } -} - -module.exports.stream = writeStream - -// writes proxied to the 'inputStream' that is passed to the Promise -// 'end' is deferred until content is handled. -class CacacheWriteStream extends Flush { - constructor (cache, opts) { - super() - this.opts = opts - this.cache = cache - this.inputStream = new Minipass() - this.inputStream.on('error', er => this.emit('error', er)) - this.inputStream.on('drain', () => this.emit('drain')) - this.handleContentP = null - } - - write (chunk, encoding, cb) { - if (!this.handleContentP) { - this.handleContentP = handleContent( - this.inputStream, - this.cache, - this.opts - ) - this.handleContentP.catch(error => this.emit('error', error)) - } - return this.inputStream.write(chunk, encoding, cb) - } - - flush (cb) { - this.inputStream.end(() => { - if (!this.handleContentP) { - const e = new Error('Cache input stream was empty') - e.code = 'ENODATA' - // empty streams are probably emitting end right away. - // defer this one tick by rejecting a promise on it. - return Promise.reject(e).catch(cb) - } - // eslint-disable-next-line promise/catch-or-return - this.handleContentP.then( - (res) => { - res.integrity && this.emit('integrity', res.integrity) - // eslint-disable-next-line promise/always-return - res.size !== null && this.emit('size', res.size) - cb() - }, - (er) => cb(er) - ) - }) - } -} - -function writeStream (cache, opts = {}) { - return new CacacheWriteStream(cache, opts) -} - -async function handleContent (inputStream, cache, opts) { - const tmp = await makeTmp(cache, opts) - try { - const res = await pipeToTmp(inputStream, cache, tmp.target, opts) - await moveToDestination( - tmp, - cache, - res.integrity, - opts - ) - return res - } finally { - if (!tmp.moved) { - await fs.rm(tmp.target, { recursive: true, force: true }) - } - } -} - -async function pipeToTmp (inputStream, cache, tmpTarget, opts) { - const outStream = new fsm.WriteStream(tmpTarget, { - flags: 'wx', - }) - - if (opts.integrityEmitter) { - // we need to create these all simultaneously since they can fire in any order - const [integrity, size] = await Promise.all([ - events.once(opts.integrityEmitter, 'integrity').then(res => res[0]), - events.once(opts.integrityEmitter, 'size').then(res => res[0]), - new Pipeline(inputStream, outStream).promise(), - ]) - return { integrity, size } - } - - let integrity - let size - const hashStream = ssri.integrityStream({ - integrity: opts.integrity, - algorithms: opts.algorithms, - size: opts.size, - }) - hashStream.on('integrity', i => { - integrity = i - }) - hashStream.on('size', s => { - size = s - }) - - const pipeline = new Pipeline(inputStream, hashStream, outStream) - await pipeline.promise() - return { integrity, size } -} - -async function makeTmp (cache, opts) { - const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) - await fs.mkdir(path.dirname(tmpTarget), { recursive: true }) - return { - target: tmpTarget, - moved: false, - } -} - -async function moveToDestination (tmp, cache, sri) { - const destination = contentPath(cache, sri) - const destDir = path.dirname(destination) - if (moveOperations.has(destination)) { - return moveOperations.get(destination) - } - moveOperations.set( - destination, - fs.mkdir(destDir, { recursive: true }) - .then(async () => { - await moveFile(tmp.target, destination, { overwrite: false }) - tmp.moved = true - return tmp.moved - }) - .catch(err => { - if (!err.message.startsWith('The destination file exists')) { - throw Object.assign(err, { code: 'EEXIST' }) - } - }).finally(() => { - moveOperations.delete(destination) - }) - - ) - return moveOperations.get(destination) -} - -function sizeError (expected, found) { - /* eslint-disable-next-line max-len */ - const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) - err.expected = expected - err.found = found - err.code = 'EBADSIZE' - return err -} - -function checksumError (expected, found) { - const err = new Error(`Integrity check failed: - Wanted: ${expected} - Found: ${found}`) - err.code = 'EINTEGRITY' - err.expected = expected - err.found = found - return err -} diff --git a/node_modules/pacote/node_modules/cacache/lib/entry-index.js b/node_modules/pacote/node_modules/cacache/lib/entry-index.js deleted file mode 100644 index 89c28f2f257d4..0000000000000 --- a/node_modules/pacote/node_modules/cacache/lib/entry-index.js +++ /dev/null @@ -1,336 +0,0 @@ -'use strict' - -const crypto = require('crypto') -const { - appendFile, - mkdir, - readFile, - readdir, - rm, - writeFile, -} = require('fs/promises') -const { Minipass } = require('minipass') -const path = require('path') -const ssri = require('ssri') -const uniqueFilename = require('unique-filename') - -const contentPath = require('./content/path') -const hashToSegments = require('./util/hash-to-segments') -const indexV = require('../package.json')['cache-version'].index -const { moveFile } = require('@npmcli/fs') - -const pMap = require('p-map') -const lsStreamConcurrency = 5 - -module.exports.NotFoundError = class NotFoundError extends Error { - constructor (cache, key) { - super(`No cache entry for ${key} found in ${cache}`) - this.code = 'ENOENT' - this.cache = cache - this.key = key - } -} - -module.exports.compact = compact - -async function compact (cache, key, matchFn, opts = {}) { - const bucket = bucketPath(cache, key) - const entries = await bucketEntries(bucket) - const newEntries = [] - // we loop backwards because the bottom-most result is the newest - // since we add new entries with appendFile - for (let i = entries.length - 1; i >= 0; --i) { - const entry = entries[i] - // a null integrity could mean either a delete was appended - // or the user has simply stored an index that does not map - // to any content. we determine if the user wants to keep the - // null integrity based on the validateEntry function passed in options. - // if the integrity is null and no validateEntry is provided, we break - // as we consider the null integrity to be a deletion of everything - // that came before it. - if (entry.integrity === null && !opts.validateEntry) { - break - } - - // if this entry is valid, and it is either the first entry or - // the newEntries array doesn't already include an entry that - // matches this one based on the provided matchFn, then we add - // it to the beginning of our list - if ((!opts.validateEntry || opts.validateEntry(entry) === true) && - (newEntries.length === 0 || - !newEntries.find((oldEntry) => matchFn(oldEntry, entry)))) { - newEntries.unshift(entry) - } - } - - const newIndex = '\n' + newEntries.map((entry) => { - const stringified = JSON.stringify(entry) - const hash = hashEntry(stringified) - return `${hash}\t${stringified}` - }).join('\n') - - const setup = async () => { - const target = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) - await mkdir(path.dirname(target), { recursive: true }) - return { - target, - moved: false, - } - } - - const teardown = async (tmp) => { - if (!tmp.moved) { - return rm(tmp.target, { recursive: true, force: true }) - } - } - - const write = async (tmp) => { - await writeFile(tmp.target, newIndex, { flag: 'wx' }) - await mkdir(path.dirname(bucket), { recursive: true }) - // we use @npmcli/move-file directly here because we - // want to overwrite the existing file - await moveFile(tmp.target, bucket) - tmp.moved = true - } - - // write the file atomically - const tmp = await setup() - try { - await write(tmp) - } finally { - await teardown(tmp) - } - - // we reverse the list we generated such that the newest - // entries come first in order to make looping through them easier - // the true passed to formatEntry tells it to keep null - // integrity values, if they made it this far it's because - // validateEntry returned true, and as such we should return it - return newEntries.reverse().map((entry) => formatEntry(cache, entry, true)) -} - -module.exports.insert = insert - -async function insert (cache, key, integrity, opts = {}) { - const { metadata, size, time } = opts - const bucket = bucketPath(cache, key) - const entry = { - key, - integrity: integrity && ssri.stringify(integrity), - time: time || Date.now(), - size, - metadata, - } - try { - await mkdir(path.dirname(bucket), { recursive: true }) - const stringified = JSON.stringify(entry) - // NOTE - Cleverness ahoy! - // - // This works because it's tremendously unlikely for an entry to corrupt - // another while still preserving the string length of the JSON in - // question. So, we just slap the length in there and verify it on read. - // - // Thanks to @isaacs for the whiteboarding session that ended up with - // this. - await appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`) - } catch (err) { - if (err.code === 'ENOENT') { - return undefined - } - - throw err - } - return formatEntry(cache, entry) -} - -module.exports.find = find - -async function find (cache, key) { - const bucket = bucketPath(cache, key) - try { - const entries = await bucketEntries(bucket) - return entries.reduce((latest, next) => { - if (next && next.key === key) { - return formatEntry(cache, next) - } else { - return latest - } - }, null) - } catch (err) { - if (err.code === 'ENOENT') { - return null - } else { - throw err - } - } -} - -module.exports.delete = del - -function del (cache, key, opts = {}) { - if (!opts.removeFully) { - return insert(cache, key, null, opts) - } - - const bucket = bucketPath(cache, key) - return rm(bucket, { recursive: true, force: true }) -} - -module.exports.lsStream = lsStream - -function lsStream (cache) { - const indexDir = bucketDir(cache) - const stream = new Minipass({ objectMode: true }) - - // Set all this up to run on the stream and then just return the stream - Promise.resolve().then(async () => { - const buckets = await readdirOrEmpty(indexDir) - await pMap(buckets, async (bucket) => { - const bucketPath = path.join(indexDir, bucket) - const subbuckets = await readdirOrEmpty(bucketPath) - await pMap(subbuckets, async (subbucket) => { - const subbucketPath = path.join(bucketPath, subbucket) - - // "/cachename//./*" - const subbucketEntries = await readdirOrEmpty(subbucketPath) - await pMap(subbucketEntries, async (entry) => { - const entryPath = path.join(subbucketPath, entry) - try { - const entries = await bucketEntries(entryPath) - // using a Map here prevents duplicate keys from showing up - // twice, I guess? - const reduced = entries.reduce((acc, entry) => { - acc.set(entry.key, entry) - return acc - }, new Map()) - // reduced is a map of key => entry - for (const entry of reduced.values()) { - const formatted = formatEntry(cache, entry) - if (formatted) { - stream.write(formatted) - } - } - } catch (err) { - if (err.code === 'ENOENT') { - return undefined - } - throw err - } - }, - { concurrency: lsStreamConcurrency }) - }, - { concurrency: lsStreamConcurrency }) - }, - { concurrency: lsStreamConcurrency }) - stream.end() - return stream - }).catch(err => stream.emit('error', err)) - - return stream -} - -module.exports.ls = ls - -async function ls (cache) { - const entries = await lsStream(cache).collect() - return entries.reduce((acc, xs) => { - acc[xs.key] = xs - return acc - }, {}) -} - -module.exports.bucketEntries = bucketEntries - -async function bucketEntries (bucket, filter) { - const data = await readFile(bucket, 'utf8') - return _bucketEntries(data, filter) -} - -function _bucketEntries (data) { - const entries = [] - data.split('\n').forEach((entry) => { - if (!entry) { - return - } - - const pieces = entry.split('\t') - if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) { - // Hash is no good! Corruption or malice? Doesn't matter! - // EJECT EJECT - return - } - let obj - try { - obj = JSON.parse(pieces[1]) - } catch (_) { - // eslint-ignore-next-line no-empty-block - } - // coverage disabled here, no need to test with an entry that parses to something falsey - // istanbul ignore else - if (obj) { - entries.push(obj) - } - }) - return entries -} - -module.exports.bucketDir = bucketDir - -function bucketDir (cache) { - return path.join(cache, `index-v${indexV}`) -} - -module.exports.bucketPath = bucketPath - -function bucketPath (cache, key) { - const hashed = hashKey(key) - return path.join.apply( - path, - [bucketDir(cache)].concat(hashToSegments(hashed)) - ) -} - -module.exports.hashKey = hashKey - -function hashKey (key) { - return hash(key, 'sha256') -} - -module.exports.hashEntry = hashEntry - -function hashEntry (str) { - return hash(str, 'sha1') -} - -function hash (str, digest) { - return crypto - .createHash(digest) - .update(str) - .digest('hex') -} - -function formatEntry (cache, entry, keepAll) { - // Treat null digests as deletions. They'll shadow any previous entries. - if (!entry.integrity && !keepAll) { - return null - } - - return { - key: entry.key, - integrity: entry.integrity, - path: entry.integrity ? contentPath(cache, entry.integrity) : undefined, - size: entry.size, - time: entry.time, - metadata: entry.metadata, - } -} - -function readdirOrEmpty (dir) { - return readdir(dir).catch((err) => { - if (err.code === 'ENOENT' || err.code === 'ENOTDIR') { - return [] - } - - throw err - }) -} diff --git a/node_modules/pacote/node_modules/cacache/lib/get.js b/node_modules/pacote/node_modules/cacache/lib/get.js deleted file mode 100644 index 80ec206c7ecaa..0000000000000 --- a/node_modules/pacote/node_modules/cacache/lib/get.js +++ /dev/null @@ -1,170 +0,0 @@ -'use strict' - -const Collect = require('minipass-collect') -const { Minipass } = require('minipass') -const Pipeline = require('minipass-pipeline') - -const index = require('./entry-index') -const memo = require('./memoization') -const read = require('./content/read') - -async function getData (cache, key, opts = {}) { - const { integrity, memoize, size } = opts - const memoized = memo.get(cache, key, opts) - if (memoized && memoize !== false) { - return { - metadata: memoized.entry.metadata, - data: memoized.data, - integrity: memoized.entry.integrity, - size: memoized.entry.size, - } - } - - const entry = await index.find(cache, key, opts) - if (!entry) { - throw new index.NotFoundError(cache, key) - } - const data = await read(cache, entry.integrity, { integrity, size }) - if (memoize) { - memo.put(cache, entry, data, opts) - } - - return { - data, - metadata: entry.metadata, - size: entry.size, - integrity: entry.integrity, - } -} -module.exports = getData - -async function getDataByDigest (cache, key, opts = {}) { - const { integrity, memoize, size } = opts - const memoized = memo.get.byDigest(cache, key, opts) - if (memoized && memoize !== false) { - return memoized - } - - const res = await read(cache, key, { integrity, size }) - if (memoize) { - memo.put.byDigest(cache, key, res, opts) - } - return res -} -module.exports.byDigest = getDataByDigest - -const getMemoizedStream = (memoized) => { - const stream = new Minipass() - stream.on('newListener', function (ev, cb) { - ev === 'metadata' && cb(memoized.entry.metadata) - ev === 'integrity' && cb(memoized.entry.integrity) - ev === 'size' && cb(memoized.entry.size) - }) - stream.end(memoized.data) - return stream -} - -function getStream (cache, key, opts = {}) { - const { memoize, size } = opts - const memoized = memo.get(cache, key, opts) - if (memoized && memoize !== false) { - return getMemoizedStream(memoized) - } - - const stream = new Pipeline() - // Set all this up to run on the stream and then just return the stream - Promise.resolve().then(async () => { - const entry = await index.find(cache, key) - if (!entry) { - throw new index.NotFoundError(cache, key) - } - - stream.emit('metadata', entry.metadata) - stream.emit('integrity', entry.integrity) - stream.emit('size', entry.size) - stream.on('newListener', function (ev, cb) { - ev === 'metadata' && cb(entry.metadata) - ev === 'integrity' && cb(entry.integrity) - ev === 'size' && cb(entry.size) - }) - - const src = read.readStream( - cache, - entry.integrity, - { ...opts, size: typeof size !== 'number' ? entry.size : size } - ) - - if (memoize) { - const memoStream = new Collect.PassThrough() - memoStream.on('collect', data => memo.put(cache, entry, data, opts)) - stream.unshift(memoStream) - } - stream.unshift(src) - return stream - }).catch((err) => stream.emit('error', err)) - - return stream -} - -module.exports.stream = getStream - -function getStreamDigest (cache, integrity, opts = {}) { - const { memoize } = opts - const memoized = memo.get.byDigest(cache, integrity, opts) - if (memoized && memoize !== false) { - const stream = new Minipass() - stream.end(memoized) - return stream - } else { - const stream = read.readStream(cache, integrity, opts) - if (!memoize) { - return stream - } - - const memoStream = new Collect.PassThrough() - memoStream.on('collect', data => memo.put.byDigest( - cache, - integrity, - data, - opts - )) - return new Pipeline(stream, memoStream) - } -} - -module.exports.stream.byDigest = getStreamDigest - -function info (cache, key, opts = {}) { - const { memoize } = opts - const memoized = memo.get(cache, key, opts) - if (memoized && memoize !== false) { - return Promise.resolve(memoized.entry) - } else { - return index.find(cache, key) - } -} -module.exports.info = info - -async function copy (cache, key, dest, opts = {}) { - const entry = await index.find(cache, key, opts) - if (!entry) { - throw new index.NotFoundError(cache, key) - } - await read.copy(cache, entry.integrity, dest, opts) - return { - metadata: entry.metadata, - size: entry.size, - integrity: entry.integrity, - } -} - -module.exports.copy = copy - -async function copyByDigest (cache, key, dest, opts = {}) { - await read.copy(cache, key, dest, opts) - return key -} - -module.exports.copy.byDigest = copyByDigest - -module.exports.hasContent = read.hasContent diff --git a/node_modules/pacote/node_modules/cacache/lib/index.js b/node_modules/pacote/node_modules/cacache/lib/index.js deleted file mode 100644 index c9b0da5f3a271..0000000000000 --- a/node_modules/pacote/node_modules/cacache/lib/index.js +++ /dev/null @@ -1,42 +0,0 @@ -'use strict' - -const get = require('./get.js') -const put = require('./put.js') -const rm = require('./rm.js') -const verify = require('./verify.js') -const { clearMemoized } = require('./memoization.js') -const tmp = require('./util/tmp.js') -const index = require('./entry-index.js') - -module.exports.index = {} -module.exports.index.compact = index.compact -module.exports.index.insert = index.insert - -module.exports.ls = index.ls -module.exports.ls.stream = index.lsStream - -module.exports.get = get -module.exports.get.byDigest = get.byDigest -module.exports.get.stream = get.stream -module.exports.get.stream.byDigest = get.stream.byDigest -module.exports.get.copy = get.copy -module.exports.get.copy.byDigest = get.copy.byDigest -module.exports.get.info = get.info -module.exports.get.hasContent = get.hasContent - -module.exports.put = put -module.exports.put.stream = put.stream - -module.exports.rm = rm.entry -module.exports.rm.all = rm.all -module.exports.rm.entry = module.exports.rm -module.exports.rm.content = rm.content - -module.exports.clearMemoized = clearMemoized - -module.exports.tmp = {} -module.exports.tmp.mkdir = tmp.mkdir -module.exports.tmp.withTmp = tmp.withTmp - -module.exports.verify = verify -module.exports.verify.lastRun = verify.lastRun diff --git a/node_modules/pacote/node_modules/cacache/lib/memoization.js b/node_modules/pacote/node_modules/cacache/lib/memoization.js deleted file mode 100644 index 2ecc60912e456..0000000000000 --- a/node_modules/pacote/node_modules/cacache/lib/memoization.js +++ /dev/null @@ -1,72 +0,0 @@ -'use strict' - -const { LRUCache } = require('lru-cache') - -const MEMOIZED = new LRUCache({ - max: 500, - maxSize: 50 * 1024 * 1024, // 50MB - ttl: 3 * 60 * 1000, // 3 minutes - sizeCalculation: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length, -}) - -module.exports.clearMemoized = clearMemoized - -function clearMemoized () { - const old = {} - MEMOIZED.forEach((v, k) => { - old[k] = v - }) - MEMOIZED.clear() - return old -} - -module.exports.put = put - -function put (cache, entry, data, opts) { - pickMem(opts).set(`key:${cache}:${entry.key}`, { entry, data }) - putDigest(cache, entry.integrity, data, opts) -} - -module.exports.put.byDigest = putDigest - -function putDigest (cache, integrity, data, opts) { - pickMem(opts).set(`digest:${cache}:${integrity}`, data) -} - -module.exports.get = get - -function get (cache, key, opts) { - return pickMem(opts).get(`key:${cache}:${key}`) -} - -module.exports.get.byDigest = getDigest - -function getDigest (cache, integrity, opts) { - return pickMem(opts).get(`digest:${cache}:${integrity}`) -} - -class ObjProxy { - constructor (obj) { - this.obj = obj - } - - get (key) { - return this.obj[key] - } - - set (key, val) { - this.obj[key] = val - } -} - -function pickMem (opts) { - if (!opts || !opts.memoize) { - return MEMOIZED - } else if (opts.memoize.get && opts.memoize.set) { - return opts.memoize - } else if (typeof opts.memoize === 'object') { - return new ObjProxy(opts.memoize) - } else { - return MEMOIZED - } -} diff --git a/node_modules/pacote/node_modules/cacache/lib/put.js b/node_modules/pacote/node_modules/cacache/lib/put.js deleted file mode 100644 index 9fc932d5f6dec..0000000000000 --- a/node_modules/pacote/node_modules/cacache/lib/put.js +++ /dev/null @@ -1,80 +0,0 @@ -'use strict' - -const index = require('./entry-index') -const memo = require('./memoization') -const write = require('./content/write') -const Flush = require('minipass-flush') -const { PassThrough } = require('minipass-collect') -const Pipeline = require('minipass-pipeline') - -const putOpts = (opts) => ({ - algorithms: ['sha512'], - ...opts, -}) - -module.exports = putData - -async function putData (cache, key, data, opts = {}) { - const { memoize } = opts - opts = putOpts(opts) - const res = await write(cache, data, opts) - const entry = await index.insert(cache, key, res.integrity, { ...opts, size: res.size }) - if (memoize) { - memo.put(cache, entry, data, opts) - } - - return res.integrity -} - -module.exports.stream = putStream - -function putStream (cache, key, opts = {}) { - const { memoize } = opts - opts = putOpts(opts) - let integrity - let size - let error - - let memoData - const pipeline = new Pipeline() - // first item in the pipeline is the memoizer, because we need - // that to end first and get the collected data. - if (memoize) { - const memoizer = new PassThrough().on('collect', data => { - memoData = data - }) - pipeline.push(memoizer) - } - - // contentStream is a write-only, not a passthrough - // no data comes out of it. - const contentStream = write.stream(cache, opts) - .on('integrity', (int) => { - integrity = int - }) - .on('size', (s) => { - size = s - }) - .on('error', (err) => { - error = err - }) - - pipeline.push(contentStream) - - // last but not least, we write the index and emit hash and size, - // and memoize if we're doing that - pipeline.push(new Flush({ - async flush () { - if (!error) { - const entry = await index.insert(cache, key, integrity, { ...opts, size }) - if (memoize && memoData) { - memo.put(cache, entry, memoData, opts) - } - pipeline.emit('integrity', integrity) - pipeline.emit('size', size) - } - }, - })) - - return pipeline -} diff --git a/node_modules/pacote/node_modules/cacache/lib/rm.js b/node_modules/pacote/node_modules/cacache/lib/rm.js deleted file mode 100644 index a94760c7cf243..0000000000000 --- a/node_modules/pacote/node_modules/cacache/lib/rm.js +++ /dev/null @@ -1,31 +0,0 @@ -'use strict' - -const { rm } = require('fs/promises') -const glob = require('./util/glob.js') -const index = require('./entry-index') -const memo = require('./memoization') -const path = require('path') -const rmContent = require('./content/rm') - -module.exports = entry -module.exports.entry = entry - -function entry (cache, key, opts) { - memo.clearMemoized() - return index.delete(cache, key, opts) -} - -module.exports.content = content - -function content (cache, integrity) { - memo.clearMemoized() - return rmContent(cache, integrity) -} - -module.exports.all = all - -async function all (cache) { - memo.clearMemoized() - const paths = await glob(path.join(cache, '*(content-*|index-*)'), { silent: true, nosort: true }) - return Promise.all(paths.map((p) => rm(p, { recursive: true, force: true }))) -} diff --git a/node_modules/pacote/node_modules/cacache/lib/util/glob.js b/node_modules/pacote/node_modules/cacache/lib/util/glob.js deleted file mode 100644 index 8500c1c16a429..0000000000000 --- a/node_modules/pacote/node_modules/cacache/lib/util/glob.js +++ /dev/null @@ -1,7 +0,0 @@ -'use strict' - -const { glob } = require('glob') -const path = require('path') - -const globify = (pattern) => pattern.split(path.win32.sep).join(path.posix.sep) -module.exports = (path, options) => glob(globify(path), options) diff --git a/node_modules/pacote/node_modules/cacache/lib/util/hash-to-segments.js b/node_modules/pacote/node_modules/cacache/lib/util/hash-to-segments.js deleted file mode 100644 index 445599b503808..0000000000000 --- a/node_modules/pacote/node_modules/cacache/lib/util/hash-to-segments.js +++ /dev/null @@ -1,7 +0,0 @@ -'use strict' - -module.exports = hashToSegments - -function hashToSegments (hash) { - return [hash.slice(0, 2), hash.slice(2, 4), hash.slice(4)] -} diff --git a/node_modules/pacote/node_modules/cacache/lib/util/tmp.js b/node_modules/pacote/node_modules/cacache/lib/util/tmp.js deleted file mode 100644 index 0bf5302136ebe..0000000000000 --- a/node_modules/pacote/node_modules/cacache/lib/util/tmp.js +++ /dev/null @@ -1,26 +0,0 @@ -'use strict' - -const { withTempDir } = require('@npmcli/fs') -const fs = require('fs/promises') -const path = require('path') - -module.exports.mkdir = mktmpdir - -async function mktmpdir (cache, opts = {}) { - const { tmpPrefix } = opts - const tmpDir = path.join(cache, 'tmp') - await fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' }) - // do not use path.join(), it drops the trailing / if tmpPrefix is unset - const target = `${tmpDir}${path.sep}${tmpPrefix || ''}` - return fs.mkdtemp(target, { owner: 'inherit' }) -} - -module.exports.withTmp = withTmp - -function withTmp (cache, opts, cb) { - if (!cb) { - cb = opts - opts = {} - } - return withTempDir(path.join(cache, 'tmp'), cb, opts) -} diff --git a/node_modules/pacote/node_modules/cacache/lib/verify.js b/node_modules/pacote/node_modules/cacache/lib/verify.js deleted file mode 100644 index d7423da1295b6..0000000000000 --- a/node_modules/pacote/node_modules/cacache/lib/verify.js +++ /dev/null @@ -1,257 +0,0 @@ -'use strict' - -const { - mkdir, - readFile, - rm, - stat, - truncate, - writeFile, -} = require('fs/promises') -const pMap = require('p-map') -const contentPath = require('./content/path') -const fsm = require('fs-minipass') -const glob = require('./util/glob.js') -const index = require('./entry-index') -const path = require('path') -const ssri = require('ssri') - -const hasOwnProperty = (obj, key) => - Object.prototype.hasOwnProperty.call(obj, key) - -const verifyOpts = (opts) => ({ - concurrency: 20, - log: { silly () {} }, - ...opts, -}) - -module.exports = verify - -async function verify (cache, opts) { - opts = verifyOpts(opts) - opts.log.silly('verify', 'verifying cache at', cache) - - const steps = [ - markStartTime, - fixPerms, - garbageCollect, - rebuildIndex, - cleanTmp, - writeVerifile, - markEndTime, - ] - - const stats = {} - for (const step of steps) { - const label = step.name - const start = new Date() - const s = await step(cache, opts) - if (s) { - Object.keys(s).forEach((k) => { - stats[k] = s[k] - }) - } - const end = new Date() - if (!stats.runTime) { - stats.runTime = {} - } - stats.runTime[label] = end - start - } - stats.runTime.total = stats.endTime - stats.startTime - opts.log.silly( - 'verify', - 'verification finished for', - cache, - 'in', - `${stats.runTime.total}ms` - ) - return stats -} - -async function markStartTime () { - return { startTime: new Date() } -} - -async function markEndTime () { - return { endTime: new Date() } -} - -async function fixPerms (cache, opts) { - opts.log.silly('verify', 'fixing cache permissions') - await mkdir(cache, { recursive: true }) - return null -} - -// Implements a naive mark-and-sweep tracing garbage collector. -// -// The algorithm is basically as follows: -// 1. Read (and filter) all index entries ("pointers") -// 2. Mark each integrity value as "live" -// 3. Read entire filesystem tree in `content-vX/` dir -// 4. If content is live, verify its checksum and delete it if it fails -// 5. If content is not marked as live, rm it. -// -async function garbageCollect (cache, opts) { - opts.log.silly('verify', 'garbage collecting content') - const indexStream = index.lsStream(cache) - const liveContent = new Set() - indexStream.on('data', (entry) => { - if (opts.filter && !opts.filter(entry)) { - return - } - - // integrity is stringified, re-parse it so we can get each hash - const integrity = ssri.parse(entry.integrity) - for (const algo in integrity) { - liveContent.add(integrity[algo].toString()) - } - }) - await new Promise((resolve, reject) => { - indexStream.on('end', resolve).on('error', reject) - }) - const contentDir = contentPath.contentDir(cache) - const files = await glob(path.join(contentDir, '**'), { - follow: false, - nodir: true, - nosort: true, - }) - const stats = { - verifiedContent: 0, - reclaimedCount: 0, - reclaimedSize: 0, - badContentCount: 0, - keptSize: 0, - } - await pMap( - files, - async (f) => { - const split = f.split(/[/\\]/) - const digest = split.slice(split.length - 3).join('') - const algo = split[split.length - 4] - const integrity = ssri.fromHex(digest, algo) - if (liveContent.has(integrity.toString())) { - const info = await verifyContent(f, integrity) - if (!info.valid) { - stats.reclaimedCount++ - stats.badContentCount++ - stats.reclaimedSize += info.size - } else { - stats.verifiedContent++ - stats.keptSize += info.size - } - } else { - // No entries refer to this content. We can delete. - stats.reclaimedCount++ - const s = await stat(f) - await rm(f, { recursive: true, force: true }) - stats.reclaimedSize += s.size - } - return stats - }, - { concurrency: opts.concurrency } - ) - return stats -} - -async function verifyContent (filepath, sri) { - const contentInfo = {} - try { - const { size } = await stat(filepath) - contentInfo.size = size - contentInfo.valid = true - await ssri.checkStream(new fsm.ReadStream(filepath), sri) - } catch (err) { - if (err.code === 'ENOENT') { - return { size: 0, valid: false } - } - if (err.code !== 'EINTEGRITY') { - throw err - } - - await rm(filepath, { recursive: true, force: true }) - contentInfo.valid = false - } - return contentInfo -} - -async function rebuildIndex (cache, opts) { - opts.log.silly('verify', 'rebuilding index') - const entries = await index.ls(cache) - const stats = { - missingContent: 0, - rejectedEntries: 0, - totalEntries: 0, - } - const buckets = {} - for (const k in entries) { - /* istanbul ignore else */ - if (hasOwnProperty(entries, k)) { - const hashed = index.hashKey(k) - const entry = entries[k] - const excluded = opts.filter && !opts.filter(entry) - excluded && stats.rejectedEntries++ - if (buckets[hashed] && !excluded) { - buckets[hashed].push(entry) - } else if (buckets[hashed] && excluded) { - // skip - } else if (excluded) { - buckets[hashed] = [] - buckets[hashed]._path = index.bucketPath(cache, k) - } else { - buckets[hashed] = [entry] - buckets[hashed]._path = index.bucketPath(cache, k) - } - } - } - await pMap( - Object.keys(buckets), - (key) => { - return rebuildBucket(cache, buckets[key], stats, opts) - }, - { concurrency: opts.concurrency } - ) - return stats -} - -async function rebuildBucket (cache, bucket, stats) { - await truncate(bucket._path) - // This needs to be serialized because cacache explicitly - // lets very racy bucket conflicts clobber each other. - for (const entry of bucket) { - const content = contentPath(cache, entry.integrity) - try { - await stat(content) - await index.insert(cache, entry.key, entry.integrity, { - metadata: entry.metadata, - size: entry.size, - time: entry.time, - }) - stats.totalEntries++ - } catch (err) { - if (err.code === 'ENOENT') { - stats.rejectedEntries++ - stats.missingContent++ - } else { - throw err - } - } - } -} - -function cleanTmp (cache, opts) { - opts.log.silly('verify', 'cleaning tmp directory') - return rm(path.join(cache, 'tmp'), { recursive: true, force: true }) -} - -async function writeVerifile (cache, opts) { - const verifile = path.join(cache, '_lastverified') - opts.log.silly('verify', 'writing verifile to ' + verifile) - return writeFile(verifile, `${Date.now()}`) -} - -module.exports.lastRun = lastRun - -async function lastRun (cache) { - const data = await readFile(path.join(cache, '_lastverified'), { encoding: 'utf8' }) - return new Date(+data) -} diff --git a/node_modules/pacote/node_modules/ignore-walk/LICENSE b/node_modules/pacote/node_modules/ignore-walk/LICENSE new file mode 100644 index 0000000000000..19129e315fe59 --- /dev/null +++ b/node_modules/pacote/node_modules/ignore-walk/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/pacote/node_modules/ignore-walk/lib/index.js b/node_modules/pacote/node_modules/ignore-walk/lib/index.js new file mode 100644 index 0000000000000..366d95e2d516c --- /dev/null +++ b/node_modules/pacote/node_modules/ignore-walk/lib/index.js @@ -0,0 +1,310 @@ +'use strict' + +const fs = require('fs') +const path = require('path') +const EE = require('events').EventEmitter +const Minimatch = require('minimatch').Minimatch + +class Walker extends EE { + constructor (opts) { + opts = opts || {} + super(opts) + // set to true if this.path is a symlink, whether follow is true or not + this.isSymbolicLink = opts.isSymbolicLink + this.path = opts.path || process.cwd() + this.basename = path.basename(this.path) + this.ignoreFiles = opts.ignoreFiles || ['.ignore'] + this.ignoreRules = {} + this.parent = opts.parent || null + this.includeEmpty = !!opts.includeEmpty + this.root = this.parent ? this.parent.root : this.path + this.follow = !!opts.follow + this.result = this.parent ? this.parent.result : new Set() + this.entries = null + this.sawError = false + this.exact = opts.exact + } + + sort (a, b) { + return a.localeCompare(b, 'en') + } + + emit (ev, data) { + let ret = false + if (!(this.sawError && ev === 'error')) { + if (ev === 'error') { + this.sawError = true + } else if (ev === 'done' && !this.parent) { + data = Array.from(data) + .map(e => /^@/.test(e) ? `./${e}` : e).sort(this.sort) + this.result = data + } + + if (ev === 'error' && this.parent) { + ret = this.parent.emit('error', data) + } else { + ret = super.emit(ev, data) + } + } + return ret + } + + start () { + fs.readdir(this.path, (er, entries) => + er ? this.emit('error', er) : this.onReaddir(entries)) + return this + } + + isIgnoreFile (e) { + return e !== '.' && + e !== '..' && + this.ignoreFiles.indexOf(e) !== -1 + } + + onReaddir (entries) { + this.entries = entries + if (entries.length === 0) { + if (this.includeEmpty) { + this.result.add(this.path.slice(this.root.length + 1)) + } + this.emit('done', this.result) + } else { + const hasIg = this.entries.some(e => + this.isIgnoreFile(e)) + + if (hasIg) { + this.addIgnoreFiles() + } else { + this.filterEntries() + } + } + } + + addIgnoreFiles () { + const newIg = this.entries + .filter(e => this.isIgnoreFile(e)) + + let igCount = newIg.length + const then = () => { + if (--igCount === 0) { + this.filterEntries() + } + } + + newIg.forEach(e => this.addIgnoreFile(e, then)) + } + + addIgnoreFile (file, then) { + const ig = path.resolve(this.path, file) + fs.readFile(ig, 'utf8', (er, data) => + er ? this.emit('error', er) : this.onReadIgnoreFile(file, data, then)) + } + + onReadIgnoreFile (file, data, then) { + const mmopt = { + matchBase: true, + dot: true, + flipNegate: true, + nocase: true, + } + const rules = data.split(/\r?\n/) + .filter(line => !/^#|^$/.test(line.trim())) + .map(rule => { + return new Minimatch(rule.trim(), mmopt) + }) + + this.ignoreRules[file] = rules + + then() + } + + filterEntries () { + // at this point we either have ignore rules, or just inheriting + // this exclusion is at the point where we know the list of + // entries in the dir, but don't know what they are. since + // some of them *might* be directories, we have to run the + // match in dir-mode as well, so that we'll pick up partials + // of files that will be included later. Anything included + // at this point will be checked again later once we know + // what it is. + const filtered = this.entries.map(entry => { + // at this point, we don't know if it's a dir or not. + const passFile = this.filterEntry(entry) + const passDir = this.filterEntry(entry, true) + return (passFile || passDir) ? [entry, passFile, passDir] : false + }).filter(e => e) + + // now we stat them all + // if it's a dir, and passes as a dir, then recurse + // if it's not a dir, but passes as a file, add to set + let entryCount = filtered.length + if (entryCount === 0) { + this.emit('done', this.result) + } else { + const then = () => { + if (--entryCount === 0) { + this.emit('done', this.result) + } + } + filtered.forEach(filt => { + const entry = filt[0] + const file = filt[1] + const dir = filt[2] + this.stat({ entry, file, dir }, then) + }) + } + } + + onstat ({ st, entry, file, dir, isSymbolicLink }, then) { + const abs = this.path + '/' + entry + if (!st.isDirectory()) { + if (file) { + this.result.add(abs.slice(this.root.length + 1)) + } + then() + } else { + // is a directory + if (dir) { + this.walker(entry, { isSymbolicLink, exact: file || this.filterEntry(entry + '/') }, then) + } else { + then() + } + } + } + + stat ({ entry, file, dir }, then) { + const abs = this.path + '/' + entry + fs.lstat(abs, (lstatErr, lstatResult) => { + if (lstatErr) { + this.emit('error', lstatErr) + } else { + const isSymbolicLink = lstatResult.isSymbolicLink() + if (this.follow && isSymbolicLink) { + fs.stat(abs, (statErr, statResult) => { + if (statErr) { + this.emit('error', statErr) + } else { + this.onstat({ st: statResult, entry, file, dir, isSymbolicLink }, then) + } + }) + } else { + this.onstat({ st: lstatResult, entry, file, dir, isSymbolicLink }, then) + } + } + }) + } + + walkerOpt (entry, opts) { + return { + path: this.path + '/' + entry, + parent: this, + ignoreFiles: this.ignoreFiles, + follow: this.follow, + includeEmpty: this.includeEmpty, + ...opts, + } + } + + walker (entry, opts, then) { + new Walker(this.walkerOpt(entry, opts)).on('done', then).start() + } + + filterEntry (entry, partial, entryBasename) { + let included = true + + // this = /a/b/c + // entry = d + // parent /a/b sees c/d + if (this.parent && this.parent.filterEntry) { + const parentEntry = this.basename + '/' + entry + const parentBasename = entryBasename || entry + included = this.parent.filterEntry(parentEntry, partial, parentBasename) + if (!included && !this.exact) { + return false + } + } + + this.ignoreFiles.forEach(f => { + if (this.ignoreRules[f]) { + this.ignoreRules[f].forEach(rule => { + // negation means inclusion + // so if it's negated, and already included, no need to check + // likewise if it's neither negated nor included + if (rule.negate !== included) { + const isRelativeRule = entryBasename && rule.globParts.some(part => + part.length <= (part.slice(-1)[0] ? 1 : 2) + ) + + // first, match against /foo/bar + // then, against foo/bar + // then, in the case of partials, match with a / + // then, if also the rule is relative, match against basename + const match = rule.match('/' + entry) || + rule.match(entry) || + !!partial && ( + rule.match('/' + entry + '/') || + rule.match(entry + '/') || + rule.negate && ( + rule.match('/' + entry, true) || + rule.match(entry, true)) || + isRelativeRule && ( + rule.match('/' + entryBasename + '/') || + rule.match(entryBasename + '/') || + rule.negate && ( + rule.match('/' + entryBasename, true) || + rule.match(entryBasename, true)))) + + if (match) { + included = rule.negate + } + } + }) + } + }) + + return included + } +} + +class WalkerSync extends Walker { + start () { + this.onReaddir(fs.readdirSync(this.path)) + return this + } + + addIgnoreFile (file, then) { + const ig = path.resolve(this.path, file) + this.onReadIgnoreFile(file, fs.readFileSync(ig, 'utf8'), then) + } + + stat ({ entry, file, dir }, then) { + const abs = this.path + '/' + entry + let st = fs.lstatSync(abs) + const isSymbolicLink = st.isSymbolicLink() + if (this.follow && isSymbolicLink) { + st = fs.statSync(abs) + } + + // console.error('STAT SYNC', {st, entry, file, dir, isSymbolicLink, then}) + this.onstat({ st, entry, file, dir, isSymbolicLink }, then) + } + + walker (entry, opts, then) { + new WalkerSync(this.walkerOpt(entry, opts)).start() + then() + } +} + +const walk = (opts, callback) => { + const p = new Promise((resolve, reject) => { + new Walker(opts).on('done', resolve).on('error', reject).start() + }) + return callback ? p.then(res => callback(null, res), callback) : p +} + +const walkSync = opts => new WalkerSync(opts).start().result + +module.exports = walk +walk.sync = walkSync +walk.Walker = Walker +walk.WalkerSync = WalkerSync diff --git a/node_modules/pacote/node_modules/ignore-walk/package.json b/node_modules/pacote/node_modules/ignore-walk/package.json new file mode 100644 index 0000000000000..125fc071939db --- /dev/null +++ b/node_modules/pacote/node_modules/ignore-walk/package.json @@ -0,0 +1,64 @@ +{ + "name": "ignore-walk", + "version": "7.0.0", + "description": "Nested/recursive `.gitignore`/`.npmignore` parsing and filtering.", + "main": "lib/index.js", + "devDependencies": { + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", + "mutate-fs": "^2.1.1", + "tap": "^16.0.1" + }, + "scripts": { + "test": "tap", + "posttest": "npm run lint", + "lint": "npm run eslint", + "lintfix": "npm run eslint -- --fix", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force", + "test:windows-coverage": "npm pkg set tap.statements=99 --json && npm pkg set tap.branches=98 --json && npm pkg set tap.lines=99 --json", + "snap": "tap", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" + }, + "keywords": [ + "ignorefile", + "ignore", + "file", + ".gitignore", + ".npmignore", + "glob" + ], + "author": "GitHub Inc.", + "license": "ISC", + "repository": { + "type": "git", + "url": "git+https://github.com/npm/ignore-walk.git" + }, + "files": [ + "bin/", + "lib/" + ], + "dependencies": { + "minimatch": "^9.0.0" + }, + "tap": { + "test-env": "LC_ALL=sk", + "before": "test/00-setup.js", + "after": "test/zz-cleanup.js", + "timeout": 600, + "jobs": 1, + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.23.3", + "content": "scripts/template-oss", + "publish": "true" + } +} diff --git a/node_modules/pacote/node_modules/isexe/LICENSE b/node_modules/pacote/node_modules/isexe/LICENSE new file mode 100644 index 0000000000000..c925dbe826b67 --- /dev/null +++ b/node_modules/pacote/node_modules/isexe/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) 2016-2022 Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/pacote/node_modules/isexe/dist/cjs/index.js b/node_modules/pacote/node_modules/isexe/dist/cjs/index.js new file mode 100644 index 0000000000000..cefcb66b5c543 --- /dev/null +++ b/node_modules/pacote/node_modules/isexe/dist/cjs/index.js @@ -0,0 +1,46 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.sync = exports.isexe = exports.posix = exports.win32 = void 0; +const posix = __importStar(require("./posix.js")); +exports.posix = posix; +const win32 = __importStar(require("./win32.js")); +exports.win32 = win32; +__exportStar(require("./options.js"), exports); +const platform = process.env._ISEXE_TEST_PLATFORM_ || process.platform; +const impl = platform === 'win32' ? win32 : posix; +/** + * Determine whether a path is executable on the current platform. + */ +exports.isexe = impl.isexe; +/** + * Synchronously determine whether a path is executable on the + * current platform. + */ +exports.sync = impl.sync; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/pacote/node_modules/isexe/dist/cjs/options.js b/node_modules/pacote/node_modules/isexe/dist/cjs/options.js new file mode 100644 index 0000000000000..0dfad0762cc32 --- /dev/null +++ b/node_modules/pacote/node_modules/isexe/dist/cjs/options.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=options.js.map \ No newline at end of file diff --git a/node_modules/pacote/node_modules/isexe/dist/cjs/package.json b/node_modules/pacote/node_modules/isexe/dist/cjs/package.json new file mode 100644 index 0000000000000..5bbefffbabee3 --- /dev/null +++ b/node_modules/pacote/node_modules/isexe/dist/cjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/node_modules/pacote/node_modules/isexe/dist/cjs/posix.js b/node_modules/pacote/node_modules/isexe/dist/cjs/posix.js new file mode 100644 index 0000000000000..3bc5e79d7007e --- /dev/null +++ b/node_modules/pacote/node_modules/isexe/dist/cjs/posix.js @@ -0,0 +1,67 @@ +"use strict"; +/** + * This is the Posix implementation of isexe, which uses the file + * mode and uid/gid values. + * + * @module + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.sync = exports.isexe = void 0; +const fs_1 = require("fs"); +const promises_1 = require("fs/promises"); +/** + * Determine whether a path is executable according to the mode and + * current (or specified) user and group IDs. + */ +const isexe = async (path, options = {}) => { + const { ignoreErrors = false } = options; + try { + return checkStat(await (0, promises_1.stat)(path), options); + } + catch (e) { + const er = e; + if (ignoreErrors || er.code === 'EACCES') + return false; + throw er; + } +}; +exports.isexe = isexe; +/** + * Synchronously determine whether a path is executable according to + * the mode and current (or specified) user and group IDs. + */ +const sync = (path, options = {}) => { + const { ignoreErrors = false } = options; + try { + return checkStat((0, fs_1.statSync)(path), options); + } + catch (e) { + const er = e; + if (ignoreErrors || er.code === 'EACCES') + return false; + throw er; + } +}; +exports.sync = sync; +const checkStat = (stat, options) => stat.isFile() && checkMode(stat, options); +const checkMode = (stat, options) => { + const myUid = options.uid ?? process.getuid?.(); + const myGroups = options.groups ?? process.getgroups?.() ?? []; + const myGid = options.gid ?? process.getgid?.() ?? myGroups[0]; + if (myUid === undefined || myGid === undefined) { + throw new Error('cannot get uid or gid'); + } + const groups = new Set([myGid, ...myGroups]); + const mod = stat.mode; + const uid = stat.uid; + const gid = stat.gid; + const u = parseInt('100', 8); + const g = parseInt('010', 8); + const o = parseInt('001', 8); + const ug = u | g; + return !!(mod & o || + (mod & g && groups.has(gid)) || + (mod & u && uid === myUid) || + (mod & ug && myUid === 0)); +}; +//# sourceMappingURL=posix.js.map \ No newline at end of file diff --git a/node_modules/pacote/node_modules/isexe/dist/cjs/win32.js b/node_modules/pacote/node_modules/isexe/dist/cjs/win32.js new file mode 100644 index 0000000000000..fa7a4d2f7d240 --- /dev/null +++ b/node_modules/pacote/node_modules/isexe/dist/cjs/win32.js @@ -0,0 +1,62 @@ +"use strict"; +/** + * This is the Windows implementation of isexe, which uses the file + * extension and PATHEXT setting. + * + * @module + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.sync = exports.isexe = void 0; +const fs_1 = require("fs"); +const promises_1 = require("fs/promises"); +/** + * Determine whether a path is executable based on the file extension + * and PATHEXT environment variable (or specified pathExt option) + */ +const isexe = async (path, options = {}) => { + const { ignoreErrors = false } = options; + try { + return checkStat(await (0, promises_1.stat)(path), path, options); + } + catch (e) { + const er = e; + if (ignoreErrors || er.code === 'EACCES') + return false; + throw er; + } +}; +exports.isexe = isexe; +/** + * Synchronously determine whether a path is executable based on the file + * extension and PATHEXT environment variable (or specified pathExt option) + */ +const sync = (path, options = {}) => { + const { ignoreErrors = false } = options; + try { + return checkStat((0, fs_1.statSync)(path), path, options); + } + catch (e) { + const er = e; + if (ignoreErrors || er.code === 'EACCES') + return false; + throw er; + } +}; +exports.sync = sync; +const checkPathExt = (path, options) => { + const { pathExt = process.env.PATHEXT || '' } = options; + const peSplit = pathExt.split(';'); + if (peSplit.indexOf('') !== -1) { + return true; + } + for (let i = 0; i < peSplit.length; i++) { + const p = peSplit[i].toLowerCase(); + const ext = path.substring(path.length - p.length).toLowerCase(); + if (p && ext === p) { + return true; + } + } + return false; +}; +const checkStat = (stat, path, options) => stat.isFile() && checkPathExt(path, options); +//# sourceMappingURL=win32.js.map \ No newline at end of file diff --git a/node_modules/pacote/node_modules/isexe/dist/mjs/index.js b/node_modules/pacote/node_modules/isexe/dist/mjs/index.js new file mode 100644 index 0000000000000..1e309acd7355e --- /dev/null +++ b/node_modules/pacote/node_modules/isexe/dist/mjs/index.js @@ -0,0 +1,16 @@ +import * as posix from './posix.js'; +import * as win32 from './win32.js'; +export * from './options.js'; +export { win32, posix }; +const platform = process.env._ISEXE_TEST_PLATFORM_ || process.platform; +const impl = platform === 'win32' ? win32 : posix; +/** + * Determine whether a path is executable on the current platform. + */ +export const isexe = impl.isexe; +/** + * Synchronously determine whether a path is executable on the + * current platform. + */ +export const sync = impl.sync; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/pacote/node_modules/isexe/dist/mjs/options.js b/node_modules/pacote/node_modules/isexe/dist/mjs/options.js new file mode 100644 index 0000000000000..e9ded40bd5b2c --- /dev/null +++ b/node_modules/pacote/node_modules/isexe/dist/mjs/options.js @@ -0,0 +1,2 @@ +export {}; +//# sourceMappingURL=options.js.map \ No newline at end of file diff --git a/node_modules/pacote/node_modules/isexe/dist/mjs/package.json b/node_modules/pacote/node_modules/isexe/dist/mjs/package.json new file mode 100644 index 0000000000000..3dbc1ca591c05 --- /dev/null +++ b/node_modules/pacote/node_modules/isexe/dist/mjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/pacote/node_modules/isexe/dist/mjs/posix.js b/node_modules/pacote/node_modules/isexe/dist/mjs/posix.js new file mode 100644 index 0000000000000..c453776c0452f --- /dev/null +++ b/node_modules/pacote/node_modules/isexe/dist/mjs/posix.js @@ -0,0 +1,62 @@ +/** + * This is the Posix implementation of isexe, which uses the file + * mode and uid/gid values. + * + * @module + */ +import { statSync } from 'fs'; +import { stat } from 'fs/promises'; +/** + * Determine whether a path is executable according to the mode and + * current (or specified) user and group IDs. + */ +export const isexe = async (path, options = {}) => { + const { ignoreErrors = false } = options; + try { + return checkStat(await stat(path), options); + } + catch (e) { + const er = e; + if (ignoreErrors || er.code === 'EACCES') + return false; + throw er; + } +}; +/** + * Synchronously determine whether a path is executable according to + * the mode and current (or specified) user and group IDs. + */ +export const sync = (path, options = {}) => { + const { ignoreErrors = false } = options; + try { + return checkStat(statSync(path), options); + } + catch (e) { + const er = e; + if (ignoreErrors || er.code === 'EACCES') + return false; + throw er; + } +}; +const checkStat = (stat, options) => stat.isFile() && checkMode(stat, options); +const checkMode = (stat, options) => { + const myUid = options.uid ?? process.getuid?.(); + const myGroups = options.groups ?? process.getgroups?.() ?? []; + const myGid = options.gid ?? process.getgid?.() ?? myGroups[0]; + if (myUid === undefined || myGid === undefined) { + throw new Error('cannot get uid or gid'); + } + const groups = new Set([myGid, ...myGroups]); + const mod = stat.mode; + const uid = stat.uid; + const gid = stat.gid; + const u = parseInt('100', 8); + const g = parseInt('010', 8); + const o = parseInt('001', 8); + const ug = u | g; + return !!(mod & o || + (mod & g && groups.has(gid)) || + (mod & u && uid === myUid) || + (mod & ug && myUid === 0)); +}; +//# sourceMappingURL=posix.js.map \ No newline at end of file diff --git a/node_modules/pacote/node_modules/isexe/dist/mjs/win32.js b/node_modules/pacote/node_modules/isexe/dist/mjs/win32.js new file mode 100644 index 0000000000000..a354ee2a5115c --- /dev/null +++ b/node_modules/pacote/node_modules/isexe/dist/mjs/win32.js @@ -0,0 +1,57 @@ +/** + * This is the Windows implementation of isexe, which uses the file + * extension and PATHEXT setting. + * + * @module + */ +import { statSync } from 'fs'; +import { stat } from 'fs/promises'; +/** + * Determine whether a path is executable based on the file extension + * and PATHEXT environment variable (or specified pathExt option) + */ +export const isexe = async (path, options = {}) => { + const { ignoreErrors = false } = options; + try { + return checkStat(await stat(path), path, options); + } + catch (e) { + const er = e; + if (ignoreErrors || er.code === 'EACCES') + return false; + throw er; + } +}; +/** + * Synchronously determine whether a path is executable based on the file + * extension and PATHEXT environment variable (or specified pathExt option) + */ +export const sync = (path, options = {}) => { + const { ignoreErrors = false } = options; + try { + return checkStat(statSync(path), path, options); + } + catch (e) { + const er = e; + if (ignoreErrors || er.code === 'EACCES') + return false; + throw er; + } +}; +const checkPathExt = (path, options) => { + const { pathExt = process.env.PATHEXT || '' } = options; + const peSplit = pathExt.split(';'); + if (peSplit.indexOf('') !== -1) { + return true; + } + for (let i = 0; i < peSplit.length; i++) { + const p = peSplit[i].toLowerCase(); + const ext = path.substring(path.length - p.length).toLowerCase(); + if (p && ext === p) { + return true; + } + } + return false; +}; +const checkStat = (stat, path, options) => stat.isFile() && checkPathExt(path, options); +//# sourceMappingURL=win32.js.map \ No newline at end of file diff --git a/node_modules/pacote/node_modules/isexe/package.json b/node_modules/pacote/node_modules/isexe/package.json new file mode 100644 index 0000000000000..a0e2cd04bfdbf --- /dev/null +++ b/node_modules/pacote/node_modules/isexe/package.json @@ -0,0 +1,96 @@ +{ + "name": "isexe", + "version": "3.1.1", + "description": "Minimal module to check if a file is executable.", + "main": "./dist/cjs/index.js", + "module": "./dist/mjs/index.js", + "types": "./dist/cjs/index.js", + "files": [ + "dist" + ], + "exports": { + ".": { + "import": { + "types": "./dist/mjs/index.d.ts", + "default": "./dist/mjs/index.js" + }, + "require": { + "types": "./dist/cjs/index.d.ts", + "default": "./dist/cjs/index.js" + } + }, + "./posix": { + "import": { + "types": "./dist/mjs/posix.d.ts", + "default": "./dist/mjs/posix.js" + }, + "require": { + "types": "./dist/cjs/posix.d.ts", + "default": "./dist/cjs/posix.js" + } + }, + "./win32": { + "import": { + "types": "./dist/mjs/win32.d.ts", + "default": "./dist/mjs/win32.js" + }, + "require": { + "types": "./dist/cjs/win32.d.ts", + "default": "./dist/cjs/win32.js" + } + }, + "./package.json": "./package.json" + }, + "devDependencies": { + "@types/node": "^20.4.5", + "@types/tap": "^15.0.8", + "c8": "^8.0.1", + "mkdirp": "^0.5.1", + "prettier": "^2.8.8", + "rimraf": "^2.5.0", + "sync-content": "^1.0.2", + "tap": "^16.3.8", + "ts-node": "^10.9.1", + "typedoc": "^0.24.8", + "typescript": "^5.1.6" + }, + "scripts": { + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "prepare": "tsc -p tsconfig/cjs.json && tsc -p tsconfig/esm.json && bash ./scripts/fixup.sh", + "pretest": "npm run prepare", + "presnap": "npm run prepare", + "test": "c8 tap", + "snap": "c8 tap", + "format": "prettier --write . --loglevel warn --ignore-path ../../.prettierignore --cache", + "typedoc": "typedoc --tsconfig tsconfig/esm.json ./src/*.ts" + }, + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC", + "tap": { + "coverage": false, + "node-arg": [ + "--enable-source-maps", + "--no-warnings", + "--loader", + "ts-node/esm" + ], + "ts": false + }, + "prettier": { + "semi": false, + "printWidth": 75, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + }, + "repository": "https://github.com/isaacs/isexe", + "engines": { + "node": ">=16" + } +} diff --git a/node_modules/pacote/node_modules/json-parse-even-better-errors/LICENSE.md b/node_modules/pacote/node_modules/json-parse-even-better-errors/LICENSE.md deleted file mode 100644 index 6991b7cbb89db..0000000000000 --- a/node_modules/pacote/node_modules/json-parse-even-better-errors/LICENSE.md +++ /dev/null @@ -1,25 +0,0 @@ -Copyright 2017 Kat Marchán -Copyright npm, Inc. - -Permission is hereby granted, free of charge, to any person obtaining a -copy of this software and associated documentation files (the "Software"), -to deal in the Software without restriction, including without limitation -the rights to use, copy, modify, merge, publish, distribute, sublicense, -and/or sell copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. - ---- - -This library is a fork of 'better-json-errors' by Kat Marchán, extended and -distributed under the terms of the MIT license above. diff --git a/node_modules/pacote/node_modules/json-parse-even-better-errors/lib/index.js b/node_modules/pacote/node_modules/json-parse-even-better-errors/lib/index.js deleted file mode 100644 index 3ffdaac96d2dc..0000000000000 --- a/node_modules/pacote/node_modules/json-parse-even-better-errors/lib/index.js +++ /dev/null @@ -1,137 +0,0 @@ -'use strict' - -const INDENT = Symbol.for('indent') -const NEWLINE = Symbol.for('newline') - -const DEFAULT_NEWLINE = '\n' -const DEFAULT_INDENT = ' ' -const BOM = /^\uFEFF/ - -// only respect indentation if we got a line break, otherwise squash it -// things other than objects and arrays aren't indented, so ignore those -// Important: in both of these regexps, the $1 capture group is the newline -// or undefined, and the $2 capture group is the indent, or undefined. -const FORMAT = /^\s*[{[]((?:\r?\n)+)([\s\t]*)/ -const EMPTY = /^(?:\{\}|\[\])((?:\r?\n)+)?$/ - -// Node 20 puts single quotes around the token and a comma after it -const UNEXPECTED_TOKEN = /^Unexpected token '?(.)'?(,)? /i - -const hexify = (char) => { - const h = char.charCodeAt(0).toString(16).toUpperCase() - return `0x${h.length % 2 ? '0' : ''}${h}` -} - -// Remove byte order marker. This catches EF BB BF (the UTF-8 BOM) -// because the buffer-to-string conversion in `fs.readFileSync()` -// translates it to FEFF, the UTF-16 BOM. -const stripBOM = (txt) => String(txt).replace(BOM, '') - -const makeParsedError = (msg, parsing, position = 0) => ({ - message: `${msg} while parsing ${parsing}`, - position, -}) - -const parseError = (e, txt, context = 20) => { - let msg = e.message - - if (!txt) { - return makeParsedError(msg, 'empty string') - } - - const badTokenMatch = msg.match(UNEXPECTED_TOKEN) - const badIndexMatch = msg.match(/ position\s+(\d+)/i) - - if (badTokenMatch) { - msg = msg.replace( - UNEXPECTED_TOKEN, - `Unexpected token ${JSON.stringify(badTokenMatch[1])} (${hexify(badTokenMatch[1])})$2 ` - ) - } - - let errIdx - if (badIndexMatch) { - errIdx = +badIndexMatch[1] - } else /* istanbul ignore next - doesnt happen in Node 22 */ if ( - msg.match(/^Unexpected end of JSON.*/i) - ) { - errIdx = txt.length - 1 - } - - if (errIdx == null) { - return makeParsedError(msg, `'${txt.slice(0, context * 2)}'`) - } - - const start = errIdx <= context ? 0 : errIdx - context - const end = errIdx + context >= txt.length ? txt.length : errIdx + context - const slice = `${start ? '...' : ''}${txt.slice(start, end)}${end === txt.length ? '' : '...'}` - - return makeParsedError( - msg, - `${txt === slice ? '' : 'near '}${JSON.stringify(slice)}`, - errIdx - ) -} - -class JSONParseError extends SyntaxError { - constructor (er, txt, context, caller) { - const metadata = parseError(er, txt, context) - super(metadata.message) - Object.assign(this, metadata) - this.code = 'EJSONPARSE' - this.systemError = er - Error.captureStackTrace(this, caller || this.constructor) - } - - get name () { - return this.constructor.name - } - - set name (n) {} - - get [Symbol.toStringTag] () { - return this.constructor.name - } -} - -const parseJson = (txt, reviver) => { - const result = JSON.parse(txt, reviver) - if (result && typeof result === 'object') { - // get the indentation so that we can save it back nicely - // if the file starts with {" then we have an indent of '', ie, none - // otherwise, pick the indentation of the next line after the first \n If the - // pattern doesn't match, then it means no indentation. JSON.stringify ignores - // symbols, so this is reasonably safe. if the string is '{}' or '[]', then - // use the default 2-space indent. - const match = txt.match(EMPTY) || txt.match(FORMAT) || [null, '', ''] - result[NEWLINE] = match[1] ?? DEFAULT_NEWLINE - result[INDENT] = match[2] ?? DEFAULT_INDENT - } - return result -} - -const parseJsonError = (raw, reviver, context) => { - const txt = stripBOM(raw) - try { - return parseJson(txt, reviver) - } catch (e) { - if (typeof raw !== 'string' && !Buffer.isBuffer(raw)) { - const msg = Array.isArray(raw) && raw.length === 0 ? 'an empty array' : String(raw) - throw Object.assign( - new TypeError(`Cannot parse ${msg}`), - { code: 'EJSONPARSE', systemError: e } - ) - } - throw new JSONParseError(e, txt, context, parseJsonError) - } -} - -module.exports = parseJsonError -parseJsonError.JSONParseError = JSONParseError -parseJsonError.noExceptions = (raw, reviver) => { - try { - return parseJson(stripBOM(raw), reviver) - } catch { - // no exceptions - } -} diff --git a/node_modules/pacote/node_modules/json-parse-even-better-errors/package.json b/node_modules/pacote/node_modules/json-parse-even-better-errors/package.json deleted file mode 100644 index c7156df325fa2..0000000000000 --- a/node_modules/pacote/node_modules/json-parse-even-better-errors/package.json +++ /dev/null @@ -1,49 +0,0 @@ -{ - "name": "json-parse-even-better-errors", - "version": "3.0.2", - "description": "JSON.parse with context information on error", - "main": "lib/index.js", - "files": [ - "bin/", - "lib/" - ], - "scripts": { - "test": "tap", - "snap": "tap", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", - "postlint": "template-oss-check", - "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", - "posttest": "npm run lint" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/npm/json-parse-even-better-errors.git" - }, - "keywords": [ - "JSON", - "parser" - ], - "author": "GitHub Inc.", - "license": "MIT", - "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", - "tap": "^16.3.0" - }, - "tap": { - "check-coverage": true, - "nyc-arg": [ - "--exclude", - "tap-snapshots/**" - ] - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - }, - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.22.0", - "publish": true - } -} diff --git a/node_modules/pacote/node_modules/npm-bundled/LICENSE b/node_modules/pacote/node_modules/npm-bundled/LICENSE new file mode 100644 index 0000000000000..20a4762540923 --- /dev/null +++ b/node_modules/pacote/node_modules/npm-bundled/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) npm, Inc. and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/pacote/node_modules/npm-bundled/lib/index.js b/node_modules/pacote/node_modules/npm-bundled/lib/index.js new file mode 100644 index 0000000000000..f5ee0bb3ea765 --- /dev/null +++ b/node_modules/pacote/node_modules/npm-bundled/lib/index.js @@ -0,0 +1,254 @@ +'use strict' + +// walk the tree of deps starting from the top level list of bundled deps +// Any deps at the top level that are depended on by a bundled dep that +// does not have that dep in its own node_modules folder are considered +// bundled deps as well. This list of names can be passed to npm-packlist +// as the "bundled" argument. Additionally, packageJsonCache is shared so +// packlist doesn't have to re-read files already consumed in this pass + +const fs = require('fs') +const path = require('path') +const EE = require('events').EventEmitter +// we don't care about the package bins, but we share a pj cache +// with other modules that DO care about it, so keep it nice. +const normalizePackageBin = require('npm-normalize-package-bin') + +class BundleWalker extends EE { + constructor (opt) { + opt = opt || {} + super(opt) + this.path = path.resolve(opt.path || process.cwd()) + + this.parent = opt.parent || null + if (this.parent) { + this.result = this.parent.result + // only collect results in node_modules folders at the top level + // since the node_modules in a bundled dep is included always + if (!this.parent.parent) { + const base = path.basename(this.path) + const scope = path.basename(path.dirname(this.path)) + this.result.add(/^@/.test(scope) ? scope + '/' + base : base) + } + this.root = this.parent.root + this.packageJsonCache = this.parent.packageJsonCache + } else { + this.result = new Set() + this.root = this.path + this.packageJsonCache = opt.packageJsonCache || new Map() + } + + this.seen = new Set() + this.didDone = false + this.children = 0 + this.node_modules = [] + this.package = null + this.bundle = null + } + + addListener (ev, fn) { + return this.on(ev, fn) + } + + on (ev, fn) { + const ret = super.on(ev, fn) + if (ev === 'done' && this.didDone) { + this.emit('done', this.result) + } + return ret + } + + done () { + if (!this.didDone) { + this.didDone = true + if (!this.parent) { + const res = Array.from(this.result) + this.result = res + this.emit('done', res) + } else { + this.emit('done') + } + } + } + + start () { + const pj = path.resolve(this.path, 'package.json') + if (this.packageJsonCache.has(pj)) { + this.onPackage(this.packageJsonCache.get(pj)) + } else { + this.readPackageJson(pj) + } + return this + } + + readPackageJson (pj) { + fs.readFile(pj, (er, data) => + er ? this.done() : this.onPackageJson(pj, data)) + } + + onPackageJson (pj, data) { + try { + this.package = normalizePackageBin(JSON.parse(data + '')) + } catch (er) { + return this.done() + } + this.packageJsonCache.set(pj, this.package) + this.onPackage(this.package) + } + + allDepsBundled (pkg) { + return Object.keys(pkg.dependencies || {}).concat( + Object.keys(pkg.optionalDependencies || {})) + } + + onPackage (pkg) { + // all deps are bundled if we got here as a child. + // otherwise, only bundle bundledDeps + // Get a unique-ified array with a short-lived Set + const bdRaw = this.parent ? this.allDepsBundled(pkg) + : pkg.bundleDependencies || pkg.bundledDependencies || [] + + const bd = Array.from(new Set( + Array.isArray(bdRaw) ? bdRaw + : bdRaw === true ? this.allDepsBundled(pkg) + : Object.keys(bdRaw))) + + if (!bd.length) { + return this.done() + } + + this.bundle = bd + this.readModules() + } + + readModules () { + readdirNodeModules(this.path + '/node_modules', (er, nm) => + er ? this.onReaddir([]) : this.onReaddir(nm)) + } + + onReaddir (nm) { + // keep track of what we have, in case children need it + this.node_modules = nm + + this.bundle.forEach(dep => this.childDep(dep)) + if (this.children === 0) { + this.done() + } + } + + childDep (dep) { + if (this.node_modules.indexOf(dep) !== -1) { + if (!this.seen.has(dep)) { + this.seen.add(dep) + this.child(dep) + } + } else if (this.parent) { + this.parent.childDep(dep) + } + } + + child (dep) { + const p = this.path + '/node_modules/' + dep + this.children += 1 + const child = new BundleWalker({ + path: p, + parent: this, + }) + child.on('done', () => { + if (--this.children === 0) { + this.done() + } + }) + child.start() + } +} + +class BundleWalkerSync extends BundleWalker { + start () { + super.start() + this.done() + return this + } + + readPackageJson (pj) { + try { + this.onPackageJson(pj, fs.readFileSync(pj)) + } catch { + // empty catch + } + return this + } + + readModules () { + try { + this.onReaddir(readdirNodeModulesSync(this.path + '/node_modules')) + } catch { + this.onReaddir([]) + } + } + + child (dep) { + new BundleWalkerSync({ + path: this.path + '/node_modules/' + dep, + parent: this, + }).start() + } +} + +const readdirNodeModules = (nm, cb) => { + fs.readdir(nm, (er, set) => { + if (er) { + cb(er) + } else { + const scopes = set.filter(f => /^@/.test(f)) + if (!scopes.length) { + cb(null, set) + } else { + const unscoped = set.filter(f => !/^@/.test(f)) + let count = scopes.length + scopes.forEach(scope => { + fs.readdir(nm + '/' + scope, (readdirEr, pkgs) => { + if (readdirEr || !pkgs.length) { + unscoped.push(scope) + } else { + unscoped.push.apply(unscoped, pkgs.map(p => scope + '/' + p)) + } + if (--count === 0) { + cb(null, unscoped) + } + }) + }) + } + } + }) +} + +const readdirNodeModulesSync = nm => { + const set = fs.readdirSync(nm) + const unscoped = set.filter(f => !/^@/.test(f)) + const scopes = set.filter(f => /^@/.test(f)).map(scope => { + try { + const pkgs = fs.readdirSync(nm + '/' + scope) + return pkgs.length ? pkgs.map(p => scope + '/' + p) : [scope] + } catch (er) { + return [scope] + } + }).reduce((a, b) => a.concat(b), []) + return unscoped.concat(scopes) +} + +const walk = (options, callback) => { + const p = new Promise((resolve, reject) => { + new BundleWalker(options).on('done', resolve).on('error', reject).start() + }) + return callback ? p.then(res => callback(null, res), callback) : p +} + +const walkSync = options => { + return new BundleWalkerSync(options).start().result +} + +module.exports = walk +walk.sync = walkSync +walk.BundleWalker = BundleWalker +walk.BundleWalkerSync = BundleWalkerSync diff --git a/node_modules/pacote/node_modules/npm-bundled/package.json b/node_modules/pacote/node_modules/npm-bundled/package.json new file mode 100644 index 0000000000000..c5daf35dbaa84 --- /dev/null +++ b/node_modules/pacote/node_modules/npm-bundled/package.json @@ -0,0 +1,49 @@ +{ + "name": "npm-bundled", + "version": "4.0.0", + "description": "list things in node_modules that are bundledDependencies, or transitive dependencies thereof", + "main": "lib/index.js", + "repository": { + "type": "git", + "url": "git+https://github.com/npm/npm-bundled.git" + }, + "author": "GitHub Inc.", + "license": "ISC", + "devDependencies": { + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", + "mutate-fs": "^2.1.1", + "tap": "^16.3.0" + }, + "scripts": { + "test": "tap", + "lint": "npm run eslint", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force", + "lintfix": "npm run eslint -- --fix", + "snap": "tap", + "posttest": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" + }, + "files": [ + "bin/", + "lib/" + ], + "dependencies": { + "npm-normalize-package-bin": "^4.0.0" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.23.3", + "publish": true + }, + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + } +} diff --git a/node_modules/pacote/node_modules/npm-normalize-package-bin/LICENSE b/node_modules/pacote/node_modules/npm-normalize-package-bin/LICENSE new file mode 100644 index 0000000000000..19cec97b18468 --- /dev/null +++ b/node_modules/pacote/node_modules/npm-normalize-package-bin/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) npm, Inc. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/pacote/node_modules/npm-normalize-package-bin/lib/index.js b/node_modules/pacote/node_modules/npm-normalize-package-bin/lib/index.js new file mode 100644 index 0000000000000..3cb8478cf6e2f --- /dev/null +++ b/node_modules/pacote/node_modules/npm-normalize-package-bin/lib/index.js @@ -0,0 +1,64 @@ +// pass in a manifest with a 'bin' field here, and it'll turn it +// into a properly santized bin object +const { join, basename } = require('path') + +const normalize = pkg => + !pkg.bin ? removeBin(pkg) + : typeof pkg.bin === 'string' ? normalizeString(pkg) + : Array.isArray(pkg.bin) ? normalizeArray(pkg) + : typeof pkg.bin === 'object' ? normalizeObject(pkg) + : removeBin(pkg) + +const normalizeString = pkg => { + if (!pkg.name) { + return removeBin(pkg) + } + pkg.bin = { [pkg.name]: pkg.bin } + return normalizeObject(pkg) +} + +const normalizeArray = pkg => { + pkg.bin = pkg.bin.reduce((acc, k) => { + acc[basename(k)] = k + return acc + }, {}) + return normalizeObject(pkg) +} + +const removeBin = pkg => { + delete pkg.bin + return pkg +} + +const normalizeObject = pkg => { + const orig = pkg.bin + const clean = {} + let hasBins = false + Object.keys(orig).forEach(binKey => { + const base = join('/', basename(binKey.replace(/\\|:/g, '/'))).slice(1) + + if (typeof orig[binKey] !== 'string' || !base) { + return + } + + const binTarget = join('/', orig[binKey].replace(/\\/g, '/')) + .replace(/\\/g, '/').slice(1) + + if (!binTarget) { + return + } + + clean[base] = binTarget + hasBins = true + }) + + if (hasBins) { + pkg.bin = clean + } else { + delete pkg.bin + } + + return pkg +} + +module.exports = normalize diff --git a/node_modules/pacote/node_modules/unique-slug/package.json b/node_modules/pacote/node_modules/npm-normalize-package-bin/package.json similarity index 54% rename from node_modules/pacote/node_modules/unique-slug/package.json rename to node_modules/pacote/node_modules/npm-normalize-package-bin/package.json index 33732cdbb4285..a1aeef0e1e751 100644 --- a/node_modules/pacote/node_modules/unique-slug/package.json +++ b/node_modules/pacote/node_modules/npm-normalize-package-bin/package.json @@ -1,42 +1,40 @@ { - "name": "unique-slug", + "name": "npm-normalize-package-bin", "version": "4.0.0", - "description": "Generate a unique character string suitible for use in files and URLs.", + "description": "Turn any flavor of allowable package.json bin into a normalized object", "main": "lib/index.js", + "repository": { + "type": "git", + "url": "git+https://github.com/npm/npm-normalize-package-bin.git" + }, + "author": "GitHub Inc.", + "license": "ISC", "scripts": { "test": "tap", - "lint": "eslint \"**/*.js\"", + "snap": "tap", + "lint": "npm run eslint", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", - "snap": "tap", - "posttest": "npm run lint" + "lintfix": "npm run eslint -- --fix", + "posttest": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, - "keywords": [], - "author": "GitHub Inc.", - "license": "ISC", "devDependencies": { - "@npmcli/eslint-config": "^3.1.0", - "@npmcli/template-oss": "4.5.1", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.3.0" }, - "repository": { - "type": "git", - "url": "https://github.com/npm/unique-slug.git" - }, - "dependencies": { - "imurmurhash": "^0.1.4" - }, "files": [ "bin/", "lib/" ], "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.5.1" + "version": "4.23.3", + "publish": "true" }, "tap": { "nyc-arg": [ diff --git a/node_modules/pacote/node_modules/npm-packlist/LICENSE b/node_modules/pacote/node_modules/npm-packlist/LICENSE new file mode 100644 index 0000000000000..19129e315fe59 --- /dev/null +++ b/node_modules/pacote/node_modules/npm-packlist/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/pacote/node_modules/npm-packlist/lib/index.js b/node_modules/pacote/node_modules/npm-packlist/lib/index.js new file mode 100644 index 0000000000000..985f11ee3f738 --- /dev/null +++ b/node_modules/pacote/node_modules/npm-packlist/lib/index.js @@ -0,0 +1,456 @@ +'use strict' + +const { Walker: IgnoreWalker } = require('ignore-walk') +const { lstatSync: lstat, readFileSync: readFile } = require('fs') +const { basename, dirname, extname, join, relative, resolve, sep } = require('path') + +// symbols used to represent synthetic rule sets +const defaultRules = Symbol('npm-packlist.rules.default') +const strictRules = Symbol('npm-packlist.rules.strict') + +// There may be others, but :?|<> are handled by node-tar +const nameIsBadForWindows = file => /\*/.test(file) + +// these are the default rules that are applied to everything except for non-link bundled deps +const defaults = [ + '.npmignore', + '.gitignore', + '**/.git', + '**/.svn', + '**/.hg', + '**/CVS', + '**/.git/**', + '**/.svn/**', + '**/.hg/**', + '**/CVS/**', + '/.lock-wscript', + '/.wafpickle-*', + '/build/config.gypi', + 'npm-debug.log', + '**/.npmrc', + '.*.swp', + '.DS_Store', + '**/.DS_Store/**', + '._*', + '**/._*/**', + '*.orig', + '/archived-packages/**', +] + +const strictDefaults = [ + // these are forcibly excluded + '/.git', +] + +const normalizePath = (path) => path.split('\\').join('/') + +const readOutOfTreeIgnoreFiles = (root, rel, result = []) => { + for (const file of ['.npmignore', '.gitignore']) { + try { + const ignoreContent = readFile(join(root, file), { encoding: 'utf8' }) + result.push(ignoreContent) + // break the loop immediately after reading, this allows us to prioritize + // the .npmignore and discard the .gitignore if one is present + break + } catch (err) { + // we ignore ENOENT errors completely because we don't care if the file doesn't exist + // but we throw everything else because failing to read a file that does exist is + // something that the user likely wants to know about + // istanbul ignore next -- we do not need to test a thrown error + if (err.code !== 'ENOENT') { + throw err + } + } + } + + if (!rel) { + return result + } + + const firstRel = rel.split(sep, 1)[0] + const newRoot = join(root, firstRel) + const newRel = relative(newRoot, join(root, rel)) + + return readOutOfTreeIgnoreFiles(newRoot, newRel, result) +} + +class PackWalker extends IgnoreWalker { + constructor (tree, opts) { + const options = { + ...opts, + includeEmpty: false, + follow: false, + // we path.resolve() here because ignore-walk doesn't do it and we want full paths + path: resolve(opts?.path || tree.path).replace(/\\/g, '/'), + ignoreFiles: opts?.ignoreFiles || [ + defaultRules, + 'package.json', + '.npmignore', + '.gitignore', + strictRules, + ], + } + + super(options) + this.isPackage = options.isPackage + this.seen = options.seen || new Set() + this.tree = tree + this.requiredFiles = options.requiredFiles || [] + + const additionalDefaults = [] + if (options.prefix && options.workspaces) { + const path = normalizePath(options.path) + const prefix = normalizePath(options.prefix) + const workspaces = options.workspaces.map((ws) => normalizePath(ws)) + + // istanbul ignore else - this does nothing unless we need it to + if (path !== prefix && workspaces.includes(path)) { + // if path and prefix are not the same directory, and workspaces has path in it + // then we know path is a workspace directory. in order to not drop ignore rules + // from directories between the workspaces root (prefix) and the workspace itself + // (path) we need to find and read those now + const relpath = relative(options.prefix, dirname(options.path)) + additionalDefaults.push(...readOutOfTreeIgnoreFiles(options.prefix, relpath)) + } else if (path === prefix) { + // on the other hand, if the path and prefix are the same, then we ignore workspaces + // so that we don't pack a workspace as part of the root project. append them as + // normalized relative paths from the root + additionalDefaults.push(...workspaces.map((w) => normalizePath(relative(options.path, w)))) + } + } + + // go ahead and inject the default rules now + this.injectRules(defaultRules, [...defaults, ...additionalDefaults]) + + if (!this.isPackage) { + // if this instance is not a package, then place some strict default rules, and append + // known required files for this directory + this.injectRules(strictRules, [ + ...strictDefaults, + ...this.requiredFiles.map((file) => `!${file}`), + ]) + } + } + + // overridden method: we intercept the reading of the package.json file here so that we can + // process it into both the package.json file rules as well as the strictRules synthetic rule set + addIgnoreFile (file, callback) { + // if we're adding anything other than package.json, then let ignore-walk handle it + if (file !== 'package.json' || !this.isPackage) { + return super.addIgnoreFile(file, callback) + } + + return this.processPackage(callback) + } + + // overridden method: if we're done, but we're a package, then we also need to evaluate bundles + // before we actually emit our done event + emit (ev, data) { + if (ev !== 'done' || !this.isPackage) { + return super.emit(ev, data) + } + + // we intentionally delay the done event while keeping the function sync here + // eslint-disable-next-line promise/catch-or-return, promise/always-return + this.gatherBundles().then(() => { + super.emit('done', this.result) + }) + return true + } + + // overridden method: before actually filtering, we make sure that we've removed the rules for + // files that should no longer take effect due to our order of precedence + filterEntries () { + if (this.ignoreRules['package.json']) { + // package.json means no .npmignore or .gitignore + this.ignoreRules['.npmignore'] = null + this.ignoreRules['.gitignore'] = null + } else if (this.ignoreRules['.npmignore']) { + // .npmignore means no .gitignore + this.ignoreRules['.gitignore'] = null + } + + return super.filterEntries() + } + + // overridden method: we never want to include anything that isn't a file or directory + onstat (opts, callback) { + if (!opts.st.isFile() && !opts.st.isDirectory()) { + return callback() + } + + return super.onstat(opts, callback) + } + + // overridden method: we want to refuse to pack files that are invalid, node-tar protects us from + // a lot of them but not all + stat (opts, callback) { + if (nameIsBadForWindows(opts.entry)) { + return callback() + } + + return super.stat(opts, callback) + } + + // overridden method: this is called to create options for a child walker when we step + // in to a normal child directory (this will never be a bundle). the default method here + // copies the root's `ignoreFiles` value, but we don't want to respect package.json for + // subdirectories, so we override it with a list that intentionally omits package.json + walkerOpt (entry, opts) { + let ignoreFiles = null + + // however, if we have a tree, and we have workspaces, and the directory we're about + // to step into is a workspace, then we _do_ want to respect its package.json + if (this.tree.workspaces) { + const workspaceDirs = [...this.tree.workspaces.values()] + .map((dir) => dir.replace(/\\/g, '/')) + + const entryPath = join(this.path, entry).replace(/\\/g, '/') + if (workspaceDirs.includes(entryPath)) { + ignoreFiles = [ + defaultRules, + 'package.json', + '.npmignore', + '.gitignore', + strictRules, + ] + } + } else { + ignoreFiles = [ + defaultRules, + '.npmignore', + '.gitignore', + strictRules, + ] + } + + return { + ...super.walkerOpt(entry, opts), + ignoreFiles, + // we map over our own requiredFiles and pass ones that are within this entry + requiredFiles: this.requiredFiles + .map((file) => { + if (relative(file, entry) === '..') { + return relative(entry, file).replace(/\\/g, '/') + } + return false + }) + .filter(Boolean), + } + } + + // overridden method: we want child walkers to be instances of this class, not ignore-walk + walker (entry, opts, callback) { + new PackWalker(this.tree, this.walkerOpt(entry, opts)).on('done', callback).start() + } + + // overridden method: we use a custom sort method to help compressibility + sort (a, b) { + // optimize for compressibility + // extname, then basename, then locale alphabetically + // https://twitter.com/isntitvacant/status/1131094910923231232 + const exta = extname(a).toLowerCase() + const extb = extname(b).toLowerCase() + const basea = basename(a).toLowerCase() + const baseb = basename(b).toLowerCase() + + return exta.localeCompare(extb, 'en') || + basea.localeCompare(baseb, 'en') || + a.localeCompare(b, 'en') + } + + // convenience method: this joins the given rules with newlines, appends a trailing newline, + // and calls the internal onReadIgnoreFile method + injectRules (filename, rules, callback = () => {}) { + this.onReadIgnoreFile(filename, `${rules.join('\n')}\n`, callback) + } + + // custom method: this is called by addIgnoreFile when we find a package.json, it uses the + // arborist tree to pull both default rules and strict rules for the package + processPackage (callback) { + const { + bin, + browser, + files, + main, + } = this.tree.package + + // rules in these arrays are inverted since they are patterns we want to _not_ ignore + const ignores = [] + const strict = [ + ...strictDefaults, + '!/package.json', + '!/readme{,.*[^~$]}', + '!/copying{,.*[^~$]}', + '!/license{,.*[^~$]}', + '!/licence{,.*[^~$]}', + '/.git', + '/node_modules', + '.npmrc', + '/package-lock.json', + '/yarn.lock', + '/pnpm-lock.yaml', + ] + + // if we have a files array in our package, we need to pull rules from it + if (files) { + for (let file of files) { + // invert the rule because these are things we want to include + if (file.startsWith('./')) { + file = file.slice(1) + } + if (file.endsWith('/*')) { + file += '*' + } + const inverse = `!${file}` + try { + // if an entry in the files array is a specific file, then we need to include it as a + // strict requirement for this package. if it's a directory or a pattern, it's a default + // pattern instead. this is ugly, but we have to stat to find out if it's a file + const stat = lstat(join(this.path, file.replace(/^!+/, '')).replace(/\\/g, '/')) + // if we have a file and we know that, it's strictly required + if (stat.isFile()) { + strict.unshift(inverse) + this.requiredFiles.push(file.startsWith('/') ? file.slice(1) : file) + } else if (stat.isDirectory()) { + // otherwise, it's a default ignore, and since we got here we know it's not a pattern + // so we include the directory contents + ignores.push(inverse) + ignores.push(`${inverse}/**`) + } + // if the thing exists, but is neither a file or a directory, we don't want it at all + } catch (err) { + // if lstat throws, then we assume we're looking at a pattern and treat it as a default + ignores.push(inverse) + } + } + + // we prepend a '*' to exclude everything, followed by our inverted file rules + // which now mean to include those + this.injectRules('package.json', ['*', ...ignores]) + } + + // browser is required + if (browser) { + strict.push(`!/${browser}`) + } + + // main is required + if (main) { + strict.push(`!/${main}`) + } + + // each bin is required + if (bin) { + for (const key in bin) { + strict.push(`!/${bin[key]}`) + } + } + + // and now we add all of the strict rules to our synthetic file + this.injectRules(strictRules, strict, callback) + } + + // custom method: after we've finished gathering the files for the root package, we call this + // before emitting the 'done' event in order to gather all of the files for bundled deps + async gatherBundles () { + if (this.seen.has(this.tree)) { + return + } + + // add this node to our seen tracker + this.seen.add(this.tree) + + // if we're the project root, then we look at our bundleDependencies, otherwise we got here + // because we're a bundled dependency of the root, which means we need to include all prod + // and optional dependencies in the bundle + let toBundle + if (this.tree.isProjectRoot) { + const { bundleDependencies } = this.tree.package + toBundle = bundleDependencies || [] + } else { + const { dependencies, optionalDependencies } = this.tree.package + toBundle = Object.keys(dependencies || {}).concat(Object.keys(optionalDependencies || {})) + } + + for (const dep of toBundle) { + const edge = this.tree.edgesOut.get(dep) + // no edgeOut = missing node, so skip it. we can't pack it if it's not here + // we also refuse to pack peer dependencies and dev dependencies + if (!edge || edge.peer || edge.dev) { + continue + } + + // get a reference to the node we're bundling + const node = this.tree.edgesOut.get(dep).to + // if there's no node, this is most likely an optional dependency that hasn't been + // installed. just skip it. + if (!node) { + continue + } + // we use node.path for the path because we want the location the node was linked to, + // not where it actually lives on disk + const path = node.path + // but link nodes don't have edgesOut, so we need to pass in the target of the node + // in order to make sure we correctly traverse its dependencies + const tree = node.target + + // and start building options to be passed to the walker for this package + const walkerOpts = { + path, + isPackage: true, + ignoreFiles: [], + seen: this.seen, // pass through seen so we can prevent infinite circular loops + } + + // if our node is a link, we apply defaultRules. we don't do this for regular bundled + // deps because their .npmignore and .gitignore files are excluded by default and may + // override defaults + if (node.isLink) { + walkerOpts.ignoreFiles.push(defaultRules) + } + + // _all_ nodes will follow package.json rules from their package root + walkerOpts.ignoreFiles.push('package.json') + + // only link nodes will obey .npmignore or .gitignore + if (node.isLink) { + walkerOpts.ignoreFiles.push('.npmignore') + walkerOpts.ignoreFiles.push('.gitignore') + } + + // _all_ nodes follow strict rules + walkerOpts.ignoreFiles.push(strictRules) + + // create a walker for this dependency and gather its results + const walker = new PackWalker(tree, walkerOpts) + const bundled = await new Promise((pResolve, pReject) => { + walker.on('error', pReject) + walker.on('done', pResolve) + walker.start() + }) + + // now we make sure we have our paths correct from the root, and accumulate everything into + // our own result set to deduplicate + const relativeFrom = relative(this.root, walker.path) + for (const file of bundled) { + this.result.add(join(relativeFrom, file).replace(/\\/g, '/')) + } + } + } +} + +const walk = (tree, options, callback) => { + if (typeof options === 'function') { + callback = options + options = {} + } + const p = new Promise((pResolve, pReject) => { + new PackWalker(tree, { ...options, isPackage: true }) + .on('done', pResolve).on('error', pReject).start() + }) + return callback ? p.then(res => callback(null, res), callback) : p +} + +module.exports = walk +walk.Walker = PackWalker diff --git a/node_modules/pacote/node_modules/@npmcli/fs/package.json b/node_modules/pacote/node_modules/npm-packlist/package.json similarity index 54% rename from node_modules/pacote/node_modules/@npmcli/fs/package.json rename to node_modules/pacote/node_modules/npm-packlist/package.json index 5261a11b78000..d7e0a4fd5a845 100644 --- a/node_modules/pacote/node_modules/@npmcli/fs/package.json +++ b/node_modules/pacote/node_modules/npm-packlist/package.json @@ -1,52 +1,61 @@ { - "name": "@npmcli/fs", - "version": "3.1.1", - "description": "filesystem utilities for the npm cli", + "name": "npm-packlist", + "version": "9.0.0", + "description": "Get a list of the files to add from a folder into an npm package", + "directories": { + "test": "test" + }, "main": "lib/index.js", + "dependencies": { + "ignore-walk": "^7.0.0" + }, + "author": "GitHub Inc.", + "license": "ISC", "files": [ "bin/", "lib/" ], + "devDependencies": { + "@npmcli/arborist": "^7.5.4", + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.23.3", + "mutate-fs": "^2.1.1", + "tap": "^16.0.1" + }, "scripts": { - "snap": "tap", "test": "tap", - "npmclilint": "npmcli-lint", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", - "lintfix": "npm run lint -- --fix", "posttest": "npm run lint", + "snap": "tap", "postsnap": "npm run lintfix --", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", + "lintfix": "npm run eslint -- --fix", + "npmclilint": "npmcli-lint", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force" }, "repository": { "type": "git", - "url": "git+https://github.com/npm/fs.git" - }, - "keywords": [ - "npm", - "oss" - ], - "author": "GitHub Inc.", - "license": "ISC", - "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", - "tap": "^16.0.1" - }, - "dependencies": { - "semver": "^7.3.5" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - }, - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.22.0" + "url": "git+https://github.com/npm/npm-packlist.git" }, "tap": { + "test-env": [ + "LC_ALL=sk" + ], "nyc-arg": [ "--exclude", "tap-snapshots/**" + ], + "files": [ + "test/*.js" ] + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.23.3", + "publish": true } } diff --git a/node_modules/pacote/node_modules/npm-registry-fetch/LICENSE.md b/node_modules/pacote/node_modules/npm-registry-fetch/LICENSE.md deleted file mode 100644 index 5fc208ff122e0..0000000000000 --- a/node_modules/pacote/node_modules/npm-registry-fetch/LICENSE.md +++ /dev/null @@ -1,20 +0,0 @@ - - -ISC License - -Copyright npm, Inc. - -Permission to use, copy, modify, and/or distribute this -software for any purpose with or without fee is hereby -granted, provided that the above copyright notice and this -permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL -WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO -EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT, -INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, -WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER -TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE -USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/pacote/node_modules/proc-log/LICENSE b/node_modules/pacote/node_modules/proc-log/LICENSE new file mode 100644 index 0000000000000..83837797202b7 --- /dev/null +++ b/node_modules/pacote/node_modules/proc-log/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) GitHub, Inc. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/pacote/node_modules/proc-log/lib/index.js b/node_modules/pacote/node_modules/proc-log/lib/index.js new file mode 100644 index 0000000000000..86d90861078da --- /dev/null +++ b/node_modules/pacote/node_modules/proc-log/lib/index.js @@ -0,0 +1,153 @@ +const META = Symbol('proc-log.meta') +module.exports = { + META: META, + output: { + LEVELS: [ + 'standard', + 'error', + 'buffer', + 'flush', + ], + KEYS: { + standard: 'standard', + error: 'error', + buffer: 'buffer', + flush: 'flush', + }, + standard: function (...args) { + return process.emit('output', 'standard', ...args) + }, + error: function (...args) { + return process.emit('output', 'error', ...args) + }, + buffer: function (...args) { + return process.emit('output', 'buffer', ...args) + }, + flush: function (...args) { + return process.emit('output', 'flush', ...args) + }, + }, + log: { + LEVELS: [ + 'notice', + 'error', + 'warn', + 'info', + 'verbose', + 'http', + 'silly', + 'timing', + 'pause', + 'resume', + ], + KEYS: { + notice: 'notice', + error: 'error', + warn: 'warn', + info: 'info', + verbose: 'verbose', + http: 'http', + silly: 'silly', + timing: 'timing', + pause: 'pause', + resume: 'resume', + }, + error: function (...args) { + return process.emit('log', 'error', ...args) + }, + notice: function (...args) { + return process.emit('log', 'notice', ...args) + }, + warn: function (...args) { + return process.emit('log', 'warn', ...args) + }, + info: function (...args) { + return process.emit('log', 'info', ...args) + }, + verbose: function (...args) { + return process.emit('log', 'verbose', ...args) + }, + http: function (...args) { + return process.emit('log', 'http', ...args) + }, + silly: function (...args) { + return process.emit('log', 'silly', ...args) + }, + timing: function (...args) { + return process.emit('log', 'timing', ...args) + }, + pause: function () { + return process.emit('log', 'pause') + }, + resume: function () { + return process.emit('log', 'resume') + }, + }, + time: { + LEVELS: [ + 'start', + 'end', + ], + KEYS: { + start: 'start', + end: 'end', + }, + start: function (name, fn) { + process.emit('time', 'start', name) + function end () { + return process.emit('time', 'end', name) + } + if (typeof fn === 'function') { + const res = fn() + if (res && res.finally) { + return res.finally(end) + } + end() + return res + } + return end + }, + end: function (name) { + return process.emit('time', 'end', name) + }, + }, + input: { + LEVELS: [ + 'start', + 'end', + 'read', + ], + KEYS: { + start: 'start', + end: 'end', + read: 'read', + }, + start: function (fn) { + process.emit('input', 'start') + function end () { + return process.emit('input', 'end') + } + if (typeof fn === 'function') { + const res = fn() + if (res && res.finally) { + return res.finally(end) + } + end() + return res + } + return end + }, + end: function () { + return process.emit('input', 'end') + }, + read: function (...args) { + let resolve, reject + const promise = new Promise((_resolve, _reject) => { + resolve = _resolve + reject = _reject + }) + process.emit('input', 'read', resolve, reject, ...args) + return promise + }, + }, +} diff --git a/node_modules/pacote/node_modules/proc-log/package.json b/node_modules/pacote/node_modules/proc-log/package.json new file mode 100644 index 0000000000000..957209d3954e5 --- /dev/null +++ b/node_modules/pacote/node_modules/proc-log/package.json @@ -0,0 +1,46 @@ +{ + "name": "proc-log", + "version": "5.0.0", + "files": [ + "bin/", + "lib/" + ], + "main": "lib/index.js", + "description": "just emit 'log' events on the process object", + "repository": { + "type": "git", + "url": "git+https://github.com/npm/proc-log.git" + }, + "author": "GitHub Inc.", + "license": "ISC", + "scripts": { + "test": "tap", + "snap": "tap", + "posttest": "npm run lint", + "postsnap": "eslint index.js test/*.js --fix", + "lint": "npm run eslint", + "postlint": "template-oss-check", + "lintfix": "npm run eslint -- --fix", + "template-oss-apply": "template-oss-apply --force", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" + }, + "devDependencies": { + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", + "tap": "^16.0.1" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.23.3", + "publish": true + }, + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + } +} diff --git a/node_modules/pacote/node_modules/npm-pick-manifest/LICENSE.md b/node_modules/pacote/node_modules/ssri/LICENSE.md similarity index 96% rename from node_modules/pacote/node_modules/npm-pick-manifest/LICENSE.md rename to node_modules/pacote/node_modules/ssri/LICENSE.md index 8d28acf866d93..e335388869f50 100644 --- a/node_modules/pacote/node_modules/npm-pick-manifest/LICENSE.md +++ b/node_modules/pacote/node_modules/ssri/LICENSE.md @@ -1,6 +1,6 @@ ISC License -Copyright (c) npm, Inc. +Copyright 2021 (c) npm, Inc. Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the diff --git a/node_modules/pacote/node_modules/ssri/lib/index.js b/node_modules/pacote/node_modules/ssri/lib/index.js new file mode 100644 index 0000000000000..7d749ed480fb9 --- /dev/null +++ b/node_modules/pacote/node_modules/ssri/lib/index.js @@ -0,0 +1,580 @@ +'use strict' + +const crypto = require('crypto') +const { Minipass } = require('minipass') + +const SPEC_ALGORITHMS = ['sha512', 'sha384', 'sha256'] +const DEFAULT_ALGORITHMS = ['sha512'] + +// TODO: this should really be a hardcoded list of algorithms we support, +// rather than [a-z0-9]. +const BASE64_REGEX = /^[a-z0-9+/]+(?:=?=?)$/i +const SRI_REGEX = /^([a-z0-9]+)-([^?]+)([?\S*]*)$/ +const STRICT_SRI_REGEX = /^([a-z0-9]+)-([A-Za-z0-9+/=]{44,88})(\?[\x21-\x7E]*)?$/ +const VCHAR_REGEX = /^[\x21-\x7E]+$/ + +const getOptString = options => options?.length ? `?${options.join('?')}` : '' + +class IntegrityStream extends Minipass { + #emittedIntegrity + #emittedSize + #emittedVerified + + constructor (opts) { + super() + this.size = 0 + this.opts = opts + + // may be overridden later, but set now for class consistency + this.#getOptions() + + // options used for calculating stream. can't be changed. + if (opts?.algorithms) { + this.algorithms = [...opts.algorithms] + } else { + this.algorithms = [...DEFAULT_ALGORITHMS] + } + if (this.algorithm !== null && !this.algorithms.includes(this.algorithm)) { + this.algorithms.push(this.algorithm) + } + + this.hashes = this.algorithms.map(crypto.createHash) + } + + #getOptions () { + // For verification + this.sri = this.opts?.integrity ? parse(this.opts?.integrity, this.opts) : null + this.expectedSize = this.opts?.size + + if (!this.sri) { + this.algorithm = null + } else if (this.sri.isHash) { + this.goodSri = true + this.algorithm = this.sri.algorithm + } else { + this.goodSri = !this.sri.isEmpty() + this.algorithm = this.sri.pickAlgorithm(this.opts) + } + + this.digests = this.goodSri ? this.sri[this.algorithm] : null + this.optString = getOptString(this.opts?.options) + } + + on (ev, handler) { + if (ev === 'size' && this.#emittedSize) { + return handler(this.#emittedSize) + } + + if (ev === 'integrity' && this.#emittedIntegrity) { + return handler(this.#emittedIntegrity) + } + + if (ev === 'verified' && this.#emittedVerified) { + return handler(this.#emittedVerified) + } + + return super.on(ev, handler) + } + + emit (ev, data) { + if (ev === 'end') { + this.#onEnd() + } + return super.emit(ev, data) + } + + write (data) { + this.size += data.length + this.hashes.forEach(h => h.update(data)) + return super.write(data) + } + + #onEnd () { + if (!this.goodSri) { + this.#getOptions() + } + const newSri = parse(this.hashes.map((h, i) => { + return `${this.algorithms[i]}-${h.digest('base64')}${this.optString}` + }).join(' '), this.opts) + // Integrity verification mode + const match = this.goodSri && newSri.match(this.sri, this.opts) + if (typeof this.expectedSize === 'number' && this.size !== this.expectedSize) { + /* eslint-disable-next-line max-len */ + const err = new Error(`stream size mismatch when checking ${this.sri}.\n Wanted: ${this.expectedSize}\n Found: ${this.size}`) + err.code = 'EBADSIZE' + err.found = this.size + err.expected = this.expectedSize + err.sri = this.sri + this.emit('error', err) + } else if (this.sri && !match) { + /* eslint-disable-next-line max-len */ + const err = new Error(`${this.sri} integrity checksum failed when using ${this.algorithm}: wanted ${this.digests} but got ${newSri}. (${this.size} bytes)`) + err.code = 'EINTEGRITY' + err.found = newSri + err.expected = this.digests + err.algorithm = this.algorithm + err.sri = this.sri + this.emit('error', err) + } else { + this.#emittedSize = this.size + this.emit('size', this.size) + this.#emittedIntegrity = newSri + this.emit('integrity', newSri) + if (match) { + this.#emittedVerified = match + this.emit('verified', match) + } + } + } +} + +class Hash { + get isHash () { + return true + } + + constructor (hash, opts) { + const strict = opts?.strict + this.source = hash.trim() + + // set default values so that we make V8 happy to + // always see a familiar object template. + this.digest = '' + this.algorithm = '' + this.options = [] + + // 3.1. Integrity metadata (called "Hash" by ssri) + // https://w3c.github.io/webappsec-subresource-integrity/#integrity-metadata-description + const match = this.source.match( + strict + ? STRICT_SRI_REGEX + : SRI_REGEX + ) + if (!match) { + return + } + if (strict && !SPEC_ALGORITHMS.includes(match[1])) { + return + } + this.algorithm = match[1] + this.digest = match[2] + + const rawOpts = match[3] + if (rawOpts) { + this.options = rawOpts.slice(1).split('?') + } + } + + hexDigest () { + return this.digest && Buffer.from(this.digest, 'base64').toString('hex') + } + + toJSON () { + return this.toString() + } + + match (integrity, opts) { + const other = parse(integrity, opts) + if (!other) { + return false + } + if (other.isIntegrity) { + const algo = other.pickAlgorithm(opts, [this.algorithm]) + + if (!algo) { + return false + } + + const foundHash = other[algo].find(hash => hash.digest === this.digest) + + if (foundHash) { + return foundHash + } + + return false + } + return other.digest === this.digest ? other : false + } + + toString (opts) { + if (opts?.strict) { + // Strict mode enforces the standard as close to the foot of the + // letter as it can. + if (!( + // The spec has very restricted productions for algorithms. + // https://www.w3.org/TR/CSP2/#source-list-syntax + SPEC_ALGORITHMS.includes(this.algorithm) && + // Usually, if someone insists on using a "different" base64, we + // leave it as-is, since there's multiple standards, and the + // specified is not a URL-safe variant. + // https://www.w3.org/TR/CSP2/#base64_value + this.digest.match(BASE64_REGEX) && + // Option syntax is strictly visual chars. + // https://w3c.github.io/webappsec-subresource-integrity/#grammardef-option-expression + // https://tools.ietf.org/html/rfc5234#appendix-B.1 + this.options.every(opt => opt.match(VCHAR_REGEX)) + )) { + return '' + } + } + return `${this.algorithm}-${this.digest}${getOptString(this.options)}` + } +} + +function integrityHashToString (toString, sep, opts, hashes) { + const toStringIsNotEmpty = toString !== '' + + let shouldAddFirstSep = false + let complement = '' + + const lastIndex = hashes.length - 1 + + for (let i = 0; i < lastIndex; i++) { + const hashString = Hash.prototype.toString.call(hashes[i], opts) + + if (hashString) { + shouldAddFirstSep = true + + complement += hashString + complement += sep + } + } + + const finalHashString = Hash.prototype.toString.call(hashes[lastIndex], opts) + + if (finalHashString) { + shouldAddFirstSep = true + complement += finalHashString + } + + if (toStringIsNotEmpty && shouldAddFirstSep) { + return toString + sep + complement + } + + return toString + complement +} + +class Integrity { + get isIntegrity () { + return true + } + + toJSON () { + return this.toString() + } + + isEmpty () { + return Object.keys(this).length === 0 + } + + toString (opts) { + let sep = opts?.sep || ' ' + let toString = '' + + if (opts?.strict) { + // Entries must be separated by whitespace, according to spec. + sep = sep.replace(/\S+/g, ' ') + + for (const hash of SPEC_ALGORITHMS) { + if (this[hash]) { + toString = integrityHashToString(toString, sep, opts, this[hash]) + } + } + } else { + for (const hash of Object.keys(this)) { + toString = integrityHashToString(toString, sep, opts, this[hash]) + } + } + + return toString + } + + concat (integrity, opts) { + const other = typeof integrity === 'string' + ? integrity + : stringify(integrity, opts) + return parse(`${this.toString(opts)} ${other}`, opts) + } + + hexDigest () { + return parse(this, { single: true }).hexDigest() + } + + // add additional hashes to an integrity value, but prevent + // *changing* an existing integrity hash. + merge (integrity, opts) { + const other = parse(integrity, opts) + for (const algo in other) { + if (this[algo]) { + if (!this[algo].find(hash => + other[algo].find(otherhash => + hash.digest === otherhash.digest))) { + throw new Error('hashes do not match, cannot update integrity') + } + } else { + this[algo] = other[algo] + } + } + } + + match (integrity, opts) { + const other = parse(integrity, opts) + if (!other) { + return false + } + const algo = other.pickAlgorithm(opts, Object.keys(this)) + return ( + !!algo && + this[algo] && + other[algo] && + this[algo].find(hash => + other[algo].find(otherhash => + hash.digest === otherhash.digest + ) + ) + ) || false + } + + // Pick the highest priority algorithm present, optionally also limited to a + // set of hashes found in another integrity. When limiting it may return + // nothing. + pickAlgorithm (opts, hashes) { + const pickAlgorithm = opts?.pickAlgorithm || getPrioritizedHash + const keys = Object.keys(this).filter(k => { + if (hashes?.length) { + return hashes.includes(k) + } + return true + }) + if (keys.length) { + return keys.reduce((acc, algo) => pickAlgorithm(acc, algo) || acc) + } + // no intersection between this and hashes, + return null + } +} + +module.exports.parse = parse +function parse (sri, opts) { + if (!sri) { + return null + } + if (typeof sri === 'string') { + return _parse(sri, opts) + } else if (sri.algorithm && sri.digest) { + const fullSri = new Integrity() + fullSri[sri.algorithm] = [sri] + return _parse(stringify(fullSri, opts), opts) + } else { + return _parse(stringify(sri, opts), opts) + } +} + +function _parse (integrity, opts) { + // 3.4.3. Parse metadata + // https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata + if (opts?.single) { + return new Hash(integrity, opts) + } + const hashes = integrity.trim().split(/\s+/).reduce((acc, string) => { + const hash = new Hash(string, opts) + if (hash.algorithm && hash.digest) { + const algo = hash.algorithm + if (!acc[algo]) { + acc[algo] = [] + } + acc[algo].push(hash) + } + return acc + }, new Integrity()) + return hashes.isEmpty() ? null : hashes +} + +module.exports.stringify = stringify +function stringify (obj, opts) { + if (obj.algorithm && obj.digest) { + return Hash.prototype.toString.call(obj, opts) + } else if (typeof obj === 'string') { + return stringify(parse(obj, opts), opts) + } else { + return Integrity.prototype.toString.call(obj, opts) + } +} + +module.exports.fromHex = fromHex +function fromHex (hexDigest, algorithm, opts) { + const optString = getOptString(opts?.options) + return parse( + `${algorithm}-${ + Buffer.from(hexDigest, 'hex').toString('base64') + }${optString}`, opts + ) +} + +module.exports.fromData = fromData +function fromData (data, opts) { + const algorithms = opts?.algorithms || [...DEFAULT_ALGORITHMS] + const optString = getOptString(opts?.options) + return algorithms.reduce((acc, algo) => { + const digest = crypto.createHash(algo).update(data).digest('base64') + const hash = new Hash( + `${algo}-${digest}${optString}`, + opts + ) + /* istanbul ignore else - it would be VERY strange if the string we + * just calculated with an algo did not have an algo or digest. + */ + if (hash.algorithm && hash.digest) { + const hashAlgo = hash.algorithm + if (!acc[hashAlgo]) { + acc[hashAlgo] = [] + } + acc[hashAlgo].push(hash) + } + return acc + }, new Integrity()) +} + +module.exports.fromStream = fromStream +function fromStream (stream, opts) { + const istream = integrityStream(opts) + return new Promise((resolve, reject) => { + stream.pipe(istream) + stream.on('error', reject) + istream.on('error', reject) + let sri + istream.on('integrity', s => { + sri = s + }) + istream.on('end', () => resolve(sri)) + istream.resume() + }) +} + +module.exports.checkData = checkData +function checkData (data, sri, opts) { + sri = parse(sri, opts) + if (!sri || !Object.keys(sri).length) { + if (opts?.error) { + throw Object.assign( + new Error('No valid integrity hashes to check against'), { + code: 'EINTEGRITY', + } + ) + } else { + return false + } + } + const algorithm = sri.pickAlgorithm(opts) + const digest = crypto.createHash(algorithm).update(data).digest('base64') + const newSri = parse({ algorithm, digest }) + const match = newSri.match(sri, opts) + opts = opts || {} + if (match || !(opts.error)) { + return match + } else if (typeof opts.size === 'number' && (data.length !== opts.size)) { + /* eslint-disable-next-line max-len */ + const err = new Error(`data size mismatch when checking ${sri}.\n Wanted: ${opts.size}\n Found: ${data.length}`) + err.code = 'EBADSIZE' + err.found = data.length + err.expected = opts.size + err.sri = sri + throw err + } else { + /* eslint-disable-next-line max-len */ + const err = new Error(`Integrity checksum failed when using ${algorithm}: Wanted ${sri}, but got ${newSri}. (${data.length} bytes)`) + err.code = 'EINTEGRITY' + err.found = newSri + err.expected = sri + err.algorithm = algorithm + err.sri = sri + throw err + } +} + +module.exports.checkStream = checkStream +function checkStream (stream, sri, opts) { + opts = opts || Object.create(null) + opts.integrity = sri + sri = parse(sri, opts) + if (!sri || !Object.keys(sri).length) { + return Promise.reject(Object.assign( + new Error('No valid integrity hashes to check against'), { + code: 'EINTEGRITY', + } + )) + } + const checker = integrityStream(opts) + return new Promise((resolve, reject) => { + stream.pipe(checker) + stream.on('error', reject) + checker.on('error', reject) + let verified + checker.on('verified', s => { + verified = s + }) + checker.on('end', () => resolve(verified)) + checker.resume() + }) +} + +module.exports.integrityStream = integrityStream +function integrityStream (opts = Object.create(null)) { + return new IntegrityStream(opts) +} + +module.exports.create = createIntegrity +function createIntegrity (opts) { + const algorithms = opts?.algorithms || [...DEFAULT_ALGORITHMS] + const optString = getOptString(opts?.options) + + const hashes = algorithms.map(crypto.createHash) + + return { + update: function (chunk, enc) { + hashes.forEach(h => h.update(chunk, enc)) + return this + }, + digest: function () { + const integrity = algorithms.reduce((acc, algo) => { + const digest = hashes.shift().digest('base64') + const hash = new Hash( + `${algo}-${digest}${optString}`, + opts + ) + /* istanbul ignore else - it would be VERY strange if the hash we + * just calculated with an algo did not have an algo or digest. + */ + if (hash.algorithm && hash.digest) { + const hashAlgo = hash.algorithm + if (!acc[hashAlgo]) { + acc[hashAlgo] = [] + } + acc[hashAlgo].push(hash) + } + return acc + }, new Integrity()) + + return integrity + }, + } +} + +const NODE_HASHES = crypto.getHashes() + +// This is a Best Effort™ at a reasonable priority for hash algos +const DEFAULT_PRIORITY = [ + 'md5', 'whirlpool', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512', + // TODO - it's unclear _which_ of these Node will actually use as its name + // for the algorithm, so we guesswork it based on the OpenSSL names. + 'sha3', + 'sha3-256', 'sha3-384', 'sha3-512', + 'sha3_256', 'sha3_384', 'sha3_512', +].filter(algo => NODE_HASHES.includes(algo)) + +function getPrioritizedHash (algo1, algo2) { + /* eslint-disable-next-line max-len */ + return DEFAULT_PRIORITY.indexOf(algo1.toLowerCase()) >= DEFAULT_PRIORITY.indexOf(algo2.toLowerCase()) + ? algo1 + : algo2 +} diff --git a/node_modules/pacote/node_modules/ssri/package.json b/node_modules/pacote/node_modules/ssri/package.json new file mode 100644 index 0000000000000..83306cd044ec3 --- /dev/null +++ b/node_modules/pacote/node_modules/ssri/package.json @@ -0,0 +1,66 @@ +{ + "name": "ssri", + "version": "12.0.0", + "description": "Standard Subresource Integrity library -- parses, serializes, generates, and verifies integrity metadata according to the SRI spec.", + "main": "lib/index.js", + "files": [ + "bin/", + "lib/" + ], + "scripts": { + "prerelease": "npm t", + "postrelease": "npm publish", + "posttest": "npm run lint", + "test": "tap", + "coverage": "tap", + "lint": "npm run eslint", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force", + "lintfix": "npm run eslint -- --fix", + "snap": "tap", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" + }, + "tap": { + "check-coverage": true, + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "repository": { + "type": "git", + "url": "git+https://github.com/npm/ssri.git" + }, + "keywords": [ + "w3c", + "web", + "security", + "integrity", + "checksum", + "hashing", + "subresource integrity", + "sri", + "sri hash", + "sri string", + "sri generator", + "html" + ], + "author": "GitHub Inc.", + "license": "ISC", + "dependencies": { + "minipass": "^7.0.3" + }, + "devDependencies": { + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", + "tap": "^16.0.1" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.23.3", + "publish": "true" + } +} diff --git a/node_modules/pacote/node_modules/unique-filename/LICENSE b/node_modules/pacote/node_modules/unique-filename/LICENSE deleted file mode 100644 index 69619c125ea7e..0000000000000 --- a/node_modules/pacote/node_modules/unique-filename/LICENSE +++ /dev/null @@ -1,5 +0,0 @@ -Copyright npm, Inc - -Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/pacote/node_modules/unique-filename/lib/index.js b/node_modules/pacote/node_modules/unique-filename/lib/index.js deleted file mode 100644 index d067d2e709809..0000000000000 --- a/node_modules/pacote/node_modules/unique-filename/lib/index.js +++ /dev/null @@ -1,7 +0,0 @@ -var path = require('path') - -var uniqueSlug = require('unique-slug') - -module.exports = function (filepath, prefix, uniq) { - return path.join(filepath, (prefix ? prefix + '-' : '') + uniqueSlug(uniq)) -} diff --git a/node_modules/pacote/node_modules/unique-slug/lib/index.js b/node_modules/pacote/node_modules/unique-slug/lib/index.js deleted file mode 100644 index 1bac84d95d730..0000000000000 --- a/node_modules/pacote/node_modules/unique-slug/lib/index.js +++ /dev/null @@ -1,11 +0,0 @@ -'use strict' -var MurmurHash3 = require('imurmurhash') - -module.exports = function (uniq) { - if (uniq) { - var hash = new MurmurHash3(uniq) - return ('00000000' + hash.result().toString(16)).slice(-8) - } else { - return (Math.random().toString(16) + '0000000').slice(2, 10) - } -} diff --git a/node_modules/pacote/node_modules/which/LICENSE b/node_modules/pacote/node_modules/which/LICENSE new file mode 100644 index 0000000000000..19129e315fe59 --- /dev/null +++ b/node_modules/pacote/node_modules/which/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/pacote/node_modules/which/bin/which.js b/node_modules/pacote/node_modules/which/bin/which.js new file mode 100755 index 0000000000000..6df16f21acf93 --- /dev/null +++ b/node_modules/pacote/node_modules/which/bin/which.js @@ -0,0 +1,52 @@ +#!/usr/bin/env node + +const which = require('../lib') +const argv = process.argv.slice(2) + +const usage = (err) => { + if (err) { + console.error(`which: ${err}`) + } + console.error('usage: which [-as] program ...') + process.exit(1) +} + +if (!argv.length) { + return usage() +} + +let dashdash = false +const [commands, flags] = argv.reduce((acc, arg) => { + if (dashdash || arg === '--') { + dashdash = true + return acc + } + + if (!/^-/.test(arg)) { + acc[0].push(arg) + return acc + } + + for (const flag of arg.slice(1).split('')) { + if (flag === 's') { + acc[1].silent = true + } else if (flag === 'a') { + acc[1].all = true + } else { + usage(`illegal option -- ${flag}`) + } + } + + return acc +}, [[], {}]) + +for (const command of commands) { + try { + const res = which.sync(command, { all: flags.all }) + if (!flags.silent) { + console.log([].concat(res).join('\n')) + } + } catch (err) { + process.exitCode = 1 + } +} diff --git a/node_modules/pacote/node_modules/which/lib/index.js b/node_modules/pacote/node_modules/which/lib/index.js new file mode 100644 index 0000000000000..2fd358baf888f --- /dev/null +++ b/node_modules/pacote/node_modules/which/lib/index.js @@ -0,0 +1,111 @@ +const { isexe, sync: isexeSync } = require('isexe') +const { join, delimiter, sep, posix } = require('path') + +const isWindows = process.platform === 'win32' + +// used to check for slashed in commands passed in. always checks for the posix +// seperator on all platforms, and checks for the current separator when not on +// a posix platform. don't use the isWindows check for this since that is mocked +// in tests but we still need the code to actually work when called. that is also +// why it is ignored from coverage. +/* istanbul ignore next */ +const rSlash = new RegExp(`[${posix.sep}${sep === posix.sep ? '' : sep}]`.replace(/(\\)/g, '\\$1')) +const rRel = new RegExp(`^\\.${rSlash.source}`) + +const getNotFoundError = (cmd) => + Object.assign(new Error(`not found: ${cmd}`), { code: 'ENOENT' }) + +const getPathInfo = (cmd, { + path: optPath = process.env.PATH, + pathExt: optPathExt = process.env.PATHEXT, + delimiter: optDelimiter = delimiter, +}) => { + // If it has a slash, then we don't bother searching the pathenv. + // just check the file itself, and that's it. + const pathEnv = cmd.match(rSlash) ? [''] : [ + // windows always checks the cwd first + ...(isWindows ? [process.cwd()] : []), + ...(optPath || /* istanbul ignore next: very unusual */ '').split(optDelimiter), + ] + + if (isWindows) { + const pathExtExe = optPathExt || + ['.EXE', '.CMD', '.BAT', '.COM'].join(optDelimiter) + const pathExt = pathExtExe.split(optDelimiter).flatMap((item) => [item, item.toLowerCase()]) + if (cmd.includes('.') && pathExt[0] !== '') { + pathExt.unshift('') + } + return { pathEnv, pathExt, pathExtExe } + } + + return { pathEnv, pathExt: [''] } +} + +const getPathPart = (raw, cmd) => { + const pathPart = /^".*"$/.test(raw) ? raw.slice(1, -1) : raw + const prefix = !pathPart && rRel.test(cmd) ? cmd.slice(0, 2) : '' + return prefix + join(pathPart, cmd) +} + +const which = async (cmd, opt = {}) => { + const { pathEnv, pathExt, pathExtExe } = getPathInfo(cmd, opt) + const found = [] + + for (const envPart of pathEnv) { + const p = getPathPart(envPart, cmd) + + for (const ext of pathExt) { + const withExt = p + ext + const is = await isexe(withExt, { pathExt: pathExtExe, ignoreErrors: true }) + if (is) { + if (!opt.all) { + return withExt + } + found.push(withExt) + } + } + } + + if (opt.all && found.length) { + return found + } + + if (opt.nothrow) { + return null + } + + throw getNotFoundError(cmd) +} + +const whichSync = (cmd, opt = {}) => { + const { pathEnv, pathExt, pathExtExe } = getPathInfo(cmd, opt) + const found = [] + + for (const pathEnvPart of pathEnv) { + const p = getPathPart(pathEnvPart, cmd) + + for (const ext of pathExt) { + const withExt = p + ext + const is = isexeSync(withExt, { pathExt: pathExtExe, ignoreErrors: true }) + if (is) { + if (!opt.all) { + return withExt + } + found.push(withExt) + } + } + } + + if (opt.all && found.length) { + return found + } + + if (opt.nothrow) { + return null + } + + throw getNotFoundError(cmd) +} + +module.exports = which +which.sync = whichSync diff --git a/node_modules/pacote/node_modules/unique-filename/package.json b/node_modules/pacote/node_modules/which/package.json similarity index 50% rename from node_modules/pacote/node_modules/unique-filename/package.json rename to node_modules/pacote/node_modules/which/package.json index b2fbf0666489a..94184233c61c4 100644 --- a/node_modules/pacote/node_modules/unique-filename/package.json +++ b/node_modules/pacote/node_modules/which/package.json @@ -1,51 +1,52 @@ { - "name": "unique-filename", - "version": "3.0.0", - "description": "Generate a unique filename for use in temporary directories or caches.", - "main": "lib/index.js", - "scripts": { - "test": "tap", - "lint": "eslint \"**/*.js\"", - "postlint": "template-oss-check", - "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", - "snap": "tap", - "posttest": "npm run lint" - }, + "author": "GitHub Inc.", + "name": "which", + "description": "Like which(1) unix command. Find the first instance of an executable in the PATH.", + "version": "5.0.0", "repository": { "type": "git", - "url": "https://github.com/npm/unique-filename.git" + "url": "git+https://github.com/npm/node-which.git" + }, + "main": "lib/index.js", + "bin": { + "node-which": "./bin/which.js" }, - "keywords": [], - "author": "GitHub Inc.", "license": "ISC", - "bugs": { - "url": "https://github.com/iarna/unique-filename/issues" + "dependencies": { + "isexe": "^3.1.1" }, - "homepage": "https://github.com/iarna/unique-filename", "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.5.1", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.3.0" }, - "dependencies": { - "unique-slug": "^4.0.0" + "scripts": { + "test": "tap", + "lint": "npm run eslint", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force", + "lintfix": "npm run eslint -- --fix", + "snap": "tap", + "posttest": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "files": [ "bin/", "lib/" ], - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - }, - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.5.1" - }, "tap": { + "check-coverage": true, "nyc-arg": [ "--exclude", "tap-snapshots/**" ] + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.23.3", + "publish": "true" } } diff --git a/node_modules/pacote/package.json b/node_modules/pacote/package.json index caadaf2db50c8..0eb8261af96e0 100644 --- a/node_modules/pacote/package.json +++ b/node_modules/pacote/package.json @@ -1,6 +1,6 @@ { "name": "pacote", - "version": "18.0.6", + "version": "19.0.0", "description": "JavaScript package downloader", "author": "GitHub Inc.", "bin": { @@ -11,11 +11,12 @@ "scripts": { "test": "tap", "snap": "tap", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", "postlint": "template-oss-check", - "lintfix": "npm run lint -- --fix", + "lintfix": "npm run eslint -- --fix", "posttest": "npm run lint", - "template-oss-apply": "template-oss-apply --force" + "template-oss-apply": "template-oss-apply --force", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "tap": { "timeout": 300, @@ -26,9 +27,9 @@ }, "devDependencies": { "@npmcli/arborist": "^7.1.0", - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", - "hosted-git-info": "^7.0.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", + "hosted-git-info": "^8.0.0", "mutate-fs": "^2.1.1", "nock": "^13.2.4", "npm-registry-mock": "^1.3.2", @@ -44,26 +45,26 @@ "git" ], "dependencies": { - "@npmcli/git": "^5.0.0", - "@npmcli/installed-package-contents": "^2.0.1", - "@npmcli/package-json": "^5.1.0", - "@npmcli/promise-spawn": "^7.0.0", - "@npmcli/run-script": "^8.0.0", - "cacache": "^18.0.0", + "@npmcli/git": "^6.0.0", + "@npmcli/installed-package-contents": "^3.0.0", + "@npmcli/package-json": "^6.0.0", + "@npmcli/promise-spawn": "^8.0.0", + "@npmcli/run-script": "^9.0.0", + "cacache": "^19.0.0", "fs-minipass": "^3.0.0", "minipass": "^7.0.2", - "npm-package-arg": "^11.0.0", - "npm-packlist": "^8.0.0", - "npm-pick-manifest": "^9.0.0", - "npm-registry-fetch": "^17.0.0", - "proc-log": "^4.0.0", + "npm-package-arg": "^12.0.0", + "npm-packlist": "^9.0.0", + "npm-pick-manifest": "^10.0.0", + "npm-registry-fetch": "^18.0.0", + "proc-log": "^5.0.0", "promise-retry": "^2.0.1", "sigstore": "^2.2.0", - "ssri": "^10.0.0", + "ssri": "^12.0.0", "tar": "^6.1.11" }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "repository": { "type": "git", @@ -71,7 +72,7 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.22.0", + "version": "4.23.3", "windowsCI": false, "publish": "true" } diff --git a/package-lock.json b/package-lock.json index 3b3d880efcdde..2b502a829e553 100644 --- a/package-lock.json +++ b/package-lock.json @@ -138,7 +138,7 @@ "npm-registry-fetch": "^18.0.1", "npm-user-validate": "^3.0.0", "p-map": "^4.0.0", - "pacote": "^18.0.6", + "pacote": "^19.0.0", "parse-conflict-json": "^3.0.1", "proc-log": "^4.2.0", "qrcode-terminal": "^0.12.0", @@ -234,7 +234,7 @@ "json-stringify-safe": "^5.0.1", "nock": "^13.3.3", "npm-package-arg": "^12.0.0", - "pacote": "^18.0.6", + "pacote": "^19.0.0", "tap": "^16.3.8" }, "engines": { @@ -1588,7 +1588,6 @@ "version": "5.0.8", "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-5.0.8.tgz", "integrity": "sha512-liASfw5cqhjNW9UFd+ruwwdEf/lbOAQjLL2XY2dFW/bkJheXDYZgOyul/4gVvEV4BWkTXjYGmDqMw9uegdbJNQ==", - "inBundle": true, "license": "ISC", "dependencies": { "@npmcli/promise-spawn": "^7.0.0", @@ -1609,7 +1608,6 @@ "version": "7.0.2", "resolved": "https://registry.npmjs.org/@npmcli/promise-spawn/-/promise-spawn-7.0.2.tgz", "integrity": "sha512-xhfYPXoV5Dy4UkY0D+v2KkwvnDfiA/8Mt3sWCGI/hM03NsYIH8ZaG6QzS9x7pje5vHZBZJ2v6VRFVTWACnqcmQ==", - "inBundle": true, "license": "ISC", "dependencies": { "which": "^4.0.0" @@ -1622,7 +1620,6 @@ "version": "7.0.2", "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-7.0.2.tgz", "integrity": "sha512-puUZAUKT5m8Zzvs72XWy3HtvVbTWljRE66cP60bxJzAqf2DgICo7lYTY2IHUmLnNpjYvw5bvmoHvPc0QO2a62w==", - "inBundle": true, "license": "ISC", "dependencies": { "lru-cache": "^10.0.1" @@ -1635,7 +1632,6 @@ "version": "4.1.3", "resolved": "https://registry.npmjs.org/ini/-/ini-4.1.3.tgz", "integrity": "sha512-X7rqawQBvfdjS10YU1y1YVreA3SsLrW9dX2CewP2EbBJM4ypVNLDkO5y04gejPwKIY9lR+7r9gn3rFPt/kmWFg==", - "inBundle": true, "license": "ISC", "engines": { "node": "^14.17.0 || ^16.13.0 || >=18.0.0" @@ -1645,7 +1641,6 @@ "version": "6.3.0", "resolved": "https://registry.npmjs.org/npm-install-checks/-/npm-install-checks-6.3.0.tgz", "integrity": "sha512-W29RiK/xtpCGqn6f3ixfRYGk+zRyr+Ew9F2E20BfXxT5/euLdA/Nm7fO7OeTGuAmTs30cpgInyJ0cYe708YTZw==", - "inBundle": true, "license": "BSD-2-Clause", "dependencies": { "semver": "^7.1.1" @@ -1658,7 +1653,6 @@ "version": "11.0.3", "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-11.0.3.tgz", "integrity": "sha512-sHGJy8sOC1YraBywpzQlIKBE4pBbGbiF95U6Auspzyem956E0+FtDtsx1ZxlOJkQCZ1AFXAY/yuvtFYrOxF+Bw==", - "inBundle": true, "license": "ISC", "dependencies": { "hosted-git-info": "^7.0.0", @@ -1674,7 +1668,6 @@ "version": "9.1.0", "resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-9.1.0.tgz", "integrity": "sha512-nkc+3pIIhqHVQr085X9d2JzPzLyjzQS96zbruppqC9aZRm/x8xx6xhI98gHtsfELP2bE+loHq8ZaHFHhe+NauA==", - "inBundle": true, "license": "ISC", "dependencies": { "npm-install-checks": "^6.0.0", @@ -1690,7 +1683,6 @@ "version": "2.1.0", "resolved": "https://registry.npmjs.org/@npmcli/installed-package-contents/-/installed-package-contents-2.1.0.tgz", "integrity": "sha512-c8UuGLeZpm69BryRykLuKRyKFZYJsZSCT4aVY5ds4omyZqJ172ApzgfKJ5eV/r3HgLdUYgFVe54KSFVjKoe27w==", - "inBundle": true, "license": "ISC", "dependencies": { "npm-bundled": "^3.0.0", @@ -1745,6 +1737,22 @@ "node": "^16.14.0 || >=18.0.0" } }, + "node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/agent": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/@npmcli/agent/-/agent-2.2.2.tgz", + "integrity": "sha512-OrcNPXdpSl9UX7qPVRWbmWMCSXrcDa2M9DvrbOTj7ao1S4PlqVFYv9/yLKMkrJKZ/V5A/kDBC690or307i26Og==", + "license": "ISC", + "dependencies": { + "agent-base": "^7.1.0", + "http-proxy-agent": "^7.0.0", + "https-proxy-agent": "^7.0.1", + "lru-cache": "^10.0.1", + "socks-proxy-agent": "^8.0.3" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, "node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/fs": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/@npmcli/fs/-/fs-3.1.1.tgz", @@ -1757,6 +1765,62 @@ "node": "^14.17.0 || ^16.13.0 || >=18.0.0" } }, + "node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/package-json": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/@npmcli/package-json/-/package-json-5.2.1.tgz", + "integrity": "sha512-f7zYC6kQautXHvNbLEWgD/uGu1+xCn9izgqBfgItWSx22U0ZDekxN08A1vM8cTxj/cRVe0Q94Ode+tdoYmIOOQ==", + "license": "ISC", + "dependencies": { + "@npmcli/git": "^5.0.0", + "glob": "^10.2.2", + "hosted-git-info": "^7.0.0", + "json-parse-even-better-errors": "^3.0.0", + "normalize-package-data": "^6.0.0", + "proc-log": "^4.0.0", + "semver": "^7.5.3" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/promise-spawn": { + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/@npmcli/promise-spawn/-/promise-spawn-7.0.2.tgz", + "integrity": "sha512-xhfYPXoV5Dy4UkY0D+v2KkwvnDfiA/8Mt3sWCGI/hM03NsYIH8ZaG6QzS9x7pje5vHZBZJ2v6VRFVTWACnqcmQ==", + "license": "ISC", + "dependencies": { + "which": "^4.0.0" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/redact": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@npmcli/redact/-/redact-2.0.1.tgz", + "integrity": "sha512-YgsR5jCQZhVmTJvjduTOIHph0L73pK8xwMVaDY0PatySqVM9AZj93jpoXYSJqfHFxFkN9dmqTw6OiqExsS3LPw==", + "license": "ISC", + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/run-script": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/@npmcli/run-script/-/run-script-8.1.0.tgz", + "integrity": "sha512-y7efHHwghQfk28G2z3tlZ67pLG0XdfYbcVG26r7YIXALRsrVQcTq4/tdenSmdOrEsNahIYA/eh8aEVROWGFUDg==", + "license": "ISC", + "dependencies": { + "@npmcli/node-gyp": "^3.0.0", + "@npmcli/package-json": "^5.0.0", + "@npmcli/promise-spawn": "^7.0.0", + "node-gyp": "^10.0.0", + "proc-log": "^4.0.0", + "which": "^4.0.0" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, "node_modules/@npmcli/metavuln-calculator/node_modules/cacache": { "version": "18.0.4", "resolved": "https://registry.npmjs.org/cacache/-/cacache-18.0.4.tgz", @@ -1780,6 +1844,18 @@ "node": "^16.14.0 || >=18.0.0" } }, + "node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info": { + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-7.0.2.tgz", + "integrity": "sha512-puUZAUKT5m8Zzvs72XWy3HtvVbTWljRE66cP60bxJzAqf2DgICo7lYTY2IHUmLnNpjYvw5bvmoHvPc0QO2a62w==", + "license": "ISC", + "dependencies": { + "lru-cache": "^10.0.1" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, "node_modules/@npmcli/metavuln-calculator/node_modules/json-parse-even-better-errors": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-3.0.2.tgz", @@ -1789,6 +1865,135 @@ "node": "^14.17.0 || ^16.13.0 || >=18.0.0" } }, + "node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen": { + "version": "13.0.1", + "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-13.0.1.tgz", + "integrity": "sha512-cKTUFc/rbKUd/9meOvgrpJ2WrNzymt6jfRDdwg5UCnVzv9dTpEj9JS5m3wtziXVCjluIXyL8pcaukYqezIzZQA==", + "license": "ISC", + "dependencies": { + "@npmcli/agent": "^2.0.0", + "cacache": "^18.0.0", + "http-cache-semantics": "^4.1.1", + "is-lambda": "^1.0.1", + "minipass": "^7.0.2", + "minipass-fetch": "^3.0.0", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "negotiator": "^0.6.3", + "proc-log": "^4.2.0", + "promise-retry": "^2.0.1", + "ssri": "^10.0.0" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/metavuln-calculator/node_modules/normalize-package-data": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-6.0.2.tgz", + "integrity": "sha512-V6gygoYb/5EmNI+MEGrWkC+e6+Rr7mTmfHrxDbLzxQogBkgzo76rkok0Am6thgSF7Mv2nLOajAJj5vDJZEFn7g==", + "license": "BSD-2-Clause", + "dependencies": { + "hosted-git-info": "^7.0.0", + "semver": "^7.3.5", + "validate-npm-package-license": "^3.0.4" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/metavuln-calculator/node_modules/npm-install-checks": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/npm-install-checks/-/npm-install-checks-6.3.0.tgz", + "integrity": "sha512-W29RiK/xtpCGqn6f3ixfRYGk+zRyr+Ew9F2E20BfXxT5/euLdA/Nm7fO7OeTGuAmTs30cpgInyJ0cYe708YTZw==", + "license": "BSD-2-Clause", + "dependencies": { + "semver": "^7.1.1" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/metavuln-calculator/node_modules/npm-package-arg": { + "version": "11.0.3", + "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-11.0.3.tgz", + "integrity": "sha512-sHGJy8sOC1YraBywpzQlIKBE4pBbGbiF95U6Auspzyem956E0+FtDtsx1ZxlOJkQCZ1AFXAY/yuvtFYrOxF+Bw==", + "license": "ISC", + "dependencies": { + "hosted-git-info": "^7.0.0", + "proc-log": "^4.0.0", + "semver": "^7.3.5", + "validate-npm-package-name": "^5.0.0" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/metavuln-calculator/node_modules/npm-pick-manifest": { + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-9.1.0.tgz", + "integrity": "sha512-nkc+3pIIhqHVQr085X9d2JzPzLyjzQS96zbruppqC9aZRm/x8xx6xhI98gHtsfELP2bE+loHq8ZaHFHhe+NauA==", + "license": "ISC", + "dependencies": { + "npm-install-checks": "^6.0.0", + "npm-normalize-package-bin": "^3.0.0", + "npm-package-arg": "^11.0.0", + "semver": "^7.3.5" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch": { + "version": "17.1.0", + "resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-17.1.0.tgz", + "integrity": "sha512-5+bKQRH0J1xG1uZ1zMNvxW0VEyoNWgJpY9UDuluPFLKDfJ9u2JmmjmTJV1srBGQOROfdBMiVvnH2Zvpbm+xkVA==", + "license": "ISC", + "dependencies": { + "@npmcli/redact": "^2.0.0", + "jsonparse": "^1.3.1", + "make-fetch-happen": "^13.0.0", + "minipass": "^7.0.2", + "minipass-fetch": "^3.0.0", + "minizlib": "^2.1.2", + "npm-package-arg": "^11.0.0", + "proc-log": "^4.0.0" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/metavuln-calculator/node_modules/pacote": { + "version": "18.0.6", + "resolved": "https://registry.npmjs.org/pacote/-/pacote-18.0.6.tgz", + "integrity": "sha512-+eK3G27SMwsB8kLIuj4h1FUhHtwiEUo21Tw8wNjmvdlpOEr613edv+8FUsTj/4F/VN5ywGE19X18N7CC2EJk6A==", + "license": "ISC", + "dependencies": { + "@npmcli/git": "^5.0.0", + "@npmcli/installed-package-contents": "^2.0.1", + "@npmcli/package-json": "^5.1.0", + "@npmcli/promise-spawn": "^7.0.0", + "@npmcli/run-script": "^8.0.0", + "cacache": "^18.0.0", + "fs-minipass": "^3.0.0", + "minipass": "^7.0.2", + "npm-package-arg": "^11.0.0", + "npm-packlist": "^8.0.0", + "npm-pick-manifest": "^9.0.0", + "npm-registry-fetch": "^17.0.0", + "proc-log": "^4.0.0", + "promise-retry": "^2.0.1", + "sigstore": "^2.2.0", + "ssri": "^10.0.0", + "tar": "^6.1.11" + }, + "bin": { + "pacote": "bin/index.js" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, "node_modules/@npmcli/metavuln-calculator/node_modules/unique-filename": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/unique-filename/-/unique-filename-3.0.0.tgz", @@ -1834,7 +2039,6 @@ "version": "3.0.0", "resolved": "https://registry.npmjs.org/@npmcli/node-gyp/-/node-gyp-3.0.0.tgz", "integrity": "sha512-gp8pRXC2oOxu0DUE1/M3bYtb1b3/DbJ5aM113+XJBgfXdussRAsX0YOrOhdd8WvnAR6auDBvJomGAkLKA5ydxA==", - "inBundle": true, "license": "ISC", "engines": { "node": "^14.17.0 || ^16.13.0 || >=18.0.0" @@ -7052,7 +7256,6 @@ "version": "6.0.5", "resolved": "https://registry.npmjs.org/ignore-walk/-/ignore-walk-6.0.5.tgz", "integrity": "sha512-VuuG0wCnjhnylG1ABXT3dAuIpTNDs/G8jlpmwXY03fXoXy/8ZK8/T+hMzt8L4WnrLCJgdybqgPagnF/f97cg3A==", - "inBundle": true, "license": "ISC", "dependencies": { "minimatch": "^9.0.0" @@ -10244,7 +10447,6 @@ "version": "3.0.1", "resolved": "https://registry.npmjs.org/npm-bundled/-/npm-bundled-3.0.1.tgz", "integrity": "sha512-+AvaheE/ww1JEwRHOrn4WHNzOxGtVp+adrg2AeZS/7KuxGUYFuBta98wYpfHBbJp6Tg6j1NKSEVHNcfZzJHQwQ==", - "inBundle": true, "license": "ISC", "dependencies": { "npm-normalize-package-bin": "^3.0.0" @@ -10270,7 +10472,6 @@ "version": "3.0.1", "resolved": "https://registry.npmjs.org/npm-normalize-package-bin/-/npm-normalize-package-bin-3.0.1.tgz", "integrity": "sha512-dMxCf+zZ+3zeQZXKxmyuCKlIDPGuv8EF940xbkC4kQVDTtqoh6rJFO+JTKSA6/Rwi0getWmtuy4Itup0AMcaDQ==", - "inBundle": true, "license": "ISC", "engines": { "node": "^14.17.0 || ^16.13.0 || >=18.0.0" @@ -10316,7 +10517,6 @@ "version": "8.0.2", "resolved": "https://registry.npmjs.org/npm-packlist/-/npm-packlist-8.0.2.tgz", "integrity": "sha512-shYrPFIS/JLP4oQmAwDyk5HcyysKW8/JLTEA32S0Z5TzvpaeeX2yMFfoK1fjEBnCBvVyIB/Jj/GBFdm0wsgzbA==", - "inBundle": true, "license": "ISC", "dependencies": { "ignore-walk": "^6.0.4" @@ -11042,302 +11242,171 @@ "license": "BlueOak-1.0.0" }, "node_modules/pacote": { - "version": "18.0.6", - "resolved": "https://registry.npmjs.org/pacote/-/pacote-18.0.6.tgz", - "integrity": "sha512-+eK3G27SMwsB8kLIuj4h1FUhHtwiEUo21Tw8wNjmvdlpOEr613edv+8FUsTj/4F/VN5ywGE19X18N7CC2EJk6A==", + "version": "19.0.0", + "resolved": "https://registry.npmjs.org/pacote/-/pacote-19.0.0.tgz", + "integrity": "sha512-953pUJqILTeaRvKFcQ78unsNc3Nl4PyVHTTsAUmvSmJ0NXs0LTWKAl5tMF2CXPRXA16RdCMYI9EKlV4CCi2T5g==", "inBundle": true, "license": "ISC", "dependencies": { - "@npmcli/git": "^5.0.0", - "@npmcli/installed-package-contents": "^2.0.1", - "@npmcli/package-json": "^5.1.0", - "@npmcli/promise-spawn": "^7.0.0", - "@npmcli/run-script": "^8.0.0", - "cacache": "^18.0.0", + "@npmcli/git": "^6.0.0", + "@npmcli/installed-package-contents": "^3.0.0", + "@npmcli/package-json": "^6.0.0", + "@npmcli/promise-spawn": "^8.0.0", + "@npmcli/run-script": "^9.0.0", + "cacache": "^19.0.0", "fs-minipass": "^3.0.0", "minipass": "^7.0.2", - "npm-package-arg": "^11.0.0", - "npm-packlist": "^8.0.0", - "npm-pick-manifest": "^9.0.0", - "npm-registry-fetch": "^17.0.0", - "proc-log": "^4.0.0", + "npm-package-arg": "^12.0.0", + "npm-packlist": "^9.0.0", + "npm-pick-manifest": "^10.0.0", + "npm-registry-fetch": "^18.0.0", + "proc-log": "^5.0.0", "promise-retry": "^2.0.1", "sigstore": "^2.2.0", - "ssri": "^10.0.0", + "ssri": "^12.0.0", "tar": "^6.1.11" }, "bin": { "pacote": "bin/index.js" }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" } }, - "node_modules/pacote/node_modules/@npmcli/agent": { - "version": "2.2.2", - "resolved": "https://registry.npmjs.org/@npmcli/agent/-/agent-2.2.2.tgz", - "integrity": "sha512-OrcNPXdpSl9UX7qPVRWbmWMCSXrcDa2M9DvrbOTj7ao1S4PlqVFYv9/yLKMkrJKZ/V5A/kDBC690or307i26Og==", + "node_modules/pacote/node_modules/@npmcli/git": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-6.0.1.tgz", + "integrity": "sha512-BBWMMxeQzalmKadyimwb2/VVQyJB01PH0HhVSNLHNBDZN/M/h/02P6f8fxedIiFhpMj11SO9Ep5tKTBE7zL2nw==", "inBundle": true, "license": "ISC", "dependencies": { - "agent-base": "^7.1.0", - "http-proxy-agent": "^7.0.0", - "https-proxy-agent": "^7.0.1", + "@npmcli/promise-spawn": "^8.0.0", + "ini": "^5.0.0", "lru-cache": "^10.0.1", - "socks-proxy-agent": "^8.0.3" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, - "node_modules/pacote/node_modules/@npmcli/fs": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/@npmcli/fs/-/fs-3.1.1.tgz", - "integrity": "sha512-q9CRWjpHCMIh5sVyefoD1cA7PkvILqCZsnSOEUUivORLjxCO/Irmue2DprETiNgEqktDBZaM1Bi+jrarx1XdCg==", - "inBundle": true, - "license": "ISC", - "dependencies": { - "semver": "^7.3.5" + "npm-pick-manifest": "^10.0.0", + "proc-log": "^5.0.0", + "promise-inflight": "^1.0.1", + "promise-retry": "^2.0.1", + "semver": "^7.3.5", + "which": "^5.0.0" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" } }, - "node_modules/pacote/node_modules/@npmcli/package-json": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/@npmcli/package-json/-/package-json-5.2.1.tgz", - "integrity": "sha512-f7zYC6kQautXHvNbLEWgD/uGu1+xCn9izgqBfgItWSx22U0ZDekxN08A1vM8cTxj/cRVe0Q94Ode+tdoYmIOOQ==", + "node_modules/pacote/node_modules/@npmcli/installed-package-contents": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@npmcli/installed-package-contents/-/installed-package-contents-3.0.0.tgz", + "integrity": "sha512-fkxoPuFGvxyrH+OQzyTkX2LUEamrF4jZSmxjAtPPHHGO0dqsQ8tTKjnIS8SAnPHdk2I03BDtSMR5K/4loKg79Q==", "inBundle": true, "license": "ISC", "dependencies": { - "@npmcli/git": "^5.0.0", - "glob": "^10.2.2", - "hosted-git-info": "^7.0.0", - "json-parse-even-better-errors": "^3.0.0", - "normalize-package-data": "^6.0.0", - "proc-log": "^4.0.0", - "semver": "^7.5.3" + "npm-bundled": "^4.0.0", + "npm-normalize-package-bin": "^4.0.0" }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, - "node_modules/pacote/node_modules/@npmcli/promise-spawn": { - "version": "7.0.2", - "resolved": "https://registry.npmjs.org/@npmcli/promise-spawn/-/promise-spawn-7.0.2.tgz", - "integrity": "sha512-xhfYPXoV5Dy4UkY0D+v2KkwvnDfiA/8Mt3sWCGI/hM03NsYIH8ZaG6QzS9x7pje5vHZBZJ2v6VRFVTWACnqcmQ==", - "inBundle": true, - "license": "ISC", - "dependencies": { - "which": "^4.0.0" + "bin": { + "installed-package-contents": "bin/index.js" }, "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, - "node_modules/pacote/node_modules/@npmcli/redact": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/@npmcli/redact/-/redact-2.0.1.tgz", - "integrity": "sha512-YgsR5jCQZhVmTJvjduTOIHph0L73pK8xwMVaDY0PatySqVM9AZj93jpoXYSJqfHFxFkN9dmqTw6OiqExsS3LPw==", - "inBundle": true, - "license": "ISC", - "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" } }, - "node_modules/pacote/node_modules/@npmcli/run-script": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/@npmcli/run-script/-/run-script-8.1.0.tgz", - "integrity": "sha512-y7efHHwghQfk28G2z3tlZ67pLG0XdfYbcVG26r7YIXALRsrVQcTq4/tdenSmdOrEsNahIYA/eh8aEVROWGFUDg==", + "node_modules/pacote/node_modules/ignore-walk": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/ignore-walk/-/ignore-walk-7.0.0.tgz", + "integrity": "sha512-T4gbf83A4NH95zvhVYZc+qWocBBGlpzUXLPGurJggw/WIOwicfXJChLDP/iBZnN5WqROSu5Bm3hhle4z8a8YGQ==", "inBundle": true, "license": "ISC", "dependencies": { - "@npmcli/node-gyp": "^3.0.0", - "@npmcli/package-json": "^5.0.0", - "@npmcli/promise-spawn": "^7.0.0", - "node-gyp": "^10.0.0", - "proc-log": "^4.0.0", - "which": "^4.0.0" + "minimatch": "^9.0.0" }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" } }, - "node_modules/pacote/node_modules/cacache": { - "version": "18.0.4", - "resolved": "https://registry.npmjs.org/cacache/-/cacache-18.0.4.tgz", - "integrity": "sha512-B+L5iIa9mgcjLbliir2th36yEwPftrzteHYujzsx3dFP/31GCHcIeS8f5MGd80odLOjaOvSpU3EEAmRQptkxLQ==", + "node_modules/pacote/node_modules/isexe": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-3.1.1.tgz", + "integrity": "sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ==", "inBundle": true, "license": "ISC", - "dependencies": { - "@npmcli/fs": "^3.1.0", - "fs-minipass": "^3.0.0", - "glob": "^10.2.2", - "lru-cache": "^10.0.1", - "minipass": "^7.0.3", - "minipass-collect": "^2.0.1", - "minipass-flush": "^1.0.5", - "minipass-pipeline": "^1.2.4", - "p-map": "^4.0.0", - "ssri": "^10.0.0", - "tar": "^6.1.11", - "unique-filename": "^3.0.0" - }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": ">=16" } }, - "node_modules/pacote/node_modules/hosted-git-info": { - "version": "7.0.2", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-7.0.2.tgz", - "integrity": "sha512-puUZAUKT5m8Zzvs72XWy3HtvVbTWljRE66cP60bxJzAqf2DgICo7lYTY2IHUmLnNpjYvw5bvmoHvPc0QO2a62w==", + "node_modules/pacote/node_modules/npm-bundled": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/npm-bundled/-/npm-bundled-4.0.0.tgz", + "integrity": "sha512-IxaQZDMsqfQ2Lz37VvyyEtKLe8FsRZuysmedy/N06TU1RyVppYKXrO4xIhR0F+7ubIBox6Q7nir6fQI3ej39iA==", "inBundle": true, "license": "ISC", "dependencies": { - "lru-cache": "^10.0.1" + "npm-normalize-package-bin": "^4.0.0" }, "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, - "node_modules/pacote/node_modules/json-parse-even-better-errors": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-3.0.2.tgz", - "integrity": "sha512-fi0NG4bPjCHunUJffmLd0gxssIgkNmArMvis4iNah6Owg1MCJjWhEcDLmsK6iGkJq3tHwbDkTlce70/tmXN4cQ==", - "inBundle": true, - "license": "MIT", - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" } }, - "node_modules/pacote/node_modules/make-fetch-happen": { - "version": "13.0.1", - "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-13.0.1.tgz", - "integrity": "sha512-cKTUFc/rbKUd/9meOvgrpJ2WrNzymt6jfRDdwg5UCnVzv9dTpEj9JS5m3wtziXVCjluIXyL8pcaukYqezIzZQA==", + "node_modules/pacote/node_modules/npm-normalize-package-bin": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/npm-normalize-package-bin/-/npm-normalize-package-bin-4.0.0.tgz", + "integrity": "sha512-TZKxPvItzai9kN9H/TkmCtx/ZN/hvr3vUycjlfmH0ootY9yFBzNOpiXAdIn1Iteqsvk4lQn6B5PTrt+n6h8k/w==", "inBundle": true, "license": "ISC", - "dependencies": { - "@npmcli/agent": "^2.0.0", - "cacache": "^18.0.0", - "http-cache-semantics": "^4.1.1", - "is-lambda": "^1.0.1", - "minipass": "^7.0.2", - "minipass-fetch": "^3.0.0", - "minipass-flush": "^1.0.5", - "minipass-pipeline": "^1.2.4", - "negotiator": "^0.6.3", - "proc-log": "^4.2.0", - "promise-retry": "^2.0.1", - "ssri": "^10.0.0" - }, "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, - "node_modules/pacote/node_modules/normalize-package-data": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-6.0.2.tgz", - "integrity": "sha512-V6gygoYb/5EmNI+MEGrWkC+e6+Rr7mTmfHrxDbLzxQogBkgzo76rkok0Am6thgSF7Mv2nLOajAJj5vDJZEFn7g==", - "inBundle": true, - "license": "BSD-2-Clause", - "dependencies": { - "hosted-git-info": "^7.0.0", - "semver": "^7.3.5", - "validate-npm-package-license": "^3.0.4" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, - "node_modules/pacote/node_modules/npm-install-checks": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/npm-install-checks/-/npm-install-checks-6.3.0.tgz", - "integrity": "sha512-W29RiK/xtpCGqn6f3ixfRYGk+zRyr+Ew9F2E20BfXxT5/euLdA/Nm7fO7OeTGuAmTs30cpgInyJ0cYe708YTZw==", - "inBundle": true, - "license": "BSD-2-Clause", - "dependencies": { - "semver": "^7.1.1" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" } }, - "node_modules/pacote/node_modules/npm-package-arg": { - "version": "11.0.3", - "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-11.0.3.tgz", - "integrity": "sha512-sHGJy8sOC1YraBywpzQlIKBE4pBbGbiF95U6Auspzyem956E0+FtDtsx1ZxlOJkQCZ1AFXAY/yuvtFYrOxF+Bw==", + "node_modules/pacote/node_modules/npm-packlist": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/npm-packlist/-/npm-packlist-9.0.0.tgz", + "integrity": "sha512-8qSayfmHJQTx3nJWYbbUmflpyarbLMBc6LCAjYsiGtXxDB68HaZpb8re6zeaLGxZzDuMdhsg70jryJe+RrItVQ==", "inBundle": true, "license": "ISC", "dependencies": { - "hosted-git-info": "^7.0.0", - "proc-log": "^4.0.0", - "semver": "^7.3.5", - "validate-npm-package-name": "^5.0.0" + "ignore-walk": "^7.0.0" }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" } }, - "node_modules/pacote/node_modules/npm-pick-manifest": { - "version": "9.1.0", - "resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-9.1.0.tgz", - "integrity": "sha512-nkc+3pIIhqHVQr085X9d2JzPzLyjzQS96zbruppqC9aZRm/x8xx6xhI98gHtsfELP2bE+loHq8ZaHFHhe+NauA==", + "node_modules/pacote/node_modules/proc-log": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-5.0.0.tgz", + "integrity": "sha512-Azwzvl90HaF0aCz1JrDdXQykFakSSNPaPoiZ9fm5qJIMHioDZEi7OAdRwSm6rSoPtY3Qutnm3L7ogmg3dc+wbQ==", "inBundle": true, "license": "ISC", - "dependencies": { - "npm-install-checks": "^6.0.0", - "npm-normalize-package-bin": "^3.0.0", - "npm-package-arg": "^11.0.0", - "semver": "^7.3.5" - }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" } }, - "node_modules/pacote/node_modules/npm-registry-fetch": { - "version": "17.1.0", - "resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-17.1.0.tgz", - "integrity": "sha512-5+bKQRH0J1xG1uZ1zMNvxW0VEyoNWgJpY9UDuluPFLKDfJ9u2JmmjmTJV1srBGQOROfdBMiVvnH2Zvpbm+xkVA==", + "node_modules/pacote/node_modules/ssri": { + "version": "12.0.0", + "resolved": "https://registry.npmjs.org/ssri/-/ssri-12.0.0.tgz", + "integrity": "sha512-S7iGNosepx9RadX82oimUkvr0Ct7IjJbEbs4mJcTxst8um95J3sDYU1RBEOvdu6oL1Wek2ODI5i4MAw+dZ6cAQ==", "inBundle": true, "license": "ISC", "dependencies": { - "@npmcli/redact": "^2.0.0", - "jsonparse": "^1.3.1", - "make-fetch-happen": "^13.0.0", - "minipass": "^7.0.2", - "minipass-fetch": "^3.0.0", - "minizlib": "^2.1.2", - "npm-package-arg": "^11.0.0", - "proc-log": "^4.0.0" + "minipass": "^7.0.3" }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" } }, - "node_modules/pacote/node_modules/unique-filename": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/unique-filename/-/unique-filename-3.0.0.tgz", - "integrity": "sha512-afXhuC55wkAmZ0P18QsVE6kp8JaxrEokN2HGIoIVv2ijHQd419H0+6EigAFcIzXeMIkcIkNBpB3L/DXB3cTS/g==", + "node_modules/pacote/node_modules/which": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/which/-/which-5.0.0.tgz", + "integrity": "sha512-JEdGzHwwkrbWoGOlIHqQ5gtprKGOenpDHpxE9zVR1bWbOtYRyPPHMe9FaP6x61CmNaTThSkb0DAJte5jD+DmzQ==", "inBundle": true, "license": "ISC", "dependencies": { - "unique-slug": "^4.0.0" + "isexe": "^3.1.1" }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/pacote/node_modules/unique-slug": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/unique-slug/-/unique-slug-4.0.0.tgz", - "integrity": "sha512-WrcA6AyEfqDX5bWige/4NQfPZMtASNVxdmWR76WESYQVAACSgWcR6e9i0mofqqBxYFtL4oAxPIptY73/0YE1DQ==", - "inBundle": true, - "license": "ISC", - "dependencies": { - "imurmurhash": "^0.1.4" + "bin": { + "node-which": "bin/which.js" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" } }, "node_modules/parent-module": { @@ -17099,7 +17168,7 @@ "npm-package-arg": "^12.0.0", "npm-pick-manifest": "^10.0.0", "npm-registry-fetch": "^18.0.1", - "pacote": "^18.0.6", + "pacote": "^19.0.0", "parse-conflict-json": "^3.0.0", "proc-log": "^4.2.0", "proggy": "^2.0.0", @@ -17180,7 +17249,7 @@ "diff": "^5.1.0", "minimatch": "^9.0.4", "npm-package-arg": "^12.0.0", - "pacote": "^18.0.6", + "pacote": "^19.0.0", "tar": "^6.2.1" }, "devDependencies": { @@ -17200,7 +17269,7 @@ "@npmcli/run-script": "^9.0.1", "ci-info": "^4.0.0", "npm-package-arg": "^12.0.0", - "pacote": "^18.0.6", + "pacote": "^19.0.0", "proc-log": "^4.2.0", "read": "^3.0.1", "read-package-json-fast": "^3.0.2", @@ -17278,7 +17347,7 @@ "@npmcli/arborist": "^7.5.4", "@npmcli/run-script": "^9.0.1", "npm-package-arg": "^12.0.0", - "pacote": "^18.0.6" + "pacote": "^19.0.0" }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", diff --git a/package.json b/package.json index 2fb2a5d0bd295..7bb28b09b67a6 100644 --- a/package.json +++ b/package.json @@ -103,7 +103,7 @@ "npm-registry-fetch": "^18.0.1", "npm-user-validate": "^3.0.0", "p-map": "^4.0.0", - "pacote": "^18.0.6", + "pacote": "^19.0.0", "parse-conflict-json": "^3.0.1", "proc-log": "^4.2.0", "qrcode-terminal": "^0.12.0", diff --git a/workspaces/arborist/package.json b/workspaces/arborist/package.json index c2e687ac31a4f..ec35ef1dca2ff 100644 --- a/workspaces/arborist/package.json +++ b/workspaces/arborist/package.json @@ -27,7 +27,7 @@ "npm-package-arg": "^12.0.0", "npm-pick-manifest": "^10.0.0", "npm-registry-fetch": "^18.0.1", - "pacote": "^18.0.6", + "pacote": "^19.0.0", "parse-conflict-json": "^3.0.0", "proc-log": "^4.2.0", "proggy": "^2.0.0", diff --git a/workspaces/libnpmdiff/package.json b/workspaces/libnpmdiff/package.json index 819a56ed3febd..295cc05c7b519 100644 --- a/workspaces/libnpmdiff/package.json +++ b/workspaces/libnpmdiff/package.json @@ -53,7 +53,7 @@ "diff": "^5.1.0", "minimatch": "^9.0.4", "npm-package-arg": "^12.0.0", - "pacote": "^18.0.6", + "pacote": "^19.0.0", "tar": "^6.2.1" }, "templateOSS": { diff --git a/workspaces/libnpmexec/package.json b/workspaces/libnpmexec/package.json index 202c68bfa8247..ccdc3135d731a 100644 --- a/workspaces/libnpmexec/package.json +++ b/workspaces/libnpmexec/package.json @@ -64,7 +64,7 @@ "@npmcli/run-script": "^9.0.1", "ci-info": "^4.0.0", "npm-package-arg": "^12.0.0", - "pacote": "^18.0.6", + "pacote": "^19.0.0", "proc-log": "^4.2.0", "read": "^3.0.1", "read-package-json-fast": "^3.0.2", diff --git a/workspaces/libnpmpack/package.json b/workspaces/libnpmpack/package.json index a697fefcaf082..27f26fc1c3bd4 100644 --- a/workspaces/libnpmpack/package.json +++ b/workspaces/libnpmpack/package.json @@ -40,7 +40,7 @@ "@npmcli/arborist": "^7.5.4", "@npmcli/run-script": "^9.0.1", "npm-package-arg": "^12.0.0", - "pacote": "^18.0.6" + "pacote": "^19.0.0" }, "engines": { "node": "^18.17.0 || >=20.5.0"