From a76b724aa5961edd97e1592ff255db8f5aa9d33f Mon Sep 17 00:00:00 2001 From: npm CLI robot Date: Sat, 5 Oct 2024 06:54:06 -0700 Subject: [PATCH] deps: upgrade npm to 10.9.0 PR-URL: https://github.com/nodejs/node/pull/55255 Reviewed-By: Luigi Pinca Reviewed-By: Rafael Gonzaga Reviewed-By: Marco Ippolito --- deps/npm/docs/content/commands/npm-init.md | 2 +- deps/npm/docs/content/commands/npm-install.md | 4 - deps/npm/docs/content/commands/npm-ls.md | 2 +- deps/npm/docs/content/commands/npm.md | 2 +- .../content/configuring-npm/package-json.md | 30 + deps/npm/docs/output/commands/npm-access.html | 4 +- .../npm/docs/output/commands/npm-adduser.html | 4 +- deps/npm/docs/output/commands/npm-audit.html | 4 +- deps/npm/docs/output/commands/npm-bugs.html | 4 +- deps/npm/docs/output/commands/npm-cache.html | 4 +- deps/npm/docs/output/commands/npm-ci.html | 4 +- .../docs/output/commands/npm-completion.html | 4 +- deps/npm/docs/output/commands/npm-config.html | 4 +- deps/npm/docs/output/commands/npm-dedupe.html | 4 +- .../docs/output/commands/npm-deprecate.html | 4 +- deps/npm/docs/output/commands/npm-diff.html | 4 +- .../docs/output/commands/npm-dist-tag.html | 4 +- deps/npm/docs/output/commands/npm-docs.html | 4 +- deps/npm/docs/output/commands/npm-doctor.html | 4 +- deps/npm/docs/output/commands/npm-edit.html | 4 +- deps/npm/docs/output/commands/npm-exec.html | 4 +- .../npm/docs/output/commands/npm-explain.html | 4 +- .../npm/docs/output/commands/npm-explore.html | 4 +- .../docs/output/commands/npm-find-dupes.html | 4 +- deps/npm/docs/output/commands/npm-fund.html | 4 +- .../docs/output/commands/npm-help-search.html | 4 +- deps/npm/docs/output/commands/npm-help.html | 4 +- deps/npm/docs/output/commands/npm-hook.html | 4 +- deps/npm/docs/output/commands/npm-init.html | 6 +- .../output/commands/npm-install-ci-test.html | 4 +- .../output/commands/npm-install-test.html | 4 +- .../npm/docs/output/commands/npm-install.html | 7 +- deps/npm/docs/output/commands/npm-link.html | 4 +- deps/npm/docs/output/commands/npm-login.html | 4 +- deps/npm/docs/output/commands/npm-logout.html | 4 +- deps/npm/docs/output/commands/npm-ls.html | 6 +- deps/npm/docs/output/commands/npm-org.html | 4 +- .../docs/output/commands/npm-outdated.html | 4 +- deps/npm/docs/output/commands/npm-owner.html | 4 +- deps/npm/docs/output/commands/npm-pack.html | 4 +- deps/npm/docs/output/commands/npm-ping.html | 4 +- deps/npm/docs/output/commands/npm-pkg.html | 4 +- deps/npm/docs/output/commands/npm-prefix.html | 4 +- .../npm/docs/output/commands/npm-profile.html | 4 +- deps/npm/docs/output/commands/npm-prune.html | 4 +- .../npm/docs/output/commands/npm-publish.html | 4 +- deps/npm/docs/output/commands/npm-query.html | 4 +- .../npm/docs/output/commands/npm-rebuild.html | 4 +- deps/npm/docs/output/commands/npm-repo.html | 4 +- .../npm/docs/output/commands/npm-restart.html | 4 +- deps/npm/docs/output/commands/npm-root.html | 4 +- .../docs/output/commands/npm-run-script.html | 4 +- deps/npm/docs/output/commands/npm-sbom.html | 4 +- deps/npm/docs/output/commands/npm-search.html | 4 +- .../docs/output/commands/npm-shrinkwrap.html | 4 +- deps/npm/docs/output/commands/npm-star.html | 4 +- deps/npm/docs/output/commands/npm-stars.html | 4 +- deps/npm/docs/output/commands/npm-start.html | 4 +- deps/npm/docs/output/commands/npm-stop.html | 4 +- deps/npm/docs/output/commands/npm-team.html | 4 +- deps/npm/docs/output/commands/npm-test.html | 4 +- deps/npm/docs/output/commands/npm-token.html | 4 +- .../docs/output/commands/npm-uninstall.html | 4 +- .../docs/output/commands/npm-unpublish.html | 4 +- deps/npm/docs/output/commands/npm-unstar.html | 4 +- deps/npm/docs/output/commands/npm-update.html | 4 +- .../npm/docs/output/commands/npm-version.html | 4 +- deps/npm/docs/output/commands/npm-view.html | 4 +- deps/npm/docs/output/commands/npm-whoami.html | 4 +- deps/npm/docs/output/commands/npm.html | 6 +- deps/npm/docs/output/commands/npx.html | 4 +- .../docs/output/configuring-npm/folders.html | 4 +- .../docs/output/configuring-npm/install.html | 4 +- .../output/configuring-npm/npm-global.html | 4 +- .../docs/output/configuring-npm/npm-json.html | 29 +- .../configuring-npm/npm-shrinkwrap-json.html | 4 +- .../docs/output/configuring-npm/npmrc.html | 4 +- .../output/configuring-npm/package-json.html | 29 +- .../configuring-npm/package-lock-json.html | 4 +- deps/npm/docs/output/using-npm/config.html | 4 +- .../using-npm/dependency-selectors.html | 4 +- .../npm/docs/output/using-npm/developers.html | 4 +- deps/npm/docs/output/using-npm/logging.html | 4 +- deps/npm/docs/output/using-npm/orgs.html | 4 +- .../docs/output/using-npm/package-spec.html | 4 +- deps/npm/docs/output/using-npm/registry.html | 4 +- deps/npm/docs/output/using-npm/removal.html | 4 +- deps/npm/docs/output/using-npm/scope.html | 4 +- deps/npm/docs/output/using-npm/scripts.html | 4 +- .../npm/docs/output/using-npm/workspaces.html | 4 +- deps/npm/lib/arborist-cmd.js | 1 + deps/npm/lib/base-cmd.js | 61 +- deps/npm/lib/commands/init.js | 2 +- deps/npm/lib/commands/run-script.js | 1 + deps/npm/lib/npm.js | 4 + deps/npm/lib/utils/error-message.js | 7 + deps/npm/lib/utils/ping.js | 2 +- deps/npm/man/man1/npm-access.1 | 2 +- deps/npm/man/man1/npm-adduser.1 | 2 +- deps/npm/man/man1/npm-audit.1 | 2 +- deps/npm/man/man1/npm-bugs.1 | 2 +- deps/npm/man/man1/npm-cache.1 | 2 +- deps/npm/man/man1/npm-ci.1 | 2 +- deps/npm/man/man1/npm-completion.1 | 2 +- deps/npm/man/man1/npm-config.1 | 2 +- deps/npm/man/man1/npm-dedupe.1 | 2 +- deps/npm/man/man1/npm-deprecate.1 | 2 +- deps/npm/man/man1/npm-diff.1 | 2 +- deps/npm/man/man1/npm-dist-tag.1 | 2 +- deps/npm/man/man1/npm-docs.1 | 2 +- deps/npm/man/man1/npm-doctor.1 | 2 +- deps/npm/man/man1/npm-edit.1 | 2 +- deps/npm/man/man1/npm-exec.1 | 2 +- deps/npm/man/man1/npm-explain.1 | 2 +- deps/npm/man/man1/npm-explore.1 | 2 +- deps/npm/man/man1/npm-find-dupes.1 | 2 +- deps/npm/man/man1/npm-fund.1 | 2 +- deps/npm/man/man1/npm-help-search.1 | 2 +- deps/npm/man/man1/npm-help.1 | 2 +- deps/npm/man/man1/npm-hook.1 | 2 +- deps/npm/man/man1/npm-init.1 | 4 +- deps/npm/man/man1/npm-install-ci-test.1 | 2 +- deps/npm/man/man1/npm-install-test.1 | 2 +- deps/npm/man/man1/npm-install.1 | 4 +- deps/npm/man/man1/npm-link.1 | 2 +- deps/npm/man/man1/npm-login.1 | 2 +- deps/npm/man/man1/npm-logout.1 | 2 +- deps/npm/man/man1/npm-ls.1 | 4 +- deps/npm/man/man1/npm-org.1 | 2 +- deps/npm/man/man1/npm-outdated.1 | 2 +- deps/npm/man/man1/npm-owner.1 | 2 +- deps/npm/man/man1/npm-pack.1 | 2 +- deps/npm/man/man1/npm-ping.1 | 2 +- deps/npm/man/man1/npm-pkg.1 | 2 +- deps/npm/man/man1/npm-prefix.1 | 2 +- deps/npm/man/man1/npm-profile.1 | 2 +- deps/npm/man/man1/npm-prune.1 | 2 +- deps/npm/man/man1/npm-publish.1 | 2 +- deps/npm/man/man1/npm-query.1 | 2 +- deps/npm/man/man1/npm-rebuild.1 | 2 +- deps/npm/man/man1/npm-repo.1 | 2 +- deps/npm/man/man1/npm-restart.1 | 2 +- deps/npm/man/man1/npm-root.1 | 2 +- deps/npm/man/man1/npm-run-script.1 | 2 +- deps/npm/man/man1/npm-sbom.1 | 2 +- deps/npm/man/man1/npm-search.1 | 2 +- deps/npm/man/man1/npm-shrinkwrap.1 | 2 +- deps/npm/man/man1/npm-star.1 | 2 +- deps/npm/man/man1/npm-stars.1 | 2 +- deps/npm/man/man1/npm-start.1 | 2 +- deps/npm/man/man1/npm-stop.1 | 2 +- deps/npm/man/man1/npm-team.1 | 2 +- deps/npm/man/man1/npm-test.1 | 2 +- deps/npm/man/man1/npm-token.1 | 2 +- deps/npm/man/man1/npm-uninstall.1 | 2 +- deps/npm/man/man1/npm-unpublish.1 | 2 +- deps/npm/man/man1/npm-unstar.1 | 2 +- deps/npm/man/man1/npm-update.1 | 2 +- deps/npm/man/man1/npm-version.1 | 2 +- deps/npm/man/man1/npm-view.1 | 2 +- deps/npm/man/man1/npm-whoami.1 | 2 +- deps/npm/man/man1/npm.1 | 4 +- deps/npm/man/man1/npx.1 | 2 +- deps/npm/man/man5/folders.5 | 2 +- deps/npm/man/man5/install.5 | 2 +- deps/npm/man/man5/npm-global.5 | 2 +- deps/npm/man/man5/npm-json.5 | 34 +- deps/npm/man/man5/npm-shrinkwrap-json.5 | 2 +- deps/npm/man/man5/npmrc.5 | 2 +- deps/npm/man/man5/package-json.5 | 34 +- deps/npm/man/man5/package-lock-json.5 | 2 +- deps/npm/man/man7/config.7 | 2 +- deps/npm/man/man7/dependency-selectors.7 | 2 +- deps/npm/man/man7/developers.7 | 2 +- deps/npm/man/man7/logging.7 | 2 +- deps/npm/man/man7/orgs.7 | 2 +- deps/npm/man/man7/package-spec.7 | 2 +- deps/npm/man/man7/registry.7 | 2 +- deps/npm/man/man7/removal.7 | 2 +- deps/npm/man/man7/scope.7 | 2 +- deps/npm/man/man7/scripts.7 | 2 +- deps/npm/man/man7/workspaces.7 | 2 +- .../node_modules/@isaacs/fs-minipass/LICENSE | 15 + .../fs-minipass/dist/commonjs/index.js | 430 ++++++++ .../fs-minipass/dist/commonjs/package.json | 3 + .../@isaacs/fs-minipass/dist/esm/index.js | 420 ++++++++ .../@isaacs/fs-minipass/dist/esm/package.json | 3 + .../@isaacs/fs-minipass/package.json | 72 ++ .../node_modules/@npmcli/agent/package.json | 22 +- .../arborist/lib/arborist/build-ideal-tree.js | 5 +- .../@npmcli/arborist/package.json | 58 +- .../node_modules/@npmcli/config/package.json | 16 +- deps/npm/node_modules/@npmcli/fs/package.json | 18 +- .../npm/node_modules/@npmcli/git/package.json | 27 +- .../installed-package-contents/package.json | 23 +- .../@npmcli/map-workspaces/lib/index.js | 34 +- .../@npmcli/map-workspaces/package.json | 25 +- .../@npmcli/metavuln-calculator/package.json | 24 +- .../@npmcli/name-from-folder/package.json | 20 +- .../npm/node_modules/@npmcli/node-gyp/LICENSE | 7 + .../@npmcli/node-gyp/package.json | 20 +- .../@npmcli/package-json/lib/normalize.js | 10 +- .../@npmcli/package-json/package.json | 29 +- .../@npmcli/promise-spawn/package.json | 19 +- .../node_modules/@npmcli/query/package.json | 21 +- .../node_modules/@npmcli/redact/package.json | 19 +- .../@npmcli/run-script/package.json | 28 +- .../node_modules/@npmcli/agent/lib/agents.js | 206 ++++ .../node_modules/@npmcli/agent/lib/dns.js | 53 + .../node_modules/@npmcli/agent/lib/errors.js | 61 ++ .../node_modules/@npmcli/agent/lib/index.js | 56 ++ .../node_modules/@npmcli/agent/lib/options.js | 86 ++ .../node_modules/@npmcli/agent/lib/proxy.js | 88 ++ .../node_modules/@npmcli/agent/package.json | 60 ++ .../sign/node_modules/@npmcli/fs/LICENSE.md | 20 + .../@npmcli/fs/lib/common/get-options.js | 20 + .../@npmcli/fs/lib/common/node.js | 9 + .../node_modules/@npmcli/fs/lib/cp/LICENSE | 15 + .../node_modules/@npmcli/fs/lib/cp/errors.js | 129 +++ .../node_modules/@npmcli/fs/lib/cp/index.js | 22 + .../@npmcli/fs/lib/cp/polyfill.js | 428 ++++++++ .../sign/node_modules/@npmcli/fs/lib/index.js | 13 + .../node_modules/@npmcli/fs/lib/move-file.js | 78 ++ .../@npmcli/fs/lib/readdir-scoped.js | 20 + .../@npmcli/fs/lib/with-temp-dir.js | 39 + .../sign/node_modules/@npmcli/fs/package.json | 52 + .../sign/node_modules/cacache/LICENSE.md | 16 + .../node_modules/cacache/lib/content/path.js | 29 + .../node_modules/cacache/lib/content/read.js | 165 ++++ .../node_modules/cacache/lib/content/rm.js | 18 + .../node_modules/cacache/lib/content/write.js | 206 ++++ .../node_modules/cacache/lib/entry-index.js | 336 +++++++ .../sign/node_modules/cacache/lib/get.js | 170 ++++ .../sign/node_modules/cacache/lib/index.js | 42 + .../node_modules/cacache/lib/memoization.js | 72 ++ .../sign/node_modules/cacache/lib/put.js | 80 ++ .../sign/node_modules/cacache/lib/rm.js | 31 + .../node_modules/cacache/lib/util/glob.js | 7 + .../cacache/lib/util/hash-to-segments.js | 7 + .../sign/node_modules/cacache/lib/util/tmp.js | 26 + .../sign/node_modules/cacache/lib/verify.js | 257 +++++ .../sign/node_modules/cacache/package.json | 82 ++ .../node_modules/make-fetch-happen/LICENSE | 16 + .../make-fetch-happen/lib/cache/entry.js | 471 +++++++++ .../make-fetch-happen/lib/cache/errors.js | 11 + .../make-fetch-happen/lib/cache/index.js | 49 + .../make-fetch-happen/lib/cache/key.js | 17 + .../make-fetch-happen/lib/cache/policy.js | 161 +++ .../make-fetch-happen/lib/fetch.js | 118 +++ .../make-fetch-happen/lib/index.js | 41 + .../make-fetch-happen/lib/options.js | 54 + .../make-fetch-happen/lib/pipeline.js | 41 + .../make-fetch-happen/lib/remote.js | 131 +++ .../make-fetch-happen/package.json | 75 ++ .../sign/node_modules/minipass-fetch/LICENSE | 28 + .../minipass-fetch/lib/abort-error.js | 17 + .../node_modules/minipass-fetch/lib/blob.js | 97 ++ .../node_modules/minipass-fetch/lib/body.js | 350 +++++++ .../minipass-fetch/lib/fetch-error.js | 32 + .../minipass-fetch/lib/headers.js | 267 +++++ .../node_modules/minipass-fetch/lib/index.js | 377 +++++++ .../minipass-fetch/lib/request.js | 282 ++++++ .../minipass-fetch/lib/response.js | 90 ++ .../node_modules/minipass-fetch/package.json | 69 ++ .../sign/node_modules/proc-log/LICENSE | 15 + .../sign/node_modules/proc-log/lib/index.js | 153 +++ .../sign/node_modules/proc-log/package.json | 45 + .../sign/node_modules/ssri/LICENSE.md | 16 + .../sign/node_modules/ssri/lib/index.js | 580 +++++++++++ .../sign/node_modules/ssri/package.json | 65 ++ .../sign/node_modules/unique-filename/LICENSE | 5 + .../node_modules/unique-filename/lib/index.js | 7 + .../node_modules/unique-filename/package.json | 51 + .../sign/node_modules/unique-slug/LICENSE | 15 + .../node_modules/unique-slug/lib/index.js | 11 + .../node_modules/unique-slug/package.json | 47 + deps/npm/node_modules/abbrev/package.json | 20 +- .../node_modules/bin-links/lib/link-gently.js | 16 +- deps/npm/node_modules/bin-links/package.json | 26 +- .../node_modules/cacache/lib/entry-index.js | 2 +- deps/npm/node_modules/cacache/lib/verify.js | 3 +- .../cacache/node_modules/chownr/LICENSE.md | 63 ++ .../chownr/dist/commonjs/index.js | 93 ++ .../chownr/dist/commonjs/package.json | 3 + .../node_modules/chownr/dist/esm/index.js | 85 ++ .../node_modules/chownr/dist/esm/package.json | 3 + .../cacache/node_modules/chownr/package.json | 69 ++ .../cacache/node_modules/minizlib/LICENSE | 26 + .../minizlib/dist/commonjs/constants.js | 123 +++ .../minizlib/dist/commonjs/index.js | 352 +++++++ .../minizlib/dist/commonjs/package.json | 3 + .../minizlib/dist/esm/constants.js | 117 +++ .../node_modules/minizlib/dist/esm/index.js | 333 +++++++ .../minizlib/dist/esm/package.json | 3 + .../node_modules/minizlib/package.json | 81 ++ .../cacache/node_modules/mkdirp/LICENSE | 21 + .../node_modules/mkdirp/dist/cjs/package.json | 91 ++ .../node_modules/mkdirp/dist/cjs/src/bin.d.ts | 3 + .../mkdirp/dist/cjs/src/bin.d.ts.map | 1 + .../node_modules/mkdirp/dist/cjs/src/bin.js | 80 ++ .../mkdirp/dist/cjs/src/bin.js.map | 1 + .../mkdirp/dist/cjs/src/find-made.d.ts | 4 + .../mkdirp/dist/cjs/src/find-made.d.ts.map | 1 + .../mkdirp/dist/cjs/src/find-made.js | 35 + .../mkdirp/dist/cjs/src/find-made.js.map | 1 + .../mkdirp/dist/cjs/src/index.d.ts | 39 + .../mkdirp/dist/cjs/src/index.d.ts.map | 1 + .../node_modules/mkdirp/dist/cjs/src/index.js | 53 + .../mkdirp/dist/cjs/src/index.js.map | 1 + .../mkdirp/dist/cjs/src/mkdirp-manual.d.ts | 6 + .../dist/cjs/src/mkdirp-manual.d.ts.map | 1 + .../mkdirp/dist/cjs/src/mkdirp-manual.js | 79 ++ .../mkdirp/dist/cjs/src/mkdirp-manual.js.map | 1 + .../mkdirp/dist/cjs/src/mkdirp-native.d.ts | 6 + .../dist/cjs/src/mkdirp-native.d.ts.map | 1 + .../mkdirp/dist/cjs/src/mkdirp-native.js | 50 + .../mkdirp/dist/cjs/src/mkdirp-native.js.map | 1 + .../mkdirp/dist/cjs/src/opts-arg.d.ts | 42 + .../mkdirp/dist/cjs/src/opts-arg.d.ts.map | 1 + .../mkdirp/dist/cjs/src/opts-arg.js | 38 + .../mkdirp/dist/cjs/src/opts-arg.js.map | 1 + .../mkdirp/dist/cjs/src/path-arg.d.ts | 2 + .../mkdirp/dist/cjs/src/path-arg.d.ts.map | 1 + .../mkdirp/dist/cjs/src/path-arg.js | 28 + .../mkdirp/dist/cjs/src/path-arg.js.map | 1 + .../mkdirp/dist/cjs/src/use-native.d.ts | 6 + .../mkdirp/dist/cjs/src/use-native.d.ts.map | 1 + .../mkdirp/dist/cjs/src/use-native.js | 17 + .../mkdirp/dist/cjs/src/use-native.js.map | 1 + .../mkdirp/dist/mjs/find-made.d.ts | 4 + .../mkdirp/dist/mjs/find-made.d.ts.map | 1 + .../node_modules/mkdirp/dist/mjs/find-made.js | 30 + .../mkdirp/dist/mjs/find-made.js.map | 1 + .../node_modules/mkdirp/dist/mjs/index.d.ts | 39 + .../mkdirp/dist/mjs/index.d.ts.map | 1 + .../node_modules/mkdirp/dist/mjs/index.js | 43 + .../node_modules/mkdirp/dist/mjs/index.js.map | 1 + .../mkdirp/dist/mjs/mkdirp-manual.d.ts | 6 + .../mkdirp/dist/mjs/mkdirp-manual.d.ts.map | 1 + .../mkdirp/dist/mjs/mkdirp-manual.js | 75 ++ .../mkdirp/dist/mjs/mkdirp-manual.js.map | 1 + .../mkdirp/dist/mjs/mkdirp-native.d.ts | 6 + .../mkdirp/dist/mjs/mkdirp-native.d.ts.map | 1 + .../mkdirp/dist/mjs/mkdirp-native.js | 46 + .../mkdirp/dist/mjs/mkdirp-native.js.map | 1 + .../mkdirp/dist/mjs/opts-arg.d.ts | 42 + .../mkdirp/dist/mjs/opts-arg.d.ts.map | 1 + .../node_modules/mkdirp/dist/mjs/opts-arg.js | 34 + .../mkdirp/dist/mjs/opts-arg.js.map | 1 + .../node_modules/mkdirp/dist/mjs/package.json | 3 + .../mkdirp/dist/mjs/path-arg.d.ts | 2 + .../mkdirp/dist/mjs/path-arg.d.ts.map | 1 + .../node_modules/mkdirp/dist/mjs/path-arg.js | 24 + .../mkdirp/dist/mjs/path-arg.js.map | 1 + .../mkdirp/dist/mjs/use-native.d.ts | 6 + .../mkdirp/dist/mjs/use-native.d.ts.map | 1 + .../mkdirp/dist/mjs/use-native.js | 14 + .../mkdirp/dist/mjs/use-native.js.map | 1 + .../cacache/node_modules/mkdirp/package.json | 91 ++ .../node_modules/mkdirp/readme.markdown | 281 ++++++ .../cacache/node_modules/p-map/index.js | 269 +++++ .../cacache/node_modules/p-map/license | 9 + .../cacache/node_modules/p-map/package.json | 57 ++ .../cacache/node_modules/tar/LICENSE | 15 + .../node_modules/tar/dist/commonjs/create.js | 83 ++ .../tar/dist/commonjs/cwd-error.js | 18 + .../node_modules/tar/dist/commonjs/extract.js | 78 ++ .../tar/dist/commonjs/get-write-flag.js | 29 + .../node_modules/tar/dist/commonjs/header.js | 306 ++++++ .../node_modules/tar/dist/commonjs/index.js | 54 + .../tar/dist/commonjs/large-numbers.js | 99 ++ .../node_modules/tar/dist/commonjs/list.js | 136 +++ .../tar/dist/commonjs/make-command.js | 61 ++ .../node_modules/tar/dist/commonjs/mkdir.js | 209 ++++ .../tar/dist/commonjs/mode-fix.js | 29 + .../tar/dist/commonjs/normalize-unicode.js | 17 + .../dist/commonjs/normalize-windows-path.js | 12 + .../node_modules/tar/dist/commonjs/options.js | 66 ++ .../node_modules/tar/dist/commonjs/pack.js | 477 +++++++++ .../tar/dist/commonjs/package.json | 3 + .../node_modules/tar/dist/commonjs/parse.js | 599 ++++++++++++ .../tar/dist/commonjs/path-reservations.js | 170 ++++ .../node_modules/tar/dist/commonjs/pax.js | 158 +++ .../tar/dist/commonjs/read-entry.js | 140 +++ .../node_modules/tar/dist/commonjs/replace.js | 231 +++++ .../tar/dist/commonjs/strip-absolute-path.js | 29 + .../dist/commonjs/strip-trailing-slashes.js | 18 + .../tar/dist/commonjs/symlink-error.js | 19 + .../node_modules/tar/dist/commonjs/types.js | 50 + .../node_modules/tar/dist/commonjs/unpack.js | 919 ++++++++++++++++++ .../node_modules/tar/dist/commonjs/update.js | 33 + .../tar/dist/commonjs/warn-method.js | 31 + .../tar/dist/commonjs/winchars.js | 14 + .../tar/dist/commonjs/write-entry.js | 689 +++++++++++++ .../node_modules/tar/dist/esm/create.js | 77 ++ .../node_modules/tar/dist/esm/cwd-error.js | 14 + .../node_modules/tar/dist/esm/extract.js | 49 + .../tar/dist/esm/get-write-flag.js | 23 + .../node_modules/tar/dist/esm/header.js | 279 ++++++ .../node_modules/tar/dist/esm/index.js | 20 + .../tar/dist/esm/large-numbers.js | 94 ++ .../cacache/node_modules/tar/dist/esm/list.js | 106 ++ .../node_modules/tar/dist/esm/make-command.js | 57 ++ .../node_modules/tar/dist/esm/mkdir.js | 201 ++++ .../node_modules/tar/dist/esm/mode-fix.js | 25 + .../tar/dist/esm/normalize-unicode.js | 13 + .../tar/dist/esm/normalize-windows-path.js | 9 + .../node_modules/tar/dist/esm/options.js | 54 + .../cacache/node_modules/tar/dist/esm/pack.js | 445 +++++++++ .../node_modules/tar/dist/esm/package.json | 3 + .../node_modules/tar/dist/esm/parse.js | 595 ++++++++++++ .../tar/dist/esm/path-reservations.js | 166 ++++ .../cacache/node_modules/tar/dist/esm/pax.js | 154 +++ .../node_modules/tar/dist/esm/read-entry.js | 136 +++ .../node_modules/tar/dist/esm/replace.js | 225 +++++ .../tar/dist/esm/strip-absolute-path.js | 25 + .../tar/dist/esm/strip-trailing-slashes.js | 14 + .../tar/dist/esm/symlink-error.js | 15 + .../node_modules/tar/dist/esm/types.js | 45 + .../node_modules/tar/dist/esm/unpack.js | 888 +++++++++++++++++ .../node_modules/tar/dist/esm/update.js | 30 + .../node_modules/tar/dist/esm/warn-method.js | 27 + .../node_modules/tar/dist/esm/winchars.js | 9 + .../node_modules/tar/dist/esm/write-entry.js | 657 +++++++++++++ .../cacache/node_modules/tar/package.json | 325 +++++++ .../cacache/node_modules/yallist/LICENSE.md | 63 ++ .../yallist/dist/commonjs/index.js | 384 ++++++++ .../yallist/dist/commonjs/package.json | 3 + .../node_modules/yallist/dist/esm/index.js | 379 ++++++++ .../yallist/dist/esm/package.json | 3 + .../cacache/node_modules/yallist/package.json | 68 ++ deps/npm/node_modules/cacache/package.json | 27 +- deps/npm/node_modules/cmd-shim/package.json | 17 +- .../node_modules/hosted-git-info/package.json | 17 +- .../npm/node_modules/ignore-walk/package.json | 17 +- deps/npm/node_modules/ini/package.json | 16 +- .../init-package-json/package.json | 27 +- .../package.json | 17 +- .../node_modules/libnpmaccess/package.json | 10 +- deps/npm/node_modules/libnpmdiff/package.json | 14 +- deps/npm/node_modules/libnpmexec/package.json | 22 +- deps/npm/node_modules/libnpmfund/package.json | 8 +- deps/npm/node_modules/libnpmhook/package.json | 8 +- deps/npm/node_modules/libnpmorg/package.json | 8 +- deps/npm/node_modules/libnpmpack/package.json | 14 +- .../node_modules/libnpmpublish/package.json | 16 +- .../node_modules/libnpmsearch/package.json | 8 +- deps/npm/node_modules/libnpmteam/package.json | 8 +- .../node_modules/libnpmversion/package.json | 14 +- .../make-fetch-happen/package.json | 29 +- .../node_modules/minizlib/LICENSE | 26 + .../minizlib/dist/commonjs/constants.js | 123 +++ .../minizlib/dist/commonjs/index.js | 352 +++++++ .../minizlib/dist/commonjs/package.json | 3 + .../minizlib/dist/esm/constants.js | 117 +++ .../node_modules/minizlib/dist/esm/index.js | 333 +++++++ .../minizlib/dist/esm/package.json | 3 + .../node_modules/minizlib/package.json | 81 ++ .../node_modules/minipass-fetch/package.json | 19 +- .../npm/node_modules/mute-stream/package.json | 20 +- .../node_modules/@npmcli/agent/lib/agents.js | 206 ++++ .../node_modules/@npmcli/agent/lib/dns.js | 53 + .../node_modules/@npmcli/agent/lib/errors.js | 61 ++ .../node_modules/@npmcli/agent/lib/index.js | 56 ++ .../node_modules/@npmcli/agent/lib/options.js | 86 ++ .../node_modules/@npmcli/agent/lib/proxy.js | 88 ++ .../node_modules/@npmcli/agent/package.json | 60 ++ .../node_modules/@npmcli/fs/LICENSE.md | 20 + .../@npmcli/fs/lib/common/get-options.js | 20 + .../@npmcli/fs/lib/common/node.js | 9 + .../node_modules/@npmcli/fs/lib/cp/LICENSE | 15 + .../node_modules/@npmcli/fs/lib/cp/errors.js | 129 +++ .../node_modules/@npmcli/fs/lib/cp/index.js | 22 + .../@npmcli/fs/lib/cp/polyfill.js | 428 ++++++++ .../node_modules/@npmcli/fs/lib/index.js | 13 + .../node_modules/@npmcli/fs/lib/move-file.js | 78 ++ .../@npmcli/fs/lib/readdir-scoped.js | 20 + .../@npmcli/fs/lib/with-temp-dir.js | 39 + .../node_modules/@npmcli/fs/package.json | 52 + .../node-gyp/node_modules/abbrev/LICENSE | 46 + .../node-gyp/node_modules/abbrev/lib/index.js | 50 + .../node-gyp/node_modules/abbrev/package.json | 43 + .../node-gyp/node_modules/cacache/LICENSE.md | 16 + .../node_modules/cacache/lib/content/path.js | 29 + .../node_modules/cacache/lib/content/read.js | 165 ++++ .../node_modules/cacache/lib/content/rm.js | 18 + .../node_modules/cacache/lib/content/write.js | 206 ++++ .../node_modules/cacache/lib/entry-index.js | 336 +++++++ .../node-gyp/node_modules/cacache/lib/get.js | 170 ++++ .../node_modules/cacache/lib/index.js | 42 + .../node_modules/cacache/lib/memoization.js | 72 ++ .../node-gyp/node_modules/cacache/lib/put.js | 80 ++ .../node-gyp/node_modules/cacache/lib/rm.js | 31 + .../node_modules/cacache/lib/util/glob.js | 7 + .../cacache/lib/util/hash-to-segments.js | 7 + .../node_modules/cacache/lib/util/tmp.js | 26 + .../node_modules/cacache/lib/verify.js | 257 +++++ .../node_modules/cacache/package.json | 82 ++ .../node-gyp/node_modules/isexe/LICENSE | 15 + .../node_modules/isexe/dist/cjs/index.js | 46 + .../node_modules/isexe/dist/cjs/options.js | 3 + .../node_modules/isexe/dist/cjs/package.json | 3 + .../node_modules/isexe/dist/cjs/posix.js | 67 ++ .../node_modules/isexe/dist/cjs/win32.js | 62 ++ .../node_modules/isexe/dist/mjs/index.js | 16 + .../node_modules/isexe/dist/mjs/options.js | 2 + .../node_modules/isexe/dist/mjs/package.json | 3 + .../node_modules/isexe/dist/mjs/posix.js | 62 ++ .../node_modules/isexe/dist/mjs/win32.js | 57 ++ .../node-gyp/node_modules/isexe/package.json | 96 ++ .../node_modules/make-fetch-happen/LICENSE | 16 + .../make-fetch-happen/lib/cache/entry.js | 471 +++++++++ .../make-fetch-happen/lib/cache/errors.js | 11 + .../make-fetch-happen/lib/cache/index.js | 49 + .../make-fetch-happen/lib/cache/key.js | 17 + .../make-fetch-happen/lib/cache/policy.js | 161 +++ .../make-fetch-happen/lib/fetch.js | 118 +++ .../make-fetch-happen/lib/index.js | 41 + .../make-fetch-happen/lib/options.js | 54 + .../make-fetch-happen/lib/pipeline.js | 41 + .../make-fetch-happen/lib/remote.js | 131 +++ .../make-fetch-happen/package.json | 75 ++ .../node_modules/minipass-fetch/LICENSE | 28 + .../minipass-fetch/lib/abort-error.js | 17 + .../node_modules/minipass-fetch/lib/blob.js | 97 ++ .../node_modules/minipass-fetch/lib/body.js | 350 +++++++ .../minipass-fetch/lib/fetch-error.js | 32 + .../minipass-fetch/lib/headers.js | 267 +++++ .../node_modules/minipass-fetch/lib/index.js | 377 +++++++ .../minipass-fetch/lib/request.js | 282 ++++++ .../minipass-fetch/lib/response.js | 90 ++ .../node_modules/minipass-fetch/package.json | 69 ++ .../node-gyp/node_modules/nopt/LICENSE | 15 + .../node-gyp/node_modules/nopt/README.md | 213 ++++ .../node-gyp/node_modules/nopt/bin/nopt.js | 29 + .../node-gyp/node_modules/nopt/lib/debug.js | 5 + .../node_modules/nopt/lib/nopt-lib.js | 479 +++++++++ .../node-gyp/node_modules/nopt/lib/nopt.js | 30 + .../node_modules/nopt/lib/type-defs.js | 91 ++ .../node-gyp/node_modules/nopt/package.json | 51 + .../node-gyp/node_modules/proc-log/LICENSE | 15 + .../node_modules/proc-log/lib/index.js | 153 +++ .../node_modules/proc-log/package.json | 45 + .../node-gyp/node_modules/ssri/LICENSE.md | 16 + .../node-gyp/node_modules/ssri/lib/index.js | 580 +++++++++++ .../node-gyp/node_modules/ssri/package.json | 65 ++ .../node_modules/unique-filename/LICENSE | 5 + .../node_modules/unique-filename/lib/index.js | 7 + .../node_modules/unique-filename/package.json | 51 + .../node-gyp/node_modules/unique-slug/LICENSE | 15 + .../node_modules/unique-slug/lib/index.js | 11 + .../node_modules/unique-slug/package.json | 47 + .../node-gyp/node_modules/which/LICENSE | 15 + .../node-gyp/node_modules/which/README.md | 51 + .../node-gyp/node_modules/which/bin/which.js | 52 + .../node-gyp/node_modules/which/lib/index.js | 111 +++ .../node-gyp/node_modules/which/package.json | 57 ++ .../nopt/node_modules/abbrev/LICENSE | 46 + .../nopt/node_modules/abbrev/lib/index.js | 50 + .../nopt/node_modules/abbrev/package.json | 43 + deps/npm/node_modules/nopt/package.json | 17 +- .../normalize-package-data/package.json | 19 +- .../npm-audit-report/package.json | 20 +- .../npm/node_modules/npm-bundled/package.json | 19 +- .../npm-install-checks/lib/current-env.js | 63 ++ .../npm-install-checks/lib/dev-engines.js | 145 +++ .../npm-install-checks/lib/index.js | 41 +- .../npm-install-checks/package.json | 19 +- .../npm-normalize-package-bin/package.json | 19 +- .../node_modules/npm-package-arg/package.json | 16 +- .../node_modules/npm-packlist/package.json | 20 +- .../npm-pick-manifest/package.json | 23 +- .../npm/node_modules/npm-profile/lib/index.js | 2 +- .../npm/node_modules/npm-profile/package.json | 23 +- .../node_modules/minizlib/LICENSE | 26 + .../minizlib/dist/commonjs/constants.js | 123 +++ .../minizlib/dist/commonjs/index.js | 352 +++++++ .../minizlib/dist/commonjs/package.json | 3 + .../minizlib/dist/esm/constants.js | 117 +++ .../node_modules/minizlib/dist/esm/index.js | 333 +++++++ .../minizlib/dist/esm/package.json | 3 + .../node_modules/minizlib/package.json | 81 ++ .../npm-registry-fetch/package.json | 30 +- .../npm-user-validate/package.json | 17 +- deps/npm/node_modules/pacote/package.json | 43 +- .../parse-conflict-json/package.json | 22 +- deps/npm/node_modules/proc-log/package.json | 19 +- deps/npm/node_modules/proggy/package.json | 22 +- deps/npm/node_modules/promzard/package.json | 19 +- .../node_modules/read-cmd-shim/package.json | 23 +- .../read-package-json-fast/package.json | 24 +- .../node_modules/read/dist/commonjs/read.js | 3 +- deps/npm/node_modules/read/package.json | 33 +- deps/npm/node_modules/rimraf/LICENSE | 15 + deps/npm/node_modules/rimraf/README.md | 220 +++++ .../rimraf/dist/commonjs/default-tmp.d.ts | 3 + .../rimraf/dist/commonjs/default-tmp.d.ts.map | 1 + .../rimraf/dist/commonjs/default-tmp.js | 61 ++ .../rimraf/dist/commonjs/default-tmp.js.map | 1 + .../rimraf/dist/commonjs/fix-eperm.d.ts | 3 + .../rimraf/dist/commonjs/fix-eperm.d.ts.map | 1 + .../rimraf/dist/commonjs/fix-eperm.js | 58 ++ .../rimraf/dist/commonjs/fix-eperm.js.map | 1 + .../node_modules/rimraf/dist/commonjs/fs.d.ts | 17 + .../rimraf/dist/commonjs/fs.d.ts.map | 1 + .../node_modules/rimraf/dist/commonjs/fs.js | 46 + .../rimraf/dist/commonjs/fs.js.map | 1 + .../rimraf/dist/commonjs/ignore-enoent.d.ts | 3 + .../dist/commonjs/ignore-enoent.d.ts.map | 1 + .../rimraf/dist/commonjs/ignore-enoent.js | 21 + .../rimraf/dist/commonjs/ignore-enoent.js.map | 1 + .../rimraf/dist/commonjs/index.d.ts | 50 + .../rimraf/dist/commonjs/index.d.ts.map | 1 + .../rimraf/dist/commonjs/index.js | 78 ++ .../rimraf/dist/commonjs/index.js.map | 1 + .../rimraf/dist/commonjs/opt-arg.d.ts | 34 + .../rimraf/dist/commonjs/opt-arg.d.ts.map | 1 + .../rimraf/dist/commonjs/opt-arg.js | 53 + .../rimraf/dist/commonjs/opt-arg.js.map | 1 + .../rimraf/dist/commonjs/package.json | 3 + .../rimraf/dist/commonjs/path-arg.d.ts | 4 + .../rimraf/dist/commonjs/path-arg.d.ts.map | 1 + .../rimraf/dist/commonjs/path-arg.js | 52 + .../rimraf/dist/commonjs/path-arg.js.map | 1 + .../rimraf/dist/commonjs/platform.d.ts | 3 + .../rimraf/dist/commonjs/platform.d.ts.map | 1 + .../rimraf/dist/commonjs/platform.js | 4 + .../rimraf/dist/commonjs/platform.js.map | 1 + .../dist/commonjs/readdir-or-error.d.ts | 3 + .../dist/commonjs/readdir-or-error.d.ts.map | 1 + .../rimraf/dist/commonjs/readdir-or-error.js | 19 + .../dist/commonjs/readdir-or-error.js.map | 1 + .../rimraf/dist/commonjs/retry-busy.d.ts | 8 + .../rimraf/dist/commonjs/retry-busy.d.ts.map | 1 + .../rimraf/dist/commonjs/retry-busy.js | 68 ++ .../rimraf/dist/commonjs/retry-busy.js.map | 1 + .../rimraf/dist/commonjs/rimraf-manual.d.ts | 3 + .../dist/commonjs/rimraf-manual.d.ts.map | 1 + .../rimraf/dist/commonjs/rimraf-manual.js | 12 + .../rimraf/dist/commonjs/rimraf-manual.js.map | 1 + .../dist/commonjs/rimraf-move-remove.d.ts | 4 + .../dist/commonjs/rimraf-move-remove.d.ts.map | 1 + .../dist/commonjs/rimraf-move-remove.js | 192 ++++ .../dist/commonjs/rimraf-move-remove.js.map | 1 + .../rimraf/dist/commonjs/rimraf-native.d.ts | 4 + .../dist/commonjs/rimraf-native.d.ts.map | 1 + .../rimraf/dist/commonjs/rimraf-native.js | 24 + .../rimraf/dist/commonjs/rimraf-native.js.map | 1 + .../rimraf/dist/commonjs/rimraf-posix.d.ts | 4 + .../dist/commonjs/rimraf-posix.d.ts.map | 1 + .../rimraf/dist/commonjs/rimraf-posix.js | 123 +++ .../rimraf/dist/commonjs/rimraf-posix.js.map | 1 + .../rimraf/dist/commonjs/rimraf-windows.d.ts | 4 + .../dist/commonjs/rimraf-windows.d.ts.map | 1 + .../rimraf/dist/commonjs/rimraf-windows.js | 182 ++++ .../dist/commonjs/rimraf-windows.js.map | 1 + .../rimraf/dist/commonjs/use-native.d.ts | 4 + .../rimraf/dist/commonjs/use-native.d.ts.map | 1 + .../rimraf/dist/commonjs/use-native.js | 22 + .../rimraf/dist/commonjs/use-native.js.map | 1 + .../node_modules/rimraf/dist/esm/bin.d.mts | 8 + .../rimraf/dist/esm/bin.d.mts.map | 1 + deps/npm/node_modules/rimraf/dist/esm/bin.mjs | 256 +++++ .../node_modules/rimraf/dist/esm/bin.mjs.map | 1 + .../rimraf/dist/esm/default-tmp.d.ts | 3 + .../rimraf/dist/esm/default-tmp.d.ts.map | 1 + .../rimraf/dist/esm/default-tmp.js | 55 ++ .../rimraf/dist/esm/default-tmp.js.map | 1 + .../rimraf/dist/esm/fix-eperm.d.ts | 3 + .../rimraf/dist/esm/fix-eperm.d.ts.map | 1 + .../node_modules/rimraf/dist/esm/fix-eperm.js | 53 + .../rimraf/dist/esm/fix-eperm.js.map | 1 + deps/npm/node_modules/rimraf/dist/esm/fs.d.ts | 17 + .../node_modules/rimraf/dist/esm/fs.d.ts.map | 1 + deps/npm/node_modules/rimraf/dist/esm/fs.js | 31 + .../node_modules/rimraf/dist/esm/fs.js.map | 1 + .../rimraf/dist/esm/ignore-enoent.d.ts | 3 + .../rimraf/dist/esm/ignore-enoent.d.ts.map | 1 + .../rimraf/dist/esm/ignore-enoent.js | 16 + .../rimraf/dist/esm/ignore-enoent.js.map | 1 + .../node_modules/rimraf/dist/esm/index.d.ts | 50 + .../rimraf/dist/esm/index.d.ts.map | 1 + .../npm/node_modules/rimraf/dist/esm/index.js | 70 ++ .../node_modules/rimraf/dist/esm/index.js.map | 1 + .../node_modules/rimraf/dist/esm/opt-arg.d.ts | 34 + .../rimraf/dist/esm/opt-arg.d.ts.map | 1 + .../node_modules/rimraf/dist/esm/opt-arg.js | 46 + .../rimraf/dist/esm/opt-arg.js.map | 1 + .../node_modules/rimraf/dist/esm/package.json | 3 + .../rimraf/dist/esm/path-arg.d.ts | 4 + .../rimraf/dist/esm/path-arg.d.ts.map | 1 + .../node_modules/rimraf/dist/esm/path-arg.js | 47 + .../rimraf/dist/esm/path-arg.js.map | 1 + .../rimraf/dist/esm/platform.d.ts | 3 + .../rimraf/dist/esm/platform.d.ts.map | 1 + .../node_modules/rimraf/dist/esm/platform.js | 2 + .../rimraf/dist/esm/platform.js.map | 1 + .../rimraf/dist/esm/readdir-or-error.d.ts | 3 + .../rimraf/dist/esm/readdir-or-error.d.ts.map | 1 + .../rimraf/dist/esm/readdir-or-error.js | 14 + .../rimraf/dist/esm/readdir-or-error.js.map | 1 + .../rimraf/dist/esm/retry-busy.d.ts | 8 + .../rimraf/dist/esm/retry-busy.d.ts.map | 1 + .../rimraf/dist/esm/retry-busy.js | 63 ++ .../rimraf/dist/esm/retry-busy.js.map | 1 + .../rimraf/dist/esm/rimraf-manual.d.ts | 3 + .../rimraf/dist/esm/rimraf-manual.d.ts.map | 1 + .../rimraf/dist/esm/rimraf-manual.js | 6 + .../rimraf/dist/esm/rimraf-manual.js.map | 1 + .../rimraf/dist/esm/rimraf-move-remove.d.ts | 4 + .../dist/esm/rimraf-move-remove.d.ts.map | 1 + .../rimraf/dist/esm/rimraf-move-remove.js | 187 ++++ .../rimraf/dist/esm/rimraf-move-remove.js.map | 1 + .../rimraf/dist/esm/rimraf-native.d.ts | 4 + .../rimraf/dist/esm/rimraf-native.d.ts.map | 1 + .../rimraf/dist/esm/rimraf-native.js | 19 + .../rimraf/dist/esm/rimraf-native.js.map | 1 + .../rimraf/dist/esm/rimraf-posix.d.ts | 4 + .../rimraf/dist/esm/rimraf-posix.d.ts.map | 1 + .../rimraf/dist/esm/rimraf-posix.js | 118 +++ .../rimraf/dist/esm/rimraf-posix.js.map | 1 + .../rimraf/dist/esm/rimraf-windows.d.ts | 4 + .../rimraf/dist/esm/rimraf-windows.d.ts.map | 1 + .../rimraf/dist/esm/rimraf-windows.js | 177 ++++ .../rimraf/dist/esm/rimraf-windows.js.map | 1 + .../rimraf/dist/esm/use-native.d.ts | 4 + .../rimraf/dist/esm/use-native.d.ts.map | 1 + .../rimraf/dist/esm/use-native.js | 16 + .../rimraf/dist/esm/use-native.js.map | 1 + deps/npm/node_modules/rimraf/package.json | 89 ++ deps/npm/node_modules/ssri/package.json | 17 +- .../node_modules/@npmcli/agent/lib/agents.js | 206 ++++ .../node_modules/@npmcli/agent/lib/dns.js | 53 + .../node_modules/@npmcli/agent/lib/errors.js | 61 ++ .../node_modules/@npmcli/agent/lib/index.js | 56 ++ .../node_modules/@npmcli/agent/lib/options.js | 86 ++ .../node_modules/@npmcli/agent/lib/proxy.js | 88 ++ .../node_modules/@npmcli/agent/package.json | 60 ++ .../tuf-js/node_modules/@npmcli/fs/LICENSE.md | 20 + .../@npmcli/fs/lib/common/get-options.js | 20 + .../@npmcli/fs/lib/common/node.js | 9 + .../node_modules/@npmcli/fs/lib/cp/LICENSE | 15 + .../node_modules/@npmcli/fs/lib/cp/errors.js | 129 +++ .../node_modules/@npmcli/fs/lib/cp/index.js | 22 + .../@npmcli/fs/lib/cp/polyfill.js | 428 ++++++++ .../node_modules/@npmcli/fs/lib/index.js | 13 + .../node_modules/@npmcli/fs/lib/move-file.js | 78 ++ .../@npmcli/fs/lib/readdir-scoped.js | 20 + .../@npmcli/fs/lib/with-temp-dir.js | 39 + .../node_modules/@npmcli/fs/package.json | 52 + .../tuf-js/node_modules/cacache/LICENSE.md | 16 + .../node_modules/cacache/lib/content/path.js | 29 + .../node_modules/cacache/lib/content/read.js | 165 ++++ .../node_modules/cacache/lib/content/rm.js | 18 + .../node_modules/cacache/lib/content/write.js | 206 ++++ .../node_modules/cacache/lib/entry-index.js | 336 +++++++ .../tuf-js/node_modules/cacache/lib/get.js | 170 ++++ .../tuf-js/node_modules/cacache/lib/index.js | 42 + .../node_modules/cacache/lib/memoization.js | 72 ++ .../tuf-js/node_modules/cacache/lib/put.js | 80 ++ .../tuf-js/node_modules/cacache/lib/rm.js | 31 + .../node_modules/cacache/lib/util/glob.js | 7 + .../cacache/lib/util/hash-to-segments.js | 7 + .../node_modules/cacache/lib/util/tmp.js | 26 + .../tuf-js/node_modules/cacache/lib/verify.js | 257 +++++ .../tuf-js/node_modules/cacache/package.json | 82 ++ .../node_modules/make-fetch-happen/LICENSE | 16 + .../make-fetch-happen/lib/cache/entry.js | 471 +++++++++ .../make-fetch-happen/lib/cache/errors.js | 11 + .../make-fetch-happen/lib/cache/index.js | 49 + .../make-fetch-happen/lib/cache/key.js | 17 + .../make-fetch-happen/lib/cache/policy.js | 161 +++ .../make-fetch-happen/lib/fetch.js | 118 +++ .../make-fetch-happen/lib/index.js | 41 + .../make-fetch-happen/lib/options.js | 54 + .../make-fetch-happen/lib/pipeline.js | 41 + .../make-fetch-happen/lib/remote.js | 131 +++ .../make-fetch-happen/package.json | 75 ++ .../node_modules/minipass-fetch/LICENSE | 28 + .../minipass-fetch/lib/abort-error.js | 17 + .../node_modules/minipass-fetch/lib/blob.js | 97 ++ .../node_modules/minipass-fetch/lib/body.js | 350 +++++++ .../minipass-fetch/lib/fetch-error.js | 32 + .../minipass-fetch/lib/headers.js | 267 +++++ .../node_modules/minipass-fetch/lib/index.js | 377 +++++++ .../minipass-fetch/lib/request.js | 282 ++++++ .../minipass-fetch/lib/response.js | 90 ++ .../node_modules/minipass-fetch/package.json | 69 ++ .../tuf-js/node_modules/proc-log/LICENSE | 15 + .../tuf-js/node_modules/proc-log/lib/index.js | 153 +++ .../tuf-js/node_modules/proc-log/package.json | 45 + .../tuf-js/node_modules/ssri/LICENSE.md | 16 + .../tuf-js/node_modules/ssri/lib/index.js | 580 +++++++++++ .../tuf-js/node_modules/ssri/package.json | 65 ++ .../node_modules/unique-filename/LICENSE | 5 + .../node_modules/unique-filename/lib/index.js | 7 + .../node_modules/unique-filename/package.json | 51 + .../tuf-js/node_modules/unique-slug/LICENSE | 15 + .../node_modules/unique-slug/lib/index.js | 11 + .../node_modules/unique-slug/package.json | 47 + .../node_modules/unique-filename/package.json | 22 +- .../npm/node_modules/unique-slug/package.json | 20 +- .../validate-npm-package-name/package.json | 17 +- deps/npm/node_modules/which/package.json | 25 +- .../write-file-atomic/package.json | 19 +- deps/npm/package.json | 94 +- .../test/lib/commands/doctor.js.test.cjs | 8 +- .../test/lib/commands/install.js.test.cjs | 309 ++++++ deps/npm/test/fixtures/clean-snapshot.js | 6 + deps/npm/test/lib/commands/doctor.js | 48 +- deps/npm/test/lib/commands/install.js | 317 ++++++ deps/npm/test/lib/commands/ping.js | 18 + deps/npm/test/lib/npm.js | 2 +- 813 files changed, 43927 insertions(+), 1054 deletions(-) create mode 100644 deps/npm/node_modules/@isaacs/fs-minipass/LICENSE create mode 100644 deps/npm/node_modules/@isaacs/fs-minipass/dist/commonjs/index.js create mode 100644 deps/npm/node_modules/@isaacs/fs-minipass/dist/commonjs/package.json create mode 100644 deps/npm/node_modules/@isaacs/fs-minipass/dist/esm/index.js create mode 100644 deps/npm/node_modules/@isaacs/fs-minipass/dist/esm/package.json create mode 100644 deps/npm/node_modules/@isaacs/fs-minipass/package.json create mode 100644 deps/npm/node_modules/@npmcli/node-gyp/LICENSE create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/agents.js create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/dns.js create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/errors.js create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/index.js create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/options.js create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/proxy.js create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/package.json create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/LICENSE.md create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/common/get-options.js create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/common/node.js create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/cp/LICENSE create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/cp/errors.js create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/cp/index.js create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/cp/polyfill.js create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/index.js create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/move-file.js create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/readdir-scoped.js create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/with-temp-dir.js create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/package.json create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/cacache/LICENSE.md create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/content/path.js create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/content/read.js create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/content/rm.js create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/content/write.js create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/entry-index.js create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/get.js create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/index.js create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/memoization.js create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/put.js create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/rm.js create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/util/glob.js create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/util/hash-to-segments.js create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/util/tmp.js create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/verify.js create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/cacache/package.json create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/LICENSE create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/entry.js create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/errors.js create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/index.js create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/key.js create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/policy.js create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/fetch.js create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/index.js create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/options.js create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/pipeline.js create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/remote.js create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/package.json create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/LICENSE create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/abort-error.js create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/blob.js create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/body.js create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/fetch-error.js create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/headers.js create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/index.js create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/request.js create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/response.js create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/package.json create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/proc-log/LICENSE create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/proc-log/lib/index.js create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/proc-log/package.json create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/ssri/LICENSE.md create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/ssri/lib/index.js create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/ssri/package.json create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/unique-filename/LICENSE create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/unique-filename/lib/index.js create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/unique-filename/package.json create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/unique-slug/LICENSE create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/unique-slug/lib/index.js create mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/unique-slug/package.json create mode 100644 deps/npm/node_modules/cacache/node_modules/chownr/LICENSE.md create mode 100644 deps/npm/node_modules/cacache/node_modules/chownr/dist/commonjs/index.js create mode 100644 deps/npm/node_modules/cacache/node_modules/chownr/dist/commonjs/package.json create mode 100644 deps/npm/node_modules/cacache/node_modules/chownr/dist/esm/index.js create mode 100644 deps/npm/node_modules/cacache/node_modules/chownr/dist/esm/package.json create mode 100644 deps/npm/node_modules/cacache/node_modules/chownr/package.json create mode 100644 deps/npm/node_modules/cacache/node_modules/minizlib/LICENSE create mode 100644 deps/npm/node_modules/cacache/node_modules/minizlib/dist/commonjs/constants.js create mode 100644 deps/npm/node_modules/cacache/node_modules/minizlib/dist/commonjs/index.js create mode 100644 deps/npm/node_modules/cacache/node_modules/minizlib/dist/commonjs/package.json create mode 100644 deps/npm/node_modules/cacache/node_modules/minizlib/dist/esm/constants.js create mode 100644 deps/npm/node_modules/cacache/node_modules/minizlib/dist/esm/index.js create mode 100644 deps/npm/node_modules/cacache/node_modules/minizlib/dist/esm/package.json create mode 100644 deps/npm/node_modules/cacache/node_modules/minizlib/package.json create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/LICENSE create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/package.json create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/bin.d.ts create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/bin.d.ts.map create mode 100755 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/bin.js create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/bin.js.map create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/find-made.d.ts create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/find-made.d.ts.map create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/find-made.js create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/find-made.js.map create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/index.d.ts create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/index.d.ts.map create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/index.js create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/index.js.map create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.d.ts create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.d.ts.map create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.js create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.js.map create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/mkdirp-native.d.ts create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/mkdirp-native.d.ts.map create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/mkdirp-native.js create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/mkdirp-native.js.map create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/opts-arg.d.ts create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/opts-arg.d.ts.map create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/opts-arg.js create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/opts-arg.js.map create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/path-arg.d.ts create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/path-arg.d.ts.map create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/path-arg.js create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/path-arg.js.map create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/use-native.d.ts create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/use-native.d.ts.map create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/use-native.js create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/use-native.js.map create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/find-made.d.ts create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/find-made.d.ts.map create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/find-made.js create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/find-made.js.map create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/index.d.ts create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/index.d.ts.map create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/index.js create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/index.js.map create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/mkdirp-manual.d.ts create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/mkdirp-manual.d.ts.map create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/mkdirp-manual.js create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/mkdirp-manual.js.map create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/mkdirp-native.d.ts create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/mkdirp-native.d.ts.map create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/mkdirp-native.js create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/mkdirp-native.js.map create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/opts-arg.d.ts create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/opts-arg.d.ts.map create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/opts-arg.js create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/opts-arg.js.map create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/package.json create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/path-arg.d.ts create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/path-arg.d.ts.map create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/path-arg.js create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/path-arg.js.map create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/use-native.d.ts create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/use-native.d.ts.map create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/use-native.js create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/use-native.js.map create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/package.json create mode 100644 deps/npm/node_modules/cacache/node_modules/mkdirp/readme.markdown create mode 100644 deps/npm/node_modules/cacache/node_modules/p-map/index.js create mode 100644 deps/npm/node_modules/cacache/node_modules/p-map/license create mode 100644 deps/npm/node_modules/cacache/node_modules/p-map/package.json create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/LICENSE create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/create.js create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/cwd-error.js create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/extract.js create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/get-write-flag.js create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/header.js create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/index.js create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/large-numbers.js create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/list.js create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/make-command.js create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/mkdir.js create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/mode-fix.js create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/normalize-unicode.js create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/normalize-windows-path.js create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/options.js create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/pack.js create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/package.json create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/parse.js create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/path-reservations.js create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/pax.js create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/read-entry.js create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/replace.js create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/strip-absolute-path.js create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/strip-trailing-slashes.js create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/symlink-error.js create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/types.js create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/unpack.js create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/update.js create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/warn-method.js create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/winchars.js create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/write-entry.js create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/dist/esm/create.js create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/dist/esm/cwd-error.js create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/dist/esm/extract.js create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/dist/esm/get-write-flag.js create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/dist/esm/header.js create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/dist/esm/index.js create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/dist/esm/large-numbers.js create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/dist/esm/list.js create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/dist/esm/make-command.js create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/dist/esm/mkdir.js create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/dist/esm/mode-fix.js create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/dist/esm/normalize-unicode.js create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/dist/esm/normalize-windows-path.js create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/dist/esm/options.js create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/dist/esm/pack.js create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/dist/esm/package.json create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/dist/esm/parse.js create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/dist/esm/path-reservations.js create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/dist/esm/pax.js create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/dist/esm/read-entry.js create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/dist/esm/replace.js create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/dist/esm/strip-absolute-path.js create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/dist/esm/strip-trailing-slashes.js create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/dist/esm/symlink-error.js create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/dist/esm/types.js create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/dist/esm/unpack.js create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/dist/esm/update.js create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/dist/esm/warn-method.js create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/dist/esm/winchars.js create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/dist/esm/write-entry.js create mode 100644 deps/npm/node_modules/cacache/node_modules/tar/package.json create mode 100644 deps/npm/node_modules/cacache/node_modules/yallist/LICENSE.md create mode 100644 deps/npm/node_modules/cacache/node_modules/yallist/dist/commonjs/index.js create mode 100644 deps/npm/node_modules/cacache/node_modules/yallist/dist/commonjs/package.json create mode 100644 deps/npm/node_modules/cacache/node_modules/yallist/dist/esm/index.js create mode 100644 deps/npm/node_modules/cacache/node_modules/yallist/dist/esm/package.json create mode 100644 deps/npm/node_modules/cacache/node_modules/yallist/package.json create mode 100644 deps/npm/node_modules/minipass-fetch/node_modules/minizlib/LICENSE create mode 100644 deps/npm/node_modules/minipass-fetch/node_modules/minizlib/dist/commonjs/constants.js create mode 100644 deps/npm/node_modules/minipass-fetch/node_modules/minizlib/dist/commonjs/index.js create mode 100644 deps/npm/node_modules/minipass-fetch/node_modules/minizlib/dist/commonjs/package.json create mode 100644 deps/npm/node_modules/minipass-fetch/node_modules/minizlib/dist/esm/constants.js create mode 100644 deps/npm/node_modules/minipass-fetch/node_modules/minizlib/dist/esm/index.js create mode 100644 deps/npm/node_modules/minipass-fetch/node_modules/minizlib/dist/esm/package.json create mode 100644 deps/npm/node_modules/minipass-fetch/node_modules/minizlib/package.json create mode 100644 deps/npm/node_modules/node-gyp/node_modules/@npmcli/agent/lib/agents.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/@npmcli/agent/lib/dns.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/@npmcli/agent/lib/errors.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/@npmcli/agent/lib/index.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/@npmcli/agent/lib/options.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/@npmcli/agent/lib/proxy.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/@npmcli/agent/package.json create mode 100644 deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/LICENSE.md create mode 100644 deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/common/get-options.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/common/node.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/LICENSE create mode 100644 deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/errors.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/index.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/polyfill.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/index.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/move-file.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/readdir-scoped.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/with-temp-dir.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/package.json create mode 100644 deps/npm/node_modules/node-gyp/node_modules/abbrev/LICENSE create mode 100644 deps/npm/node_modules/node-gyp/node_modules/abbrev/lib/index.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/abbrev/package.json create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/LICENSE.md create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/lib/content/path.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/lib/content/read.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/lib/content/rm.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/lib/content/write.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/lib/entry-index.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/lib/get.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/lib/index.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/lib/memoization.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/lib/put.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/lib/rm.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/lib/util/glob.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/lib/util/hash-to-segments.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/lib/util/tmp.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/lib/verify.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/package.json create mode 100644 deps/npm/node_modules/node-gyp/node_modules/isexe/LICENSE create mode 100644 deps/npm/node_modules/node-gyp/node_modules/isexe/dist/cjs/index.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/isexe/dist/cjs/options.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/isexe/dist/cjs/package.json create mode 100644 deps/npm/node_modules/node-gyp/node_modules/isexe/dist/cjs/posix.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/isexe/dist/cjs/win32.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/isexe/dist/mjs/index.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/isexe/dist/mjs/options.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/isexe/dist/mjs/package.json create mode 100644 deps/npm/node_modules/node-gyp/node_modules/isexe/dist/mjs/posix.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/isexe/dist/mjs/win32.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/isexe/package.json create mode 100644 deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/LICENSE create mode 100644 deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/entry.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/errors.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/index.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/key.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/policy.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/fetch.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/index.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/options.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/pipeline.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/remote.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/package.json create mode 100644 deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/LICENSE create mode 100644 deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/abort-error.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/blob.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/body.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/fetch-error.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/headers.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/index.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/request.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/response.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/package.json create mode 100644 deps/npm/node_modules/node-gyp/node_modules/nopt/LICENSE create mode 100644 deps/npm/node_modules/node-gyp/node_modules/nopt/README.md create mode 100755 deps/npm/node_modules/node-gyp/node_modules/nopt/bin/nopt.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/nopt/lib/debug.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/nopt/lib/nopt-lib.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/nopt/lib/nopt.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/nopt/lib/type-defs.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/nopt/package.json create mode 100644 deps/npm/node_modules/node-gyp/node_modules/proc-log/LICENSE create mode 100644 deps/npm/node_modules/node-gyp/node_modules/proc-log/lib/index.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/proc-log/package.json create mode 100644 deps/npm/node_modules/node-gyp/node_modules/ssri/LICENSE.md create mode 100644 deps/npm/node_modules/node-gyp/node_modules/ssri/lib/index.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/ssri/package.json create mode 100644 deps/npm/node_modules/node-gyp/node_modules/unique-filename/LICENSE create mode 100644 deps/npm/node_modules/node-gyp/node_modules/unique-filename/lib/index.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/unique-filename/package.json create mode 100644 deps/npm/node_modules/node-gyp/node_modules/unique-slug/LICENSE create mode 100644 deps/npm/node_modules/node-gyp/node_modules/unique-slug/lib/index.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/unique-slug/package.json create mode 100644 deps/npm/node_modules/node-gyp/node_modules/which/LICENSE create mode 100644 deps/npm/node_modules/node-gyp/node_modules/which/README.md create mode 100755 deps/npm/node_modules/node-gyp/node_modules/which/bin/which.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/which/lib/index.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/which/package.json create mode 100644 deps/npm/node_modules/nopt/node_modules/abbrev/LICENSE create mode 100644 deps/npm/node_modules/nopt/node_modules/abbrev/lib/index.js create mode 100644 deps/npm/node_modules/nopt/node_modules/abbrev/package.json create mode 100644 deps/npm/node_modules/npm-install-checks/lib/current-env.js create mode 100644 deps/npm/node_modules/npm-install-checks/lib/dev-engines.js create mode 100644 deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/LICENSE create mode 100644 deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/dist/commonjs/constants.js create mode 100644 deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/dist/commonjs/index.js create mode 100644 deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/dist/commonjs/package.json create mode 100644 deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/dist/esm/constants.js create mode 100644 deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/dist/esm/index.js create mode 100644 deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/dist/esm/package.json create mode 100644 deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/package.json create mode 100644 deps/npm/node_modules/rimraf/LICENSE create mode 100644 deps/npm/node_modules/rimraf/README.md create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/default-tmp.d.ts create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/default-tmp.d.ts.map create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/default-tmp.js create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/default-tmp.js.map create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/fix-eperm.d.ts create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/fix-eperm.d.ts.map create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/fix-eperm.js create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/fix-eperm.js.map create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/fs.d.ts create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/fs.d.ts.map create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/fs.js create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/fs.js.map create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/ignore-enoent.d.ts create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/ignore-enoent.d.ts.map create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/ignore-enoent.js create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/ignore-enoent.js.map create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/index.d.ts create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/index.d.ts.map create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/index.js create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/index.js.map create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/opt-arg.d.ts create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/opt-arg.d.ts.map create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/opt-arg.js create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/opt-arg.js.map create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/package.json create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/path-arg.d.ts create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/path-arg.d.ts.map create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/path-arg.js create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/path-arg.js.map create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/platform.d.ts create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/platform.d.ts.map create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/platform.js create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/platform.js.map create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/readdir-or-error.d.ts create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/readdir-or-error.d.ts.map create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/readdir-or-error.js create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/readdir-or-error.js.map create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/retry-busy.d.ts create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/retry-busy.d.ts.map create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/retry-busy.js create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/retry-busy.js.map create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/rimraf-manual.d.ts create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/rimraf-manual.d.ts.map create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/rimraf-manual.js create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/rimraf-manual.js.map create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/rimraf-move-remove.d.ts create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/rimraf-move-remove.d.ts.map create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/rimraf-move-remove.js create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/rimraf-move-remove.js.map create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/rimraf-native.d.ts create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/rimraf-native.d.ts.map create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/rimraf-native.js create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/rimraf-native.js.map create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/rimraf-posix.d.ts create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/rimraf-posix.d.ts.map create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/rimraf-posix.js create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/rimraf-posix.js.map create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/rimraf-windows.d.ts create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/rimraf-windows.d.ts.map create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/rimraf-windows.js create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/rimraf-windows.js.map create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/use-native.d.ts create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/use-native.d.ts.map create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/use-native.js create mode 100644 deps/npm/node_modules/rimraf/dist/commonjs/use-native.js.map create mode 100644 deps/npm/node_modules/rimraf/dist/esm/bin.d.mts create mode 100644 deps/npm/node_modules/rimraf/dist/esm/bin.d.mts.map create mode 100755 deps/npm/node_modules/rimraf/dist/esm/bin.mjs create mode 100644 deps/npm/node_modules/rimraf/dist/esm/bin.mjs.map create mode 100644 deps/npm/node_modules/rimraf/dist/esm/default-tmp.d.ts create mode 100644 deps/npm/node_modules/rimraf/dist/esm/default-tmp.d.ts.map create mode 100644 deps/npm/node_modules/rimraf/dist/esm/default-tmp.js create mode 100644 deps/npm/node_modules/rimraf/dist/esm/default-tmp.js.map create mode 100644 deps/npm/node_modules/rimraf/dist/esm/fix-eperm.d.ts create mode 100644 deps/npm/node_modules/rimraf/dist/esm/fix-eperm.d.ts.map create mode 100644 deps/npm/node_modules/rimraf/dist/esm/fix-eperm.js create mode 100644 deps/npm/node_modules/rimraf/dist/esm/fix-eperm.js.map create mode 100644 deps/npm/node_modules/rimraf/dist/esm/fs.d.ts create mode 100644 deps/npm/node_modules/rimraf/dist/esm/fs.d.ts.map create mode 100644 deps/npm/node_modules/rimraf/dist/esm/fs.js create mode 100644 deps/npm/node_modules/rimraf/dist/esm/fs.js.map create mode 100644 deps/npm/node_modules/rimraf/dist/esm/ignore-enoent.d.ts create mode 100644 deps/npm/node_modules/rimraf/dist/esm/ignore-enoent.d.ts.map create mode 100644 deps/npm/node_modules/rimraf/dist/esm/ignore-enoent.js create mode 100644 deps/npm/node_modules/rimraf/dist/esm/ignore-enoent.js.map create mode 100644 deps/npm/node_modules/rimraf/dist/esm/index.d.ts create mode 100644 deps/npm/node_modules/rimraf/dist/esm/index.d.ts.map create mode 100644 deps/npm/node_modules/rimraf/dist/esm/index.js create mode 100644 deps/npm/node_modules/rimraf/dist/esm/index.js.map create mode 100644 deps/npm/node_modules/rimraf/dist/esm/opt-arg.d.ts create mode 100644 deps/npm/node_modules/rimraf/dist/esm/opt-arg.d.ts.map create mode 100644 deps/npm/node_modules/rimraf/dist/esm/opt-arg.js create mode 100644 deps/npm/node_modules/rimraf/dist/esm/opt-arg.js.map create mode 100644 deps/npm/node_modules/rimraf/dist/esm/package.json create mode 100644 deps/npm/node_modules/rimraf/dist/esm/path-arg.d.ts create mode 100644 deps/npm/node_modules/rimraf/dist/esm/path-arg.d.ts.map create mode 100644 deps/npm/node_modules/rimraf/dist/esm/path-arg.js create mode 100644 deps/npm/node_modules/rimraf/dist/esm/path-arg.js.map create mode 100644 deps/npm/node_modules/rimraf/dist/esm/platform.d.ts create mode 100644 deps/npm/node_modules/rimraf/dist/esm/platform.d.ts.map create mode 100644 deps/npm/node_modules/rimraf/dist/esm/platform.js create mode 100644 deps/npm/node_modules/rimraf/dist/esm/platform.js.map create mode 100644 deps/npm/node_modules/rimraf/dist/esm/readdir-or-error.d.ts create mode 100644 deps/npm/node_modules/rimraf/dist/esm/readdir-or-error.d.ts.map create mode 100644 deps/npm/node_modules/rimraf/dist/esm/readdir-or-error.js create mode 100644 deps/npm/node_modules/rimraf/dist/esm/readdir-or-error.js.map create mode 100644 deps/npm/node_modules/rimraf/dist/esm/retry-busy.d.ts create mode 100644 deps/npm/node_modules/rimraf/dist/esm/retry-busy.d.ts.map create mode 100644 deps/npm/node_modules/rimraf/dist/esm/retry-busy.js create mode 100644 deps/npm/node_modules/rimraf/dist/esm/retry-busy.js.map create mode 100644 deps/npm/node_modules/rimraf/dist/esm/rimraf-manual.d.ts create mode 100644 deps/npm/node_modules/rimraf/dist/esm/rimraf-manual.d.ts.map create mode 100644 deps/npm/node_modules/rimraf/dist/esm/rimraf-manual.js create mode 100644 deps/npm/node_modules/rimraf/dist/esm/rimraf-manual.js.map create mode 100644 deps/npm/node_modules/rimraf/dist/esm/rimraf-move-remove.d.ts create mode 100644 deps/npm/node_modules/rimraf/dist/esm/rimraf-move-remove.d.ts.map create mode 100644 deps/npm/node_modules/rimraf/dist/esm/rimraf-move-remove.js create mode 100644 deps/npm/node_modules/rimraf/dist/esm/rimraf-move-remove.js.map create mode 100644 deps/npm/node_modules/rimraf/dist/esm/rimraf-native.d.ts create mode 100644 deps/npm/node_modules/rimraf/dist/esm/rimraf-native.d.ts.map create mode 100644 deps/npm/node_modules/rimraf/dist/esm/rimraf-native.js create mode 100644 deps/npm/node_modules/rimraf/dist/esm/rimraf-native.js.map create mode 100644 deps/npm/node_modules/rimraf/dist/esm/rimraf-posix.d.ts create mode 100644 deps/npm/node_modules/rimraf/dist/esm/rimraf-posix.d.ts.map create mode 100644 deps/npm/node_modules/rimraf/dist/esm/rimraf-posix.js create mode 100644 deps/npm/node_modules/rimraf/dist/esm/rimraf-posix.js.map create mode 100644 deps/npm/node_modules/rimraf/dist/esm/rimraf-windows.d.ts create mode 100644 deps/npm/node_modules/rimraf/dist/esm/rimraf-windows.d.ts.map create mode 100644 deps/npm/node_modules/rimraf/dist/esm/rimraf-windows.js create mode 100644 deps/npm/node_modules/rimraf/dist/esm/rimraf-windows.js.map create mode 100644 deps/npm/node_modules/rimraf/dist/esm/use-native.d.ts create mode 100644 deps/npm/node_modules/rimraf/dist/esm/use-native.d.ts.map create mode 100644 deps/npm/node_modules/rimraf/dist/esm/use-native.js create mode 100644 deps/npm/node_modules/rimraf/dist/esm/use-native.js.map create mode 100644 deps/npm/node_modules/rimraf/package.json create mode 100644 deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/agents.js create mode 100644 deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/dns.js create mode 100644 deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/errors.js create mode 100644 deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/index.js create mode 100644 deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/options.js create mode 100644 deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/proxy.js create mode 100644 deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/package.json create mode 100644 deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/LICENSE.md create mode 100644 deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/common/get-options.js create mode 100644 deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/common/node.js create mode 100644 deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/cp/LICENSE create mode 100644 deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/cp/errors.js create mode 100644 deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/cp/index.js create mode 100644 deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/cp/polyfill.js create mode 100644 deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/index.js create mode 100644 deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/move-file.js create mode 100644 deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/readdir-scoped.js create mode 100644 deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/with-temp-dir.js create mode 100644 deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/package.json create mode 100644 deps/npm/node_modules/tuf-js/node_modules/cacache/LICENSE.md create mode 100644 deps/npm/node_modules/tuf-js/node_modules/cacache/lib/content/path.js create mode 100644 deps/npm/node_modules/tuf-js/node_modules/cacache/lib/content/read.js create mode 100644 deps/npm/node_modules/tuf-js/node_modules/cacache/lib/content/rm.js create mode 100644 deps/npm/node_modules/tuf-js/node_modules/cacache/lib/content/write.js create mode 100644 deps/npm/node_modules/tuf-js/node_modules/cacache/lib/entry-index.js create mode 100644 deps/npm/node_modules/tuf-js/node_modules/cacache/lib/get.js create mode 100644 deps/npm/node_modules/tuf-js/node_modules/cacache/lib/index.js create mode 100644 deps/npm/node_modules/tuf-js/node_modules/cacache/lib/memoization.js create mode 100644 deps/npm/node_modules/tuf-js/node_modules/cacache/lib/put.js create mode 100644 deps/npm/node_modules/tuf-js/node_modules/cacache/lib/rm.js create mode 100644 deps/npm/node_modules/tuf-js/node_modules/cacache/lib/util/glob.js create mode 100644 deps/npm/node_modules/tuf-js/node_modules/cacache/lib/util/hash-to-segments.js create mode 100644 deps/npm/node_modules/tuf-js/node_modules/cacache/lib/util/tmp.js create mode 100644 deps/npm/node_modules/tuf-js/node_modules/cacache/lib/verify.js create mode 100644 deps/npm/node_modules/tuf-js/node_modules/cacache/package.json create mode 100644 deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/LICENSE create mode 100644 deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/entry.js create mode 100644 deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/errors.js create mode 100644 deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/index.js create mode 100644 deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/key.js create mode 100644 deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/policy.js create mode 100644 deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/fetch.js create mode 100644 deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/index.js create mode 100644 deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/options.js create mode 100644 deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/pipeline.js create mode 100644 deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/remote.js create mode 100644 deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/package.json create mode 100644 deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/LICENSE create mode 100644 deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/abort-error.js create mode 100644 deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/blob.js create mode 100644 deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/body.js create mode 100644 deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/fetch-error.js create mode 100644 deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/headers.js create mode 100644 deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/index.js create mode 100644 deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/request.js create mode 100644 deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/response.js create mode 100644 deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/package.json create mode 100644 deps/npm/node_modules/tuf-js/node_modules/proc-log/LICENSE create mode 100644 deps/npm/node_modules/tuf-js/node_modules/proc-log/lib/index.js create mode 100644 deps/npm/node_modules/tuf-js/node_modules/proc-log/package.json create mode 100644 deps/npm/node_modules/tuf-js/node_modules/ssri/LICENSE.md create mode 100644 deps/npm/node_modules/tuf-js/node_modules/ssri/lib/index.js create mode 100644 deps/npm/node_modules/tuf-js/node_modules/ssri/package.json create mode 100644 deps/npm/node_modules/tuf-js/node_modules/unique-filename/LICENSE create mode 100644 deps/npm/node_modules/tuf-js/node_modules/unique-filename/lib/index.js create mode 100644 deps/npm/node_modules/tuf-js/node_modules/unique-filename/package.json create mode 100644 deps/npm/node_modules/tuf-js/node_modules/unique-slug/LICENSE create mode 100644 deps/npm/node_modules/tuf-js/node_modules/unique-slug/lib/index.js create mode 100644 deps/npm/node_modules/tuf-js/node_modules/unique-slug/package.json create mode 100644 deps/npm/tap-snapshots/test/lib/commands/install.js.test.cjs diff --git a/deps/npm/docs/content/commands/npm-init.md b/deps/npm/docs/content/commands/npm-init.md index c4b39e6397019c..870f846761e4e3 100644 --- a/deps/npm/docs/content/commands/npm-init.md +++ b/deps/npm/docs/content/commands/npm-init.md @@ -7,7 +7,7 @@ description: Create a package.json file ### Synopsis ```bash -npm init (same as `npx `) +npm init (same as `npx create-`) npm init <@scope> (same as `npx <@scope>/create`) aliases: create, innit diff --git a/deps/npm/docs/content/commands/npm-install.md b/deps/npm/docs/content/commands/npm-install.md index 3ec3c0f3c12896..ec933394b1a726 100644 --- a/deps/npm/docs/content/commands/npm-install.md +++ b/deps/npm/docs/content/commands/npm-install.md @@ -178,10 +178,6 @@ into a tarball (b). npm install ansi-regex --save-bundle ``` - **Note**: If there is a file or folder named `` in the current - working directory, then it will try to install that, and only try to - fetch the package by name if it is not valid. - * `npm install @npm:`: Install a package under a custom alias. Allows multiple versions of diff --git a/deps/npm/docs/content/commands/npm-ls.md b/deps/npm/docs/content/commands/npm-ls.md index 59e5b281ef1155..3abedf9cd8e4e6 100644 --- a/deps/npm/docs/content/commands/npm-ls.md +++ b/deps/npm/docs/content/commands/npm-ls.md @@ -27,7 +27,7 @@ packages will *also* show the paths to the specified packages. For example, running `npm ls promzard` in npm's source tree will show: ```bash -npm@10.8.3 /path/to/npm +npm@10.9.0 /path/to/npm └─┬ init-package-json@0.0.4 └── promzard@0.1.5 ``` diff --git a/deps/npm/docs/content/commands/npm.md b/deps/npm/docs/content/commands/npm.md index 52ad44d60a6db9..d92892ec544eb9 100644 --- a/deps/npm/docs/content/commands/npm.md +++ b/deps/npm/docs/content/commands/npm.md @@ -14,7 +14,7 @@ Note: This command is unaware of workspaces. ### Version -10.8.3 +10.9.0 ### Description diff --git a/deps/npm/docs/content/configuring-npm/package-json.md b/deps/npm/docs/content/configuring-npm/package-json.md index a843b974b0ed55..7e9daf1317717f 100644 --- a/deps/npm/docs/content/configuring-npm/package-json.md +++ b/deps/npm/docs/content/configuring-npm/package-json.md @@ -337,6 +337,10 @@ the `files` globs. Exceptions to this are: These can not be included. +### exports + +The "exports" provides a modern alternative to "main" allowing multiple entry points to be defined, conditional entry resolution support between environments, and preventing any other entry points besides those defined in "exports". This encapsulation allows module authors to clearly define the public interface for their package. For more details see the [node.js documentation on package entry points](https://nodejs.org/api/packages.html#package-entry-points) + ### main The main field is a module ID that is the primary entry point to your @@ -1125,6 +1129,32 @@ Like the `os` option, you can also block architectures: The host architecture is determined by `process.arch` +### devEngines + +The `devEngines` field aids engineers working on a codebase to all be using the same tooling. + +You can specify a `devEngines` property in your `package.json` which will run before `install`, `ci`, and `run` commands. + +> Note: `engines` and `devEngines` differ in object shape. They also function very differently. `engines` is designed to alert the user when a dependency uses a differening npm or node version that the project it's being used in, whereas `devEngines` is used to alert people interacting with the source code of a project. + +The supported keys under the `devEngines` property are `cpu`, `os`, `libc`, `runtime`, and `packageManager`. Each property can be an object or an array of objects. Objects must contain `name`, and optionally can specify `version`, and `onFail`. `onFail` can be `warn`, `error`, or `ignore`, and if left undefined is of the same value as `error`. `npm` will assume that you're running with `node`. +Here's an example of a project that will fail if the environment is not `node` and `npm`. If you set `runtime.name` or `packageManager.name` to any other string, it will fail within the npm CLI. + +```json +{ + "devEngines": { + "runtime": { + "name": "node", + "onFail": "error" + }, + "packageManager": { + "name": "npm", + "onFail": "error" + } + } +} +``` + ### private If you set `"private": true` in your package.json, then npm will refuse to diff --git a/deps/npm/docs/output/commands/npm-access.html b/deps/npm/docs/output/commands/npm-access.html index 76bf07b8473ee4..0a839663fcb016 100644 --- a/deps/npm/docs/output/commands/npm-access.html +++ b/deps/npm/docs/output/commands/npm-access.html @@ -141,9 +141,9 @@
-

+

npm-access - @10.8.3 + @10.9.0

Set access level on published packages
diff --git a/deps/npm/docs/output/commands/npm-adduser.html b/deps/npm/docs/output/commands/npm-adduser.html index 8cf2e1abf4bf55..2ac615ad13978d 100644 --- a/deps/npm/docs/output/commands/npm-adduser.html +++ b/deps/npm/docs/output/commands/npm-adduser.html @@ -141,9 +141,9 @@
-

+

npm-adduser - @10.8.3 + @10.9.0

Add a registry user account
diff --git a/deps/npm/docs/output/commands/npm-audit.html b/deps/npm/docs/output/commands/npm-audit.html index 7d53101a2f37b8..d7374ccfa66089 100644 --- a/deps/npm/docs/output/commands/npm-audit.html +++ b/deps/npm/docs/output/commands/npm-audit.html @@ -141,9 +141,9 @@
-

+

npm-audit - @10.8.3 + @10.9.0

Run a security audit
diff --git a/deps/npm/docs/output/commands/npm-bugs.html b/deps/npm/docs/output/commands/npm-bugs.html index d9e5cf60b6776d..9186bc100ae8f6 100644 --- a/deps/npm/docs/output/commands/npm-bugs.html +++ b/deps/npm/docs/output/commands/npm-bugs.html @@ -141,9 +141,9 @@
-

+

npm-bugs - @10.8.3 + @10.9.0

Report bugs for a package in a web browser
diff --git a/deps/npm/docs/output/commands/npm-cache.html b/deps/npm/docs/output/commands/npm-cache.html index 21f2cc68bcc6ce..55cec217dbdfb7 100644 --- a/deps/npm/docs/output/commands/npm-cache.html +++ b/deps/npm/docs/output/commands/npm-cache.html @@ -141,9 +141,9 @@
-

+

npm-cache - @10.8.3 + @10.9.0

Manipulates packages cache
diff --git a/deps/npm/docs/output/commands/npm-ci.html b/deps/npm/docs/output/commands/npm-ci.html index 9f645ecfb59c44..efe858777565a9 100644 --- a/deps/npm/docs/output/commands/npm-ci.html +++ b/deps/npm/docs/output/commands/npm-ci.html @@ -141,9 +141,9 @@
-

+

npm-ci - @10.8.3 + @10.9.0

Clean install a project
diff --git a/deps/npm/docs/output/commands/npm-completion.html b/deps/npm/docs/output/commands/npm-completion.html index d73c648bfa391f..4a91a94498f33b 100644 --- a/deps/npm/docs/output/commands/npm-completion.html +++ b/deps/npm/docs/output/commands/npm-completion.html @@ -141,9 +141,9 @@
-

+

npm-completion - @10.8.3 + @10.9.0

Tab Completion for npm
diff --git a/deps/npm/docs/output/commands/npm-config.html b/deps/npm/docs/output/commands/npm-config.html index 4e5c70886388ec..d18998fea8471d 100644 --- a/deps/npm/docs/output/commands/npm-config.html +++ b/deps/npm/docs/output/commands/npm-config.html @@ -141,9 +141,9 @@
-

+

npm-config - @10.8.3 + @10.9.0

Manage the npm configuration files
diff --git a/deps/npm/docs/output/commands/npm-dedupe.html b/deps/npm/docs/output/commands/npm-dedupe.html index f4c92e79f8d59c..194ea085383df3 100644 --- a/deps/npm/docs/output/commands/npm-dedupe.html +++ b/deps/npm/docs/output/commands/npm-dedupe.html @@ -141,9 +141,9 @@
-

+

npm-dedupe - @10.8.3 + @10.9.0

Reduce duplication in the package tree
diff --git a/deps/npm/docs/output/commands/npm-deprecate.html b/deps/npm/docs/output/commands/npm-deprecate.html index c644650427aaff..ae40adfbbab051 100644 --- a/deps/npm/docs/output/commands/npm-deprecate.html +++ b/deps/npm/docs/output/commands/npm-deprecate.html @@ -141,9 +141,9 @@
-

+

npm-deprecate - @10.8.3 + @10.9.0

Deprecate a version of a package
diff --git a/deps/npm/docs/output/commands/npm-diff.html b/deps/npm/docs/output/commands/npm-diff.html index 6e23a06faa413c..257b1c21572083 100644 --- a/deps/npm/docs/output/commands/npm-diff.html +++ b/deps/npm/docs/output/commands/npm-diff.html @@ -141,9 +141,9 @@
-

+

npm-diff - @10.8.3 + @10.9.0

The registry diff command
diff --git a/deps/npm/docs/output/commands/npm-dist-tag.html b/deps/npm/docs/output/commands/npm-dist-tag.html index 2e0ea2b927c3d9..08e7770a0c1745 100644 --- a/deps/npm/docs/output/commands/npm-dist-tag.html +++ b/deps/npm/docs/output/commands/npm-dist-tag.html @@ -141,9 +141,9 @@
-

+

npm-dist-tag - @10.8.3 + @10.9.0

Modify package distribution tags
diff --git a/deps/npm/docs/output/commands/npm-docs.html b/deps/npm/docs/output/commands/npm-docs.html index e5108efb32b0e3..8b647251f36740 100644 --- a/deps/npm/docs/output/commands/npm-docs.html +++ b/deps/npm/docs/output/commands/npm-docs.html @@ -141,9 +141,9 @@
-

+

npm-docs - @10.8.3 + @10.9.0

Open documentation for a package in a web browser
diff --git a/deps/npm/docs/output/commands/npm-doctor.html b/deps/npm/docs/output/commands/npm-doctor.html index 6ec69164f8d3e4..110ec2f9b20bf0 100644 --- a/deps/npm/docs/output/commands/npm-doctor.html +++ b/deps/npm/docs/output/commands/npm-doctor.html @@ -141,9 +141,9 @@
-

+

npm-doctor - @10.8.3 + @10.9.0

Check the health of your npm environment
diff --git a/deps/npm/docs/output/commands/npm-edit.html b/deps/npm/docs/output/commands/npm-edit.html index 3b0e72471738dc..9e4f7b361a5cb6 100644 --- a/deps/npm/docs/output/commands/npm-edit.html +++ b/deps/npm/docs/output/commands/npm-edit.html @@ -141,9 +141,9 @@
-

+

npm-edit - @10.8.3 + @10.9.0

Edit an installed package
diff --git a/deps/npm/docs/output/commands/npm-exec.html b/deps/npm/docs/output/commands/npm-exec.html index 71087fefd84709..695fa35ab825c2 100644 --- a/deps/npm/docs/output/commands/npm-exec.html +++ b/deps/npm/docs/output/commands/npm-exec.html @@ -141,9 +141,9 @@
-

+

npm-exec - @10.8.3 + @10.9.0

Run a command from a local or remote npm package
diff --git a/deps/npm/docs/output/commands/npm-explain.html b/deps/npm/docs/output/commands/npm-explain.html index dcc6a94838aaa5..e79255ff5fa6c6 100644 --- a/deps/npm/docs/output/commands/npm-explain.html +++ b/deps/npm/docs/output/commands/npm-explain.html @@ -141,9 +141,9 @@
-

+

npm-explain - @10.8.3 + @10.9.0

Explain installed packages
diff --git a/deps/npm/docs/output/commands/npm-explore.html b/deps/npm/docs/output/commands/npm-explore.html index 783de27e7dd87c..e296f4146aff98 100644 --- a/deps/npm/docs/output/commands/npm-explore.html +++ b/deps/npm/docs/output/commands/npm-explore.html @@ -141,9 +141,9 @@
-

+

npm-explore - @10.8.3 + @10.9.0

Browse an installed package
diff --git a/deps/npm/docs/output/commands/npm-find-dupes.html b/deps/npm/docs/output/commands/npm-find-dupes.html index fb1badb1476c27..a8c914a0dd3d44 100644 --- a/deps/npm/docs/output/commands/npm-find-dupes.html +++ b/deps/npm/docs/output/commands/npm-find-dupes.html @@ -141,9 +141,9 @@
-

+

npm-find-dupes - @10.8.3 + @10.9.0

Find duplication in the package tree
diff --git a/deps/npm/docs/output/commands/npm-fund.html b/deps/npm/docs/output/commands/npm-fund.html index 9e597d085c0c34..36a63253439b60 100644 --- a/deps/npm/docs/output/commands/npm-fund.html +++ b/deps/npm/docs/output/commands/npm-fund.html @@ -141,9 +141,9 @@
-

+

npm-fund - @10.8.3 + @10.9.0

Retrieve funding information
diff --git a/deps/npm/docs/output/commands/npm-help-search.html b/deps/npm/docs/output/commands/npm-help-search.html index 0b521d6de2ab1f..76dea45d852e75 100644 --- a/deps/npm/docs/output/commands/npm-help-search.html +++ b/deps/npm/docs/output/commands/npm-help-search.html @@ -141,9 +141,9 @@
-

+

npm-help-search - @10.8.3 + @10.9.0

Search npm help documentation
diff --git a/deps/npm/docs/output/commands/npm-help.html b/deps/npm/docs/output/commands/npm-help.html index dafcf20f7bc08b..e6b14af2f9ec6c 100644 --- a/deps/npm/docs/output/commands/npm-help.html +++ b/deps/npm/docs/output/commands/npm-help.html @@ -141,9 +141,9 @@
-

+

npm-help - @10.8.3 + @10.9.0

Get help on npm
diff --git a/deps/npm/docs/output/commands/npm-hook.html b/deps/npm/docs/output/commands/npm-hook.html index 953ce47aca7c80..393700a9a7165a 100644 --- a/deps/npm/docs/output/commands/npm-hook.html +++ b/deps/npm/docs/output/commands/npm-hook.html @@ -141,9 +141,9 @@
-

+

npm-hook - @10.8.3 + @10.9.0

Manage registry hooks
diff --git a/deps/npm/docs/output/commands/npm-init.html b/deps/npm/docs/output/commands/npm-init.html index 305e34838072dd..8ff01b2f7a76d0 100644 --- a/deps/npm/docs/output/commands/npm-init.html +++ b/deps/npm/docs/output/commands/npm-init.html @@ -141,9 +141,9 @@
-

+

npm-init - @10.8.3 + @10.9.0

Create a package.json file
@@ -154,7 +154,7 @@

Table of contents

Synopsis

-
npm init <package-spec> (same as `npx <package-spec>`)
+
npm init <package-spec> (same as `npx create-<package-spec>`)
 npm init <@scope> (same as `npx <@scope>/create`)
 
 aliases: create, innit
diff --git a/deps/npm/docs/output/commands/npm-install-ci-test.html b/deps/npm/docs/output/commands/npm-install-ci-test.html
index a25a150d655e86..b0f9d237ed8e98 100644
--- a/deps/npm/docs/output/commands/npm-install-ci-test.html
+++ b/deps/npm/docs/output/commands/npm-install-ci-test.html
@@ -141,9 +141,9 @@
 
 
-

+

npm-install-ci-test - @10.8.3 + @10.9.0

Install a project with a clean slate and run tests
diff --git a/deps/npm/docs/output/commands/npm-install-test.html b/deps/npm/docs/output/commands/npm-install-test.html index 1a36de01dcff6f..b0fbd63887fff5 100644 --- a/deps/npm/docs/output/commands/npm-install-test.html +++ b/deps/npm/docs/output/commands/npm-install-test.html @@ -141,9 +141,9 @@
-

+

npm-install-test - @10.8.3 + @10.9.0

Install package(s) and run tests
diff --git a/deps/npm/docs/output/commands/npm-install.html b/deps/npm/docs/output/commands/npm-install.html index 858c325cd833b7..fa57e02eaf9ad5 100644 --- a/deps/npm/docs/output/commands/npm-install.html +++ b/deps/npm/docs/output/commands/npm-install.html @@ -141,9 +141,9 @@
-

+

npm-install - @10.8.3 + @10.9.0

Install a package
@@ -309,9 +309,6 @@

Description

npm install readable-stream --save-exact npm install ansi-regex --save-bundle
-

Note: If there is a file or folder named <name> in the current -working directory, then it will try to install that, and only try to -fetch the package by name if it is not valid.

  • npm install <alias>@npm:<name>:

    diff --git a/deps/npm/docs/output/commands/npm-link.html b/deps/npm/docs/output/commands/npm-link.html index c6738b85254188..4e461ebefafd42 100644 --- a/deps/npm/docs/output/commands/npm-link.html +++ b/deps/npm/docs/output/commands/npm-link.html @@ -141,9 +141,9 @@
    -

    +

    npm-link - @10.8.3 + @10.9.0

    Symlink a package folder
    diff --git a/deps/npm/docs/output/commands/npm-login.html b/deps/npm/docs/output/commands/npm-login.html index bc8e4684ea5363..9c1584ca36bc41 100644 --- a/deps/npm/docs/output/commands/npm-login.html +++ b/deps/npm/docs/output/commands/npm-login.html @@ -141,9 +141,9 @@
    -

    +

    npm-login - @10.8.3 + @10.9.0

    Login to a registry user account
    diff --git a/deps/npm/docs/output/commands/npm-logout.html b/deps/npm/docs/output/commands/npm-logout.html index 5b48e25a8245b0..8908b329395254 100644 --- a/deps/npm/docs/output/commands/npm-logout.html +++ b/deps/npm/docs/output/commands/npm-logout.html @@ -141,9 +141,9 @@
    -

    +

    npm-logout - @10.8.3 + @10.9.0

    Log out of the registry
    diff --git a/deps/npm/docs/output/commands/npm-ls.html b/deps/npm/docs/output/commands/npm-ls.html index c1c5cd0c9e779f..2615a492a75e38 100644 --- a/deps/npm/docs/output/commands/npm-ls.html +++ b/deps/npm/docs/output/commands/npm-ls.html @@ -141,9 +141,9 @@
    -

    +

    npm-ls - @10.8.3 + @10.9.0

    List installed packages
    @@ -168,7 +168,7 @@

    Description

    the results to only the paths to the packages named. Note that nested packages will also show the paths to the specified packages. For example, running npm ls promzard in npm's source tree will show:

    -
    npm@10.8.3 /path/to/npm
    +
    npm@10.9.0 /path/to/npm
     └─┬ init-package-json@0.0.4
       └── promzard@0.1.5
     
    diff --git a/deps/npm/docs/output/commands/npm-org.html b/deps/npm/docs/output/commands/npm-org.html index 9f97a53bf5cb34..66d823f7f3c2de 100644 --- a/deps/npm/docs/output/commands/npm-org.html +++ b/deps/npm/docs/output/commands/npm-org.html @@ -141,9 +141,9 @@
    -

    +

    npm-org - @10.8.3 + @10.9.0

    Manage orgs
    diff --git a/deps/npm/docs/output/commands/npm-outdated.html b/deps/npm/docs/output/commands/npm-outdated.html index db933f402e923f..6002ff1818da2c 100644 --- a/deps/npm/docs/output/commands/npm-outdated.html +++ b/deps/npm/docs/output/commands/npm-outdated.html @@ -141,9 +141,9 @@
    -

    +

    npm-outdated - @10.8.3 + @10.9.0

    Check for outdated packages
    diff --git a/deps/npm/docs/output/commands/npm-owner.html b/deps/npm/docs/output/commands/npm-owner.html index 7b3d111efad61a..e8b7a05274aa91 100644 --- a/deps/npm/docs/output/commands/npm-owner.html +++ b/deps/npm/docs/output/commands/npm-owner.html @@ -141,9 +141,9 @@
    -

    +

    npm-owner - @10.8.3 + @10.9.0

    Manage package owners
    diff --git a/deps/npm/docs/output/commands/npm-pack.html b/deps/npm/docs/output/commands/npm-pack.html index a39574805ca1a1..596ba9a35e3ac5 100644 --- a/deps/npm/docs/output/commands/npm-pack.html +++ b/deps/npm/docs/output/commands/npm-pack.html @@ -141,9 +141,9 @@
    -

    +

    npm-pack - @10.8.3 + @10.9.0

    Create a tarball from a package
    diff --git a/deps/npm/docs/output/commands/npm-ping.html b/deps/npm/docs/output/commands/npm-ping.html index bb052270cf3f88..de393dbd0b5d71 100644 --- a/deps/npm/docs/output/commands/npm-ping.html +++ b/deps/npm/docs/output/commands/npm-ping.html @@ -141,9 +141,9 @@
    -

    +

    npm-ping - @10.8.3 + @10.9.0

    Ping npm registry
    diff --git a/deps/npm/docs/output/commands/npm-pkg.html b/deps/npm/docs/output/commands/npm-pkg.html index 86c5ad73917db5..ffb153cc440ce4 100644 --- a/deps/npm/docs/output/commands/npm-pkg.html +++ b/deps/npm/docs/output/commands/npm-pkg.html @@ -141,9 +141,9 @@
    -

    +

    npm-pkg - @10.8.3 + @10.9.0

    Manages your package.json
    diff --git a/deps/npm/docs/output/commands/npm-prefix.html b/deps/npm/docs/output/commands/npm-prefix.html index d16200830c3a3a..da63644f2df42a 100644 --- a/deps/npm/docs/output/commands/npm-prefix.html +++ b/deps/npm/docs/output/commands/npm-prefix.html @@ -141,9 +141,9 @@
    -

    +

    npm-prefix - @10.8.3 + @10.9.0

    Display prefix
    diff --git a/deps/npm/docs/output/commands/npm-profile.html b/deps/npm/docs/output/commands/npm-profile.html index ac0c7206cbaa46..c3679196a85bc4 100644 --- a/deps/npm/docs/output/commands/npm-profile.html +++ b/deps/npm/docs/output/commands/npm-profile.html @@ -141,9 +141,9 @@
    -

    +

    npm-profile - @10.8.3 + @10.9.0

    Change settings on your registry profile
    diff --git a/deps/npm/docs/output/commands/npm-prune.html b/deps/npm/docs/output/commands/npm-prune.html index ea1976ed852ca7..4e844ab9f02cbb 100644 --- a/deps/npm/docs/output/commands/npm-prune.html +++ b/deps/npm/docs/output/commands/npm-prune.html @@ -141,9 +141,9 @@
    -

    +

    npm-prune - @10.8.3 + @10.9.0

    Remove extraneous packages
    diff --git a/deps/npm/docs/output/commands/npm-publish.html b/deps/npm/docs/output/commands/npm-publish.html index 89d5ac62ec1fac..b808cc29a15744 100644 --- a/deps/npm/docs/output/commands/npm-publish.html +++ b/deps/npm/docs/output/commands/npm-publish.html @@ -141,9 +141,9 @@
    -

    +

    npm-publish - @10.8.3 + @10.9.0

    Publish a package
    diff --git a/deps/npm/docs/output/commands/npm-query.html b/deps/npm/docs/output/commands/npm-query.html index 90c75bccd61189..d85a2f6c27ff79 100644 --- a/deps/npm/docs/output/commands/npm-query.html +++ b/deps/npm/docs/output/commands/npm-query.html @@ -141,9 +141,9 @@
    -

    +

    npm-query - @10.8.3 + @10.9.0

    Dependency selector query
    diff --git a/deps/npm/docs/output/commands/npm-rebuild.html b/deps/npm/docs/output/commands/npm-rebuild.html index 6dbb7f9c12e42d..ff28b35b86bced 100644 --- a/deps/npm/docs/output/commands/npm-rebuild.html +++ b/deps/npm/docs/output/commands/npm-rebuild.html @@ -141,9 +141,9 @@
    -

    +

    npm-rebuild - @10.8.3 + @10.9.0

    Rebuild a package
    diff --git a/deps/npm/docs/output/commands/npm-repo.html b/deps/npm/docs/output/commands/npm-repo.html index c31847aeca0622..8ee7e9d6d2d074 100644 --- a/deps/npm/docs/output/commands/npm-repo.html +++ b/deps/npm/docs/output/commands/npm-repo.html @@ -141,9 +141,9 @@
    -

    +

    npm-repo - @10.8.3 + @10.9.0

    Open package repository page in the browser
    diff --git a/deps/npm/docs/output/commands/npm-restart.html b/deps/npm/docs/output/commands/npm-restart.html index 6d0b29454a77be..e3171bf280910b 100644 --- a/deps/npm/docs/output/commands/npm-restart.html +++ b/deps/npm/docs/output/commands/npm-restart.html @@ -141,9 +141,9 @@
    -

    +

    npm-restart - @10.8.3 + @10.9.0

    Restart a package
    diff --git a/deps/npm/docs/output/commands/npm-root.html b/deps/npm/docs/output/commands/npm-root.html index 670cf25a731134..4e2e82e5bb259c 100644 --- a/deps/npm/docs/output/commands/npm-root.html +++ b/deps/npm/docs/output/commands/npm-root.html @@ -141,9 +141,9 @@
    -

    +

    npm-root - @10.8.3 + @10.9.0

    Display npm root
    diff --git a/deps/npm/docs/output/commands/npm-run-script.html b/deps/npm/docs/output/commands/npm-run-script.html index bcabcc0bfec01f..4673e733f49fcd 100644 --- a/deps/npm/docs/output/commands/npm-run-script.html +++ b/deps/npm/docs/output/commands/npm-run-script.html @@ -141,9 +141,9 @@
    -

    +

    npm-run-script - @10.8.3 + @10.9.0

    Run arbitrary package scripts
    diff --git a/deps/npm/docs/output/commands/npm-sbom.html b/deps/npm/docs/output/commands/npm-sbom.html index f95f0992fb5cd1..00508eca6cd91a 100644 --- a/deps/npm/docs/output/commands/npm-sbom.html +++ b/deps/npm/docs/output/commands/npm-sbom.html @@ -141,9 +141,9 @@
    -

    +

    npm-sbom - @10.8.3 + @10.9.0

    Generate a Software Bill of Materials (SBOM)
    diff --git a/deps/npm/docs/output/commands/npm-search.html b/deps/npm/docs/output/commands/npm-search.html index ba4b81381b0b38..edf4e437e65400 100644 --- a/deps/npm/docs/output/commands/npm-search.html +++ b/deps/npm/docs/output/commands/npm-search.html @@ -141,9 +141,9 @@
    -

    +

    npm-search - @10.8.3 + @10.9.0

    Search for packages
    diff --git a/deps/npm/docs/output/commands/npm-shrinkwrap.html b/deps/npm/docs/output/commands/npm-shrinkwrap.html index c143822f846db7..f225abaed9218f 100644 --- a/deps/npm/docs/output/commands/npm-shrinkwrap.html +++ b/deps/npm/docs/output/commands/npm-shrinkwrap.html @@ -141,9 +141,9 @@
    -

    +

    npm-shrinkwrap - @10.8.3 + @10.9.0

    Lock down dependency versions for publication
    diff --git a/deps/npm/docs/output/commands/npm-star.html b/deps/npm/docs/output/commands/npm-star.html index e634b8686c2e7a..04b36628ad2176 100644 --- a/deps/npm/docs/output/commands/npm-star.html +++ b/deps/npm/docs/output/commands/npm-star.html @@ -141,9 +141,9 @@
    -

    +

    npm-star - @10.8.3 + @10.9.0

    Mark your favorite packages
    diff --git a/deps/npm/docs/output/commands/npm-stars.html b/deps/npm/docs/output/commands/npm-stars.html index 0986e3a60f6acd..3183aa047f1f89 100644 --- a/deps/npm/docs/output/commands/npm-stars.html +++ b/deps/npm/docs/output/commands/npm-stars.html @@ -141,9 +141,9 @@
    -

    +

    npm-stars - @10.8.3 + @10.9.0

    View packages marked as favorites
    diff --git a/deps/npm/docs/output/commands/npm-start.html b/deps/npm/docs/output/commands/npm-start.html index 0879146131ed6e..b81caefe0a4431 100644 --- a/deps/npm/docs/output/commands/npm-start.html +++ b/deps/npm/docs/output/commands/npm-start.html @@ -141,9 +141,9 @@
    -

    +

    npm-start - @10.8.3 + @10.9.0

    Start a package
    diff --git a/deps/npm/docs/output/commands/npm-stop.html b/deps/npm/docs/output/commands/npm-stop.html index ea3001a24bdcab..85d4c782a736e7 100644 --- a/deps/npm/docs/output/commands/npm-stop.html +++ b/deps/npm/docs/output/commands/npm-stop.html @@ -141,9 +141,9 @@
    -

    +

    npm-stop - @10.8.3 + @10.9.0

    Stop a package
    diff --git a/deps/npm/docs/output/commands/npm-team.html b/deps/npm/docs/output/commands/npm-team.html index 2cad22ffbd4fe2..6ad869fa3bed9e 100644 --- a/deps/npm/docs/output/commands/npm-team.html +++ b/deps/npm/docs/output/commands/npm-team.html @@ -141,9 +141,9 @@
    -

    +

    npm-team - @10.8.3 + @10.9.0

    Manage organization teams and team memberships
    diff --git a/deps/npm/docs/output/commands/npm-test.html b/deps/npm/docs/output/commands/npm-test.html index ab7431a1b0a8b0..bce16c92e4f087 100644 --- a/deps/npm/docs/output/commands/npm-test.html +++ b/deps/npm/docs/output/commands/npm-test.html @@ -141,9 +141,9 @@
    -

    +

    npm-test - @10.8.3 + @10.9.0

    Test a package
    diff --git a/deps/npm/docs/output/commands/npm-token.html b/deps/npm/docs/output/commands/npm-token.html index ca7df2869ec714..99fcafd9b8f53b 100644 --- a/deps/npm/docs/output/commands/npm-token.html +++ b/deps/npm/docs/output/commands/npm-token.html @@ -141,9 +141,9 @@
    -

    +

    npm-token - @10.8.3 + @10.9.0

    Manage your authentication tokens
    diff --git a/deps/npm/docs/output/commands/npm-uninstall.html b/deps/npm/docs/output/commands/npm-uninstall.html index ea964c96e07073..dd8e7a2234604e 100644 --- a/deps/npm/docs/output/commands/npm-uninstall.html +++ b/deps/npm/docs/output/commands/npm-uninstall.html @@ -141,9 +141,9 @@
    -

    +

    npm-uninstall - @10.8.3 + @10.9.0

    Remove a package
    diff --git a/deps/npm/docs/output/commands/npm-unpublish.html b/deps/npm/docs/output/commands/npm-unpublish.html index 607049cf467cd5..56e2c59f62addb 100644 --- a/deps/npm/docs/output/commands/npm-unpublish.html +++ b/deps/npm/docs/output/commands/npm-unpublish.html @@ -141,9 +141,9 @@
    -

    +

    npm-unpublish - @10.8.3 + @10.9.0

    Remove a package from the registry
    diff --git a/deps/npm/docs/output/commands/npm-unstar.html b/deps/npm/docs/output/commands/npm-unstar.html index 15a72f6013482d..710d8a947d665f 100644 --- a/deps/npm/docs/output/commands/npm-unstar.html +++ b/deps/npm/docs/output/commands/npm-unstar.html @@ -141,9 +141,9 @@
    -

    +

    npm-unstar - @10.8.3 + @10.9.0

    Remove an item from your favorite packages
    diff --git a/deps/npm/docs/output/commands/npm-update.html b/deps/npm/docs/output/commands/npm-update.html index e38eb06ba4c8fc..e587ec92f8614a 100644 --- a/deps/npm/docs/output/commands/npm-update.html +++ b/deps/npm/docs/output/commands/npm-update.html @@ -141,9 +141,9 @@
    -

    +

    npm-update - @10.8.3 + @10.9.0

    Update packages
    diff --git a/deps/npm/docs/output/commands/npm-version.html b/deps/npm/docs/output/commands/npm-version.html index dfb31dd2c9821d..196a0236093673 100644 --- a/deps/npm/docs/output/commands/npm-version.html +++ b/deps/npm/docs/output/commands/npm-version.html @@ -141,9 +141,9 @@
    -

    +

    npm-version - @10.8.3 + @10.9.0

    Bump a package version
    diff --git a/deps/npm/docs/output/commands/npm-view.html b/deps/npm/docs/output/commands/npm-view.html index 8837e47e8f6867..0eebee3037748d 100644 --- a/deps/npm/docs/output/commands/npm-view.html +++ b/deps/npm/docs/output/commands/npm-view.html @@ -141,9 +141,9 @@
    -

    +

    npm-view - @10.8.3 + @10.9.0

    View registry info
    diff --git a/deps/npm/docs/output/commands/npm-whoami.html b/deps/npm/docs/output/commands/npm-whoami.html index f6dbcc3f3ae190..0d1fa5ea9b87d7 100644 --- a/deps/npm/docs/output/commands/npm-whoami.html +++ b/deps/npm/docs/output/commands/npm-whoami.html @@ -141,9 +141,9 @@
    -

    +

    npm-whoami - @10.8.3 + @10.9.0

    Display npm username
    diff --git a/deps/npm/docs/output/commands/npm.html b/deps/npm/docs/output/commands/npm.html index e9f96129c1571d..005009c1dba277 100644 --- a/deps/npm/docs/output/commands/npm.html +++ b/deps/npm/docs/output/commands/npm.html @@ -141,9 +141,9 @@
    -

    +

    npm - @10.8.3 + @10.9.0

    javascript package manager
    @@ -158,7 +158,7 @@

    Table of contents

    Note: This command is unaware of workspaces.

    Version

    -

    10.8.3

    +

    10.9.0

    Description

    npm is the package manager for the Node JavaScript platform. It puts modules in place so that node can find them, and manages dependency diff --git a/deps/npm/docs/output/commands/npx.html b/deps/npm/docs/output/commands/npx.html index 418fc957a78f24..d3239ed320bb70 100644 --- a/deps/npm/docs/output/commands/npx.html +++ b/deps/npm/docs/output/commands/npx.html @@ -141,9 +141,9 @@

    -

    +

    npx - @10.8.3 + @10.9.0

    Run a command from a local or remote npm package
    diff --git a/deps/npm/docs/output/configuring-npm/folders.html b/deps/npm/docs/output/configuring-npm/folders.html index 9bcbc0e5a36fc1..f23aef6354f73d 100644 --- a/deps/npm/docs/output/configuring-npm/folders.html +++ b/deps/npm/docs/output/configuring-npm/folders.html @@ -141,9 +141,9 @@
    -

    +

    folders - @10.8.3 + @10.9.0

    Folder Structures Used by npm
    diff --git a/deps/npm/docs/output/configuring-npm/install.html b/deps/npm/docs/output/configuring-npm/install.html index d0d46ffc693878..cddbcaae2de8be 100644 --- a/deps/npm/docs/output/configuring-npm/install.html +++ b/deps/npm/docs/output/configuring-npm/install.html @@ -141,9 +141,9 @@
    -

    +

    install - @10.8.3 + @10.9.0

    Download and install node and npm
    diff --git a/deps/npm/docs/output/configuring-npm/npm-global.html b/deps/npm/docs/output/configuring-npm/npm-global.html index 9bcbc0e5a36fc1..f23aef6354f73d 100644 --- a/deps/npm/docs/output/configuring-npm/npm-global.html +++ b/deps/npm/docs/output/configuring-npm/npm-global.html @@ -141,9 +141,9 @@
    -

    +

    folders - @10.8.3 + @10.9.0

    Folder Structures Used by npm
    diff --git a/deps/npm/docs/output/configuring-npm/npm-json.html b/deps/npm/docs/output/configuring-npm/npm-json.html index dfdc917fc21244..0cd27f8d82fdd8 100644 --- a/deps/npm/docs/output/configuring-npm/npm-json.html +++ b/deps/npm/docs/output/configuring-npm/npm-json.html @@ -141,16 +141,16 @@
    -

    +

    package.json - @10.8.3 + @10.9.0

    Specifics of npm's package.json handling

    Table of contents

    - +

    Description

    @@ -411,6 +411,8 @@

    files

  • yarn.lock
  • These can not be included.

    +

    exports

    +

    The "exports" provides a modern alternative to "main" allowing multiple entry points to be defined, conditional entry resolution support between environments, and preventing any other entry points besides those defined in "exports". This encapsulation allows module authors to clearly define the public interface for their package. For more details see the node.js documentation on package entry points

    main

    The main field is a module ID that is the primary entry point to your program. That is, if your package is named foo, and a user installs it, @@ -1005,6 +1007,27 @@

    cpu

    }
  • The host architecture is determined by process.arch

    +

    devEngines

    +

    The devEngines field aids engineers working on a codebase to all be using the same tooling.

    +

    You can specify a devEngines property in your package.json which will run before install, ci, and run commands.

    +
    +

    Note: engines and devEngines differ in object shape. They also function very differently. engines is designed to alert the user when a dependency uses a differening npm or node version that the project it's being used in, whereas devEngines is used to alert people interacting with the source code of a project.

    +
    +

    The supported keys under the devEngines property are cpu, os, libc, runtime, and packageManager. Each property can be an object or an array of objects. Objects must contain name, and optionally can specify version, and onFail. onFail can be warn, error, or ignore, and if left undefined is of the same value as error. npm will assume that you're running with node. +Here's an example of a project that will fail if the environment is not node and npm. If you set runtime.name or packageManager.name to any other string, it will fail within the npm CLI.

    +
    {
    +  "devEngines": {
    +    "runtime": {
    +      "name": "node",
    +      "onFail": "error"
    +    },
    +    "packageManager": {
    +      "name": "npm",
    +      "onFail": "error"
    +    }
    +  }
    +}
    +

    private

    If you set "private": true in your package.json, then npm will refuse to publish it.

    diff --git a/deps/npm/docs/output/configuring-npm/npm-shrinkwrap-json.html b/deps/npm/docs/output/configuring-npm/npm-shrinkwrap-json.html index 567f47ad15dba3..5f20e99541a7a7 100644 --- a/deps/npm/docs/output/configuring-npm/npm-shrinkwrap-json.html +++ b/deps/npm/docs/output/configuring-npm/npm-shrinkwrap-json.html @@ -141,9 +141,9 @@
    -

    +

    npm-shrinkwrap.json - @10.8.3 + @10.9.0

    A publishable lockfile
    diff --git a/deps/npm/docs/output/configuring-npm/npmrc.html b/deps/npm/docs/output/configuring-npm/npmrc.html index 19950a61fb82be..a20c3d807d3d83 100644 --- a/deps/npm/docs/output/configuring-npm/npmrc.html +++ b/deps/npm/docs/output/configuring-npm/npmrc.html @@ -141,9 +141,9 @@
    -

    +

    npmrc - @10.8.3 + @10.9.0

    The npm config files
    diff --git a/deps/npm/docs/output/configuring-npm/package-json.html b/deps/npm/docs/output/configuring-npm/package-json.html index dfdc917fc21244..0cd27f8d82fdd8 100644 --- a/deps/npm/docs/output/configuring-npm/package-json.html +++ b/deps/npm/docs/output/configuring-npm/package-json.html @@ -141,16 +141,16 @@
    -

    +

    package.json - @10.8.3 + @10.9.0

    Specifics of npm's package.json handling

    Table of contents

    - +

    Description

    @@ -411,6 +411,8 @@

    files

  • yarn.lock
  • These can not be included.

    +

    exports

    +

    The "exports" provides a modern alternative to "main" allowing multiple entry points to be defined, conditional entry resolution support between environments, and preventing any other entry points besides those defined in "exports". This encapsulation allows module authors to clearly define the public interface for their package. For more details see the node.js documentation on package entry points

    main

    The main field is a module ID that is the primary entry point to your program. That is, if your package is named foo, and a user installs it, @@ -1005,6 +1007,27 @@

    cpu

    }

    The host architecture is determined by process.arch

    +

    devEngines

    +

    The devEngines field aids engineers working on a codebase to all be using the same tooling.

    +

    You can specify a devEngines property in your package.json which will run before install, ci, and run commands.

    +
    +

    Note: engines and devEngines differ in object shape. They also function very differently. engines is designed to alert the user when a dependency uses a differening npm or node version that the project it's being used in, whereas devEngines is used to alert people interacting with the source code of a project.

    +
    +

    The supported keys under the devEngines property are cpu, os, libc, runtime, and packageManager. Each property can be an object or an array of objects. Objects must contain name, and optionally can specify version, and onFail. onFail can be warn, error, or ignore, and if left undefined is of the same value as error. npm will assume that you're running with node. +Here's an example of a project that will fail if the environment is not node and npm. If you set runtime.name or packageManager.name to any other string, it will fail within the npm CLI.

    +
    {
    +  "devEngines": {
    +    "runtime": {
    +      "name": "node",
    +      "onFail": "error"
    +    },
    +    "packageManager": {
    +      "name": "npm",
    +      "onFail": "error"
    +    }
    +  }
    +}
    +

    private

    If you set "private": true in your package.json, then npm will refuse to publish it.

    diff --git a/deps/npm/docs/output/configuring-npm/package-lock-json.html b/deps/npm/docs/output/configuring-npm/package-lock-json.html index 7ece2f78141166..58598df7ae4fbc 100644 --- a/deps/npm/docs/output/configuring-npm/package-lock-json.html +++ b/deps/npm/docs/output/configuring-npm/package-lock-json.html @@ -141,9 +141,9 @@
    -

    +

    package-lock.json - @10.8.3 + @10.9.0

    A manifestation of the manifest
    diff --git a/deps/npm/docs/output/using-npm/config.html b/deps/npm/docs/output/using-npm/config.html index 0bc94abe755923..5d70bb7e8e803e 100644 --- a/deps/npm/docs/output/using-npm/config.html +++ b/deps/npm/docs/output/using-npm/config.html @@ -141,9 +141,9 @@
    -

    +

    config - @10.8.3 + @10.9.0

    More than you probably want to know about npm configuration
    diff --git a/deps/npm/docs/output/using-npm/dependency-selectors.html b/deps/npm/docs/output/using-npm/dependency-selectors.html index 38c14547a1d051..008377b3ab55ff 100644 --- a/deps/npm/docs/output/using-npm/dependency-selectors.html +++ b/deps/npm/docs/output/using-npm/dependency-selectors.html @@ -141,9 +141,9 @@
    -

    +

    Dependency Selector Syntax & Querying - @10.8.3 + @10.9.0

    Dependency Selector Syntax & Querying
    diff --git a/deps/npm/docs/output/using-npm/developers.html b/deps/npm/docs/output/using-npm/developers.html index fbb68f149c8f56..e75223cd623a54 100644 --- a/deps/npm/docs/output/using-npm/developers.html +++ b/deps/npm/docs/output/using-npm/developers.html @@ -141,9 +141,9 @@
    -

    +

    developers - @10.8.3 + @10.9.0

    Developer Guide
    diff --git a/deps/npm/docs/output/using-npm/logging.html b/deps/npm/docs/output/using-npm/logging.html index 9943e1a6e81b07..5499bd53e285f2 100644 --- a/deps/npm/docs/output/using-npm/logging.html +++ b/deps/npm/docs/output/using-npm/logging.html @@ -141,9 +141,9 @@
    -

    +

    Logging - @10.8.3 + @10.9.0

    Why, What & How We Log
    diff --git a/deps/npm/docs/output/using-npm/orgs.html b/deps/npm/docs/output/using-npm/orgs.html index 487f16e322e353..c9ed80f6421c5e 100644 --- a/deps/npm/docs/output/using-npm/orgs.html +++ b/deps/npm/docs/output/using-npm/orgs.html @@ -141,9 +141,9 @@
    -

    +

    orgs - @10.8.3 + @10.9.0

    Working with Teams & Orgs
    diff --git a/deps/npm/docs/output/using-npm/package-spec.html b/deps/npm/docs/output/using-npm/package-spec.html index c485b897fb2a18..a370699bb555bd 100644 --- a/deps/npm/docs/output/using-npm/package-spec.html +++ b/deps/npm/docs/output/using-npm/package-spec.html @@ -141,9 +141,9 @@
    -

    +

    package-spec - @10.8.3 + @10.9.0

    Package name specifier
    diff --git a/deps/npm/docs/output/using-npm/registry.html b/deps/npm/docs/output/using-npm/registry.html index 285769976fb4b3..9d9f8a539333c9 100644 --- a/deps/npm/docs/output/using-npm/registry.html +++ b/deps/npm/docs/output/using-npm/registry.html @@ -141,9 +141,9 @@
    -

    +

    registry - @10.8.3 + @10.9.0

    The JavaScript Package Registry
    diff --git a/deps/npm/docs/output/using-npm/removal.html b/deps/npm/docs/output/using-npm/removal.html index 207835c29cad23..2668dd9dd8a440 100644 --- a/deps/npm/docs/output/using-npm/removal.html +++ b/deps/npm/docs/output/using-npm/removal.html @@ -141,9 +141,9 @@
    -

    +

    removal - @10.8.3 + @10.9.0

    Cleaning the Slate
    diff --git a/deps/npm/docs/output/using-npm/scope.html b/deps/npm/docs/output/using-npm/scope.html index a536b06726e5e4..0441f8ef703e95 100644 --- a/deps/npm/docs/output/using-npm/scope.html +++ b/deps/npm/docs/output/using-npm/scope.html @@ -141,9 +141,9 @@
    -

    +

    scope - @10.8.3 + @10.9.0

    Scoped packages
    diff --git a/deps/npm/docs/output/using-npm/scripts.html b/deps/npm/docs/output/using-npm/scripts.html index a22d6c9d8ec27f..ff37c2ede18062 100644 --- a/deps/npm/docs/output/using-npm/scripts.html +++ b/deps/npm/docs/output/using-npm/scripts.html @@ -141,9 +141,9 @@
    -

    +

    scripts - @10.8.3 + @10.9.0

    How npm handles the "scripts" field
    diff --git a/deps/npm/docs/output/using-npm/workspaces.html b/deps/npm/docs/output/using-npm/workspaces.html index b3ebf5c300ce0f..19195cc868db4b 100644 --- a/deps/npm/docs/output/using-npm/workspaces.html +++ b/deps/npm/docs/output/using-npm/workspaces.html @@ -141,9 +141,9 @@
    -

    +

    workspaces - @10.8.3 + @10.9.0

    Working with workspaces
    diff --git a/deps/npm/lib/arborist-cmd.js b/deps/npm/lib/arborist-cmd.js index 9d247d02fa181d..f0167887b06996 100644 --- a/deps/npm/lib/arborist-cmd.js +++ b/deps/npm/lib/arborist-cmd.js @@ -18,6 +18,7 @@ class ArboristCmd extends BaseCommand { static workspaces = true static ignoreImplicitWorkspace = false + static checkDevEngines = true constructor (npm) { super(npm) diff --git a/deps/npm/lib/base-cmd.js b/deps/npm/lib/base-cmd.js index 99ae6d7f43c70a..941ffefad2ef4e 100644 --- a/deps/npm/lib/base-cmd.js +++ b/deps/npm/lib/base-cmd.js @@ -1,10 +1,12 @@ const { log } = require('proc-log') class BaseCommand { + // these defaults can be overridden by individual commands static workspaces = false static ignoreImplicitWorkspace = true + static checkDevEngines = false - // these are all overridden by individual commands + // these should always be overridden by individual commands static name = null static description = null static params = null @@ -129,6 +131,63 @@ class BaseCommand { } } + // Checks the devEngines entry in the package.json at this.localPrefix + async checkDevEngines () { + const force = this.npm.flatOptions.force + + const { devEngines } = await require('@npmcli/package-json') + .normalize(this.npm.config.localPrefix) + .then(p => p.content) + .catch(() => ({})) + + if (typeof devEngines === 'undefined') { + return + } + + const { checkDevEngines, currentEnv } = require('npm-install-checks') + const current = currentEnv.devEngines({ + nodeVersion: this.npm.nodeVersion, + npmVersion: this.npm.version, + }) + + const failures = checkDevEngines(devEngines, current) + const warnings = failures.filter(f => f.isWarn) + const errors = failures.filter(f => f.isError) + + const genMsg = (failure, i = 0) => { + return [...new Set([ + // eslint-disable-next-line + i === 0 ? 'The developer of this package has specified the following through devEngines' : '', + `${failure.message}`, + `${failure.errors.map(e => e.message).join('\n')}`, + ])].filter(v => v).join('\n') + } + + [...warnings, ...(force ? errors : [])].forEach((failure, i) => { + const message = genMsg(failure, i) + log.warn('EBADDEVENGINES', message) + log.warn('EBADDEVENGINES', { + current: failure.current, + required: failure.required, + }) + }) + + if (force) { + return + } + + if (errors.length) { + const failure = errors[0] + const message = genMsg(failure) + throw Object.assign(new Error(message), { + engine: failure.engine, + code: 'EBADDEVENGINES', + current: failure.current, + required: failure.required, + }) + } + } + async setWorkspaces () { const { relative } = require('node:path') diff --git a/deps/npm/lib/commands/init.js b/deps/npm/lib/commands/init.js index b8ef3e59ccf298..09e8d8522f7f31 100644 --- a/deps/npm/lib/commands/init.js +++ b/deps/npm/lib/commands/init.js @@ -31,7 +31,7 @@ class Init extends BaseCommand { static name = 'init' static usage = [ - ' (same as `npx `)', + ' (same as `npx create-`)', '<@scope> (same as `npx <@scope>/create`)', ] diff --git a/deps/npm/lib/commands/run-script.js b/deps/npm/lib/commands/run-script.js index 0a139d08af745b..50c745d6d9c078 100644 --- a/deps/npm/lib/commands/run-script.js +++ b/deps/npm/lib/commands/run-script.js @@ -21,6 +21,7 @@ class RunScript extends BaseCommand { static workspaces = true static ignoreImplicitWorkspace = false static isShellout = true + static checkDevEngines = true static async completion (opts, npm) { const argv = opts.conf.argv.remain diff --git a/deps/npm/lib/npm.js b/deps/npm/lib/npm.js index 5563cec21ba4d2..893e032f1ecedd 100644 --- a/deps/npm/lib/npm.js +++ b/deps/npm/lib/npm.js @@ -247,6 +247,10 @@ class Npm { execWorkspaces = true } + if (command.checkDevEngines && !this.global) { + await command.checkDevEngines() + } + return time.start(`command:${cmd}`, () => execWorkspaces ? command.execWorkspaces(args) : command.exec(args)) } diff --git a/deps/npm/lib/utils/error-message.js b/deps/npm/lib/utils/error-message.js index fc47c909069f07..4b5582ac8e1816 100644 --- a/deps/npm/lib/utils/error-message.js +++ b/deps/npm/lib/utils/error-message.js @@ -200,6 +200,13 @@ const errorMessage = (er, npm) => { ].join('\n')]) break + case 'EBADDEVENGINES': { + const { current, required } = er + summary.push(['EBADDEVENGINES', er.message]) + detail.push(['EBADDEVENGINES', { current, required }]) + break + } + case 'EBADPLATFORM': { const actual = er.current const expected = { ...er.required } diff --git a/deps/npm/lib/utils/ping.js b/deps/npm/lib/utils/ping.js index 00956d0c1630ce..1c8c9e827a4eaa 100644 --- a/deps/npm/lib/utils/ping.js +++ b/deps/npm/lib/utils/ping.js @@ -2,6 +2,6 @@ // used by the ping and doctor commands const fetch = require('npm-registry-fetch') module.exports = async (flatOptions) => { - const res = await fetch('/-/ping?write=true', flatOptions) + const res = await fetch('/-/ping', { ...flatOptions, cache: false }) return res.json().catch(() => ({})) } diff --git a/deps/npm/man/man1/npm-access.1 b/deps/npm/man/man1/npm-access.1 index d3e622c61d53be..602193f3b8e3e8 100644 --- a/deps/npm/man/man1/npm-access.1 +++ b/deps/npm/man/man1/npm-access.1 @@ -1,4 +1,4 @@ -.TH "NPM-ACCESS" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-ACCESS" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-access\fR - Set access level on published packages .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-adduser.1 b/deps/npm/man/man1/npm-adduser.1 index a670b65c62782f..467384ec0ec40e 100644 --- a/deps/npm/man/man1/npm-adduser.1 +++ b/deps/npm/man/man1/npm-adduser.1 @@ -1,4 +1,4 @@ -.TH "NPM-ADDUSER" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-ADDUSER" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-adduser\fR - Add a registry user account .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-audit.1 b/deps/npm/man/man1/npm-audit.1 index 7b8e907cd65e66..64bf5d5461a157 100644 --- a/deps/npm/man/man1/npm-audit.1 +++ b/deps/npm/man/man1/npm-audit.1 @@ -1,4 +1,4 @@ -.TH "NPM-AUDIT" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-AUDIT" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-audit\fR - Run a security audit .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-bugs.1 b/deps/npm/man/man1/npm-bugs.1 index d20ac5377799ce..c25b04b6814137 100644 --- a/deps/npm/man/man1/npm-bugs.1 +++ b/deps/npm/man/man1/npm-bugs.1 @@ -1,4 +1,4 @@ -.TH "NPM-BUGS" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-BUGS" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-bugs\fR - Report bugs for a package in a web browser .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-cache.1 b/deps/npm/man/man1/npm-cache.1 index ae80a0ed6d86de..e9da044a5b4479 100644 --- a/deps/npm/man/man1/npm-cache.1 +++ b/deps/npm/man/man1/npm-cache.1 @@ -1,4 +1,4 @@ -.TH "NPM-CACHE" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-CACHE" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-cache\fR - Manipulates packages cache .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-ci.1 b/deps/npm/man/man1/npm-ci.1 index 840ef475bd964a..1ad1419cb1f98f 100644 --- a/deps/npm/man/man1/npm-ci.1 +++ b/deps/npm/man/man1/npm-ci.1 @@ -1,4 +1,4 @@ -.TH "NPM-CI" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-CI" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-ci\fR - Clean install a project .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-completion.1 b/deps/npm/man/man1/npm-completion.1 index ad7294e0ea4fe7..6f246c436cead3 100644 --- a/deps/npm/man/man1/npm-completion.1 +++ b/deps/npm/man/man1/npm-completion.1 @@ -1,4 +1,4 @@ -.TH "NPM-COMPLETION" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-COMPLETION" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-completion\fR - Tab Completion for npm .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-config.1 b/deps/npm/man/man1/npm-config.1 index 10828f3752aa6d..1bef58e188b68c 100644 --- a/deps/npm/man/man1/npm-config.1 +++ b/deps/npm/man/man1/npm-config.1 @@ -1,4 +1,4 @@ -.TH "NPM-CONFIG" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-CONFIG" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-config\fR - Manage the npm configuration files .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-dedupe.1 b/deps/npm/man/man1/npm-dedupe.1 index 3c3621f37b3701..b1e21f0478ed4f 100644 --- a/deps/npm/man/man1/npm-dedupe.1 +++ b/deps/npm/man/man1/npm-dedupe.1 @@ -1,4 +1,4 @@ -.TH "NPM-DEDUPE" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-DEDUPE" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-dedupe\fR - Reduce duplication in the package tree .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-deprecate.1 b/deps/npm/man/man1/npm-deprecate.1 index a7f7089a9c88f0..31e9e56cd95382 100644 --- a/deps/npm/man/man1/npm-deprecate.1 +++ b/deps/npm/man/man1/npm-deprecate.1 @@ -1,4 +1,4 @@ -.TH "NPM-DEPRECATE" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-DEPRECATE" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-deprecate\fR - Deprecate a version of a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-diff.1 b/deps/npm/man/man1/npm-diff.1 index 4d3f9f6955ada5..c29ae3266d1543 100644 --- a/deps/npm/man/man1/npm-diff.1 +++ b/deps/npm/man/man1/npm-diff.1 @@ -1,4 +1,4 @@ -.TH "NPM-DIFF" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-DIFF" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-diff\fR - The registry diff command .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-dist-tag.1 b/deps/npm/man/man1/npm-dist-tag.1 index d8451c181f18be..46e5cf8ac3492a 100644 --- a/deps/npm/man/man1/npm-dist-tag.1 +++ b/deps/npm/man/man1/npm-dist-tag.1 @@ -1,4 +1,4 @@ -.TH "NPM-DIST-TAG" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-DIST-TAG" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-dist-tag\fR - Modify package distribution tags .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-docs.1 b/deps/npm/man/man1/npm-docs.1 index f6398139688780..6cda2d87abe163 100644 --- a/deps/npm/man/man1/npm-docs.1 +++ b/deps/npm/man/man1/npm-docs.1 @@ -1,4 +1,4 @@ -.TH "NPM-DOCS" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-DOCS" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-docs\fR - Open documentation for a package in a web browser .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-doctor.1 b/deps/npm/man/man1/npm-doctor.1 index ba1cff38927cb4..7a67ed1e6b32b1 100644 --- a/deps/npm/man/man1/npm-doctor.1 +++ b/deps/npm/man/man1/npm-doctor.1 @@ -1,4 +1,4 @@ -.TH "NPM-DOCTOR" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-DOCTOR" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-doctor\fR - Check the health of your npm environment .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-edit.1 b/deps/npm/man/man1/npm-edit.1 index 6beaa7b38444c1..5ae7e24b67d60e 100644 --- a/deps/npm/man/man1/npm-edit.1 +++ b/deps/npm/man/man1/npm-edit.1 @@ -1,4 +1,4 @@ -.TH "NPM-EDIT" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-EDIT" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-edit\fR - Edit an installed package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-exec.1 b/deps/npm/man/man1/npm-exec.1 index 3e267f6eaae86d..20c21e67919d65 100644 --- a/deps/npm/man/man1/npm-exec.1 +++ b/deps/npm/man/man1/npm-exec.1 @@ -1,4 +1,4 @@ -.TH "NPM-EXEC" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-EXEC" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-exec\fR - Run a command from a local or remote npm package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-explain.1 b/deps/npm/man/man1/npm-explain.1 index 60f19d7e1a844f..65ef0e086e2ce0 100644 --- a/deps/npm/man/man1/npm-explain.1 +++ b/deps/npm/man/man1/npm-explain.1 @@ -1,4 +1,4 @@ -.TH "NPM-EXPLAIN" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-EXPLAIN" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-explain\fR - Explain installed packages .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-explore.1 b/deps/npm/man/man1/npm-explore.1 index 3c1abf7b4db562..6b92a81c2abc0d 100644 --- a/deps/npm/man/man1/npm-explore.1 +++ b/deps/npm/man/man1/npm-explore.1 @@ -1,4 +1,4 @@ -.TH "NPM-EXPLORE" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-EXPLORE" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-explore\fR - Browse an installed package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-find-dupes.1 b/deps/npm/man/man1/npm-find-dupes.1 index 134480927461d7..261313e1b23ad1 100644 --- a/deps/npm/man/man1/npm-find-dupes.1 +++ b/deps/npm/man/man1/npm-find-dupes.1 @@ -1,4 +1,4 @@ -.TH "NPM-FIND-DUPES" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-FIND-DUPES" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-find-dupes\fR - Find duplication in the package tree .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-fund.1 b/deps/npm/man/man1/npm-fund.1 index 25a3b60faa0cb6..b87d57d20c0f50 100644 --- a/deps/npm/man/man1/npm-fund.1 +++ b/deps/npm/man/man1/npm-fund.1 @@ -1,4 +1,4 @@ -.TH "NPM-FUND" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-FUND" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-fund\fR - Retrieve funding information .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-help-search.1 b/deps/npm/man/man1/npm-help-search.1 index 607139184335f4..9660ca29697e9f 100644 --- a/deps/npm/man/man1/npm-help-search.1 +++ b/deps/npm/man/man1/npm-help-search.1 @@ -1,4 +1,4 @@ -.TH "NPM-HELP-SEARCH" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-HELP-SEARCH" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-help-search\fR - Search npm help documentation .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-help.1 b/deps/npm/man/man1/npm-help.1 index 97b15b6eeab138..a8c70fdc88a9ae 100644 --- a/deps/npm/man/man1/npm-help.1 +++ b/deps/npm/man/man1/npm-help.1 @@ -1,4 +1,4 @@ -.TH "NPM-HELP" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-HELP" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-help\fR - Get help on npm .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-hook.1 b/deps/npm/man/man1/npm-hook.1 index b7362d6e2ef8dc..2c07a5afadbb64 100644 --- a/deps/npm/man/man1/npm-hook.1 +++ b/deps/npm/man/man1/npm-hook.1 @@ -1,4 +1,4 @@ -.TH "NPM-HOOK" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-HOOK" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-hook\fR - Manage registry hooks .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-init.1 b/deps/npm/man/man1/npm-init.1 index 4639e3031dad6d..cb7339dc0f4ff0 100644 --- a/deps/npm/man/man1/npm-init.1 +++ b/deps/npm/man/man1/npm-init.1 @@ -1,11 +1,11 @@ -.TH "NPM-INIT" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-INIT" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-init\fR - Create a package.json file .SS "Synopsis" .P .RS 2 .nf -npm init (same as `npx `) +npm init (same as `npx create-`) npm init <@scope> (same as `npx <@scope>/create`) aliases: create, innit diff --git a/deps/npm/man/man1/npm-install-ci-test.1 b/deps/npm/man/man1/npm-install-ci-test.1 index 3f8e17b9c69746..52defde693a506 100644 --- a/deps/npm/man/man1/npm-install-ci-test.1 +++ b/deps/npm/man/man1/npm-install-ci-test.1 @@ -1,4 +1,4 @@ -.TH "NPM-INSTALL-CI-TEST" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-INSTALL-CI-TEST" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-install-ci-test\fR - Install a project with a clean slate and run tests .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-install-test.1 b/deps/npm/man/man1/npm-install-test.1 index 6d3c5aca8c6411..e2023e1a985474 100644 --- a/deps/npm/man/man1/npm-install-test.1 +++ b/deps/npm/man/man1/npm-install-test.1 @@ -1,4 +1,4 @@ -.TH "NPM-INSTALL-TEST" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-INSTALL-TEST" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-install-test\fR - Install package(s) and run tests .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-install.1 b/deps/npm/man/man1/npm-install.1 index 3d51d664fb6bf3..59b3a8c81ac73f 100644 --- a/deps/npm/man/man1/npm-install.1 +++ b/deps/npm/man/man1/npm-install.1 @@ -1,4 +1,4 @@ -.TH "NPM-INSTALL" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-INSTALL" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-install\fR - Install a package .SS "Synopsis" @@ -173,8 +173,6 @@ npm install readable-stream --save-exact npm install ansi-regex --save-bundle .fi .RE -.P -\fBNote\fR: If there is a file or folder named \fB\fR in the current working directory, then it will try to install that, and only try to fetch the package by name if it is not valid. .IP \(bu 4 \fBnpm install @npm:\fR: .P diff --git a/deps/npm/man/man1/npm-link.1 b/deps/npm/man/man1/npm-link.1 index b2d6e243af726d..e6106495400fdc 100644 --- a/deps/npm/man/man1/npm-link.1 +++ b/deps/npm/man/man1/npm-link.1 @@ -1,4 +1,4 @@ -.TH "NPM-LINK" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-LINK" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-link\fR - Symlink a package folder .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-login.1 b/deps/npm/man/man1/npm-login.1 index ae61f04aeba5f2..36f2bf2d0f35dc 100644 --- a/deps/npm/man/man1/npm-login.1 +++ b/deps/npm/man/man1/npm-login.1 @@ -1,4 +1,4 @@ -.TH "NPM-LOGIN" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-LOGIN" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-login\fR - Login to a registry user account .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-logout.1 b/deps/npm/man/man1/npm-logout.1 index 5601da1924d19d..78cba39ccd85c9 100644 --- a/deps/npm/man/man1/npm-logout.1 +++ b/deps/npm/man/man1/npm-logout.1 @@ -1,4 +1,4 @@ -.TH "NPM-LOGOUT" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-LOGOUT" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-logout\fR - Log out of the registry .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-ls.1 b/deps/npm/man/man1/npm-ls.1 index 0775be95e46462..b0364fe1531753 100644 --- a/deps/npm/man/man1/npm-ls.1 +++ b/deps/npm/man/man1/npm-ls.1 @@ -1,4 +1,4 @@ -.TH "NPM-LS" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-LS" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-ls\fR - List installed packages .SS "Synopsis" @@ -20,7 +20,7 @@ Positional arguments are \fBname@version-range\fR identifiers, which will limit .P .RS 2 .nf -npm@10.8.3 /path/to/npm +npm@10.9.0 /path/to/npm └─┬ init-package-json@0.0.4 └── promzard@0.1.5 .fi diff --git a/deps/npm/man/man1/npm-org.1 b/deps/npm/man/man1/npm-org.1 index 782162959b8238..0465a6b5bddb54 100644 --- a/deps/npm/man/man1/npm-org.1 +++ b/deps/npm/man/man1/npm-org.1 @@ -1,4 +1,4 @@ -.TH "NPM-ORG" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-ORG" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-org\fR - Manage orgs .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-outdated.1 b/deps/npm/man/man1/npm-outdated.1 index bd22481ccb46b7..ad1cf07823630f 100644 --- a/deps/npm/man/man1/npm-outdated.1 +++ b/deps/npm/man/man1/npm-outdated.1 @@ -1,4 +1,4 @@ -.TH "NPM-OUTDATED" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-OUTDATED" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-outdated\fR - Check for outdated packages .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-owner.1 b/deps/npm/man/man1/npm-owner.1 index 22202d1d46e877..5842e5a51d125f 100644 --- a/deps/npm/man/man1/npm-owner.1 +++ b/deps/npm/man/man1/npm-owner.1 @@ -1,4 +1,4 @@ -.TH "NPM-OWNER" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-OWNER" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-owner\fR - Manage package owners .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-pack.1 b/deps/npm/man/man1/npm-pack.1 index dbe579ed7ce4bf..6cc2acd4b580f7 100644 --- a/deps/npm/man/man1/npm-pack.1 +++ b/deps/npm/man/man1/npm-pack.1 @@ -1,4 +1,4 @@ -.TH "NPM-PACK" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-PACK" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-pack\fR - Create a tarball from a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-ping.1 b/deps/npm/man/man1/npm-ping.1 index 21318c025568a9..ad03cbbb6a1398 100644 --- a/deps/npm/man/man1/npm-ping.1 +++ b/deps/npm/man/man1/npm-ping.1 @@ -1,4 +1,4 @@ -.TH "NPM-PING" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-PING" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-ping\fR - Ping npm registry .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-pkg.1 b/deps/npm/man/man1/npm-pkg.1 index 05c3b50411b09c..2f45876234ec3d 100644 --- a/deps/npm/man/man1/npm-pkg.1 +++ b/deps/npm/man/man1/npm-pkg.1 @@ -1,4 +1,4 @@ -.TH "NPM-PKG" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-PKG" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-pkg\fR - Manages your package.json .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-prefix.1 b/deps/npm/man/man1/npm-prefix.1 index 9745f3370c89be..cac44344c71d79 100644 --- a/deps/npm/man/man1/npm-prefix.1 +++ b/deps/npm/man/man1/npm-prefix.1 @@ -1,4 +1,4 @@ -.TH "NPM-PREFIX" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-PREFIX" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-prefix\fR - Display prefix .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-profile.1 b/deps/npm/man/man1/npm-profile.1 index 8171db696c6ede..28e3f41244ace1 100644 --- a/deps/npm/man/man1/npm-profile.1 +++ b/deps/npm/man/man1/npm-profile.1 @@ -1,4 +1,4 @@ -.TH "NPM-PROFILE" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-PROFILE" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-profile\fR - Change settings on your registry profile .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-prune.1 b/deps/npm/man/man1/npm-prune.1 index cd065ff5a58ab2..7634c75f0901fa 100644 --- a/deps/npm/man/man1/npm-prune.1 +++ b/deps/npm/man/man1/npm-prune.1 @@ -1,4 +1,4 @@ -.TH "NPM-PRUNE" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-PRUNE" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-prune\fR - Remove extraneous packages .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-publish.1 b/deps/npm/man/man1/npm-publish.1 index 17572afaaa84a9..c6724b6fe2ce51 100644 --- a/deps/npm/man/man1/npm-publish.1 +++ b/deps/npm/man/man1/npm-publish.1 @@ -1,4 +1,4 @@ -.TH "NPM-PUBLISH" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-PUBLISH" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-publish\fR - Publish a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-query.1 b/deps/npm/man/man1/npm-query.1 index 8b395876b68555..0a3c6e665e7ecf 100644 --- a/deps/npm/man/man1/npm-query.1 +++ b/deps/npm/man/man1/npm-query.1 @@ -1,4 +1,4 @@ -.TH "NPM-QUERY" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-QUERY" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-query\fR - Dependency selector query .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-rebuild.1 b/deps/npm/man/man1/npm-rebuild.1 index d500181cd66c72..e537cec373e3c6 100644 --- a/deps/npm/man/man1/npm-rebuild.1 +++ b/deps/npm/man/man1/npm-rebuild.1 @@ -1,4 +1,4 @@ -.TH "NPM-REBUILD" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-REBUILD" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-rebuild\fR - Rebuild a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-repo.1 b/deps/npm/man/man1/npm-repo.1 index 0592faf7559d21..869541f6faaa51 100644 --- a/deps/npm/man/man1/npm-repo.1 +++ b/deps/npm/man/man1/npm-repo.1 @@ -1,4 +1,4 @@ -.TH "NPM-REPO" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-REPO" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-repo\fR - Open package repository page in the browser .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-restart.1 b/deps/npm/man/man1/npm-restart.1 index c92a1acede0ca1..b0e3b0deead7ef 100644 --- a/deps/npm/man/man1/npm-restart.1 +++ b/deps/npm/man/man1/npm-restart.1 @@ -1,4 +1,4 @@ -.TH "NPM-RESTART" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-RESTART" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-restart\fR - Restart a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-root.1 b/deps/npm/man/man1/npm-root.1 index 8daf0430c4e76e..94a132557e3979 100644 --- a/deps/npm/man/man1/npm-root.1 +++ b/deps/npm/man/man1/npm-root.1 @@ -1,4 +1,4 @@ -.TH "NPM-ROOT" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-ROOT" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-root\fR - Display npm root .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-run-script.1 b/deps/npm/man/man1/npm-run-script.1 index b3374d7cdc5e4a..424c733d6a0523 100644 --- a/deps/npm/man/man1/npm-run-script.1 +++ b/deps/npm/man/man1/npm-run-script.1 @@ -1,4 +1,4 @@ -.TH "NPM-RUN-SCRIPT" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-RUN-SCRIPT" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-run-script\fR - Run arbitrary package scripts .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-sbom.1 b/deps/npm/man/man1/npm-sbom.1 index 70423b46fdf408..7bb8199c8b11c8 100644 --- a/deps/npm/man/man1/npm-sbom.1 +++ b/deps/npm/man/man1/npm-sbom.1 @@ -1,4 +1,4 @@ -.TH "NPM-SBOM" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-SBOM" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-sbom\fR - Generate a Software Bill of Materials (SBOM) .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-search.1 b/deps/npm/man/man1/npm-search.1 index c72a963f577209..4fe67890bc949a 100644 --- a/deps/npm/man/man1/npm-search.1 +++ b/deps/npm/man/man1/npm-search.1 @@ -1,4 +1,4 @@ -.TH "NPM-SEARCH" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-SEARCH" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-search\fR - Search for packages .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-shrinkwrap.1 b/deps/npm/man/man1/npm-shrinkwrap.1 index 1f677474153975..ed3c0cb03ccca7 100644 --- a/deps/npm/man/man1/npm-shrinkwrap.1 +++ b/deps/npm/man/man1/npm-shrinkwrap.1 @@ -1,4 +1,4 @@ -.TH "NPM-SHRINKWRAP" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-SHRINKWRAP" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-shrinkwrap\fR - Lock down dependency versions for publication .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-star.1 b/deps/npm/man/man1/npm-star.1 index f441999ab56fb3..83d0ff230ec1bb 100644 --- a/deps/npm/man/man1/npm-star.1 +++ b/deps/npm/man/man1/npm-star.1 @@ -1,4 +1,4 @@ -.TH "NPM-STAR" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-STAR" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-star\fR - Mark your favorite packages .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-stars.1 b/deps/npm/man/man1/npm-stars.1 index 5f22addcb31b10..bdeffec7a3b1b5 100644 --- a/deps/npm/man/man1/npm-stars.1 +++ b/deps/npm/man/man1/npm-stars.1 @@ -1,4 +1,4 @@ -.TH "NPM-STARS" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-STARS" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-stars\fR - View packages marked as favorites .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-start.1 b/deps/npm/man/man1/npm-start.1 index 1e2c8265972942..c3cb48c6f72576 100644 --- a/deps/npm/man/man1/npm-start.1 +++ b/deps/npm/man/man1/npm-start.1 @@ -1,4 +1,4 @@ -.TH "NPM-START" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-START" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-start\fR - Start a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-stop.1 b/deps/npm/man/man1/npm-stop.1 index 546dc8bef5da82..36ef105209b7f0 100644 --- a/deps/npm/man/man1/npm-stop.1 +++ b/deps/npm/man/man1/npm-stop.1 @@ -1,4 +1,4 @@ -.TH "NPM-STOP" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-STOP" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-stop\fR - Stop a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-team.1 b/deps/npm/man/man1/npm-team.1 index c8e4286873d415..6712c8f92b23c8 100644 --- a/deps/npm/man/man1/npm-team.1 +++ b/deps/npm/man/man1/npm-team.1 @@ -1,4 +1,4 @@ -.TH "NPM-TEAM" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-TEAM" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-team\fR - Manage organization teams and team memberships .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-test.1 b/deps/npm/man/man1/npm-test.1 index 2edba4e88bb489..5e63e9c6d76ab3 100644 --- a/deps/npm/man/man1/npm-test.1 +++ b/deps/npm/man/man1/npm-test.1 @@ -1,4 +1,4 @@ -.TH "NPM-TEST" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-TEST" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-test\fR - Test a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-token.1 b/deps/npm/man/man1/npm-token.1 index 20bda1a7ef30b9..29a2c361574121 100644 --- a/deps/npm/man/man1/npm-token.1 +++ b/deps/npm/man/man1/npm-token.1 @@ -1,4 +1,4 @@ -.TH "NPM-TOKEN" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-TOKEN" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-token\fR - Manage your authentication tokens .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-uninstall.1 b/deps/npm/man/man1/npm-uninstall.1 index 052c11b8085074..0c10a60c1f07e7 100644 --- a/deps/npm/man/man1/npm-uninstall.1 +++ b/deps/npm/man/man1/npm-uninstall.1 @@ -1,4 +1,4 @@ -.TH "NPM-UNINSTALL" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-UNINSTALL" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-uninstall\fR - Remove a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-unpublish.1 b/deps/npm/man/man1/npm-unpublish.1 index fd7ee3066e9986..098af2e59310bc 100644 --- a/deps/npm/man/man1/npm-unpublish.1 +++ b/deps/npm/man/man1/npm-unpublish.1 @@ -1,4 +1,4 @@ -.TH "NPM-UNPUBLISH" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-UNPUBLISH" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-unpublish\fR - Remove a package from the registry .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-unstar.1 b/deps/npm/man/man1/npm-unstar.1 index f5e409f091725f..7326984aa8b4d2 100644 --- a/deps/npm/man/man1/npm-unstar.1 +++ b/deps/npm/man/man1/npm-unstar.1 @@ -1,4 +1,4 @@ -.TH "NPM-UNSTAR" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-UNSTAR" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-unstar\fR - Remove an item from your favorite packages .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-update.1 b/deps/npm/man/man1/npm-update.1 index dd9d503de4e6a0..797a9e601cf3d2 100644 --- a/deps/npm/man/man1/npm-update.1 +++ b/deps/npm/man/man1/npm-update.1 @@ -1,4 +1,4 @@ -.TH "NPM-UPDATE" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-UPDATE" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-update\fR - Update packages .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-version.1 b/deps/npm/man/man1/npm-version.1 index 5e6d0beefac912..d0a7dcee6fea65 100644 --- a/deps/npm/man/man1/npm-version.1 +++ b/deps/npm/man/man1/npm-version.1 @@ -1,4 +1,4 @@ -.TH "NPM-VERSION" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-VERSION" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-version\fR - Bump a package version .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-view.1 b/deps/npm/man/man1/npm-view.1 index baca4b0a97fae8..2925f8d7d2cffe 100644 --- a/deps/npm/man/man1/npm-view.1 +++ b/deps/npm/man/man1/npm-view.1 @@ -1,4 +1,4 @@ -.TH "NPM-VIEW" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-VIEW" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-view\fR - View registry info .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-whoami.1 b/deps/npm/man/man1/npm-whoami.1 index 40caa068abd1ef..f283db5c9d247a 100644 --- a/deps/npm/man/man1/npm-whoami.1 +++ b/deps/npm/man/man1/npm-whoami.1 @@ -1,4 +1,4 @@ -.TH "NPM-WHOAMI" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-WHOAMI" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-whoami\fR - Display npm username .SS "Synopsis" diff --git a/deps/npm/man/man1/npm.1 b/deps/npm/man/man1/npm.1 index 471e5b5f7c110f..b2034bc50fa5e3 100644 --- a/deps/npm/man/man1/npm.1 +++ b/deps/npm/man/man1/npm.1 @@ -1,4 +1,4 @@ -.TH "NPM" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm\fR - javascript package manager .SS "Synopsis" @@ -12,7 +12,7 @@ npm Note: This command is unaware of workspaces. .SS "Version" .P -10.8.3 +10.9.0 .SS "Description" .P npm is the package manager for the Node JavaScript platform. It puts modules in place so that node can find them, and manages dependency conflicts intelligently. diff --git a/deps/npm/man/man1/npx.1 b/deps/npm/man/man1/npx.1 index 0849e0d10949e2..a1000443cc665e 100644 --- a/deps/npm/man/man1/npx.1 +++ b/deps/npm/man/man1/npx.1 @@ -1,4 +1,4 @@ -.TH "NPX" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPX" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpx\fR - Run a command from a local or remote npm package .SS "Synopsis" diff --git a/deps/npm/man/man5/folders.5 b/deps/npm/man/man5/folders.5 index 6e4b89a844f0d8..6e3045729c46ca 100644 --- a/deps/npm/man/man5/folders.5 +++ b/deps/npm/man/man5/folders.5 @@ -1,4 +1,4 @@ -.TH "FOLDERS" "5" "August 2024" "NPM@10.8.3" "" +.TH "FOLDERS" "5" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBfolders\fR - Folder Structures Used by npm .SS "Description" diff --git a/deps/npm/man/man5/install.5 b/deps/npm/man/man5/install.5 index e5d1e8921317cb..f5fabf02d7431b 100644 --- a/deps/npm/man/man5/install.5 +++ b/deps/npm/man/man5/install.5 @@ -1,4 +1,4 @@ -.TH "INSTALL" "5" "August 2024" "NPM@10.8.3" "" +.TH "INSTALL" "5" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBinstall\fR - Download and install node and npm .SS "Description" diff --git a/deps/npm/man/man5/npm-global.5 b/deps/npm/man/man5/npm-global.5 index 6e4b89a844f0d8..6e3045729c46ca 100644 --- a/deps/npm/man/man5/npm-global.5 +++ b/deps/npm/man/man5/npm-global.5 @@ -1,4 +1,4 @@ -.TH "FOLDERS" "5" "August 2024" "NPM@10.8.3" "" +.TH "FOLDERS" "5" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBfolders\fR - Folder Structures Used by npm .SS "Description" diff --git a/deps/npm/man/man5/npm-json.5 b/deps/npm/man/man5/npm-json.5 index b167d6e0e92849..49773dda01d706 100644 --- a/deps/npm/man/man5/npm-json.5 +++ b/deps/npm/man/man5/npm-json.5 @@ -1,4 +1,4 @@ -.TH "PACKAGE.JSON" "5" "August 2024" "NPM@10.8.3" "" +.TH "PACKAGE.JSON" "5" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBpackage.json\fR - Specifics of npm's package.json handling .SS "Description" @@ -338,6 +338,9 @@ Most of these ignored files can be included specifically if included in the \fBf .P These can not be included. +.SS "exports" +.P +The "exports" provides a modern alternative to "main" allowing multiple entry points to be defined, conditional entry resolution support between environments, and preventing any other entry points besides those defined in "exports". This encapsulation allows module authors to clearly define the public interface for their package. For more details see the \fBnode.js documentation on package entry points\fR \fI\(lahttps://nodejs.org/api/packages.html#package-entry-points\(ra\fR .SS "main" .P The main field is a module ID that is the primary entry point to your program. That is, if your package is named \fBfoo\fR, and a user installs it, and then does \fBrequire("foo")\fR, then your main module's exports object will be returned. @@ -1041,6 +1044,35 @@ Like the \fBos\fR option, you can also block architectures: .RE .P The host architecture is determined by \fBprocess.arch\fR +.SS "devEngines" +.P +The \fBdevEngines\fR field aids engineers working on a codebase to all be using the same tooling. +.P +You can specify a \fBdevEngines\fR property in your \fBpackage.json\fR which will run before \fBinstall\fR, \fBci\fR, and \fBrun\fR commands. +.RS 0 +.P +Note: \fBengines\fR and \fBdevEngines\fR differ in object shape. They also function very differently. \fBengines\fR is designed to alert the user when a dependency uses a differening npm or node version that the project it's being used in, whereas \fBdevEngines\fR is used to alert people interacting with the source code of a project. +.RE 0 + +.P +The supported keys under the \fBdevEngines\fR property are \fBcpu\fR, \fBos\fR, \fBlibc\fR, \fBruntime\fR, and \fBpackageManager\fR. Each property can be an object or an array of objects. Objects must contain \fBname\fR, and optionally can specify \fBversion\fR, and \fBonFail\fR. \fBonFail\fR can be \fBwarn\fR, \fBerror\fR, or \fBignore\fR, and if left undefined is of the same value as \fBerror\fR. \fBnpm\fR will assume that you're running with \fBnode\fR. Here's an example of a project that will fail if the environment is not \fBnode\fR and \fBnpm\fR. If you set \fBruntime.name\fR or \fBpackageManager.name\fR to any other string, it will fail within the npm CLI. +.P +.RS 2 +.nf +{ + "devEngines": { + "runtime": { + "name": "node", + "onFail": "error" + }, + "packageManager": { + "name": "npm", + "onFail": "error" + } + } +} +.fi +.RE .SS "private" .P If you set \fB"private": true\fR in your package.json, then npm will refuse to publish it. diff --git a/deps/npm/man/man5/npm-shrinkwrap-json.5 b/deps/npm/man/man5/npm-shrinkwrap-json.5 index 13353dc1e6a187..d1f394d23aa1ce 100644 --- a/deps/npm/man/man5/npm-shrinkwrap-json.5 +++ b/deps/npm/man/man5/npm-shrinkwrap-json.5 @@ -1,4 +1,4 @@ -.TH "NPM-SHRINKWRAP.JSON" "5" "August 2024" "NPM@10.8.3" "" +.TH "NPM-SHRINKWRAP.JSON" "5" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-shrinkwrap.json\fR - A publishable lockfile .SS "Description" diff --git a/deps/npm/man/man5/npmrc.5 b/deps/npm/man/man5/npmrc.5 index 9f439429b53923..3b78f989c23545 100644 --- a/deps/npm/man/man5/npmrc.5 +++ b/deps/npm/man/man5/npmrc.5 @@ -1,4 +1,4 @@ -.TH "NPMRC" "5" "August 2024" "NPM@10.8.3" "" +.TH "NPMRC" "5" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpmrc\fR - The npm config files .SS "Description" diff --git a/deps/npm/man/man5/package-json.5 b/deps/npm/man/man5/package-json.5 index b167d6e0e92849..49773dda01d706 100644 --- a/deps/npm/man/man5/package-json.5 +++ b/deps/npm/man/man5/package-json.5 @@ -1,4 +1,4 @@ -.TH "PACKAGE.JSON" "5" "August 2024" "NPM@10.8.3" "" +.TH "PACKAGE.JSON" "5" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBpackage.json\fR - Specifics of npm's package.json handling .SS "Description" @@ -338,6 +338,9 @@ Most of these ignored files can be included specifically if included in the \fBf .P These can not be included. +.SS "exports" +.P +The "exports" provides a modern alternative to "main" allowing multiple entry points to be defined, conditional entry resolution support between environments, and preventing any other entry points besides those defined in "exports". This encapsulation allows module authors to clearly define the public interface for their package. For more details see the \fBnode.js documentation on package entry points\fR \fI\(lahttps://nodejs.org/api/packages.html#package-entry-points\(ra\fR .SS "main" .P The main field is a module ID that is the primary entry point to your program. That is, if your package is named \fBfoo\fR, and a user installs it, and then does \fBrequire("foo")\fR, then your main module's exports object will be returned. @@ -1041,6 +1044,35 @@ Like the \fBos\fR option, you can also block architectures: .RE .P The host architecture is determined by \fBprocess.arch\fR +.SS "devEngines" +.P +The \fBdevEngines\fR field aids engineers working on a codebase to all be using the same tooling. +.P +You can specify a \fBdevEngines\fR property in your \fBpackage.json\fR which will run before \fBinstall\fR, \fBci\fR, and \fBrun\fR commands. +.RS 0 +.P +Note: \fBengines\fR and \fBdevEngines\fR differ in object shape. They also function very differently. \fBengines\fR is designed to alert the user when a dependency uses a differening npm or node version that the project it's being used in, whereas \fBdevEngines\fR is used to alert people interacting with the source code of a project. +.RE 0 + +.P +The supported keys under the \fBdevEngines\fR property are \fBcpu\fR, \fBos\fR, \fBlibc\fR, \fBruntime\fR, and \fBpackageManager\fR. Each property can be an object or an array of objects. Objects must contain \fBname\fR, and optionally can specify \fBversion\fR, and \fBonFail\fR. \fBonFail\fR can be \fBwarn\fR, \fBerror\fR, or \fBignore\fR, and if left undefined is of the same value as \fBerror\fR. \fBnpm\fR will assume that you're running with \fBnode\fR. Here's an example of a project that will fail if the environment is not \fBnode\fR and \fBnpm\fR. If you set \fBruntime.name\fR or \fBpackageManager.name\fR to any other string, it will fail within the npm CLI. +.P +.RS 2 +.nf +{ + "devEngines": { + "runtime": { + "name": "node", + "onFail": "error" + }, + "packageManager": { + "name": "npm", + "onFail": "error" + } + } +} +.fi +.RE .SS "private" .P If you set \fB"private": true\fR in your package.json, then npm will refuse to publish it. diff --git a/deps/npm/man/man5/package-lock-json.5 b/deps/npm/man/man5/package-lock-json.5 index 183a8fbff85d75..df3dcca1c4fa75 100644 --- a/deps/npm/man/man5/package-lock-json.5 +++ b/deps/npm/man/man5/package-lock-json.5 @@ -1,4 +1,4 @@ -.TH "PACKAGE-LOCK.JSON" "5" "August 2024" "NPM@10.8.3" "" +.TH "PACKAGE-LOCK.JSON" "5" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBpackage-lock.json\fR - A manifestation of the manifest .SS "Description" diff --git a/deps/npm/man/man7/config.7 b/deps/npm/man/man7/config.7 index 0cd1e963107f62..ee166c9d4ccee9 100644 --- a/deps/npm/man/man7/config.7 +++ b/deps/npm/man/man7/config.7 @@ -1,4 +1,4 @@ -.TH "CONFIG" "7" "August 2024" "NPM@10.8.3" "" +.TH "CONFIG" "7" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBconfig\fR - More than you probably want to know about npm configuration .SS "Description" diff --git a/deps/npm/man/man7/dependency-selectors.7 b/deps/npm/man/man7/dependency-selectors.7 index 92cbf923c1546a..54d80fee4dc58d 100644 --- a/deps/npm/man/man7/dependency-selectors.7 +++ b/deps/npm/man/man7/dependency-selectors.7 @@ -1,4 +1,4 @@ -.TH "QUERYING" "7" "August 2024" "NPM@10.8.3" "" +.TH "QUERYING" "7" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBQuerying\fR - Dependency Selector Syntax & Querying .SS "Description" diff --git a/deps/npm/man/man7/developers.7 b/deps/npm/man/man7/developers.7 index fd7d1342f26052..aa907f3f51deda 100644 --- a/deps/npm/man/man7/developers.7 +++ b/deps/npm/man/man7/developers.7 @@ -1,4 +1,4 @@ -.TH "DEVELOPERS" "7" "August 2024" "NPM@10.8.3" "" +.TH "DEVELOPERS" "7" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBdevelopers\fR - Developer Guide .SS "Description" diff --git a/deps/npm/man/man7/logging.7 b/deps/npm/man/man7/logging.7 index 3ab9f5c1ddf49e..f36ae6e4703048 100644 --- a/deps/npm/man/man7/logging.7 +++ b/deps/npm/man/man7/logging.7 @@ -1,4 +1,4 @@ -.TH "LOGGING" "7" "August 2024" "NPM@10.8.3" "" +.TH "LOGGING" "7" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBLogging\fR - Why, What & How We Log .SS "Description" diff --git a/deps/npm/man/man7/orgs.7 b/deps/npm/man/man7/orgs.7 index c3718278543966..2866d7dd9800a5 100644 --- a/deps/npm/man/man7/orgs.7 +++ b/deps/npm/man/man7/orgs.7 @@ -1,4 +1,4 @@ -.TH "ORGS" "7" "August 2024" "NPM@10.8.3" "" +.TH "ORGS" "7" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBorgs\fR - Working with Teams & Orgs .SS "Description" diff --git a/deps/npm/man/man7/package-spec.7 b/deps/npm/man/man7/package-spec.7 index 60cbfcd9ff6068..0d76a5f3017253 100644 --- a/deps/npm/man/man7/package-spec.7 +++ b/deps/npm/man/man7/package-spec.7 @@ -1,4 +1,4 @@ -.TH "PACKAGE-SPEC" "7" "August 2024" "NPM@10.8.3" "" +.TH "PACKAGE-SPEC" "7" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBpackage-spec\fR - Package name specifier .SS "Description" diff --git a/deps/npm/man/man7/registry.7 b/deps/npm/man/man7/registry.7 index a798125ab8dd05..61ad2702f7b235 100644 --- a/deps/npm/man/man7/registry.7 +++ b/deps/npm/man/man7/registry.7 @@ -1,4 +1,4 @@ -.TH "REGISTRY" "7" "August 2024" "NPM@10.8.3" "" +.TH "REGISTRY" "7" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBregistry\fR - The JavaScript Package Registry .SS "Description" diff --git a/deps/npm/man/man7/removal.7 b/deps/npm/man/man7/removal.7 index 9e792b51df71a3..a76b743553c304 100644 --- a/deps/npm/man/man7/removal.7 +++ b/deps/npm/man/man7/removal.7 @@ -1,4 +1,4 @@ -.TH "REMOVAL" "7" "August 2024" "NPM@10.8.3" "" +.TH "REMOVAL" "7" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBremoval\fR - Cleaning the Slate .SS "Synopsis" diff --git a/deps/npm/man/man7/scope.7 b/deps/npm/man/man7/scope.7 index 9313cb144366f1..94c22ccbd9f8fa 100644 --- a/deps/npm/man/man7/scope.7 +++ b/deps/npm/man/man7/scope.7 @@ -1,4 +1,4 @@ -.TH "SCOPE" "7" "August 2024" "NPM@10.8.3" "" +.TH "SCOPE" "7" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBscope\fR - Scoped packages .SS "Description" diff --git a/deps/npm/man/man7/scripts.7 b/deps/npm/man/man7/scripts.7 index 843454ed9fd913..925d3181dc9174 100644 --- a/deps/npm/man/man7/scripts.7 +++ b/deps/npm/man/man7/scripts.7 @@ -1,4 +1,4 @@ -.TH "SCRIPTS" "7" "August 2024" "NPM@10.8.3" "" +.TH "SCRIPTS" "7" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBscripts\fR - How npm handles the "scripts" field .SS "Description" diff --git a/deps/npm/man/man7/workspaces.7 b/deps/npm/man/man7/workspaces.7 index 7459be91ff0880..1812eb73eb7ece 100644 --- a/deps/npm/man/man7/workspaces.7 +++ b/deps/npm/man/man7/workspaces.7 @@ -1,4 +1,4 @@ -.TH "WORKSPACES" "7" "August 2024" "NPM@10.8.3" "" +.TH "WORKSPACES" "7" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBworkspaces\fR - Working with workspaces .SS "Description" diff --git a/deps/npm/node_modules/@isaacs/fs-minipass/LICENSE b/deps/npm/node_modules/@isaacs/fs-minipass/LICENSE new file mode 100644 index 00000000000000..19129e315fe593 --- /dev/null +++ b/deps/npm/node_modules/@isaacs/fs-minipass/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/@isaacs/fs-minipass/dist/commonjs/index.js b/deps/npm/node_modules/@isaacs/fs-minipass/dist/commonjs/index.js new file mode 100644 index 00000000000000..2b3178c5263b4c --- /dev/null +++ b/deps/npm/node_modules/@isaacs/fs-minipass/dist/commonjs/index.js @@ -0,0 +1,430 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.WriteStreamSync = exports.WriteStream = exports.ReadStreamSync = exports.ReadStream = void 0; +const events_1 = __importDefault(require("events")); +const fs_1 = __importDefault(require("fs")); +const minipass_1 = require("minipass"); +const writev = fs_1.default.writev; +const _autoClose = Symbol('_autoClose'); +const _close = Symbol('_close'); +const _ended = Symbol('_ended'); +const _fd = Symbol('_fd'); +const _finished = Symbol('_finished'); +const _flags = Symbol('_flags'); +const _flush = Symbol('_flush'); +const _handleChunk = Symbol('_handleChunk'); +const _makeBuf = Symbol('_makeBuf'); +const _mode = Symbol('_mode'); +const _needDrain = Symbol('_needDrain'); +const _onerror = Symbol('_onerror'); +const _onopen = Symbol('_onopen'); +const _onread = Symbol('_onread'); +const _onwrite = Symbol('_onwrite'); +const _open = Symbol('_open'); +const _path = Symbol('_path'); +const _pos = Symbol('_pos'); +const _queue = Symbol('_queue'); +const _read = Symbol('_read'); +const _readSize = Symbol('_readSize'); +const _reading = Symbol('_reading'); +const _remain = Symbol('_remain'); +const _size = Symbol('_size'); +const _write = Symbol('_write'); +const _writing = Symbol('_writing'); +const _defaultFlag = Symbol('_defaultFlag'); +const _errored = Symbol('_errored'); +class ReadStream extends minipass_1.Minipass { + [_errored] = false; + [_fd]; + [_path]; + [_readSize]; + [_reading] = false; + [_size]; + [_remain]; + [_autoClose]; + constructor(path, opt) { + opt = opt || {}; + super(opt); + this.readable = true; + this.writable = false; + if (typeof path !== 'string') { + throw new TypeError('path must be a string'); + } + this[_errored] = false; + this[_fd] = typeof opt.fd === 'number' ? opt.fd : undefined; + this[_path] = path; + this[_readSize] = opt.readSize || 16 * 1024 * 1024; + this[_reading] = false; + this[_size] = typeof opt.size === 'number' ? opt.size : Infinity; + this[_remain] = this[_size]; + this[_autoClose] = + typeof opt.autoClose === 'boolean' ? opt.autoClose : true; + if (typeof this[_fd] === 'number') { + this[_read](); + } + else { + this[_open](); + } + } + get fd() { + return this[_fd]; + } + get path() { + return this[_path]; + } + //@ts-ignore + write() { + throw new TypeError('this is a readable stream'); + } + //@ts-ignore + end() { + throw new TypeError('this is a readable stream'); + } + [_open]() { + fs_1.default.open(this[_path], 'r', (er, fd) => this[_onopen](er, fd)); + } + [_onopen](er, fd) { + if (er) { + this[_onerror](er); + } + else { + this[_fd] = fd; + this.emit('open', fd); + this[_read](); + } + } + [_makeBuf]() { + return Buffer.allocUnsafe(Math.min(this[_readSize], this[_remain])); + } + [_read]() { + if (!this[_reading]) { + this[_reading] = true; + const buf = this[_makeBuf](); + /* c8 ignore start */ + if (buf.length === 0) { + return process.nextTick(() => this[_onread](null, 0, buf)); + } + /* c8 ignore stop */ + fs_1.default.read(this[_fd], buf, 0, buf.length, null, (er, br, b) => this[_onread](er, br, b)); + } + } + [_onread](er, br, buf) { + this[_reading] = false; + if (er) { + this[_onerror](er); + } + else if (this[_handleChunk](br, buf)) { + this[_read](); + } + } + [_close]() { + if (this[_autoClose] && typeof this[_fd] === 'number') { + const fd = this[_fd]; + this[_fd] = undefined; + fs_1.default.close(fd, er => er ? this.emit('error', er) : this.emit('close')); + } + } + [_onerror](er) { + this[_reading] = true; + this[_close](); + this.emit('error', er); + } + [_handleChunk](br, buf) { + let ret = false; + // no effect if infinite + this[_remain] -= br; + if (br > 0) { + ret = super.write(br < buf.length ? buf.subarray(0, br) : buf); + } + if (br === 0 || this[_remain] <= 0) { + ret = false; + this[_close](); + super.end(); + } + return ret; + } + emit(ev, ...args) { + switch (ev) { + case 'prefinish': + case 'finish': + return false; + case 'drain': + if (typeof this[_fd] === 'number') { + this[_read](); + } + return false; + case 'error': + if (this[_errored]) { + return false; + } + this[_errored] = true; + return super.emit(ev, ...args); + default: + return super.emit(ev, ...args); + } + } +} +exports.ReadStream = ReadStream; +class ReadStreamSync extends ReadStream { + [_open]() { + let threw = true; + try { + this[_onopen](null, fs_1.default.openSync(this[_path], 'r')); + threw = false; + } + finally { + if (threw) { + this[_close](); + } + } + } + [_read]() { + let threw = true; + try { + if (!this[_reading]) { + this[_reading] = true; + do { + const buf = this[_makeBuf](); + /* c8 ignore start */ + const br = buf.length === 0 + ? 0 + : fs_1.default.readSync(this[_fd], buf, 0, buf.length, null); + /* c8 ignore stop */ + if (!this[_handleChunk](br, buf)) { + break; + } + } while (true); + this[_reading] = false; + } + threw = false; + } + finally { + if (threw) { + this[_close](); + } + } + } + [_close]() { + if (this[_autoClose] && typeof this[_fd] === 'number') { + const fd = this[_fd]; + this[_fd] = undefined; + fs_1.default.closeSync(fd); + this.emit('close'); + } + } +} +exports.ReadStreamSync = ReadStreamSync; +class WriteStream extends events_1.default { + readable = false; + writable = true; + [_errored] = false; + [_writing] = false; + [_ended] = false; + [_queue] = []; + [_needDrain] = false; + [_path]; + [_mode]; + [_autoClose]; + [_fd]; + [_defaultFlag]; + [_flags]; + [_finished] = false; + [_pos]; + constructor(path, opt) { + opt = opt || {}; + super(opt); + this[_path] = path; + this[_fd] = typeof opt.fd === 'number' ? opt.fd : undefined; + this[_mode] = opt.mode === undefined ? 0o666 : opt.mode; + this[_pos] = typeof opt.start === 'number' ? opt.start : undefined; + this[_autoClose] = + typeof opt.autoClose === 'boolean' ? opt.autoClose : true; + // truncating makes no sense when writing into the middle + const defaultFlag = this[_pos] !== undefined ? 'r+' : 'w'; + this[_defaultFlag] = opt.flags === undefined; + this[_flags] = opt.flags === undefined ? defaultFlag : opt.flags; + if (this[_fd] === undefined) { + this[_open](); + } + } + emit(ev, ...args) { + if (ev === 'error') { + if (this[_errored]) { + return false; + } + this[_errored] = true; + } + return super.emit(ev, ...args); + } + get fd() { + return this[_fd]; + } + get path() { + return this[_path]; + } + [_onerror](er) { + this[_close](); + this[_writing] = true; + this.emit('error', er); + } + [_open]() { + fs_1.default.open(this[_path], this[_flags], this[_mode], (er, fd) => this[_onopen](er, fd)); + } + [_onopen](er, fd) { + if (this[_defaultFlag] && + this[_flags] === 'r+' && + er && + er.code === 'ENOENT') { + this[_flags] = 'w'; + this[_open](); + } + else if (er) { + this[_onerror](er); + } + else { + this[_fd] = fd; + this.emit('open', fd); + if (!this[_writing]) { + this[_flush](); + } + } + } + end(buf, enc) { + if (buf) { + //@ts-ignore + this.write(buf, enc); + } + this[_ended] = true; + // synthetic after-write logic, where drain/finish live + if (!this[_writing] && + !this[_queue].length && + typeof this[_fd] === 'number') { + this[_onwrite](null, 0); + } + return this; + } + write(buf, enc) { + if (typeof buf === 'string') { + buf = Buffer.from(buf, enc); + } + if (this[_ended]) { + this.emit('error', new Error('write() after end()')); + return false; + } + if (this[_fd] === undefined || this[_writing] || this[_queue].length) { + this[_queue].push(buf); + this[_needDrain] = true; + return false; + } + this[_writing] = true; + this[_write](buf); + return true; + } + [_write](buf) { + fs_1.default.write(this[_fd], buf, 0, buf.length, this[_pos], (er, bw) => this[_onwrite](er, bw)); + } + [_onwrite](er, bw) { + if (er) { + this[_onerror](er); + } + else { + if (this[_pos] !== undefined && typeof bw === 'number') { + this[_pos] += bw; + } + if (this[_queue].length) { + this[_flush](); + } + else { + this[_writing] = false; + if (this[_ended] && !this[_finished]) { + this[_finished] = true; + this[_close](); + this.emit('finish'); + } + else if (this[_needDrain]) { + this[_needDrain] = false; + this.emit('drain'); + } + } + } + } + [_flush]() { + if (this[_queue].length === 0) { + if (this[_ended]) { + this[_onwrite](null, 0); + } + } + else if (this[_queue].length === 1) { + this[_write](this[_queue].pop()); + } + else { + const iovec = this[_queue]; + this[_queue] = []; + writev(this[_fd], iovec, this[_pos], (er, bw) => this[_onwrite](er, bw)); + } + } + [_close]() { + if (this[_autoClose] && typeof this[_fd] === 'number') { + const fd = this[_fd]; + this[_fd] = undefined; + fs_1.default.close(fd, er => er ? this.emit('error', er) : this.emit('close')); + } + } +} +exports.WriteStream = WriteStream; +class WriteStreamSync extends WriteStream { + [_open]() { + let fd; + // only wrap in a try{} block if we know we'll retry, to avoid + // the rethrow obscuring the error's source frame in most cases. + if (this[_defaultFlag] && this[_flags] === 'r+') { + try { + fd = fs_1.default.openSync(this[_path], this[_flags], this[_mode]); + } + catch (er) { + if (er?.code === 'ENOENT') { + this[_flags] = 'w'; + return this[_open](); + } + else { + throw er; + } + } + } + else { + fd = fs_1.default.openSync(this[_path], this[_flags], this[_mode]); + } + this[_onopen](null, fd); + } + [_close]() { + if (this[_autoClose] && typeof this[_fd] === 'number') { + const fd = this[_fd]; + this[_fd] = undefined; + fs_1.default.closeSync(fd); + this.emit('close'); + } + } + [_write](buf) { + // throw the original, but try to close if it fails + let threw = true; + try { + this[_onwrite](null, fs_1.default.writeSync(this[_fd], buf, 0, buf.length, this[_pos])); + threw = false; + } + finally { + if (threw) { + try { + this[_close](); + } + catch { + // ok error + } + } + } + } +} +exports.WriteStreamSync = WriteStreamSync; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/@isaacs/fs-minipass/dist/commonjs/package.json b/deps/npm/node_modules/@isaacs/fs-minipass/dist/commonjs/package.json new file mode 100644 index 00000000000000..5bbefffbabee39 --- /dev/null +++ b/deps/npm/node_modules/@isaacs/fs-minipass/dist/commonjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/deps/npm/node_modules/@isaacs/fs-minipass/dist/esm/index.js b/deps/npm/node_modules/@isaacs/fs-minipass/dist/esm/index.js new file mode 100644 index 00000000000000..287a0f614dcc65 --- /dev/null +++ b/deps/npm/node_modules/@isaacs/fs-minipass/dist/esm/index.js @@ -0,0 +1,420 @@ +import EE from 'events'; +import fs from 'fs'; +import { Minipass } from 'minipass'; +const writev = fs.writev; +const _autoClose = Symbol('_autoClose'); +const _close = Symbol('_close'); +const _ended = Symbol('_ended'); +const _fd = Symbol('_fd'); +const _finished = Symbol('_finished'); +const _flags = Symbol('_flags'); +const _flush = Symbol('_flush'); +const _handleChunk = Symbol('_handleChunk'); +const _makeBuf = Symbol('_makeBuf'); +const _mode = Symbol('_mode'); +const _needDrain = Symbol('_needDrain'); +const _onerror = Symbol('_onerror'); +const _onopen = Symbol('_onopen'); +const _onread = Symbol('_onread'); +const _onwrite = Symbol('_onwrite'); +const _open = Symbol('_open'); +const _path = Symbol('_path'); +const _pos = Symbol('_pos'); +const _queue = Symbol('_queue'); +const _read = Symbol('_read'); +const _readSize = Symbol('_readSize'); +const _reading = Symbol('_reading'); +const _remain = Symbol('_remain'); +const _size = Symbol('_size'); +const _write = Symbol('_write'); +const _writing = Symbol('_writing'); +const _defaultFlag = Symbol('_defaultFlag'); +const _errored = Symbol('_errored'); +export class ReadStream extends Minipass { + [_errored] = false; + [_fd]; + [_path]; + [_readSize]; + [_reading] = false; + [_size]; + [_remain]; + [_autoClose]; + constructor(path, opt) { + opt = opt || {}; + super(opt); + this.readable = true; + this.writable = false; + if (typeof path !== 'string') { + throw new TypeError('path must be a string'); + } + this[_errored] = false; + this[_fd] = typeof opt.fd === 'number' ? opt.fd : undefined; + this[_path] = path; + this[_readSize] = opt.readSize || 16 * 1024 * 1024; + this[_reading] = false; + this[_size] = typeof opt.size === 'number' ? opt.size : Infinity; + this[_remain] = this[_size]; + this[_autoClose] = + typeof opt.autoClose === 'boolean' ? opt.autoClose : true; + if (typeof this[_fd] === 'number') { + this[_read](); + } + else { + this[_open](); + } + } + get fd() { + return this[_fd]; + } + get path() { + return this[_path]; + } + //@ts-ignore + write() { + throw new TypeError('this is a readable stream'); + } + //@ts-ignore + end() { + throw new TypeError('this is a readable stream'); + } + [_open]() { + fs.open(this[_path], 'r', (er, fd) => this[_onopen](er, fd)); + } + [_onopen](er, fd) { + if (er) { + this[_onerror](er); + } + else { + this[_fd] = fd; + this.emit('open', fd); + this[_read](); + } + } + [_makeBuf]() { + return Buffer.allocUnsafe(Math.min(this[_readSize], this[_remain])); + } + [_read]() { + if (!this[_reading]) { + this[_reading] = true; + const buf = this[_makeBuf](); + /* c8 ignore start */ + if (buf.length === 0) { + return process.nextTick(() => this[_onread](null, 0, buf)); + } + /* c8 ignore stop */ + fs.read(this[_fd], buf, 0, buf.length, null, (er, br, b) => this[_onread](er, br, b)); + } + } + [_onread](er, br, buf) { + this[_reading] = false; + if (er) { + this[_onerror](er); + } + else if (this[_handleChunk](br, buf)) { + this[_read](); + } + } + [_close]() { + if (this[_autoClose] && typeof this[_fd] === 'number') { + const fd = this[_fd]; + this[_fd] = undefined; + fs.close(fd, er => er ? this.emit('error', er) : this.emit('close')); + } + } + [_onerror](er) { + this[_reading] = true; + this[_close](); + this.emit('error', er); + } + [_handleChunk](br, buf) { + let ret = false; + // no effect if infinite + this[_remain] -= br; + if (br > 0) { + ret = super.write(br < buf.length ? buf.subarray(0, br) : buf); + } + if (br === 0 || this[_remain] <= 0) { + ret = false; + this[_close](); + super.end(); + } + return ret; + } + emit(ev, ...args) { + switch (ev) { + case 'prefinish': + case 'finish': + return false; + case 'drain': + if (typeof this[_fd] === 'number') { + this[_read](); + } + return false; + case 'error': + if (this[_errored]) { + return false; + } + this[_errored] = true; + return super.emit(ev, ...args); + default: + return super.emit(ev, ...args); + } + } +} +export class ReadStreamSync extends ReadStream { + [_open]() { + let threw = true; + try { + this[_onopen](null, fs.openSync(this[_path], 'r')); + threw = false; + } + finally { + if (threw) { + this[_close](); + } + } + } + [_read]() { + let threw = true; + try { + if (!this[_reading]) { + this[_reading] = true; + do { + const buf = this[_makeBuf](); + /* c8 ignore start */ + const br = buf.length === 0 + ? 0 + : fs.readSync(this[_fd], buf, 0, buf.length, null); + /* c8 ignore stop */ + if (!this[_handleChunk](br, buf)) { + break; + } + } while (true); + this[_reading] = false; + } + threw = false; + } + finally { + if (threw) { + this[_close](); + } + } + } + [_close]() { + if (this[_autoClose] && typeof this[_fd] === 'number') { + const fd = this[_fd]; + this[_fd] = undefined; + fs.closeSync(fd); + this.emit('close'); + } + } +} +export class WriteStream extends EE { + readable = false; + writable = true; + [_errored] = false; + [_writing] = false; + [_ended] = false; + [_queue] = []; + [_needDrain] = false; + [_path]; + [_mode]; + [_autoClose]; + [_fd]; + [_defaultFlag]; + [_flags]; + [_finished] = false; + [_pos]; + constructor(path, opt) { + opt = opt || {}; + super(opt); + this[_path] = path; + this[_fd] = typeof opt.fd === 'number' ? opt.fd : undefined; + this[_mode] = opt.mode === undefined ? 0o666 : opt.mode; + this[_pos] = typeof opt.start === 'number' ? opt.start : undefined; + this[_autoClose] = + typeof opt.autoClose === 'boolean' ? opt.autoClose : true; + // truncating makes no sense when writing into the middle + const defaultFlag = this[_pos] !== undefined ? 'r+' : 'w'; + this[_defaultFlag] = opt.flags === undefined; + this[_flags] = opt.flags === undefined ? defaultFlag : opt.flags; + if (this[_fd] === undefined) { + this[_open](); + } + } + emit(ev, ...args) { + if (ev === 'error') { + if (this[_errored]) { + return false; + } + this[_errored] = true; + } + return super.emit(ev, ...args); + } + get fd() { + return this[_fd]; + } + get path() { + return this[_path]; + } + [_onerror](er) { + this[_close](); + this[_writing] = true; + this.emit('error', er); + } + [_open]() { + fs.open(this[_path], this[_flags], this[_mode], (er, fd) => this[_onopen](er, fd)); + } + [_onopen](er, fd) { + if (this[_defaultFlag] && + this[_flags] === 'r+' && + er && + er.code === 'ENOENT') { + this[_flags] = 'w'; + this[_open](); + } + else if (er) { + this[_onerror](er); + } + else { + this[_fd] = fd; + this.emit('open', fd); + if (!this[_writing]) { + this[_flush](); + } + } + } + end(buf, enc) { + if (buf) { + //@ts-ignore + this.write(buf, enc); + } + this[_ended] = true; + // synthetic after-write logic, where drain/finish live + if (!this[_writing] && + !this[_queue].length && + typeof this[_fd] === 'number') { + this[_onwrite](null, 0); + } + return this; + } + write(buf, enc) { + if (typeof buf === 'string') { + buf = Buffer.from(buf, enc); + } + if (this[_ended]) { + this.emit('error', new Error('write() after end()')); + return false; + } + if (this[_fd] === undefined || this[_writing] || this[_queue].length) { + this[_queue].push(buf); + this[_needDrain] = true; + return false; + } + this[_writing] = true; + this[_write](buf); + return true; + } + [_write](buf) { + fs.write(this[_fd], buf, 0, buf.length, this[_pos], (er, bw) => this[_onwrite](er, bw)); + } + [_onwrite](er, bw) { + if (er) { + this[_onerror](er); + } + else { + if (this[_pos] !== undefined && typeof bw === 'number') { + this[_pos] += bw; + } + if (this[_queue].length) { + this[_flush](); + } + else { + this[_writing] = false; + if (this[_ended] && !this[_finished]) { + this[_finished] = true; + this[_close](); + this.emit('finish'); + } + else if (this[_needDrain]) { + this[_needDrain] = false; + this.emit('drain'); + } + } + } + } + [_flush]() { + if (this[_queue].length === 0) { + if (this[_ended]) { + this[_onwrite](null, 0); + } + } + else if (this[_queue].length === 1) { + this[_write](this[_queue].pop()); + } + else { + const iovec = this[_queue]; + this[_queue] = []; + writev(this[_fd], iovec, this[_pos], (er, bw) => this[_onwrite](er, bw)); + } + } + [_close]() { + if (this[_autoClose] && typeof this[_fd] === 'number') { + const fd = this[_fd]; + this[_fd] = undefined; + fs.close(fd, er => er ? this.emit('error', er) : this.emit('close')); + } + } +} +export class WriteStreamSync extends WriteStream { + [_open]() { + let fd; + // only wrap in a try{} block if we know we'll retry, to avoid + // the rethrow obscuring the error's source frame in most cases. + if (this[_defaultFlag] && this[_flags] === 'r+') { + try { + fd = fs.openSync(this[_path], this[_flags], this[_mode]); + } + catch (er) { + if (er?.code === 'ENOENT') { + this[_flags] = 'w'; + return this[_open](); + } + else { + throw er; + } + } + } + else { + fd = fs.openSync(this[_path], this[_flags], this[_mode]); + } + this[_onopen](null, fd); + } + [_close]() { + if (this[_autoClose] && typeof this[_fd] === 'number') { + const fd = this[_fd]; + this[_fd] = undefined; + fs.closeSync(fd); + this.emit('close'); + } + } + [_write](buf) { + // throw the original, but try to close if it fails + let threw = true; + try { + this[_onwrite](null, fs.writeSync(this[_fd], buf, 0, buf.length, this[_pos])); + threw = false; + } + finally { + if (threw) { + try { + this[_close](); + } + catch { + // ok error + } + } + } + } +} +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/@isaacs/fs-minipass/dist/esm/package.json b/deps/npm/node_modules/@isaacs/fs-minipass/dist/esm/package.json new file mode 100644 index 00000000000000..3dbc1ca591c055 --- /dev/null +++ b/deps/npm/node_modules/@isaacs/fs-minipass/dist/esm/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/deps/npm/node_modules/@isaacs/fs-minipass/package.json b/deps/npm/node_modules/@isaacs/fs-minipass/package.json new file mode 100644 index 00000000000000..cc4576c4afe776 --- /dev/null +++ b/deps/npm/node_modules/@isaacs/fs-minipass/package.json @@ -0,0 +1,72 @@ +{ + "name": "@isaacs/fs-minipass", + "version": "4.0.1", + "main": "./dist/commonjs/index.js", + "scripts": { + "prepare": "tshy", + "pretest": "npm run prepare", + "test": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "format": "prettier --write . --loglevel warn", + "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts" + }, + "keywords": [], + "author": "Isaac Z. Schlueter", + "license": "ISC", + "repository": { + "type": "git", + "url": "https://github.com/npm/fs-minipass.git" + }, + "description": "fs read and write streams based on minipass", + "dependencies": { + "minipass": "^7.0.4" + }, + "devDependencies": { + "@types/node": "^20.11.30", + "mutate-fs": "^2.1.1", + "prettier": "^3.2.5", + "tap": "^18.7.1", + "tshy": "^1.12.0", + "typedoc": "^0.25.12" + }, + "files": [ + "dist" + ], + "engines": { + "node": ">=18.0.0" + }, + "tshy": { + "exports": { + "./package.json": "./package.json", + ".": "./src/index.ts" + } + }, + "exports": { + "./package.json": "./package.json", + ".": { + "import": { + "types": "./dist/esm/index.d.ts", + "default": "./dist/esm/index.js" + }, + "require": { + "types": "./dist/commonjs/index.d.ts", + "default": "./dist/commonjs/index.js" + } + } + }, + "types": "./dist/commonjs/index.d.ts", + "type": "module", + "prettier": { + "semi": false, + "printWidth": 75, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + } +} diff --git a/deps/npm/node_modules/@npmcli/agent/package.json b/deps/npm/node_modules/@npmcli/agent/package.json index ef5b4e3228cc46..4d648fb5dfe052 100644 --- a/deps/npm/node_modules/@npmcli/agent/package.json +++ b/deps/npm/node_modules/@npmcli/agent/package.json @@ -1,17 +1,18 @@ { "name": "@npmcli/agent", - "version": "2.2.2", + "version": "3.0.0", "description": "the http/https agent used by the npm cli", "main": "lib/index.js", "scripts": { "gencerts": "bash scripts/create-cert.sh", "test": "tap", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", + "lintfix": "npm run eslint -- --fix", "snap": "tap", - "posttest": "npm run lint" + "posttest": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "author": "GitHub Inc.", "license": "ISC", @@ -24,11 +25,11 @@ "lib/" ], "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.21.3", + "version": "4.23.1", "publish": "true" }, "dependencies": { @@ -39,17 +40,16 @@ "socks-proxy-agent": "^8.0.3" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.21.3", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.1", "minipass-fetch": "^3.0.3", "nock": "^13.2.7", - "semver": "^7.5.4", - "simple-socks": "^3.1.0", + "socksv5": "^0.0.6", "tap": "^16.3.0" }, "repository": { "type": "git", - "url": "https://github.com/npm/agent.git" + "url": "git+https://github.com/npm/agent.git" }, "tap": { "nyc-arg": [ diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js index 06d03bbce7a32f..6bd4e9407e72d6 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js @@ -195,7 +195,10 @@ module.exports = cls => class IdealTreeBuilder extends cls { for (const node of this.idealTree.inventory.values()) { if (!node.optional) { try { - checkEngine(node.package, npmVersion, nodeVersion, this.options.force) + // if devEngines is present in the root node we ignore the engines check + if (!(node.isRoot && node.package.devEngines)) { + checkEngine(node.package, npmVersion, nodeVersion, this.options.force) + } } catch (err) { if (engineStrict) { throw err diff --git a/deps/npm/node_modules/@npmcli/arborist/package.json b/deps/npm/node_modules/@npmcli/arborist/package.json index 7fec2c6bea339b..b1e2b21a254635 100644 --- a/deps/npm/node_modules/@npmcli/arborist/package.json +++ b/deps/npm/node_modules/@npmcli/arborist/package.json @@ -1,49 +1,49 @@ { "name": "@npmcli/arborist", - "version": "7.5.4", + "version": "8.0.0", "description": "Manage node_modules trees", "dependencies": { "@isaacs/string-locale-compare": "^1.1.0", - "@npmcli/fs": "^3.1.1", - "@npmcli/installed-package-contents": "^2.1.0", - "@npmcli/map-workspaces": "^3.0.2", - "@npmcli/metavuln-calculator": "^7.1.1", - "@npmcli/name-from-folder": "^2.0.0", - "@npmcli/node-gyp": "^3.0.0", - "@npmcli/package-json": "^5.1.0", - "@npmcli/query": "^3.1.0", - "@npmcli/redact": "^2.0.0", - "@npmcli/run-script": "^8.1.0", - "bin-links": "^4.0.4", - "cacache": "^18.0.3", + "@npmcli/fs": "^4.0.0", + "@npmcli/installed-package-contents": "^3.0.0", + "@npmcli/map-workspaces": "^4.0.1", + "@npmcli/metavuln-calculator": "^8.0.0", + "@npmcli/name-from-folder": "^3.0.0", + "@npmcli/node-gyp": "^4.0.0", + "@npmcli/package-json": "^6.0.1", + "@npmcli/query": "^4.0.0", + "@npmcli/redact": "^3.0.0", + "@npmcli/run-script": "^9.0.1", + "bin-links": "^5.0.0", + "cacache": "^19.0.1", "common-ancestor-path": "^1.0.1", - "hosted-git-info": "^7.0.2", - "json-parse-even-better-errors": "^3.0.2", + "hosted-git-info": "^8.0.0", + "json-parse-even-better-errors": "^4.0.0", "json-stringify-nice": "^1.1.4", "lru-cache": "^10.2.2", "minimatch": "^9.0.4", - "nopt": "^7.2.1", - "npm-install-checks": "^6.2.0", - "npm-package-arg": "^11.0.2", - "npm-pick-manifest": "^9.0.1", - "npm-registry-fetch": "^17.0.1", - "pacote": "^18.0.6", - "parse-conflict-json": "^3.0.0", - "proc-log": "^4.2.0", - "proggy": "^2.0.0", + "nopt": "^8.0.0", + "npm-install-checks": "^7.1.0", + "npm-package-arg": "^12.0.0", + "npm-pick-manifest": "^10.0.0", + "npm-registry-fetch": "^18.0.1", + "pacote": "^19.0.0", + "parse-conflict-json": "^4.0.0", + "proc-log": "^5.0.0", + "proggy": "^3.0.0", "promise-all-reject-late": "^1.0.0", "promise-call-limit": "^3.0.1", - "read-package-json-fast": "^3.0.2", + "read-package-json-fast": "^4.0.0", "semver": "^7.3.7", - "ssri": "^10.0.6", + "ssri": "^12.0.0", "treeverse": "^3.0.0", "walk-up-path": "^3.0.1" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", + "@npmcli/eslint-config": "^5.0.1", "@npmcli/template-oss": "4.23.3", "benchmark": "^2.1.4", - "minify-registry-metadata": "^3.0.0", + "minify-registry-metadata": "^4.0.0", "nock": "^13.3.3", "tap": "^16.3.8", "tar-stream": "^3.0.0", @@ -89,7 +89,7 @@ ] }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", diff --git a/deps/npm/node_modules/@npmcli/config/package.json b/deps/npm/node_modules/@npmcli/config/package.json index 6cf2b85438c65a..18c677393b5ff3 100644 --- a/deps/npm/node_modules/@npmcli/config/package.json +++ b/deps/npm/node_modules/@npmcli/config/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/config", - "version": "8.3.4", + "version": "9.0.0", "files": [ "bin/", "lib/" @@ -31,23 +31,23 @@ ] }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", + "@npmcli/eslint-config": "^5.0.1", "@npmcli/mock-globals": "^1.0.0", "@npmcli/template-oss": "4.23.3", "tap": "^16.3.8" }, "dependencies": { - "@npmcli/map-workspaces": "^3.0.2", - "@npmcli/package-json": "^5.1.1", + "@npmcli/map-workspaces": "^4.0.1", + "@npmcli/package-json": "^6.0.1", "ci-info": "^4.0.0", - "ini": "^4.1.2", - "nopt": "^7.2.1", - "proc-log": "^4.2.0", + "ini": "^5.0.0", + "nopt": "^8.0.0", + "proc-log": "^5.0.0", "semver": "^7.3.5", "walk-up-path": "^3.0.1" }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", diff --git a/deps/npm/node_modules/@npmcli/fs/package.json b/deps/npm/node_modules/@npmcli/fs/package.json index 5261a11b78000e..e4063ec8752437 100644 --- a/deps/npm/node_modules/@npmcli/fs/package.json +++ b/deps/npm/node_modules/@npmcli/fs/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/fs", - "version": "3.1.1", + "version": "4.0.0", "description": "filesystem utilities for the npm cli", "main": "lib/index.js", "files": [ @@ -11,12 +11,13 @@ "snap": "tap", "test": "tap", "npmclilint": "npmcli-lint", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", - "lintfix": "npm run lint -- --fix", + "lint": "npm run eslint", + "lintfix": "npm run eslint -- --fix", "posttest": "npm run lint", "postsnap": "npm run lintfix --", "postlint": "template-oss-check", - "template-oss-apply": "template-oss-apply --force" + "template-oss-apply": "template-oss-apply --force", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "repository": { "type": "git", @@ -29,19 +30,20 @@ "author": "GitHub Inc.", "license": "ISC", "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.0.1" }, "dependencies": { "semver": "^7.3.5" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.22.0" + "version": "4.23.3", + "publish": true }, "tap": { "nyc-arg": [ diff --git a/deps/npm/node_modules/@npmcli/git/package.json b/deps/npm/node_modules/@npmcli/git/package.json index b6aa4a282cc0f1..2bc6730ba2151b 100644 --- a/deps/npm/node_modules/@npmcli/git/package.json +++ b/deps/npm/node_modules/@npmcli/git/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/git", - "version": "5.0.8", + "version": "6.0.1", "main": "lib/index.js", "files": [ "bin/", @@ -14,13 +14,14 @@ "author": "GitHub Inc.", "license": "ISC", "scripts": { - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", "snap": "tap", "test": "tap", "posttest": "npm run lint", "postlint": "template-oss-check", - "lintfix": "npm run lint -- --fix", - "template-oss-apply": "template-oss-apply --force" + "lintfix": "npm run eslint -- --fix", + "template-oss-apply": "template-oss-apply --force", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "tap": { "timeout": 600, @@ -30,29 +31,29 @@ ] }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "npm-package-arg": "^11.0.0", "slash": "^3.0.0", "tap": "^16.0.1" }, "dependencies": { - "@npmcli/promise-spawn": "^7.0.0", - "ini": "^4.1.3", + "@npmcli/promise-spawn": "^8.0.0", + "ini": "^5.0.0", "lru-cache": "^10.0.1", - "npm-pick-manifest": "^9.0.0", - "proc-log": "^4.0.0", + "npm-pick-manifest": "^10.0.0", + "proc-log": "^5.0.0", "promise-inflight": "^1.0.1", "promise-retry": "^2.0.1", "semver": "^7.3.5", - "which": "^4.0.0" + "which": "^5.0.0" }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.22.0", + "version": "4.23.3", "publish": true } } diff --git a/deps/npm/node_modules/@npmcli/installed-package-contents/package.json b/deps/npm/node_modules/@npmcli/installed-package-contents/package.json index 132256430a6c18..d5b68a737daf49 100644 --- a/deps/npm/node_modules/@npmcli/installed-package-contents/package.json +++ b/deps/npm/node_modules/@npmcli/installed-package-contents/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/installed-package-contents", - "version": "2.1.0", + "version": "3.0.0", "description": "Get the list of files installed in a package in node_modules, including bundled dependencies", "author": "GitHub Inc.", "main": "lib/index.js", @@ -11,35 +11,36 @@ "scripts": { "test": "tap", "snap": "tap", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", - "posttest": "npm run lint" + "lintfix": "npm run eslint -- --fix", + "posttest": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.21.4", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.3.0" }, "dependencies": { - "npm-bundled": "^3.0.0", - "npm-normalize-package-bin": "^3.0.0" + "npm-bundled": "^4.0.0", + "npm-normalize-package-bin": "^4.0.0" }, "repository": { "type": "git", - "url": "https://github.com/npm/installed-package-contents.git" + "url": "git+https://github.com/npm/installed-package-contents.git" }, "files": [ "bin/", "lib/" ], "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.21.4", + "version": "4.23.3", "publish": true }, "tap": { diff --git a/deps/npm/node_modules/@npmcli/map-workspaces/lib/index.js b/deps/npm/node_modules/@npmcli/map-workspaces/lib/index.js index b20bf5de5d631e..1b39d2e3f67e08 100644 --- a/deps/npm/node_modules/@npmcli/map-workspaces/lib/index.js +++ b/deps/npm/node_modules/@npmcli/map-workspaces/lib/index.js @@ -2,7 +2,7 @@ const path = require('path') const getName = require('@npmcli/name-from-folder') const { minimatch } = require('minimatch') -const rpj = require('read-package-json-fast') +const pkgJson = require('@npmcli/package-json') const { glob } = require('glob') function appendNegatedPatterns (allPatterns) { @@ -67,15 +67,7 @@ function getPatterns (workspaces) { } function getPackageName (pkg, pathname) { - const { name } = pkg - return name || getName(pathname) -} - -function pkgPathmame (opts) { - return (...args) => { - const cwd = opts.cwd ? opts.cwd : process.cwd() - return path.join.apply(null, [cwd, ...args]) - } + return pkg.name || getName(pathname) } // make sure glob pattern only matches folders @@ -101,16 +93,19 @@ async function mapWorkspaces (opts = {}) { code: 'EMAPWORKSPACESPKG', }) } + if (!opts.cwd) { + opts.cwd = process.cwd() + } const { workspaces = [] } = opts.pkg const { patterns, negatedPatterns } = getPatterns(workspaces) const results = new Map() - const seen = new Map() if (!patterns.length && !negatedPatterns.length) { return results } + const seen = new Map() const getGlobOpts = () => ({ ...opts, ignore: [ @@ -121,8 +116,6 @@ async function mapWorkspaces (opts = {}) { ], }) - const getPackagePathname = pkgPathmame(opts) - let matches = await glob(patterns.map((p) => getGlobPattern(p)), getGlobOpts()) // preserves glob@8 behavior matches = matches.sort((a, b) => a.localeCompare(b, 'en')) @@ -138,10 +131,8 @@ async function mapWorkspaces (opts = {}) { for (const match of orderedMatches) { let pkg - const packageJsonPathname = getPackagePathname(match, 'package.json') - try { - pkg = await rpj(packageJsonPathname) + pkg = await pkgJson.normalize(path.join(opts.cwd, match)) } catch (err) { if (err.code === 'ENOENT') { continue @@ -150,15 +141,14 @@ async function mapWorkspaces (opts = {}) { } } - const packagePathname = path.dirname(packageJsonPathname) - const name = getPackageName(pkg, packagePathname) + const name = getPackageName(pkg.content, pkg.path) let seenPackagePathnames = seen.get(name) if (!seenPackagePathnames) { seenPackagePathnames = new Set() seen.set(name, seenPackagePathnames) } - seenPackagePathnames.add(packagePathname) + seenPackagePathnames.add(pkg.path) } const errorMessageArray = ['must not have multiple workspaces with the same name'] @@ -200,6 +190,9 @@ mapWorkspaces.virtual = function (opts = {}) { code: 'EMAPWORKSPACESLOCKFILE', }) } + if (!opts.cwd) { + opts.cwd = process.cwd() + } const { packages = {} } = opts.lockfile const { workspaces = [] } = packages[''] || {} @@ -218,10 +211,9 @@ mapWorkspaces.virtual = function (opts = {}) { } } - const getPackagePathname = pkgPathmame(opts) for (const pattern of patterns) { for (const packageKey of minimatch.match(packageKeys, pattern)) { - const packagePathname = getPackagePathname(packageKey) + const packagePathname = path.join(opts.cwd, packageKey) const name = getPackageName(packages[packageKey], packagePathname) results.set(packagePathname, name) } diff --git a/deps/npm/node_modules/@npmcli/map-workspaces/package.json b/deps/npm/node_modules/@npmcli/map-workspaces/package.json index e6292b06bd2b43..f2667f25e9d5d2 100644 --- a/deps/npm/node_modules/@npmcli/map-workspaces/package.json +++ b/deps/npm/node_modules/@npmcli/map-workspaces/package.json @@ -1,18 +1,18 @@ { "name": "@npmcli/map-workspaces", - "version": "3.0.6", + "version": "4.0.1", "main": "lib/index.js", "files": [ "bin/", "lib/" ], "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "description": "Retrieves a name:pathname Map for a given workspaces config", "repository": { "type": "git", - "url": "https://github.com/npm/map-workspaces.git" + "url": "git+https://github.com/npm/map-workspaces.git" }, "keywords": [ "npm", @@ -25,14 +25,15 @@ "author": "GitHub Inc.", "license": "ISC", "scripts": { - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", "pretest": "npm run lint", "test": "tap", "snap": "tap", "postlint": "template-oss-check", - "lintfix": "npm run lint -- --fix", + "lintfix": "npm run eslint -- --fix", "posttest": "npm run lint", - "template-oss-apply": "template-oss-apply --force" + "template-oss-apply": "template-oss-apply --force", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "tap": { "check-coverage": true, @@ -42,19 +43,19 @@ ] }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.21.3", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.0.1" }, "dependencies": { - "@npmcli/name-from-folder": "^2.0.0", + "@npmcli/name-from-folder": "^3.0.0", + "@npmcli/package-json": "^6.0.0", "glob": "^10.2.2", - "minimatch": "^9.0.0", - "read-package-json-fast": "^3.0.0" + "minimatch": "^9.0.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.21.3", + "version": "4.23.3", "publish": "true" } } diff --git a/deps/npm/node_modules/@npmcli/metavuln-calculator/package.json b/deps/npm/node_modules/@npmcli/metavuln-calculator/package.json index a7ec02d2ee72b2..d4c3cf54d83ea7 100644 --- a/deps/npm/node_modules/@npmcli/metavuln-calculator/package.json +++ b/deps/npm/node_modules/@npmcli/metavuln-calculator/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/metavuln-calculator", - "version": "7.1.1", + "version": "8.0.0", "main": "lib/index.js", "files": [ "bin/", @@ -18,9 +18,9 @@ "posttest": "npm run lint", "snap": "tap", "postsnap": "npm run lint", - "eslint": "eslint", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", - "lintfix": "npm run lint -- --fix", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", + "lintfix": "npm run eslint -- --fix", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force" }, @@ -33,24 +33,24 @@ ] }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "require-inject": "^1.4.4", "tap": "^16.0.1" }, "dependencies": { - "cacache": "^18.0.0", - "json-parse-even-better-errors": "^3.0.0", - "pacote": "^18.0.0", - "proc-log": "^4.1.0", + "cacache": "^19.0.0", + "json-parse-even-better-errors": "^4.0.0", + "pacote": "^19.0.0", + "proc-log": "^5.0.0", "semver": "^7.3.5" }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.22.0", + "version": "4.23.3", "publish": "true", "ciVersions": [ "16.14.0", diff --git a/deps/npm/node_modules/@npmcli/name-from-folder/package.json b/deps/npm/node_modules/@npmcli/name-from-folder/package.json index f0aa5b16dba1aa..323edd81d22fb4 100644 --- a/deps/npm/node_modules/@npmcli/name-from-folder/package.json +++ b/deps/npm/node_modules/@npmcli/name-from-folder/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/name-from-folder", - "version": "2.0.0", + "version": "3.0.0", "files": [ "bin/", "lib/" @@ -9,30 +9,32 @@ "description": "Get the package name from a folder path", "repository": { "type": "git", - "url": "https://github.com/npm/name-from-folder.git" + "url": "git+https://github.com/npm/name-from-folder.git" }, "author": "GitHub Inc.", "license": "ISC", "scripts": { "test": "tap", "snap": "tap", - "lint": "eslint \"**/*.js\"", + "lint": "npm run eslint", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", - "posttest": "npm run lint" + "lintfix": "npm run eslint -- --fix", + "posttest": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.1", - "@npmcli/template-oss": "4.11.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.3.2" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.11.0" + "version": "4.23.3", + "publish": true }, "tap": { "nyc-arg": [ diff --git a/deps/npm/node_modules/@npmcli/node-gyp/LICENSE b/deps/npm/node_modules/@npmcli/node-gyp/LICENSE new file mode 100644 index 00000000000000..3609cabca45350 --- /dev/null +++ b/deps/npm/node_modules/@npmcli/node-gyp/LICENSE @@ -0,0 +1,7 @@ +ISC License: + +Copyright (c) 2023 by GitHub Inc. + +Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/@npmcli/node-gyp/package.json b/deps/npm/node_modules/@npmcli/node-gyp/package.json index 999572bc5883a2..3be9663a39de04 100644 --- a/deps/npm/node_modules/@npmcli/node-gyp/package.json +++ b/deps/npm/node_modules/@npmcli/node-gyp/package.json @@ -1,19 +1,20 @@ { "name": "@npmcli/node-gyp", - "version": "3.0.0", + "version": "4.0.0", "description": "Tools for dealing with node-gyp packages", "scripts": { "test": "tap", - "lint": "eslint \"**/*.js\"", + "lint": "npm run eslint", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", + "lintfix": "npm run eslint -- --fix", "snap": "tap", - "posttest": "npm run lint" + "posttest": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "repository": { "type": "git", - "url": "https://github.com/npm/node-gyp.git" + "url": "git+https://github.com/npm/node-gyp.git" }, "keywords": [ "npm", @@ -28,16 +29,17 @@ "author": "GitHub Inc.", "license": "ISC", "devDependencies": { - "@npmcli/eslint-config": "^3.0.1", - "@npmcli/template-oss": "4.5.1", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.0.1" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.5.1" + "version": "4.23.3", + "publish": true }, "tap": { "nyc-arg": [ diff --git a/deps/npm/node_modules/@npmcli/package-json/lib/normalize.js b/deps/npm/node_modules/@npmcli/package-json/lib/normalize.js index 682d234825de94..3adec0143f445a 100644 --- a/deps/npm/node_modules/@npmcli/package-json/lib/normalize.js +++ b/deps/npm/node_modules/@npmcli/package-json/lib/normalize.js @@ -127,13 +127,9 @@ function unixifyPath (ref) { return ref.replace(/\\|:/g, '/') } -function securePath (ref) { - const secured = path.join('.', path.join('/', unixifyPath(ref))) - return secured.startsWith('.') ? '' : secured -} - function secureAndUnixifyPath (ref) { - return unixifyPath(securePath(ref)) + const secured = unixifyPath(path.join('.', path.join('/', unixifyPath(ref)))) + return secured.startsWith('./') ? '' : secured } // We don't want the `changes` array in here by default because this is a hot @@ -376,7 +372,7 @@ const normalize = async (pkg, { strict, steps, root, changes, allowLegacyCase }) // expand "directories.bin" if (steps.includes('binDir') && data.directories?.bin && !data.bin) { - const binsDir = path.resolve(pkg.path, securePath(data.directories.bin)) + const binsDir = path.resolve(pkg.path, secureAndUnixifyPath(data.directories.bin)) const bins = await lazyLoadGlob()('**', { cwd: binsDir }) data.bin = bins.reduce((acc, binFile) => { if (binFile && !binFile.startsWith('.')) { diff --git a/deps/npm/node_modules/@npmcli/package-json/package.json b/deps/npm/node_modules/@npmcli/package-json/package.json index a5ea22bdbb340f..e766e8ccb4bbf5 100644 --- a/deps/npm/node_modules/@npmcli/package-json/package.json +++ b/deps/npm/node_modules/@npmcli/package-json/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/package-json", - "version": "5.2.0", + "version": "6.0.1", "description": "Programmatic API to update package.json", "main": "lib/index.js", "files": [ @@ -10,12 +10,13 @@ "scripts": { "snap": "tap", "test": "tap", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", - "lintfix": "npm run lint -- --fix", + "lint": "npm run eslint", + "lintfix": "npm run eslint -- --fix", "posttest": "npm run lint", "postsnap": "npm run lintfix --", "postlint": "template-oss-check", - "template-oss-apply": "template-oss-apply --force" + "template-oss-apply": "template-oss-apply --force", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "keywords": [ "npm", @@ -24,19 +25,19 @@ "author": "GitHub Inc.", "license": "ISC", "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "read-package-json": "^7.0.0", - "read-package-json-fast": "^3.0.2", + "read-package-json-fast": "^4.0.0", "tap": "^16.0.1" }, "dependencies": { - "@npmcli/git": "^5.0.0", + "@npmcli/git": "^6.0.0", "glob": "^10.2.2", - "hosted-git-info": "^7.0.0", - "json-parse-even-better-errors": "^3.0.0", - "normalize-package-data": "^6.0.0", - "proc-log": "^4.0.0", + "hosted-git-info": "^8.0.0", + "json-parse-even-better-errors": "^4.0.0", + "normalize-package-data": "^7.0.0", + "proc-log": "^5.0.0", "semver": "^7.5.3" }, "repository": { @@ -44,11 +45,11 @@ "url": "git+https://github.com/npm/package-json.git" }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.22.0", + "version": "4.23.3", "publish": "true" }, "tap": { diff --git a/deps/npm/node_modules/@npmcli/promise-spawn/package.json b/deps/npm/node_modules/@npmcli/promise-spawn/package.json index 1b633f84596d20..9914063f85156d 100644 --- a/deps/npm/node_modules/@npmcli/promise-spawn/package.json +++ b/deps/npm/node_modules/@npmcli/promise-spawn/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/promise-spawn", - "version": "7.0.2", + "version": "8.0.1", "files": [ "bin/", "lib/" @@ -16,12 +16,13 @@ "scripts": { "test": "tap", "snap": "tap", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", - "lintfix": "npm run lint -- --fix", + "lint": "npm run eslint", + "lintfix": "npm run eslint -- --fix", "posttest": "npm run lint", "postsnap": "npm run lintfix --", "postlint": "template-oss-check", - "template-oss-apply": "template-oss-apply --force" + "template-oss-apply": "template-oss-apply --force", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "tap": { "check-coverage": true, @@ -31,20 +32,20 @@ ] }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "spawk": "^1.7.1", "tap": "^16.0.1" }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.22.0", + "version": "4.23.3", "publish": true }, "dependencies": { - "which": "^4.0.0" + "which": "^5.0.0" } } diff --git a/deps/npm/node_modules/@npmcli/query/package.json b/deps/npm/node_modules/@npmcli/query/package.json index ad45c18c44cd64..14f7fbfaac0168 100644 --- a/deps/npm/node_modules/@npmcli/query/package.json +++ b/deps/npm/node_modules/@npmcli/query/package.json @@ -1,16 +1,17 @@ { "name": "@npmcli/query", - "version": "3.1.0", + "version": "4.0.0", "description": "npm query parser and tools", "main": "lib/index.js", "scripts": { "test": "tap", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", + "lintfix": "npm run eslint -- --fix", "snap": "tap", - "posttest": "npm run lint" + "posttest": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "contributors": [ { @@ -35,24 +36,24 @@ "lib/" ], "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.21.3", + "version": "4.23.3", "publish": true }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.21.3", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.2.0" }, "dependencies": { - "postcss-selector-parser": "^6.0.10" + "postcss-selector-parser": "^6.1.2" }, "repository": { "type": "git", - "url": "https://github.com/npm/query.git" + "url": "git+https://github.com/npm/query.git" }, "tap": { "nyc-arg": [ diff --git a/deps/npm/node_modules/@npmcli/redact/package.json b/deps/npm/node_modules/@npmcli/redact/package.json index 831387ca541061..649f82ef5ca89b 100644 --- a/deps/npm/node_modules/@npmcli/redact/package.json +++ b/deps/npm/node_modules/@npmcli/redact/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/redact", - "version": "2.0.1", + "version": "3.0.0", "description": "Redact sensitive npm information from output", "main": "lib/index.js", "exports": { @@ -10,12 +10,13 @@ }, "scripts": { "test": "tap", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", + "lintfix": "npm run eslint -- --fix", "snap": "tap", - "posttest": "npm run lint" + "posttest": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "keywords": [], "author": "GitHub Inc.", @@ -26,11 +27,11 @@ ], "repository": { "type": "git", - "url": "https://github.com/npm/redact.git" + "url": "git+https://github.com/npm/redact.git" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.21.3", + "version": "4.23.3", "publish": true }, "tap": { @@ -41,11 +42,11 @@ "timeout": 120 }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.2", - "@npmcli/template-oss": "4.21.3", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.3.10" }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" } } diff --git a/deps/npm/node_modules/@npmcli/run-script/package.json b/deps/npm/node_modules/@npmcli/run-script/package.json index 8a83e726fbeb2c..e5fd2fef62e5ce 100644 --- a/deps/npm/node_modules/@npmcli/run-script/package.json +++ b/deps/npm/node_modules/@npmcli/run-script/package.json @@ -1,32 +1,32 @@ { "name": "@npmcli/run-script", - "version": "8.1.0", + "version": "9.0.1", "description": "Run a lifecycle script for a package (descendant of npm-lifecycle)", "author": "GitHub Inc.", "license": "ISC", "scripts": { "test": "tap", - "eslint": "eslint", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", - "lintfix": "npm run lint -- --fix", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", + "lintfix": "npm run eslint -- --fix", "postlint": "template-oss-check", "snap": "tap", "posttest": "npm run lint", "template-oss-apply": "template-oss-apply --force" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.21.4", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "spawk": "^1.8.1", "tap": "^16.0.1" }, "dependencies": { - "@npmcli/node-gyp": "^3.0.0", - "@npmcli/package-json": "^5.0.0", - "@npmcli/promise-spawn": "^7.0.0", + "@npmcli/node-gyp": "^4.0.0", + "@npmcli/package-json": "^6.0.0", + "@npmcli/promise-spawn": "^8.0.0", "node-gyp": "^10.0.0", - "proc-log": "^4.0.0", - "which": "^4.0.0" + "proc-log": "^5.0.0", + "which": "^5.0.0" }, "files": [ "bin/", @@ -35,14 +35,14 @@ "main": "lib/run-script.js", "repository": { "type": "git", - "url": "https://github.com/npm/run-script.git" + "url": "git+https://github.com/npm/run-script.git" }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.21.4", + "version": "4.23.3", "publish": "true" }, "tap": { diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/agents.js b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/agents.js new file mode 100644 index 00000000000000..c541b93001517e --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/agents.js @@ -0,0 +1,206 @@ +'use strict' + +const net = require('net') +const tls = require('tls') +const { once } = require('events') +const timers = require('timers/promises') +const { normalizeOptions, cacheOptions } = require('./options') +const { getProxy, getProxyAgent, proxyCache } = require('./proxy.js') +const Errors = require('./errors.js') +const { Agent: AgentBase } = require('agent-base') + +module.exports = class Agent extends AgentBase { + #options + #timeouts + #proxy + #noProxy + #ProxyAgent + + constructor (options = {}) { + const { timeouts, proxy, noProxy, ...normalizedOptions } = normalizeOptions(options) + + super(normalizedOptions) + + this.#options = normalizedOptions + this.#timeouts = timeouts + + if (proxy) { + this.#proxy = new URL(proxy) + this.#noProxy = noProxy + this.#ProxyAgent = getProxyAgent(proxy) + } + } + + get proxy () { + return this.#proxy ? { url: this.#proxy } : {} + } + + #getProxy (options) { + if (!this.#proxy) { + return + } + + const proxy = getProxy(`${options.protocol}//${options.host}:${options.port}`, { + proxy: this.#proxy, + noProxy: this.#noProxy, + }) + + if (!proxy) { + return + } + + const cacheKey = cacheOptions({ + ...options, + ...this.#options, + timeouts: this.#timeouts, + proxy, + }) + + if (proxyCache.has(cacheKey)) { + return proxyCache.get(cacheKey) + } + + let ProxyAgent = this.#ProxyAgent + if (Array.isArray(ProxyAgent)) { + ProxyAgent = this.isSecureEndpoint(options) ? ProxyAgent[1] : ProxyAgent[0] + } + + const proxyAgent = new ProxyAgent(proxy, { + ...this.#options, + socketOptions: { family: this.#options.family }, + }) + proxyCache.set(cacheKey, proxyAgent) + + return proxyAgent + } + + // takes an array of promises and races them against the connection timeout + // which will throw the necessary error if it is hit. This will return the + // result of the promise race. + async #timeoutConnection ({ promises, options, timeout }, ac = new AbortController()) { + if (timeout) { + const connectionTimeout = timers.setTimeout(timeout, null, { signal: ac.signal }) + .then(() => { + throw new Errors.ConnectionTimeoutError(`${options.host}:${options.port}`) + }).catch((err) => { + if (err.name === 'AbortError') { + return + } + throw err + }) + promises.push(connectionTimeout) + } + + let result + try { + result = await Promise.race(promises) + ac.abort() + } catch (err) { + ac.abort() + throw err + } + return result + } + + async connect (request, options) { + // if the connection does not have its own lookup function + // set, then use the one from our options + options.lookup ??= this.#options.lookup + + let socket + let timeout = this.#timeouts.connection + const isSecureEndpoint = this.isSecureEndpoint(options) + + const proxy = this.#getProxy(options) + if (proxy) { + // some of the proxies will wait for the socket to fully connect before + // returning so we have to await this while also racing it against the + // connection timeout. + const start = Date.now() + socket = await this.#timeoutConnection({ + options, + timeout, + promises: [proxy.connect(request, options)], + }) + // see how much time proxy.connect took and subtract it from + // the timeout + if (timeout) { + timeout = timeout - (Date.now() - start) + } + } else { + socket = (isSecureEndpoint ? tls : net).connect(options) + } + + socket.setKeepAlive(this.keepAlive, this.keepAliveMsecs) + socket.setNoDelay(this.keepAlive) + + const abortController = new AbortController() + const { signal } = abortController + + const connectPromise = socket[isSecureEndpoint ? 'secureConnecting' : 'connecting'] + ? once(socket, isSecureEndpoint ? 'secureConnect' : 'connect', { signal }) + : Promise.resolve() + + await this.#timeoutConnection({ + options, + timeout, + promises: [ + connectPromise, + once(socket, 'error', { signal }).then((err) => { + throw err[0] + }), + ], + }, abortController) + + if (this.#timeouts.idle) { + socket.setTimeout(this.#timeouts.idle, () => { + socket.destroy(new Errors.IdleTimeoutError(`${options.host}:${options.port}`)) + }) + } + + return socket + } + + addRequest (request, options) { + const proxy = this.#getProxy(options) + // it would be better to call proxy.addRequest here but this causes the + // http-proxy-agent to call its super.addRequest which causes the request + // to be added to the agent twice. since we only support 3 agents + // currently (see the required agents in proxy.js) we have manually + // checked that the only public methods we need to call are called in the + // next block. this could change in the future and presumably we would get + // failing tests until we have properly called the necessary methods on + // each of our proxy agents + if (proxy?.setRequestProps) { + proxy.setRequestProps(request, options) + } + + request.setHeader('connection', this.keepAlive ? 'keep-alive' : 'close') + + if (this.#timeouts.response) { + let responseTimeout + request.once('finish', () => { + setTimeout(() => { + request.destroy(new Errors.ResponseTimeoutError(request, this.#proxy)) + }, this.#timeouts.response) + }) + request.once('response', () => { + clearTimeout(responseTimeout) + }) + } + + if (this.#timeouts.transfer) { + let transferTimeout + request.once('response', (res) => { + setTimeout(() => { + res.destroy(new Errors.TransferTimeoutError(request, this.#proxy)) + }, this.#timeouts.transfer) + res.once('close', () => { + clearTimeout(transferTimeout) + }) + }) + } + + return super.addRequest(request, options) + } +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/dns.js b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/dns.js new file mode 100644 index 00000000000000..3c6946c566d736 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/dns.js @@ -0,0 +1,53 @@ +'use strict' + +const { LRUCache } = require('lru-cache') +const dns = require('dns') + +// this is a factory so that each request can have its own opts (i.e. ttl) +// while still sharing the cache across all requests +const cache = new LRUCache({ max: 50 }) + +const getOptions = ({ + family = 0, + hints = dns.ADDRCONFIG, + all = false, + verbatim = undefined, + ttl = 5 * 60 * 1000, + lookup = dns.lookup, +}) => ({ + // hints and lookup are returned since both are top level properties to (net|tls).connect + hints, + lookup: (hostname, ...args) => { + const callback = args.pop() // callback is always last arg + const lookupOptions = args[0] ?? {} + + const options = { + family, + hints, + all, + verbatim, + ...(typeof lookupOptions === 'number' ? { family: lookupOptions } : lookupOptions), + } + + const key = JSON.stringify({ hostname, ...options }) + + if (cache.has(key)) { + const cached = cache.get(key) + return process.nextTick(callback, null, ...cached) + } + + lookup(hostname, options, (err, ...result) => { + if (err) { + return callback(err) + } + + cache.set(key, result, { ttl }) + return callback(null, ...result) + }) + }, +}) + +module.exports = { + cache, + getOptions, +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/errors.js b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/errors.js new file mode 100644 index 00000000000000..70475aec8eb357 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/errors.js @@ -0,0 +1,61 @@ +'use strict' + +class InvalidProxyProtocolError extends Error { + constructor (url) { + super(`Invalid protocol \`${url.protocol}\` connecting to proxy \`${url.host}\``) + this.code = 'EINVALIDPROXY' + this.proxy = url + } +} + +class ConnectionTimeoutError extends Error { + constructor (host) { + super(`Timeout connecting to host \`${host}\``) + this.code = 'ECONNECTIONTIMEOUT' + this.host = host + } +} + +class IdleTimeoutError extends Error { + constructor (host) { + super(`Idle timeout reached for host \`${host}\``) + this.code = 'EIDLETIMEOUT' + this.host = host + } +} + +class ResponseTimeoutError extends Error { + constructor (request, proxy) { + let msg = 'Response timeout ' + if (proxy) { + msg += `from proxy \`${proxy.host}\` ` + } + msg += `connecting to host \`${request.host}\`` + super(msg) + this.code = 'ERESPONSETIMEOUT' + this.proxy = proxy + this.request = request + } +} + +class TransferTimeoutError extends Error { + constructor (request, proxy) { + let msg = 'Transfer timeout ' + if (proxy) { + msg += `from proxy \`${proxy.host}\` ` + } + msg += `for \`${request.host}\`` + super(msg) + this.code = 'ETRANSFERTIMEOUT' + this.proxy = proxy + this.request = request + } +} + +module.exports = { + InvalidProxyProtocolError, + ConnectionTimeoutError, + IdleTimeoutError, + ResponseTimeoutError, + TransferTimeoutError, +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/index.js b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/index.js new file mode 100644 index 00000000000000..b33d6eaef07a21 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/index.js @@ -0,0 +1,56 @@ +'use strict' + +const { LRUCache } = require('lru-cache') +const { normalizeOptions, cacheOptions } = require('./options') +const { getProxy, proxyCache } = require('./proxy.js') +const dns = require('./dns.js') +const Agent = require('./agents.js') + +const agentCache = new LRUCache({ max: 20 }) + +const getAgent = (url, { agent, proxy, noProxy, ...options } = {}) => { + // false has meaning so this can't be a simple truthiness check + if (agent != null) { + return agent + } + + url = new URL(url) + + const proxyForUrl = getProxy(url, { proxy, noProxy }) + const normalizedOptions = { + ...normalizeOptions(options), + proxy: proxyForUrl, + } + + const cacheKey = cacheOptions({ + ...normalizedOptions, + secureEndpoint: url.protocol === 'https:', + }) + + if (agentCache.has(cacheKey)) { + return agentCache.get(cacheKey) + } + + const newAgent = new Agent(normalizedOptions) + agentCache.set(cacheKey, newAgent) + + return newAgent +} + +module.exports = { + getAgent, + Agent, + // these are exported for backwards compatability + HttpAgent: Agent, + HttpsAgent: Agent, + cache: { + proxy: proxyCache, + agent: agentCache, + dns: dns.cache, + clear: () => { + proxyCache.clear() + agentCache.clear() + dns.cache.clear() + }, + }, +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/options.js b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/options.js new file mode 100644 index 00000000000000..0bf53f725f0846 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/options.js @@ -0,0 +1,86 @@ +'use strict' + +const dns = require('./dns') + +const normalizeOptions = (opts) => { + const family = parseInt(opts.family ?? '0', 10) + const keepAlive = opts.keepAlive ?? true + + const normalized = { + // nodejs http agent options. these are all the defaults + // but kept here to increase the likelihood of cache hits + // https://nodejs.org/api/http.html#new-agentoptions + keepAliveMsecs: keepAlive ? 1000 : undefined, + maxSockets: opts.maxSockets ?? 15, + maxTotalSockets: Infinity, + maxFreeSockets: keepAlive ? 256 : undefined, + scheduling: 'fifo', + // then spread the rest of the options + ...opts, + // we already set these to their defaults that we want + family, + keepAlive, + // our custom timeout options + timeouts: { + // the standard timeout option is mapped to our idle timeout + // and then deleted below + idle: opts.timeout ?? 0, + connection: 0, + response: 0, + transfer: 0, + ...opts.timeouts, + }, + // get the dns options that go at the top level of socket connection + ...dns.getOptions({ family, ...opts.dns }), + } + + // remove timeout since we already used it to set our own idle timeout + delete normalized.timeout + + return normalized +} + +const createKey = (obj) => { + let key = '' + const sorted = Object.entries(obj).sort((a, b) => a[0] - b[0]) + for (let [k, v] of sorted) { + if (v == null) { + v = 'null' + } else if (v instanceof URL) { + v = v.toString() + } else if (typeof v === 'object') { + v = createKey(v) + } + key += `${k}:${v}:` + } + return key +} + +const cacheOptions = ({ secureEndpoint, ...options }) => createKey({ + secureEndpoint: !!secureEndpoint, + // socket connect options + family: options.family, + hints: options.hints, + localAddress: options.localAddress, + // tls specific connect options + strictSsl: secureEndpoint ? !!options.rejectUnauthorized : false, + ca: secureEndpoint ? options.ca : null, + cert: secureEndpoint ? options.cert : null, + key: secureEndpoint ? options.key : null, + // http agent options + keepAlive: options.keepAlive, + keepAliveMsecs: options.keepAliveMsecs, + maxSockets: options.maxSockets, + maxTotalSockets: options.maxTotalSockets, + maxFreeSockets: options.maxFreeSockets, + scheduling: options.scheduling, + // timeout options + timeouts: options.timeouts, + // proxy + proxy: options.proxy, +}) + +module.exports = { + normalizeOptions, + cacheOptions, +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/proxy.js b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/proxy.js new file mode 100644 index 00000000000000..6272e929e57bcf --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/proxy.js @@ -0,0 +1,88 @@ +'use strict' + +const { HttpProxyAgent } = require('http-proxy-agent') +const { HttpsProxyAgent } = require('https-proxy-agent') +const { SocksProxyAgent } = require('socks-proxy-agent') +const { LRUCache } = require('lru-cache') +const { InvalidProxyProtocolError } = require('./errors.js') + +const PROXY_CACHE = new LRUCache({ max: 20 }) + +const SOCKS_PROTOCOLS = new Set(SocksProxyAgent.protocols) + +const PROXY_ENV_KEYS = new Set(['https_proxy', 'http_proxy', 'proxy', 'no_proxy']) + +const PROXY_ENV = Object.entries(process.env).reduce((acc, [key, value]) => { + key = key.toLowerCase() + if (PROXY_ENV_KEYS.has(key)) { + acc[key] = value + } + return acc +}, {}) + +const getProxyAgent = (url) => { + url = new URL(url) + + const protocol = url.protocol.slice(0, -1) + if (SOCKS_PROTOCOLS.has(protocol)) { + return SocksProxyAgent + } + if (protocol === 'https' || protocol === 'http') { + return [HttpProxyAgent, HttpsProxyAgent] + } + + throw new InvalidProxyProtocolError(url) +} + +const isNoProxy = (url, noProxy) => { + if (typeof noProxy === 'string') { + noProxy = noProxy.split(',').map((p) => p.trim()).filter(Boolean) + } + + if (!noProxy || !noProxy.length) { + return false + } + + const hostSegments = url.hostname.split('.').reverse() + + return noProxy.some((no) => { + const noSegments = no.split('.').filter(Boolean).reverse() + if (!noSegments.length) { + return false + } + + for (let i = 0; i < noSegments.length; i++) { + if (hostSegments[i] !== noSegments[i]) { + return false + } + } + + return true + }) +} + +const getProxy = (url, { proxy, noProxy }) => { + url = new URL(url) + + if (!proxy) { + proxy = url.protocol === 'https:' + ? PROXY_ENV.https_proxy + : PROXY_ENV.https_proxy || PROXY_ENV.http_proxy || PROXY_ENV.proxy + } + + if (!noProxy) { + noProxy = PROXY_ENV.no_proxy + } + + if (!proxy || isNoProxy(url, noProxy)) { + return null + } + + return new URL(proxy) +} + +module.exports = { + getProxyAgent, + getProxy, + proxyCache: PROXY_CACHE, +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/package.json b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/package.json new file mode 100644 index 00000000000000..ef5b4e3228cc46 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/package.json @@ -0,0 +1,60 @@ +{ + "name": "@npmcli/agent", + "version": "2.2.2", + "description": "the http/https agent used by the npm cli", + "main": "lib/index.js", + "scripts": { + "gencerts": "bash scripts/create-cert.sh", + "test": "tap", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force", + "lintfix": "npm run lint -- --fix", + "snap": "tap", + "posttest": "npm run lint" + }, + "author": "GitHub Inc.", + "license": "ISC", + "bugs": { + "url": "https://github.com/npm/agent/issues" + }, + "homepage": "https://github.com/npm/agent#readme", + "files": [ + "bin/", + "lib/" + ], + "engines": { + "node": "^16.14.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.21.3", + "publish": "true" + }, + "dependencies": { + "agent-base": "^7.1.0", + "http-proxy-agent": "^7.0.0", + "https-proxy-agent": "^7.0.1", + "lru-cache": "^10.0.1", + "socks-proxy-agent": "^8.0.3" + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.21.3", + "minipass-fetch": "^3.0.3", + "nock": "^13.2.7", + "semver": "^7.5.4", + "simple-socks": "^3.1.0", + "tap": "^16.3.0" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/agent.git" + }, + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + } +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/LICENSE.md b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/LICENSE.md new file mode 100644 index 00000000000000..5fc208ff122e08 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/LICENSE.md @@ -0,0 +1,20 @@ + + +ISC License + +Copyright npm, Inc. + +Permission to use, copy, modify, and/or distribute this +software for any purpose with or without fee is hereby +granted, provided that the above copyright notice and this +permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL +WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO +EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/common/get-options.js b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/common/get-options.js new file mode 100644 index 00000000000000..cb5982f79077ac --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/common/get-options.js @@ -0,0 +1,20 @@ +// given an input that may or may not be an object, return an object that has +// a copy of every defined property listed in 'copy'. if the input is not an +// object, assign it to the property named by 'wrap' +const getOptions = (input, { copy, wrap }) => { + const result = {} + + if (input && typeof input === 'object') { + for (const prop of copy) { + if (input[prop] !== undefined) { + result[prop] = input[prop] + } + } + } else { + result[wrap] = input + } + + return result +} + +module.exports = getOptions diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/common/node.js b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/common/node.js new file mode 100644 index 00000000000000..4d13bc037359d7 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/common/node.js @@ -0,0 +1,9 @@ +const semver = require('semver') + +const satisfies = (range) => { + return semver.satisfies(process.version, range, { includePrerelease: true }) +} + +module.exports = { + satisfies, +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/cp/LICENSE b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/cp/LICENSE new file mode 100644 index 00000000000000..93546dfb7655bf --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/cp/LICENSE @@ -0,0 +1,15 @@ +(The MIT License) + +Copyright (c) 2011-2017 JP Richardson + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files +(the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, + merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS +OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, + ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/cp/errors.js b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/cp/errors.js new file mode 100644 index 00000000000000..1cd1e05d0c533d --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/cp/errors.js @@ -0,0 +1,129 @@ +'use strict' +const { inspect } = require('util') + +// adapted from node's internal/errors +// https://github.com/nodejs/node/blob/c8a04049/lib/internal/errors.js + +// close copy of node's internal SystemError class. +class SystemError { + constructor (code, prefix, context) { + // XXX context.code is undefined in all constructors used in cp/polyfill + // that may be a bug copied from node, maybe the constructor should use + // `code` not `errno`? nodejs/node#41104 + let message = `${prefix}: ${context.syscall} returned ` + + `${context.code} (${context.message})` + + if (context.path !== undefined) { + message += ` ${context.path}` + } + if (context.dest !== undefined) { + message += ` => ${context.dest}` + } + + this.code = code + Object.defineProperties(this, { + name: { + value: 'SystemError', + enumerable: false, + writable: true, + configurable: true, + }, + message: { + value: message, + enumerable: false, + writable: true, + configurable: true, + }, + info: { + value: context, + enumerable: true, + configurable: true, + writable: false, + }, + errno: { + get () { + return context.errno + }, + set (value) { + context.errno = value + }, + enumerable: true, + configurable: true, + }, + syscall: { + get () { + return context.syscall + }, + set (value) { + context.syscall = value + }, + enumerable: true, + configurable: true, + }, + }) + + if (context.path !== undefined) { + Object.defineProperty(this, 'path', { + get () { + return context.path + }, + set (value) { + context.path = value + }, + enumerable: true, + configurable: true, + }) + } + + if (context.dest !== undefined) { + Object.defineProperty(this, 'dest', { + get () { + return context.dest + }, + set (value) { + context.dest = value + }, + enumerable: true, + configurable: true, + }) + } + } + + toString () { + return `${this.name} [${this.code}]: ${this.message}` + } + + [Symbol.for('nodejs.util.inspect.custom')] (_recurseTimes, ctx) { + return inspect(this, { + ...ctx, + getters: true, + customInspect: false, + }) + } +} + +function E (code, message) { + module.exports[code] = class NodeError extends SystemError { + constructor (ctx) { + super(code, message, ctx) + } + } +} + +E('ERR_FS_CP_DIR_TO_NON_DIR', 'Cannot overwrite directory with non-directory') +E('ERR_FS_CP_EEXIST', 'Target already exists') +E('ERR_FS_CP_EINVAL', 'Invalid src or dest') +E('ERR_FS_CP_FIFO_PIPE', 'Cannot copy a FIFO pipe') +E('ERR_FS_CP_NON_DIR_TO_DIR', 'Cannot overwrite non-directory with directory') +E('ERR_FS_CP_SOCKET', 'Cannot copy a socket file') +E('ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY', 'Cannot overwrite symlink in subdirectory of self') +E('ERR_FS_CP_UNKNOWN', 'Cannot copy an unknown file type') +E('ERR_FS_EISDIR', 'Path is a directory') + +module.exports.ERR_INVALID_ARG_TYPE = class ERR_INVALID_ARG_TYPE extends Error { + constructor (name, expected, actual) { + super() + this.code = 'ERR_INVALID_ARG_TYPE' + this.message = `The ${name} argument must be ${expected}. Received ${typeof actual}` + } +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/cp/index.js b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/cp/index.js new file mode 100644 index 00000000000000..972ce7aa12abef --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/cp/index.js @@ -0,0 +1,22 @@ +const fs = require('fs/promises') +const getOptions = require('../common/get-options.js') +const node = require('../common/node.js') +const polyfill = require('./polyfill.js') + +// node 16.7.0 added fs.cp +const useNative = node.satisfies('>=16.7.0') + +const cp = async (src, dest, opts) => { + const options = getOptions(opts, { + copy: ['dereference', 'errorOnExist', 'filter', 'force', 'preserveTimestamps', 'recursive'], + }) + + // the polyfill is tested separately from this module, no need to hack + // process.version to try to trigger it just for coverage + // istanbul ignore next + return useNative + ? fs.cp(src, dest, options) + : polyfill(src, dest, options) +} + +module.exports = cp diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/cp/polyfill.js b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/cp/polyfill.js new file mode 100644 index 00000000000000..80eb10de971918 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/cp/polyfill.js @@ -0,0 +1,428 @@ +// this file is a modified version of the code in node 17.2.0 +// which is, in turn, a modified version of the fs-extra module on npm +// node core changes: +// - Use of the assert module has been replaced with core's error system. +// - All code related to the glob dependency has been removed. +// - Bring your own custom fs module is not currently supported. +// - Some basic code cleanup. +// changes here: +// - remove all callback related code +// - drop sync support +// - change assertions back to non-internal methods (see options.js) +// - throws ENOTDIR when rmdir gets an ENOENT for a path that exists in Windows +'use strict' + +const { + ERR_FS_CP_DIR_TO_NON_DIR, + ERR_FS_CP_EEXIST, + ERR_FS_CP_EINVAL, + ERR_FS_CP_FIFO_PIPE, + ERR_FS_CP_NON_DIR_TO_DIR, + ERR_FS_CP_SOCKET, + ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY, + ERR_FS_CP_UNKNOWN, + ERR_FS_EISDIR, + ERR_INVALID_ARG_TYPE, +} = require('./errors.js') +const { + constants: { + errno: { + EEXIST, + EISDIR, + EINVAL, + ENOTDIR, + }, + }, +} = require('os') +const { + chmod, + copyFile, + lstat, + mkdir, + readdir, + readlink, + stat, + symlink, + unlink, + utimes, +} = require('fs/promises') +const { + dirname, + isAbsolute, + join, + parse, + resolve, + sep, + toNamespacedPath, +} = require('path') +const { fileURLToPath } = require('url') + +const defaultOptions = { + dereference: false, + errorOnExist: false, + filter: undefined, + force: true, + preserveTimestamps: false, + recursive: false, +} + +async function cp (src, dest, opts) { + if (opts != null && typeof opts !== 'object') { + throw new ERR_INVALID_ARG_TYPE('options', ['Object'], opts) + } + return cpFn( + toNamespacedPath(getValidatedPath(src)), + toNamespacedPath(getValidatedPath(dest)), + { ...defaultOptions, ...opts }) +} + +function getValidatedPath (fileURLOrPath) { + const path = fileURLOrPath != null && fileURLOrPath.href + && fileURLOrPath.origin + ? fileURLToPath(fileURLOrPath) + : fileURLOrPath + return path +} + +async function cpFn (src, dest, opts) { + // Warn about using preserveTimestamps on 32-bit node + // istanbul ignore next + if (opts.preserveTimestamps && process.arch === 'ia32') { + const warning = 'Using the preserveTimestamps option in 32-bit ' + + 'node is not recommended' + process.emitWarning(warning, 'TimestampPrecisionWarning') + } + const stats = await checkPaths(src, dest, opts) + const { srcStat, destStat } = stats + await checkParentPaths(src, srcStat, dest) + if (opts.filter) { + return handleFilter(checkParentDir, destStat, src, dest, opts) + } + return checkParentDir(destStat, src, dest, opts) +} + +async function checkPaths (src, dest, opts) { + const { 0: srcStat, 1: destStat } = await getStats(src, dest, opts) + if (destStat) { + if (areIdentical(srcStat, destStat)) { + throw new ERR_FS_CP_EINVAL({ + message: 'src and dest cannot be the same', + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } + if (srcStat.isDirectory() && !destStat.isDirectory()) { + throw new ERR_FS_CP_DIR_TO_NON_DIR({ + message: `cannot overwrite directory ${src} ` + + `with non-directory ${dest}`, + path: dest, + syscall: 'cp', + errno: EISDIR, + }) + } + if (!srcStat.isDirectory() && destStat.isDirectory()) { + throw new ERR_FS_CP_NON_DIR_TO_DIR({ + message: `cannot overwrite non-directory ${src} ` + + `with directory ${dest}`, + path: dest, + syscall: 'cp', + errno: ENOTDIR, + }) + } + } + + if (srcStat.isDirectory() && isSrcSubdir(src, dest)) { + throw new ERR_FS_CP_EINVAL({ + message: `cannot copy ${src} to a subdirectory of self ${dest}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } + return { srcStat, destStat } +} + +function areIdentical (srcStat, destStat) { + return destStat.ino && destStat.dev && destStat.ino === srcStat.ino && + destStat.dev === srcStat.dev +} + +function getStats (src, dest, opts) { + const statFunc = opts.dereference ? + (file) => stat(file, { bigint: true }) : + (file) => lstat(file, { bigint: true }) + return Promise.all([ + statFunc(src), + statFunc(dest).catch((err) => { + // istanbul ignore next: unsure how to cover. + if (err.code === 'ENOENT') { + return null + } + // istanbul ignore next: unsure how to cover. + throw err + }), + ]) +} + +async function checkParentDir (destStat, src, dest, opts) { + const destParent = dirname(dest) + const dirExists = await pathExists(destParent) + if (dirExists) { + return getStatsForCopy(destStat, src, dest, opts) + } + await mkdir(destParent, { recursive: true }) + return getStatsForCopy(destStat, src, dest, opts) +} + +function pathExists (dest) { + return stat(dest).then( + () => true, + // istanbul ignore next: not sure when this would occur + (err) => (err.code === 'ENOENT' ? false : Promise.reject(err))) +} + +// Recursively check if dest parent is a subdirectory of src. +// It works for all file types including symlinks since it +// checks the src and dest inodes. It starts from the deepest +// parent and stops once it reaches the src parent or the root path. +async function checkParentPaths (src, srcStat, dest) { + const srcParent = resolve(dirname(src)) + const destParent = resolve(dirname(dest)) + if (destParent === srcParent || destParent === parse(destParent).root) { + return + } + let destStat + try { + destStat = await stat(destParent, { bigint: true }) + } catch (err) { + // istanbul ignore else: not sure when this would occur + if (err.code === 'ENOENT') { + return + } + // istanbul ignore next: not sure when this would occur + throw err + } + if (areIdentical(srcStat, destStat)) { + throw new ERR_FS_CP_EINVAL({ + message: `cannot copy ${src} to a subdirectory of self ${dest}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } + return checkParentPaths(src, srcStat, destParent) +} + +const normalizePathToArray = (path) => + resolve(path).split(sep).filter(Boolean) + +// Return true if dest is a subdir of src, otherwise false. +// It only checks the path strings. +function isSrcSubdir (src, dest) { + const srcArr = normalizePathToArray(src) + const destArr = normalizePathToArray(dest) + return srcArr.every((cur, i) => destArr[i] === cur) +} + +async function handleFilter (onInclude, destStat, src, dest, opts, cb) { + const include = await opts.filter(src, dest) + if (include) { + return onInclude(destStat, src, dest, opts, cb) + } +} + +function startCopy (destStat, src, dest, opts) { + if (opts.filter) { + return handleFilter(getStatsForCopy, destStat, src, dest, opts) + } + return getStatsForCopy(destStat, src, dest, opts) +} + +async function getStatsForCopy (destStat, src, dest, opts) { + const statFn = opts.dereference ? stat : lstat + const srcStat = await statFn(src) + // istanbul ignore else: can't portably test FIFO + if (srcStat.isDirectory() && opts.recursive) { + return onDir(srcStat, destStat, src, dest, opts) + } else if (srcStat.isDirectory()) { + throw new ERR_FS_EISDIR({ + message: `${src} is a directory (not copied)`, + path: src, + syscall: 'cp', + errno: EINVAL, + }) + } else if (srcStat.isFile() || + srcStat.isCharacterDevice() || + srcStat.isBlockDevice()) { + return onFile(srcStat, destStat, src, dest, opts) + } else if (srcStat.isSymbolicLink()) { + return onLink(destStat, src, dest) + } else if (srcStat.isSocket()) { + throw new ERR_FS_CP_SOCKET({ + message: `cannot copy a socket file: ${dest}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } else if (srcStat.isFIFO()) { + throw new ERR_FS_CP_FIFO_PIPE({ + message: `cannot copy a FIFO pipe: ${dest}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } + // istanbul ignore next: should be unreachable + throw new ERR_FS_CP_UNKNOWN({ + message: `cannot copy an unknown file type: ${dest}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) +} + +function onFile (srcStat, destStat, src, dest, opts) { + if (!destStat) { + return _copyFile(srcStat, src, dest, opts) + } + return mayCopyFile(srcStat, src, dest, opts) +} + +async function mayCopyFile (srcStat, src, dest, opts) { + if (opts.force) { + await unlink(dest) + return _copyFile(srcStat, src, dest, opts) + } else if (opts.errorOnExist) { + throw new ERR_FS_CP_EEXIST({ + message: `${dest} already exists`, + path: dest, + syscall: 'cp', + errno: EEXIST, + }) + } +} + +async function _copyFile (srcStat, src, dest, opts) { + await copyFile(src, dest) + if (opts.preserveTimestamps) { + return handleTimestampsAndMode(srcStat.mode, src, dest) + } + return setDestMode(dest, srcStat.mode) +} + +async function handleTimestampsAndMode (srcMode, src, dest) { + // Make sure the file is writable before setting the timestamp + // otherwise open fails with EPERM when invoked with 'r+' + // (through utimes call) + if (fileIsNotWritable(srcMode)) { + await makeFileWritable(dest, srcMode) + return setDestTimestampsAndMode(srcMode, src, dest) + } + return setDestTimestampsAndMode(srcMode, src, dest) +} + +function fileIsNotWritable (srcMode) { + return (srcMode & 0o200) === 0 +} + +function makeFileWritable (dest, srcMode) { + return setDestMode(dest, srcMode | 0o200) +} + +async function setDestTimestampsAndMode (srcMode, src, dest) { + await setDestTimestamps(src, dest) + return setDestMode(dest, srcMode) +} + +function setDestMode (dest, srcMode) { + return chmod(dest, srcMode) +} + +async function setDestTimestamps (src, dest) { + // The initial srcStat.atime cannot be trusted + // because it is modified by the read(2) system call + // (See https://nodejs.org/api/fs.html#fs_stat_time_values) + const updatedSrcStat = await stat(src) + return utimes(dest, updatedSrcStat.atime, updatedSrcStat.mtime) +} + +function onDir (srcStat, destStat, src, dest, opts) { + if (!destStat) { + return mkDirAndCopy(srcStat.mode, src, dest, opts) + } + return copyDir(src, dest, opts) +} + +async function mkDirAndCopy (srcMode, src, dest, opts) { + await mkdir(dest) + await copyDir(src, dest, opts) + return setDestMode(dest, srcMode) +} + +async function copyDir (src, dest, opts) { + const dir = await readdir(src) + for (let i = 0; i < dir.length; i++) { + const item = dir[i] + const srcItem = join(src, item) + const destItem = join(dest, item) + const { destStat } = await checkPaths(srcItem, destItem, opts) + await startCopy(destStat, srcItem, destItem, opts) + } +} + +async function onLink (destStat, src, dest) { + let resolvedSrc = await readlink(src) + if (!isAbsolute(resolvedSrc)) { + resolvedSrc = resolve(dirname(src), resolvedSrc) + } + if (!destStat) { + return symlink(resolvedSrc, dest) + } + let resolvedDest + try { + resolvedDest = await readlink(dest) + } catch (err) { + // Dest exists and is a regular file or directory, + // Windows may throw UNKNOWN error. If dest already exists, + // fs throws error anyway, so no need to guard against it here. + // istanbul ignore next: can only test on windows + if (err.code === 'EINVAL' || err.code === 'UNKNOWN') { + return symlink(resolvedSrc, dest) + } + // istanbul ignore next: should not be possible + throw err + } + if (!isAbsolute(resolvedDest)) { + resolvedDest = resolve(dirname(dest), resolvedDest) + } + if (isSrcSubdir(resolvedSrc, resolvedDest)) { + throw new ERR_FS_CP_EINVAL({ + message: `cannot copy ${resolvedSrc} to a subdirectory of self ` + + `${resolvedDest}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } + // Do not copy if src is a subdir of dest since unlinking + // dest in this case would result in removing src contents + // and therefore a broken symlink would be created. + const srcStat = await stat(src) + if (srcStat.isDirectory() && isSrcSubdir(resolvedDest, resolvedSrc)) { + throw new ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY({ + message: `cannot overwrite ${resolvedDest} with ${resolvedSrc}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } + return copyLink(resolvedSrc, dest) +} + +async function copyLink (resolvedSrc, dest) { + await unlink(dest) + return symlink(resolvedSrc, dest) +} + +module.exports = cp diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/index.js b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/index.js new file mode 100644 index 00000000000000..81c746304cc428 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/index.js @@ -0,0 +1,13 @@ +'use strict' + +const cp = require('./cp/index.js') +const withTempDir = require('./with-temp-dir.js') +const readdirScoped = require('./readdir-scoped.js') +const moveFile = require('./move-file.js') + +module.exports = { + cp, + withTempDir, + readdirScoped, + moveFile, +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/move-file.js b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/move-file.js new file mode 100644 index 00000000000000..d56e06d384659a --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/move-file.js @@ -0,0 +1,78 @@ +const { dirname, join, resolve, relative, isAbsolute } = require('path') +const fs = require('fs/promises') + +const pathExists = async path => { + try { + await fs.access(path) + return true + } catch (er) { + return er.code !== 'ENOENT' + } +} + +const moveFile = async (source, destination, options = {}, root = true, symlinks = []) => { + if (!source || !destination) { + throw new TypeError('`source` and `destination` file required') + } + + options = { + overwrite: true, + ...options, + } + + if (!options.overwrite && await pathExists(destination)) { + throw new Error(`The destination file exists: ${destination}`) + } + + await fs.mkdir(dirname(destination), { recursive: true }) + + try { + await fs.rename(source, destination) + } catch (error) { + if (error.code === 'EXDEV' || error.code === 'EPERM') { + const sourceStat = await fs.lstat(source) + if (sourceStat.isDirectory()) { + const files = await fs.readdir(source) + await Promise.all(files.map((file) => + moveFile(join(source, file), join(destination, file), options, false, symlinks) + )) + } else if (sourceStat.isSymbolicLink()) { + symlinks.push({ source, destination }) + } else { + await fs.copyFile(source, destination) + } + } else { + throw error + } + } + + if (root) { + await Promise.all(symlinks.map(async ({ source: symSource, destination: symDestination }) => { + let target = await fs.readlink(symSource) + // junction symlinks in windows will be absolute paths, so we need to + // make sure they point to the symlink destination + if (isAbsolute(target)) { + target = resolve(symDestination, relative(symSource, target)) + } + // try to determine what the actual file is so we can create the correct + // type of symlink in windows + let targetStat = 'file' + try { + targetStat = await fs.stat(resolve(dirname(symSource), target)) + if (targetStat.isDirectory()) { + targetStat = 'junction' + } + } catch { + // targetStat remains 'file' + } + await fs.symlink( + target, + symDestination, + targetStat + ) + })) + await fs.rm(source, { recursive: true, force: true }) + } +} + +module.exports = moveFile diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/readdir-scoped.js b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/readdir-scoped.js new file mode 100644 index 00000000000000..cd601dfbe7486b --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/readdir-scoped.js @@ -0,0 +1,20 @@ +const { readdir } = require('fs/promises') +const { join } = require('path') + +const readdirScoped = async (dir) => { + const results = [] + + for (const item of await readdir(dir)) { + if (item.startsWith('@')) { + for (const scopedItem of await readdir(join(dir, item))) { + results.push(join(item, scopedItem)) + } + } else { + results.push(item) + } + } + + return results +} + +module.exports = readdirScoped diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/with-temp-dir.js b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/with-temp-dir.js new file mode 100644 index 00000000000000..0738ac4f29e1be --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/with-temp-dir.js @@ -0,0 +1,39 @@ +const { join, sep } = require('path') + +const getOptions = require('./common/get-options.js') +const { mkdir, mkdtemp, rm } = require('fs/promises') + +// create a temp directory, ensure its permissions match its parent, then call +// the supplied function passing it the path to the directory. clean up after +// the function finishes, whether it throws or not +const withTempDir = async (root, fn, opts) => { + const options = getOptions(opts, { + copy: ['tmpPrefix'], + }) + // create the directory + await mkdir(root, { recursive: true }) + + const target = await mkdtemp(join(`${root}${sep}`, options.tmpPrefix || '')) + let err + let result + + try { + result = await fn(target) + } catch (_err) { + err = _err + } + + try { + await rm(target, { force: true, recursive: true }) + } catch { + // ignore errors + } + + if (err) { + throw err + } + + return result +} + +module.exports = withTempDir diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/package.json b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/package.json new file mode 100644 index 00000000000000..5261a11b78000e --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/package.json @@ -0,0 +1,52 @@ +{ + "name": "@npmcli/fs", + "version": "3.1.1", + "description": "filesystem utilities for the npm cli", + "main": "lib/index.js", + "files": [ + "bin/", + "lib/" + ], + "scripts": { + "snap": "tap", + "test": "tap", + "npmclilint": "npmcli-lint", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lintfix": "npm run lint -- --fix", + "posttest": "npm run lint", + "postsnap": "npm run lintfix --", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/npm/fs.git" + }, + "keywords": [ + "npm", + "oss" + ], + "author": "GitHub Inc.", + "license": "ISC", + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.22.0", + "tap": "^16.0.1" + }, + "dependencies": { + "semver": "^7.3.5" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.22.0" + }, + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + } +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/LICENSE.md b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/LICENSE.md new file mode 100644 index 00000000000000..8d28acf866d932 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/LICENSE.md @@ -0,0 +1,16 @@ +ISC License + +Copyright (c) npm, Inc. + +Permission to use, copy, modify, and/or distribute this software for +any purpose with or without fee is hereby granted, provided that the +above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS +ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/content/path.js b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/content/path.js new file mode 100644 index 00000000000000..ad5a76a4f73f26 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/content/path.js @@ -0,0 +1,29 @@ +'use strict' + +const contentVer = require('../../package.json')['cache-version'].content +const hashToSegments = require('../util/hash-to-segments') +const path = require('path') +const ssri = require('ssri') + +// Current format of content file path: +// +// sha512-BaSE64Hex= -> +// ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee +// +module.exports = contentPath + +function contentPath (cache, integrity) { + const sri = ssri.parse(integrity, { single: true }) + // contentPath is the *strongest* algo given + return path.join( + contentDir(cache), + sri.algorithm, + ...hashToSegments(sri.hexDigest()) + ) +} + +module.exports.contentDir = contentDir + +function contentDir (cache) { + return path.join(cache, `content-v${contentVer}`) +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/content/read.js b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/content/read.js new file mode 100644 index 00000000000000..5f6192c3cec566 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/content/read.js @@ -0,0 +1,165 @@ +'use strict' + +const fs = require('fs/promises') +const fsm = require('fs-minipass') +const ssri = require('ssri') +const contentPath = require('./path') +const Pipeline = require('minipass-pipeline') + +module.exports = read + +const MAX_SINGLE_READ_SIZE = 64 * 1024 * 1024 +async function read (cache, integrity, opts = {}) { + const { size } = opts + const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => { + // get size + const stat = size ? { size } : await fs.stat(cpath) + return { stat, cpath, sri } + }) + + if (stat.size > MAX_SINGLE_READ_SIZE) { + return readPipeline(cpath, stat.size, sri, new Pipeline()).concat() + } + + const data = await fs.readFile(cpath, { encoding: null }) + + if (stat.size !== data.length) { + throw sizeError(stat.size, data.length) + } + + if (!ssri.checkData(data, sri)) { + throw integrityError(sri, cpath) + } + + return data +} + +const readPipeline = (cpath, size, sri, stream) => { + stream.push( + new fsm.ReadStream(cpath, { + size, + readSize: MAX_SINGLE_READ_SIZE, + }), + ssri.integrityStream({ + integrity: sri, + size, + }) + ) + return stream +} + +module.exports.stream = readStream +module.exports.readStream = readStream + +function readStream (cache, integrity, opts = {}) { + const { size } = opts + const stream = new Pipeline() + // Set all this up to run on the stream and then just return the stream + Promise.resolve().then(async () => { + const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => { + // get size + const stat = size ? { size } : await fs.stat(cpath) + return { stat, cpath, sri } + }) + + return readPipeline(cpath, stat.size, sri, stream) + }).catch(err => stream.emit('error', err)) + + return stream +} + +module.exports.copy = copy + +function copy (cache, integrity, dest) { + return withContentSri(cache, integrity, (cpath) => { + return fs.copyFile(cpath, dest) + }) +} + +module.exports.hasContent = hasContent + +async function hasContent (cache, integrity) { + if (!integrity) { + return false + } + + try { + return await withContentSri(cache, integrity, async (cpath, sri) => { + const stat = await fs.stat(cpath) + return { size: stat.size, sri, stat } + }) + } catch (err) { + if (err.code === 'ENOENT') { + return false + } + + if (err.code === 'EPERM') { + /* istanbul ignore else */ + if (process.platform !== 'win32') { + throw err + } else { + return false + } + } + } +} + +async function withContentSri (cache, integrity, fn) { + const sri = ssri.parse(integrity) + // If `integrity` has multiple entries, pick the first digest + // with available local data. + const algo = sri.pickAlgorithm() + const digests = sri[algo] + + if (digests.length <= 1) { + const cpath = contentPath(cache, digests[0]) + return fn(cpath, digests[0]) + } else { + // Can't use race here because a generic error can happen before + // a ENOENT error, and can happen before a valid result + const results = await Promise.all(digests.map(async (meta) => { + try { + return await withContentSri(cache, meta, fn) + } catch (err) { + if (err.code === 'ENOENT') { + return Object.assign( + new Error('No matching content found for ' + sri.toString()), + { code: 'ENOENT' } + ) + } + return err + } + })) + // Return the first non error if it is found + const result = results.find((r) => !(r instanceof Error)) + if (result) { + return result + } + + // Throw the No matching content found error + const enoentError = results.find((r) => r.code === 'ENOENT') + if (enoentError) { + throw enoentError + } + + // Throw generic error + throw results.find((r) => r instanceof Error) + } +} + +function sizeError (expected, found) { + /* eslint-disable-next-line max-len */ + const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) + err.expected = expected + err.found = found + err.code = 'EBADSIZE' + return err +} + +function integrityError (sri, path) { + const err = new Error(`Integrity verification failed for ${sri} (${path})`) + err.code = 'EINTEGRITY' + err.sri = sri + err.path = path + return err +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/content/rm.js b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/content/rm.js new file mode 100644 index 00000000000000..ce58d679e4cb25 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/content/rm.js @@ -0,0 +1,18 @@ +'use strict' + +const fs = require('fs/promises') +const contentPath = require('./path') +const { hasContent } = require('./read') + +module.exports = rm + +async function rm (cache, integrity) { + const content = await hasContent(cache, integrity) + // ~pretty~ sure we can't end up with a content lacking sri, but be safe + if (content && content.sri) { + await fs.rm(contentPath(cache, content.sri), { recursive: true, force: true }) + return true + } else { + return false + } +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/content/write.js b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/content/write.js new file mode 100644 index 00000000000000..e7187abca8788a --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/content/write.js @@ -0,0 +1,206 @@ +'use strict' + +const events = require('events') + +const contentPath = require('./path') +const fs = require('fs/promises') +const { moveFile } = require('@npmcli/fs') +const { Minipass } = require('minipass') +const Pipeline = require('minipass-pipeline') +const Flush = require('minipass-flush') +const path = require('path') +const ssri = require('ssri') +const uniqueFilename = require('unique-filename') +const fsm = require('fs-minipass') + +module.exports = write + +// Cache of move operations in process so we don't duplicate +const moveOperations = new Map() + +async function write (cache, data, opts = {}) { + const { algorithms, size, integrity } = opts + + if (typeof size === 'number' && data.length !== size) { + throw sizeError(size, data.length) + } + + const sri = ssri.fromData(data, algorithms ? { algorithms } : {}) + if (integrity && !ssri.checkData(data, integrity, opts)) { + throw checksumError(integrity, sri) + } + + for (const algo in sri) { + const tmp = await makeTmp(cache, opts) + const hash = sri[algo].toString() + try { + await fs.writeFile(tmp.target, data, { flag: 'wx' }) + await moveToDestination(tmp, cache, hash, opts) + } finally { + if (!tmp.moved) { + await fs.rm(tmp.target, { recursive: true, force: true }) + } + } + } + return { integrity: sri, size: data.length } +} + +module.exports.stream = writeStream + +// writes proxied to the 'inputStream' that is passed to the Promise +// 'end' is deferred until content is handled. +class CacacheWriteStream extends Flush { + constructor (cache, opts) { + super() + this.opts = opts + this.cache = cache + this.inputStream = new Minipass() + this.inputStream.on('error', er => this.emit('error', er)) + this.inputStream.on('drain', () => this.emit('drain')) + this.handleContentP = null + } + + write (chunk, encoding, cb) { + if (!this.handleContentP) { + this.handleContentP = handleContent( + this.inputStream, + this.cache, + this.opts + ) + this.handleContentP.catch(error => this.emit('error', error)) + } + return this.inputStream.write(chunk, encoding, cb) + } + + flush (cb) { + this.inputStream.end(() => { + if (!this.handleContentP) { + const e = new Error('Cache input stream was empty') + e.code = 'ENODATA' + // empty streams are probably emitting end right away. + // defer this one tick by rejecting a promise on it. + return Promise.reject(e).catch(cb) + } + // eslint-disable-next-line promise/catch-or-return + this.handleContentP.then( + (res) => { + res.integrity && this.emit('integrity', res.integrity) + // eslint-disable-next-line promise/always-return + res.size !== null && this.emit('size', res.size) + cb() + }, + (er) => cb(er) + ) + }) + } +} + +function writeStream (cache, opts = {}) { + return new CacacheWriteStream(cache, opts) +} + +async function handleContent (inputStream, cache, opts) { + const tmp = await makeTmp(cache, opts) + try { + const res = await pipeToTmp(inputStream, cache, tmp.target, opts) + await moveToDestination( + tmp, + cache, + res.integrity, + opts + ) + return res + } finally { + if (!tmp.moved) { + await fs.rm(tmp.target, { recursive: true, force: true }) + } + } +} + +async function pipeToTmp (inputStream, cache, tmpTarget, opts) { + const outStream = new fsm.WriteStream(tmpTarget, { + flags: 'wx', + }) + + if (opts.integrityEmitter) { + // we need to create these all simultaneously since they can fire in any order + const [integrity, size] = await Promise.all([ + events.once(opts.integrityEmitter, 'integrity').then(res => res[0]), + events.once(opts.integrityEmitter, 'size').then(res => res[0]), + new Pipeline(inputStream, outStream).promise(), + ]) + return { integrity, size } + } + + let integrity + let size + const hashStream = ssri.integrityStream({ + integrity: opts.integrity, + algorithms: opts.algorithms, + size: opts.size, + }) + hashStream.on('integrity', i => { + integrity = i + }) + hashStream.on('size', s => { + size = s + }) + + const pipeline = new Pipeline(inputStream, hashStream, outStream) + await pipeline.promise() + return { integrity, size } +} + +async function makeTmp (cache, opts) { + const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) + await fs.mkdir(path.dirname(tmpTarget), { recursive: true }) + return { + target: tmpTarget, + moved: false, + } +} + +async function moveToDestination (tmp, cache, sri) { + const destination = contentPath(cache, sri) + const destDir = path.dirname(destination) + if (moveOperations.has(destination)) { + return moveOperations.get(destination) + } + moveOperations.set( + destination, + fs.mkdir(destDir, { recursive: true }) + .then(async () => { + await moveFile(tmp.target, destination, { overwrite: false }) + tmp.moved = true + return tmp.moved + }) + .catch(err => { + if (!err.message.startsWith('The destination file exists')) { + throw Object.assign(err, { code: 'EEXIST' }) + } + }).finally(() => { + moveOperations.delete(destination) + }) + + ) + return moveOperations.get(destination) +} + +function sizeError (expected, found) { + /* eslint-disable-next-line max-len */ + const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) + err.expected = expected + err.found = found + err.code = 'EBADSIZE' + return err +} + +function checksumError (expected, found) { + const err = new Error(`Integrity check failed: + Wanted: ${expected} + Found: ${found}`) + err.code = 'EINTEGRITY' + err.expected = expected + err.found = found + return err +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/entry-index.js b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/entry-index.js new file mode 100644 index 00000000000000..89c28f2f257d48 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/entry-index.js @@ -0,0 +1,336 @@ +'use strict' + +const crypto = require('crypto') +const { + appendFile, + mkdir, + readFile, + readdir, + rm, + writeFile, +} = require('fs/promises') +const { Minipass } = require('minipass') +const path = require('path') +const ssri = require('ssri') +const uniqueFilename = require('unique-filename') + +const contentPath = require('./content/path') +const hashToSegments = require('./util/hash-to-segments') +const indexV = require('../package.json')['cache-version'].index +const { moveFile } = require('@npmcli/fs') + +const pMap = require('p-map') +const lsStreamConcurrency = 5 + +module.exports.NotFoundError = class NotFoundError extends Error { + constructor (cache, key) { + super(`No cache entry for ${key} found in ${cache}`) + this.code = 'ENOENT' + this.cache = cache + this.key = key + } +} + +module.exports.compact = compact + +async function compact (cache, key, matchFn, opts = {}) { + const bucket = bucketPath(cache, key) + const entries = await bucketEntries(bucket) + const newEntries = [] + // we loop backwards because the bottom-most result is the newest + // since we add new entries with appendFile + for (let i = entries.length - 1; i >= 0; --i) { + const entry = entries[i] + // a null integrity could mean either a delete was appended + // or the user has simply stored an index that does not map + // to any content. we determine if the user wants to keep the + // null integrity based on the validateEntry function passed in options. + // if the integrity is null and no validateEntry is provided, we break + // as we consider the null integrity to be a deletion of everything + // that came before it. + if (entry.integrity === null && !opts.validateEntry) { + break + } + + // if this entry is valid, and it is either the first entry or + // the newEntries array doesn't already include an entry that + // matches this one based on the provided matchFn, then we add + // it to the beginning of our list + if ((!opts.validateEntry || opts.validateEntry(entry) === true) && + (newEntries.length === 0 || + !newEntries.find((oldEntry) => matchFn(oldEntry, entry)))) { + newEntries.unshift(entry) + } + } + + const newIndex = '\n' + newEntries.map((entry) => { + const stringified = JSON.stringify(entry) + const hash = hashEntry(stringified) + return `${hash}\t${stringified}` + }).join('\n') + + const setup = async () => { + const target = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) + await mkdir(path.dirname(target), { recursive: true }) + return { + target, + moved: false, + } + } + + const teardown = async (tmp) => { + if (!tmp.moved) { + return rm(tmp.target, { recursive: true, force: true }) + } + } + + const write = async (tmp) => { + await writeFile(tmp.target, newIndex, { flag: 'wx' }) + await mkdir(path.dirname(bucket), { recursive: true }) + // we use @npmcli/move-file directly here because we + // want to overwrite the existing file + await moveFile(tmp.target, bucket) + tmp.moved = true + } + + // write the file atomically + const tmp = await setup() + try { + await write(tmp) + } finally { + await teardown(tmp) + } + + // we reverse the list we generated such that the newest + // entries come first in order to make looping through them easier + // the true passed to formatEntry tells it to keep null + // integrity values, if they made it this far it's because + // validateEntry returned true, and as such we should return it + return newEntries.reverse().map((entry) => formatEntry(cache, entry, true)) +} + +module.exports.insert = insert + +async function insert (cache, key, integrity, opts = {}) { + const { metadata, size, time } = opts + const bucket = bucketPath(cache, key) + const entry = { + key, + integrity: integrity && ssri.stringify(integrity), + time: time || Date.now(), + size, + metadata, + } + try { + await mkdir(path.dirname(bucket), { recursive: true }) + const stringified = JSON.stringify(entry) + // NOTE - Cleverness ahoy! + // + // This works because it's tremendously unlikely for an entry to corrupt + // another while still preserving the string length of the JSON in + // question. So, we just slap the length in there and verify it on read. + // + // Thanks to @isaacs for the whiteboarding session that ended up with + // this. + await appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`) + } catch (err) { + if (err.code === 'ENOENT') { + return undefined + } + + throw err + } + return formatEntry(cache, entry) +} + +module.exports.find = find + +async function find (cache, key) { + const bucket = bucketPath(cache, key) + try { + const entries = await bucketEntries(bucket) + return entries.reduce((latest, next) => { + if (next && next.key === key) { + return formatEntry(cache, next) + } else { + return latest + } + }, null) + } catch (err) { + if (err.code === 'ENOENT') { + return null + } else { + throw err + } + } +} + +module.exports.delete = del + +function del (cache, key, opts = {}) { + if (!opts.removeFully) { + return insert(cache, key, null, opts) + } + + const bucket = bucketPath(cache, key) + return rm(bucket, { recursive: true, force: true }) +} + +module.exports.lsStream = lsStream + +function lsStream (cache) { + const indexDir = bucketDir(cache) + const stream = new Minipass({ objectMode: true }) + + // Set all this up to run on the stream and then just return the stream + Promise.resolve().then(async () => { + const buckets = await readdirOrEmpty(indexDir) + await pMap(buckets, async (bucket) => { + const bucketPath = path.join(indexDir, bucket) + const subbuckets = await readdirOrEmpty(bucketPath) + await pMap(subbuckets, async (subbucket) => { + const subbucketPath = path.join(bucketPath, subbucket) + + // "/cachename//./*" + const subbucketEntries = await readdirOrEmpty(subbucketPath) + await pMap(subbucketEntries, async (entry) => { + const entryPath = path.join(subbucketPath, entry) + try { + const entries = await bucketEntries(entryPath) + // using a Map here prevents duplicate keys from showing up + // twice, I guess? + const reduced = entries.reduce((acc, entry) => { + acc.set(entry.key, entry) + return acc + }, new Map()) + // reduced is a map of key => entry + for (const entry of reduced.values()) { + const formatted = formatEntry(cache, entry) + if (formatted) { + stream.write(formatted) + } + } + } catch (err) { + if (err.code === 'ENOENT') { + return undefined + } + throw err + } + }, + { concurrency: lsStreamConcurrency }) + }, + { concurrency: lsStreamConcurrency }) + }, + { concurrency: lsStreamConcurrency }) + stream.end() + return stream + }).catch(err => stream.emit('error', err)) + + return stream +} + +module.exports.ls = ls + +async function ls (cache) { + const entries = await lsStream(cache).collect() + return entries.reduce((acc, xs) => { + acc[xs.key] = xs + return acc + }, {}) +} + +module.exports.bucketEntries = bucketEntries + +async function bucketEntries (bucket, filter) { + const data = await readFile(bucket, 'utf8') + return _bucketEntries(data, filter) +} + +function _bucketEntries (data) { + const entries = [] + data.split('\n').forEach((entry) => { + if (!entry) { + return + } + + const pieces = entry.split('\t') + if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) { + // Hash is no good! Corruption or malice? Doesn't matter! + // EJECT EJECT + return + } + let obj + try { + obj = JSON.parse(pieces[1]) + } catch (_) { + // eslint-ignore-next-line no-empty-block + } + // coverage disabled here, no need to test with an entry that parses to something falsey + // istanbul ignore else + if (obj) { + entries.push(obj) + } + }) + return entries +} + +module.exports.bucketDir = bucketDir + +function bucketDir (cache) { + return path.join(cache, `index-v${indexV}`) +} + +module.exports.bucketPath = bucketPath + +function bucketPath (cache, key) { + const hashed = hashKey(key) + return path.join.apply( + path, + [bucketDir(cache)].concat(hashToSegments(hashed)) + ) +} + +module.exports.hashKey = hashKey + +function hashKey (key) { + return hash(key, 'sha256') +} + +module.exports.hashEntry = hashEntry + +function hashEntry (str) { + return hash(str, 'sha1') +} + +function hash (str, digest) { + return crypto + .createHash(digest) + .update(str) + .digest('hex') +} + +function formatEntry (cache, entry, keepAll) { + // Treat null digests as deletions. They'll shadow any previous entries. + if (!entry.integrity && !keepAll) { + return null + } + + return { + key: entry.key, + integrity: entry.integrity, + path: entry.integrity ? contentPath(cache, entry.integrity) : undefined, + size: entry.size, + time: entry.time, + metadata: entry.metadata, + } +} + +function readdirOrEmpty (dir) { + return readdir(dir).catch((err) => { + if (err.code === 'ENOENT' || err.code === 'ENOTDIR') { + return [] + } + + throw err + }) +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/get.js b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/get.js new file mode 100644 index 00000000000000..80ec206c7ecaaa --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/get.js @@ -0,0 +1,170 @@ +'use strict' + +const Collect = require('minipass-collect') +const { Minipass } = require('minipass') +const Pipeline = require('minipass-pipeline') + +const index = require('./entry-index') +const memo = require('./memoization') +const read = require('./content/read') + +async function getData (cache, key, opts = {}) { + const { integrity, memoize, size } = opts + const memoized = memo.get(cache, key, opts) + if (memoized && memoize !== false) { + return { + metadata: memoized.entry.metadata, + data: memoized.data, + integrity: memoized.entry.integrity, + size: memoized.entry.size, + } + } + + const entry = await index.find(cache, key, opts) + if (!entry) { + throw new index.NotFoundError(cache, key) + } + const data = await read(cache, entry.integrity, { integrity, size }) + if (memoize) { + memo.put(cache, entry, data, opts) + } + + return { + data, + metadata: entry.metadata, + size: entry.size, + integrity: entry.integrity, + } +} +module.exports = getData + +async function getDataByDigest (cache, key, opts = {}) { + const { integrity, memoize, size } = opts + const memoized = memo.get.byDigest(cache, key, opts) + if (memoized && memoize !== false) { + return memoized + } + + const res = await read(cache, key, { integrity, size }) + if (memoize) { + memo.put.byDigest(cache, key, res, opts) + } + return res +} +module.exports.byDigest = getDataByDigest + +const getMemoizedStream = (memoized) => { + const stream = new Minipass() + stream.on('newListener', function (ev, cb) { + ev === 'metadata' && cb(memoized.entry.metadata) + ev === 'integrity' && cb(memoized.entry.integrity) + ev === 'size' && cb(memoized.entry.size) + }) + stream.end(memoized.data) + return stream +} + +function getStream (cache, key, opts = {}) { + const { memoize, size } = opts + const memoized = memo.get(cache, key, opts) + if (memoized && memoize !== false) { + return getMemoizedStream(memoized) + } + + const stream = new Pipeline() + // Set all this up to run on the stream and then just return the stream + Promise.resolve().then(async () => { + const entry = await index.find(cache, key) + if (!entry) { + throw new index.NotFoundError(cache, key) + } + + stream.emit('metadata', entry.metadata) + stream.emit('integrity', entry.integrity) + stream.emit('size', entry.size) + stream.on('newListener', function (ev, cb) { + ev === 'metadata' && cb(entry.metadata) + ev === 'integrity' && cb(entry.integrity) + ev === 'size' && cb(entry.size) + }) + + const src = read.readStream( + cache, + entry.integrity, + { ...opts, size: typeof size !== 'number' ? entry.size : size } + ) + + if (memoize) { + const memoStream = new Collect.PassThrough() + memoStream.on('collect', data => memo.put(cache, entry, data, opts)) + stream.unshift(memoStream) + } + stream.unshift(src) + return stream + }).catch((err) => stream.emit('error', err)) + + return stream +} + +module.exports.stream = getStream + +function getStreamDigest (cache, integrity, opts = {}) { + const { memoize } = opts + const memoized = memo.get.byDigest(cache, integrity, opts) + if (memoized && memoize !== false) { + const stream = new Minipass() + stream.end(memoized) + return stream + } else { + const stream = read.readStream(cache, integrity, opts) + if (!memoize) { + return stream + } + + const memoStream = new Collect.PassThrough() + memoStream.on('collect', data => memo.put.byDigest( + cache, + integrity, + data, + opts + )) + return new Pipeline(stream, memoStream) + } +} + +module.exports.stream.byDigest = getStreamDigest + +function info (cache, key, opts = {}) { + const { memoize } = opts + const memoized = memo.get(cache, key, opts) + if (memoized && memoize !== false) { + return Promise.resolve(memoized.entry) + } else { + return index.find(cache, key) + } +} +module.exports.info = info + +async function copy (cache, key, dest, opts = {}) { + const entry = await index.find(cache, key, opts) + if (!entry) { + throw new index.NotFoundError(cache, key) + } + await read.copy(cache, entry.integrity, dest, opts) + return { + metadata: entry.metadata, + size: entry.size, + integrity: entry.integrity, + } +} + +module.exports.copy = copy + +async function copyByDigest (cache, key, dest, opts = {}) { + await read.copy(cache, key, dest, opts) + return key +} + +module.exports.copy.byDigest = copyByDigest + +module.exports.hasContent = read.hasContent diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/index.js b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/index.js new file mode 100644 index 00000000000000..c9b0da5f3a271b --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/index.js @@ -0,0 +1,42 @@ +'use strict' + +const get = require('./get.js') +const put = require('./put.js') +const rm = require('./rm.js') +const verify = require('./verify.js') +const { clearMemoized } = require('./memoization.js') +const tmp = require('./util/tmp.js') +const index = require('./entry-index.js') + +module.exports.index = {} +module.exports.index.compact = index.compact +module.exports.index.insert = index.insert + +module.exports.ls = index.ls +module.exports.ls.stream = index.lsStream + +module.exports.get = get +module.exports.get.byDigest = get.byDigest +module.exports.get.stream = get.stream +module.exports.get.stream.byDigest = get.stream.byDigest +module.exports.get.copy = get.copy +module.exports.get.copy.byDigest = get.copy.byDigest +module.exports.get.info = get.info +module.exports.get.hasContent = get.hasContent + +module.exports.put = put +module.exports.put.stream = put.stream + +module.exports.rm = rm.entry +module.exports.rm.all = rm.all +module.exports.rm.entry = module.exports.rm +module.exports.rm.content = rm.content + +module.exports.clearMemoized = clearMemoized + +module.exports.tmp = {} +module.exports.tmp.mkdir = tmp.mkdir +module.exports.tmp.withTmp = tmp.withTmp + +module.exports.verify = verify +module.exports.verify.lastRun = verify.lastRun diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/memoization.js b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/memoization.js new file mode 100644 index 00000000000000..2ecc60912e4563 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/memoization.js @@ -0,0 +1,72 @@ +'use strict' + +const { LRUCache } = require('lru-cache') + +const MEMOIZED = new LRUCache({ + max: 500, + maxSize: 50 * 1024 * 1024, // 50MB + ttl: 3 * 60 * 1000, // 3 minutes + sizeCalculation: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length, +}) + +module.exports.clearMemoized = clearMemoized + +function clearMemoized () { + const old = {} + MEMOIZED.forEach((v, k) => { + old[k] = v + }) + MEMOIZED.clear() + return old +} + +module.exports.put = put + +function put (cache, entry, data, opts) { + pickMem(opts).set(`key:${cache}:${entry.key}`, { entry, data }) + putDigest(cache, entry.integrity, data, opts) +} + +module.exports.put.byDigest = putDigest + +function putDigest (cache, integrity, data, opts) { + pickMem(opts).set(`digest:${cache}:${integrity}`, data) +} + +module.exports.get = get + +function get (cache, key, opts) { + return pickMem(opts).get(`key:${cache}:${key}`) +} + +module.exports.get.byDigest = getDigest + +function getDigest (cache, integrity, opts) { + return pickMem(opts).get(`digest:${cache}:${integrity}`) +} + +class ObjProxy { + constructor (obj) { + this.obj = obj + } + + get (key) { + return this.obj[key] + } + + set (key, val) { + this.obj[key] = val + } +} + +function pickMem (opts) { + if (!opts || !opts.memoize) { + return MEMOIZED + } else if (opts.memoize.get && opts.memoize.set) { + return opts.memoize + } else if (typeof opts.memoize === 'object') { + return new ObjProxy(opts.memoize) + } else { + return MEMOIZED + } +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/put.js b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/put.js new file mode 100644 index 00000000000000..9fc932d5f6dec5 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/put.js @@ -0,0 +1,80 @@ +'use strict' + +const index = require('./entry-index') +const memo = require('./memoization') +const write = require('./content/write') +const Flush = require('minipass-flush') +const { PassThrough } = require('minipass-collect') +const Pipeline = require('minipass-pipeline') + +const putOpts = (opts) => ({ + algorithms: ['sha512'], + ...opts, +}) + +module.exports = putData + +async function putData (cache, key, data, opts = {}) { + const { memoize } = opts + opts = putOpts(opts) + const res = await write(cache, data, opts) + const entry = await index.insert(cache, key, res.integrity, { ...opts, size: res.size }) + if (memoize) { + memo.put(cache, entry, data, opts) + } + + return res.integrity +} + +module.exports.stream = putStream + +function putStream (cache, key, opts = {}) { + const { memoize } = opts + opts = putOpts(opts) + let integrity + let size + let error + + let memoData + const pipeline = new Pipeline() + // first item in the pipeline is the memoizer, because we need + // that to end first and get the collected data. + if (memoize) { + const memoizer = new PassThrough().on('collect', data => { + memoData = data + }) + pipeline.push(memoizer) + } + + // contentStream is a write-only, not a passthrough + // no data comes out of it. + const contentStream = write.stream(cache, opts) + .on('integrity', (int) => { + integrity = int + }) + .on('size', (s) => { + size = s + }) + .on('error', (err) => { + error = err + }) + + pipeline.push(contentStream) + + // last but not least, we write the index and emit hash and size, + // and memoize if we're doing that + pipeline.push(new Flush({ + async flush () { + if (!error) { + const entry = await index.insert(cache, key, integrity, { ...opts, size }) + if (memoize && memoData) { + memo.put(cache, entry, memoData, opts) + } + pipeline.emit('integrity', integrity) + pipeline.emit('size', size) + } + }, + })) + + return pipeline +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/rm.js b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/rm.js new file mode 100644 index 00000000000000..a94760c7cf2430 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/rm.js @@ -0,0 +1,31 @@ +'use strict' + +const { rm } = require('fs/promises') +const glob = require('./util/glob.js') +const index = require('./entry-index') +const memo = require('./memoization') +const path = require('path') +const rmContent = require('./content/rm') + +module.exports = entry +module.exports.entry = entry + +function entry (cache, key, opts) { + memo.clearMemoized() + return index.delete(cache, key, opts) +} + +module.exports.content = content + +function content (cache, integrity) { + memo.clearMemoized() + return rmContent(cache, integrity) +} + +module.exports.all = all + +async function all (cache) { + memo.clearMemoized() + const paths = await glob(path.join(cache, '*(content-*|index-*)'), { silent: true, nosort: true }) + return Promise.all(paths.map((p) => rm(p, { recursive: true, force: true }))) +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/util/glob.js b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/util/glob.js new file mode 100644 index 00000000000000..8500c1c16a429f --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/util/glob.js @@ -0,0 +1,7 @@ +'use strict' + +const { glob } = require('glob') +const path = require('path') + +const globify = (pattern) => pattern.split(path.win32.sep).join(path.posix.sep) +module.exports = (path, options) => glob(globify(path), options) diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/util/hash-to-segments.js b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/util/hash-to-segments.js new file mode 100644 index 00000000000000..445599b5038088 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/util/hash-to-segments.js @@ -0,0 +1,7 @@ +'use strict' + +module.exports = hashToSegments + +function hashToSegments (hash) { + return [hash.slice(0, 2), hash.slice(2, 4), hash.slice(4)] +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/util/tmp.js b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/util/tmp.js new file mode 100644 index 00000000000000..0bf5302136ebeb --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/util/tmp.js @@ -0,0 +1,26 @@ +'use strict' + +const { withTempDir } = require('@npmcli/fs') +const fs = require('fs/promises') +const path = require('path') + +module.exports.mkdir = mktmpdir + +async function mktmpdir (cache, opts = {}) { + const { tmpPrefix } = opts + const tmpDir = path.join(cache, 'tmp') + await fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' }) + // do not use path.join(), it drops the trailing / if tmpPrefix is unset + const target = `${tmpDir}${path.sep}${tmpPrefix || ''}` + return fs.mkdtemp(target, { owner: 'inherit' }) +} + +module.exports.withTmp = withTmp + +function withTmp (cache, opts, cb) { + if (!cb) { + cb = opts + opts = {} + } + return withTempDir(path.join(cache, 'tmp'), cb, opts) +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/verify.js b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/verify.js new file mode 100644 index 00000000000000..d7423da1295b68 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/verify.js @@ -0,0 +1,257 @@ +'use strict' + +const { + mkdir, + readFile, + rm, + stat, + truncate, + writeFile, +} = require('fs/promises') +const pMap = require('p-map') +const contentPath = require('./content/path') +const fsm = require('fs-minipass') +const glob = require('./util/glob.js') +const index = require('./entry-index') +const path = require('path') +const ssri = require('ssri') + +const hasOwnProperty = (obj, key) => + Object.prototype.hasOwnProperty.call(obj, key) + +const verifyOpts = (opts) => ({ + concurrency: 20, + log: { silly () {} }, + ...opts, +}) + +module.exports = verify + +async function verify (cache, opts) { + opts = verifyOpts(opts) + opts.log.silly('verify', 'verifying cache at', cache) + + const steps = [ + markStartTime, + fixPerms, + garbageCollect, + rebuildIndex, + cleanTmp, + writeVerifile, + markEndTime, + ] + + const stats = {} + for (const step of steps) { + const label = step.name + const start = new Date() + const s = await step(cache, opts) + if (s) { + Object.keys(s).forEach((k) => { + stats[k] = s[k] + }) + } + const end = new Date() + if (!stats.runTime) { + stats.runTime = {} + } + stats.runTime[label] = end - start + } + stats.runTime.total = stats.endTime - stats.startTime + opts.log.silly( + 'verify', + 'verification finished for', + cache, + 'in', + `${stats.runTime.total}ms` + ) + return stats +} + +async function markStartTime () { + return { startTime: new Date() } +} + +async function markEndTime () { + return { endTime: new Date() } +} + +async function fixPerms (cache, opts) { + opts.log.silly('verify', 'fixing cache permissions') + await mkdir(cache, { recursive: true }) + return null +} + +// Implements a naive mark-and-sweep tracing garbage collector. +// +// The algorithm is basically as follows: +// 1. Read (and filter) all index entries ("pointers") +// 2. Mark each integrity value as "live" +// 3. Read entire filesystem tree in `content-vX/` dir +// 4. If content is live, verify its checksum and delete it if it fails +// 5. If content is not marked as live, rm it. +// +async function garbageCollect (cache, opts) { + opts.log.silly('verify', 'garbage collecting content') + const indexStream = index.lsStream(cache) + const liveContent = new Set() + indexStream.on('data', (entry) => { + if (opts.filter && !opts.filter(entry)) { + return + } + + // integrity is stringified, re-parse it so we can get each hash + const integrity = ssri.parse(entry.integrity) + for (const algo in integrity) { + liveContent.add(integrity[algo].toString()) + } + }) + await new Promise((resolve, reject) => { + indexStream.on('end', resolve).on('error', reject) + }) + const contentDir = contentPath.contentDir(cache) + const files = await glob(path.join(contentDir, '**'), { + follow: false, + nodir: true, + nosort: true, + }) + const stats = { + verifiedContent: 0, + reclaimedCount: 0, + reclaimedSize: 0, + badContentCount: 0, + keptSize: 0, + } + await pMap( + files, + async (f) => { + const split = f.split(/[/\\]/) + const digest = split.slice(split.length - 3).join('') + const algo = split[split.length - 4] + const integrity = ssri.fromHex(digest, algo) + if (liveContent.has(integrity.toString())) { + const info = await verifyContent(f, integrity) + if (!info.valid) { + stats.reclaimedCount++ + stats.badContentCount++ + stats.reclaimedSize += info.size + } else { + stats.verifiedContent++ + stats.keptSize += info.size + } + } else { + // No entries refer to this content. We can delete. + stats.reclaimedCount++ + const s = await stat(f) + await rm(f, { recursive: true, force: true }) + stats.reclaimedSize += s.size + } + return stats + }, + { concurrency: opts.concurrency } + ) + return stats +} + +async function verifyContent (filepath, sri) { + const contentInfo = {} + try { + const { size } = await stat(filepath) + contentInfo.size = size + contentInfo.valid = true + await ssri.checkStream(new fsm.ReadStream(filepath), sri) + } catch (err) { + if (err.code === 'ENOENT') { + return { size: 0, valid: false } + } + if (err.code !== 'EINTEGRITY') { + throw err + } + + await rm(filepath, { recursive: true, force: true }) + contentInfo.valid = false + } + return contentInfo +} + +async function rebuildIndex (cache, opts) { + opts.log.silly('verify', 'rebuilding index') + const entries = await index.ls(cache) + const stats = { + missingContent: 0, + rejectedEntries: 0, + totalEntries: 0, + } + const buckets = {} + for (const k in entries) { + /* istanbul ignore else */ + if (hasOwnProperty(entries, k)) { + const hashed = index.hashKey(k) + const entry = entries[k] + const excluded = opts.filter && !opts.filter(entry) + excluded && stats.rejectedEntries++ + if (buckets[hashed] && !excluded) { + buckets[hashed].push(entry) + } else if (buckets[hashed] && excluded) { + // skip + } else if (excluded) { + buckets[hashed] = [] + buckets[hashed]._path = index.bucketPath(cache, k) + } else { + buckets[hashed] = [entry] + buckets[hashed]._path = index.bucketPath(cache, k) + } + } + } + await pMap( + Object.keys(buckets), + (key) => { + return rebuildBucket(cache, buckets[key], stats, opts) + }, + { concurrency: opts.concurrency } + ) + return stats +} + +async function rebuildBucket (cache, bucket, stats) { + await truncate(bucket._path) + // This needs to be serialized because cacache explicitly + // lets very racy bucket conflicts clobber each other. + for (const entry of bucket) { + const content = contentPath(cache, entry.integrity) + try { + await stat(content) + await index.insert(cache, entry.key, entry.integrity, { + metadata: entry.metadata, + size: entry.size, + time: entry.time, + }) + stats.totalEntries++ + } catch (err) { + if (err.code === 'ENOENT') { + stats.rejectedEntries++ + stats.missingContent++ + } else { + throw err + } + } + } +} + +function cleanTmp (cache, opts) { + opts.log.silly('verify', 'cleaning tmp directory') + return rm(path.join(cache, 'tmp'), { recursive: true, force: true }) +} + +async function writeVerifile (cache, opts) { + const verifile = path.join(cache, '_lastverified') + opts.log.silly('verify', 'writing verifile to ' + verifile) + return writeFile(verifile, `${Date.now()}`) +} + +module.exports.lastRun = lastRun + +async function lastRun (cache) { + const data = await readFile(path.join(cache, '_lastverified'), { encoding: 'utf8' }) + return new Date(+data) +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/package.json b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/package.json new file mode 100644 index 00000000000000..6e6219158ed759 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/package.json @@ -0,0 +1,82 @@ +{ + "name": "cacache", + "version": "18.0.4", + "cache-version": { + "content": "2", + "index": "5" + }, + "description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.", + "main": "lib/index.js", + "files": [ + "bin/", + "lib/" + ], + "scripts": { + "test": "tap", + "snap": "tap", + "coverage": "tap", + "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "npmclilint": "npmcli-lint", + "lintfix": "npm run lint -- --fix", + "postsnap": "npm run lintfix --", + "postlint": "template-oss-check", + "posttest": "npm run lint", + "template-oss-apply": "template-oss-apply --force" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/npm/cacache.git" + }, + "keywords": [ + "cache", + "caching", + "content-addressable", + "sri", + "sri hash", + "subresource integrity", + "cache", + "storage", + "store", + "file store", + "filesystem", + "disk cache", + "disk storage" + ], + "license": "ISC", + "dependencies": { + "@npmcli/fs": "^3.1.0", + "fs-minipass": "^3.0.0", + "glob": "^10.2.2", + "lru-cache": "^10.0.1", + "minipass": "^7.0.3", + "minipass-collect": "^2.0.1", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "p-map": "^4.0.0", + "ssri": "^10.0.0", + "tar": "^6.1.11", + "unique-filename": "^3.0.0" + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.22.0", + "tap": "^16.0.0" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "windowsCI": false, + "version": "4.22.0", + "publish": "true" + }, + "author": "GitHub Inc.", + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + } +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/LICENSE b/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/LICENSE new file mode 100644 index 00000000000000..1808eb2844231c --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/LICENSE @@ -0,0 +1,16 @@ +ISC License + +Copyright 2017-2022 (c) npm, Inc. + +Permission to use, copy, modify, and/or distribute this software for +any purpose with or without fee is hereby granted, provided that the +above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS +ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/entry.js b/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/entry.js new file mode 100644 index 00000000000000..bfcfacbcc95e18 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/entry.js @@ -0,0 +1,471 @@ +const { Request, Response } = require('minipass-fetch') +const { Minipass } = require('minipass') +const MinipassFlush = require('minipass-flush') +const cacache = require('cacache') +const url = require('url') + +const CachingMinipassPipeline = require('../pipeline.js') +const CachePolicy = require('./policy.js') +const cacheKey = require('./key.js') +const remote = require('../remote.js') + +const hasOwnProperty = (obj, prop) => Object.prototype.hasOwnProperty.call(obj, prop) + +// allow list for request headers that will be written to the cache index +// note: we will also store any request headers +// that are named in a response's vary header +const KEEP_REQUEST_HEADERS = [ + 'accept-charset', + 'accept-encoding', + 'accept-language', + 'accept', + 'cache-control', +] + +// allow list for response headers that will be written to the cache index +// note: we must not store the real response's age header, or when we load +// a cache policy based on the metadata it will think the cached response +// is always stale +const KEEP_RESPONSE_HEADERS = [ + 'cache-control', + 'content-encoding', + 'content-language', + 'content-type', + 'date', + 'etag', + 'expires', + 'last-modified', + 'link', + 'location', + 'pragma', + 'vary', +] + +// return an object containing all metadata to be written to the index +const getMetadata = (request, response, options) => { + const metadata = { + time: Date.now(), + url: request.url, + reqHeaders: {}, + resHeaders: {}, + + // options on which we must match the request and vary the response + options: { + compress: options.compress != null ? options.compress : request.compress, + }, + } + + // only save the status if it's not a 200 or 304 + if (response.status !== 200 && response.status !== 304) { + metadata.status = response.status + } + + for (const name of KEEP_REQUEST_HEADERS) { + if (request.headers.has(name)) { + metadata.reqHeaders[name] = request.headers.get(name) + } + } + + // if the request's host header differs from the host in the url + // we need to keep it, otherwise it's just noise and we ignore it + const host = request.headers.get('host') + const parsedUrl = new url.URL(request.url) + if (host && parsedUrl.host !== host) { + metadata.reqHeaders.host = host + } + + // if the response has a vary header, make sure + // we store the relevant request headers too + if (response.headers.has('vary')) { + const vary = response.headers.get('vary') + // a vary of "*" means every header causes a different response. + // in that scenario, we do not include any additional headers + // as the freshness check will always fail anyway and we don't + // want to bloat the cache indexes + if (vary !== '*') { + // copy any other request headers that will vary the response + const varyHeaders = vary.trim().toLowerCase().split(/\s*,\s*/) + for (const name of varyHeaders) { + if (request.headers.has(name)) { + metadata.reqHeaders[name] = request.headers.get(name) + } + } + } + } + + for (const name of KEEP_RESPONSE_HEADERS) { + if (response.headers.has(name)) { + metadata.resHeaders[name] = response.headers.get(name) + } + } + + for (const name of options.cacheAdditionalHeaders) { + if (response.headers.has(name)) { + metadata.resHeaders[name] = response.headers.get(name) + } + } + + return metadata +} + +// symbols used to hide objects that may be lazily evaluated in a getter +const _request = Symbol('request') +const _response = Symbol('response') +const _policy = Symbol('policy') + +class CacheEntry { + constructor ({ entry, request, response, options }) { + if (entry) { + this.key = entry.key + this.entry = entry + // previous versions of this module didn't write an explicit timestamp in + // the metadata, so fall back to the entry's timestamp. we can't use the + // entry timestamp to determine staleness because cacache will update it + // when it verifies its data + this.entry.metadata.time = this.entry.metadata.time || this.entry.time + } else { + this.key = cacheKey(request) + } + + this.options = options + + // these properties are behind getters that lazily evaluate + this[_request] = request + this[_response] = response + this[_policy] = null + } + + // returns a CacheEntry instance that satisfies the given request + // or undefined if no existing entry satisfies + static async find (request, options) { + try { + // compacts the index and returns an array of unique entries + var matches = await cacache.index.compact(options.cachePath, cacheKey(request), (A, B) => { + const entryA = new CacheEntry({ entry: A, options }) + const entryB = new CacheEntry({ entry: B, options }) + return entryA.policy.satisfies(entryB.request) + }, { + validateEntry: (entry) => { + // clean out entries with a buggy content-encoding value + if (entry.metadata && + entry.metadata.resHeaders && + entry.metadata.resHeaders['content-encoding'] === null) { + return false + } + + // if an integrity is null, it needs to have a status specified + if (entry.integrity === null) { + return !!(entry.metadata && entry.metadata.status) + } + + return true + }, + }) + } catch (err) { + // if the compact request fails, ignore the error and return + return + } + + // a cache mode of 'reload' means to behave as though we have no cache + // on the way to the network. return undefined to allow cacheFetch to + // create a brand new request no matter what. + if (options.cache === 'reload') { + return + } + + // find the specific entry that satisfies the request + let match + for (const entry of matches) { + const _entry = new CacheEntry({ + entry, + options, + }) + + if (_entry.policy.satisfies(request)) { + match = _entry + break + } + } + + return match + } + + // if the user made a PUT/POST/PATCH then we invalidate our + // cache for the same url by deleting the index entirely + static async invalidate (request, options) { + const key = cacheKey(request) + try { + await cacache.rm.entry(options.cachePath, key, { removeFully: true }) + } catch (err) { + // ignore errors + } + } + + get request () { + if (!this[_request]) { + this[_request] = new Request(this.entry.metadata.url, { + method: 'GET', + headers: this.entry.metadata.reqHeaders, + ...this.entry.metadata.options, + }) + } + + return this[_request] + } + + get response () { + if (!this[_response]) { + this[_response] = new Response(null, { + url: this.entry.metadata.url, + counter: this.options.counter, + status: this.entry.metadata.status || 200, + headers: { + ...this.entry.metadata.resHeaders, + 'content-length': this.entry.size, + }, + }) + } + + return this[_response] + } + + get policy () { + if (!this[_policy]) { + this[_policy] = new CachePolicy({ + entry: this.entry, + request: this.request, + response: this.response, + options: this.options, + }) + } + + return this[_policy] + } + + // wraps the response in a pipeline that stores the data + // in the cache while the user consumes it + async store (status) { + // if we got a status other than 200, 301, or 308, + // or the CachePolicy forbid storage, append the + // cache status header and return it untouched + if ( + this.request.method !== 'GET' || + ![200, 301, 308].includes(this.response.status) || + !this.policy.storable() + ) { + this.response.headers.set('x-local-cache-status', 'skip') + return this.response + } + + const size = this.response.headers.get('content-length') + const cacheOpts = { + algorithms: this.options.algorithms, + metadata: getMetadata(this.request, this.response, this.options), + size, + integrity: this.options.integrity, + integrityEmitter: this.response.body.hasIntegrityEmitter && this.response.body, + } + + let body = null + // we only set a body if the status is a 200, redirects are + // stored as metadata only + if (this.response.status === 200) { + let cacheWriteResolve, cacheWriteReject + const cacheWritePromise = new Promise((resolve, reject) => { + cacheWriteResolve = resolve + cacheWriteReject = reject + }).catch((err) => { + body.emit('error', err) + }) + + body = new CachingMinipassPipeline({ events: ['integrity', 'size'] }, new MinipassFlush({ + flush () { + return cacheWritePromise + }, + })) + // this is always true since if we aren't reusing the one from the remote fetch, we + // are using the one from cacache + body.hasIntegrityEmitter = true + + const onResume = () => { + const tee = new Minipass() + const cacheStream = cacache.put.stream(this.options.cachePath, this.key, cacheOpts) + // re-emit the integrity and size events on our new response body so they can be reused + cacheStream.on('integrity', i => body.emit('integrity', i)) + cacheStream.on('size', s => body.emit('size', s)) + // stick a flag on here so downstream users will know if they can expect integrity events + tee.pipe(cacheStream) + // TODO if the cache write fails, log a warning but return the response anyway + // eslint-disable-next-line promise/catch-or-return + cacheStream.promise().then(cacheWriteResolve, cacheWriteReject) + body.unshift(tee) + body.unshift(this.response.body) + } + + body.once('resume', onResume) + body.once('end', () => body.removeListener('resume', onResume)) + } else { + await cacache.index.insert(this.options.cachePath, this.key, null, cacheOpts) + } + + // note: we do not set the x-local-cache-hash header because we do not know + // the hash value until after the write to the cache completes, which doesn't + // happen until after the response has been sent and it's too late to write + // the header anyway + this.response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath)) + this.response.headers.set('x-local-cache-key', encodeURIComponent(this.key)) + this.response.headers.set('x-local-cache-mode', 'stream') + this.response.headers.set('x-local-cache-status', status) + this.response.headers.set('x-local-cache-time', new Date().toISOString()) + const newResponse = new Response(body, { + url: this.response.url, + status: this.response.status, + headers: this.response.headers, + counter: this.options.counter, + }) + return newResponse + } + + // use the cached data to create a response and return it + async respond (method, options, status) { + let response + if (method === 'HEAD' || [301, 308].includes(this.response.status)) { + // if the request is a HEAD, or the response is a redirect, + // then the metadata in the entry already includes everything + // we need to build a response + response = this.response + } else { + // we're responding with a full cached response, so create a body + // that reads from cacache and attach it to a new Response + const body = new Minipass() + const headers = { ...this.policy.responseHeaders() } + + const onResume = () => { + const cacheStream = cacache.get.stream.byDigest( + this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize } + ) + cacheStream.on('error', async (err) => { + cacheStream.pause() + if (err.code === 'EINTEGRITY') { + await cacache.rm.content( + this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize } + ) + } + if (err.code === 'ENOENT' || err.code === 'EINTEGRITY') { + await CacheEntry.invalidate(this.request, this.options) + } + body.emit('error', err) + cacheStream.resume() + }) + // emit the integrity and size events based on our metadata so we're consistent + body.emit('integrity', this.entry.integrity) + body.emit('size', Number(headers['content-length'])) + cacheStream.pipe(body) + } + + body.once('resume', onResume) + body.once('end', () => body.removeListener('resume', onResume)) + response = new Response(body, { + url: this.entry.metadata.url, + counter: options.counter, + status: 200, + headers, + }) + } + + response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath)) + response.headers.set('x-local-cache-hash', encodeURIComponent(this.entry.integrity)) + response.headers.set('x-local-cache-key', encodeURIComponent(this.key)) + response.headers.set('x-local-cache-mode', 'stream') + response.headers.set('x-local-cache-status', status) + response.headers.set('x-local-cache-time', new Date(this.entry.metadata.time).toUTCString()) + return response + } + + // use the provided request along with this cache entry to + // revalidate the stored response. returns a response, either + // from the cache or from the update + async revalidate (request, options) { + const revalidateRequest = new Request(request, { + headers: this.policy.revalidationHeaders(request), + }) + + try { + // NOTE: be sure to remove the headers property from the + // user supplied options, since we have already defined + // them on the new request object. if they're still in the + // options then those will overwrite the ones from the policy + var response = await remote(revalidateRequest, { + ...options, + headers: undefined, + }) + } catch (err) { + // if the network fetch fails, return the stale + // cached response unless it has a cache-control + // of 'must-revalidate' + if (!this.policy.mustRevalidate) { + return this.respond(request.method, options, 'stale') + } + + throw err + } + + if (this.policy.revalidated(revalidateRequest, response)) { + // we got a 304, write a new index to the cache and respond from cache + const metadata = getMetadata(request, response, options) + // 304 responses do not include headers that are specific to the response data + // since they do not include a body, so we copy values for headers that were + // in the old cache entry to the new one, if the new metadata does not already + // include that header + for (const name of KEEP_RESPONSE_HEADERS) { + if ( + !hasOwnProperty(metadata.resHeaders, name) && + hasOwnProperty(this.entry.metadata.resHeaders, name) + ) { + metadata.resHeaders[name] = this.entry.metadata.resHeaders[name] + } + } + + for (const name of options.cacheAdditionalHeaders) { + const inMeta = hasOwnProperty(metadata.resHeaders, name) + const inEntry = hasOwnProperty(this.entry.metadata.resHeaders, name) + const inPolicy = hasOwnProperty(this.policy.response.headers, name) + + // if the header is in the existing entry, but it is not in the metadata + // then we need to write it to the metadata as this will refresh the on-disk cache + if (!inMeta && inEntry) { + metadata.resHeaders[name] = this.entry.metadata.resHeaders[name] + } + // if the header is in the metadata, but not in the policy, then we need to set + // it in the policy so that it's included in the immediate response. future + // responses will load a new cache entry, so we don't need to change that + if (!inPolicy && inMeta) { + this.policy.response.headers[name] = metadata.resHeaders[name] + } + } + + try { + await cacache.index.insert(options.cachePath, this.key, this.entry.integrity, { + size: this.entry.size, + metadata, + }) + } catch (err) { + // if updating the cache index fails, we ignore it and + // respond anyway + } + return this.respond(request.method, options, 'revalidated') + } + + // if we got a modified response, create a new entry based on it + const newEntry = new CacheEntry({ + request, + response, + options, + }) + + // respond with the new entry while writing it to the cache + return newEntry.store('updated') + } +} + +module.exports = CacheEntry diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/errors.js b/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/errors.js new file mode 100644 index 00000000000000..67a66573bebe66 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/errors.js @@ -0,0 +1,11 @@ +class NotCachedError extends Error { + constructor (url) { + /* eslint-disable-next-line max-len */ + super(`request to ${url} failed: cache mode is 'only-if-cached' but no cached response is available.`) + this.code = 'ENOTCACHED' + } +} + +module.exports = { + NotCachedError, +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/index.js b/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/index.js new file mode 100644 index 00000000000000..0de49d23fb9336 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/index.js @@ -0,0 +1,49 @@ +const { NotCachedError } = require('./errors.js') +const CacheEntry = require('./entry.js') +const remote = require('../remote.js') + +// do whatever is necessary to get a Response and return it +const cacheFetch = async (request, options) => { + // try to find a cached entry that satisfies this request + const entry = await CacheEntry.find(request, options) + if (!entry) { + // no cached result, if the cache mode is 'only-if-cached' that's a failure + if (options.cache === 'only-if-cached') { + throw new NotCachedError(request.url) + } + + // otherwise, we make a request, store it and return it + const response = await remote(request, options) + const newEntry = new CacheEntry({ request, response, options }) + return newEntry.store('miss') + } + + // we have a cached response that satisfies this request, however if the cache + // mode is 'no-cache' then we send the revalidation request no matter what + if (options.cache === 'no-cache') { + return entry.revalidate(request, options) + } + + // if the cached entry is not stale, or if the cache mode is 'force-cache' or + // 'only-if-cached' we can respond with the cached entry. set the status + // based on the result of needsRevalidation and respond + const _needsRevalidation = entry.policy.needsRevalidation(request) + if (options.cache === 'force-cache' || + options.cache === 'only-if-cached' || + !_needsRevalidation) { + return entry.respond(request.method, options, _needsRevalidation ? 'stale' : 'hit') + } + + // if we got here, the cache entry is stale so revalidate it + return entry.revalidate(request, options) +} + +cacheFetch.invalidate = async (request, options) => { + if (!options.cachePath) { + return + } + + return CacheEntry.invalidate(request, options) +} + +module.exports = cacheFetch diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/key.js b/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/key.js new file mode 100644 index 00000000000000..f7684d562b7fae --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/key.js @@ -0,0 +1,17 @@ +const { URL, format } = require('url') + +// options passed to url.format() when generating a key +const formatOptions = { + auth: false, + fragment: false, + search: true, + unicode: false, +} + +// returns a string to be used as the cache key for the Request +const cacheKey = (request) => { + const parsed = new URL(request.url) + return `make-fetch-happen:request-cache:${format(parsed, formatOptions)}` +} + +module.exports = cacheKey diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/policy.js b/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/policy.js new file mode 100644 index 00000000000000..ada3c8600dae92 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/policy.js @@ -0,0 +1,161 @@ +const CacheSemantics = require('http-cache-semantics') +const Negotiator = require('negotiator') +const ssri = require('ssri') + +// options passed to http-cache-semantics constructor +const policyOptions = { + shared: false, + ignoreCargoCult: true, +} + +// a fake empty response, used when only testing the +// request for storability +const emptyResponse = { status: 200, headers: {} } + +// returns a plain object representation of the Request +const requestObject = (request) => { + const _obj = { + method: request.method, + url: request.url, + headers: {}, + compress: request.compress, + } + + request.headers.forEach((value, key) => { + _obj.headers[key] = value + }) + + return _obj +} + +// returns a plain object representation of the Response +const responseObject = (response) => { + const _obj = { + status: response.status, + headers: {}, + } + + response.headers.forEach((value, key) => { + _obj.headers[key] = value + }) + + return _obj +} + +class CachePolicy { + constructor ({ entry, request, response, options }) { + this.entry = entry + this.request = requestObject(request) + this.response = responseObject(response) + this.options = options + this.policy = new CacheSemantics(this.request, this.response, policyOptions) + + if (this.entry) { + // if we have an entry, copy the timestamp to the _responseTime + // this is necessary because the CacheSemantics constructor forces + // the value to Date.now() which means a policy created from a + // cache entry is likely to always identify itself as stale + this.policy._responseTime = this.entry.metadata.time + } + } + + // static method to quickly determine if a request alone is storable + static storable (request, options) { + // no cachePath means no caching + if (!options.cachePath) { + return false + } + + // user explicitly asked not to cache + if (options.cache === 'no-store') { + return false + } + + // we only cache GET and HEAD requests + if (!['GET', 'HEAD'].includes(request.method)) { + return false + } + + // otherwise, let http-cache-semantics make the decision + // based on the request's headers + const policy = new CacheSemantics(requestObject(request), emptyResponse, policyOptions) + return policy.storable() + } + + // returns true if the policy satisfies the request + satisfies (request) { + const _req = requestObject(request) + if (this.request.headers.host !== _req.headers.host) { + return false + } + + if (this.request.compress !== _req.compress) { + return false + } + + const negotiatorA = new Negotiator(this.request) + const negotiatorB = new Negotiator(_req) + + if (JSON.stringify(negotiatorA.mediaTypes()) !== JSON.stringify(negotiatorB.mediaTypes())) { + return false + } + + if (JSON.stringify(negotiatorA.languages()) !== JSON.stringify(negotiatorB.languages())) { + return false + } + + if (JSON.stringify(negotiatorA.encodings()) !== JSON.stringify(negotiatorB.encodings())) { + return false + } + + if (this.options.integrity) { + return ssri.parse(this.options.integrity).match(this.entry.integrity) + } + + return true + } + + // returns true if the request and response allow caching + storable () { + return this.policy.storable() + } + + // NOTE: this is a hack to avoid parsing the cache-control + // header ourselves, it returns true if the response's + // cache-control contains must-revalidate + get mustRevalidate () { + return !!this.policy._rescc['must-revalidate'] + } + + // returns true if the cached response requires revalidation + // for the given request + needsRevalidation (request) { + const _req = requestObject(request) + // force method to GET because we only cache GETs + // but can serve a HEAD from a cached GET + _req.method = 'GET' + return !this.policy.satisfiesWithoutRevalidation(_req) + } + + responseHeaders () { + return this.policy.responseHeaders() + } + + // returns a new object containing the appropriate headers + // to send a revalidation request + revalidationHeaders (request) { + const _req = requestObject(request) + return this.policy.revalidationHeaders(_req) + } + + // returns true if the request/response was revalidated + // successfully. returns false if a new response was received + revalidated (request, response) { + const _req = requestObject(request) + const _res = responseObject(response) + const policy = this.policy.revalidatedPolicy(_req, _res) + return !policy.modified + } +} + +module.exports = CachePolicy diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/fetch.js b/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/fetch.js new file mode 100644 index 00000000000000..233ba67e165502 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/fetch.js @@ -0,0 +1,118 @@ +'use strict' + +const { FetchError, Request, isRedirect } = require('minipass-fetch') +const url = require('url') + +const CachePolicy = require('./cache/policy.js') +const cache = require('./cache/index.js') +const remote = require('./remote.js') + +// given a Request, a Response and user options +// return true if the response is a redirect that +// can be followed. we throw errors that will result +// in the fetch being rejected if the redirect is +// possible but invalid for some reason +const canFollowRedirect = (request, response, options) => { + if (!isRedirect(response.status)) { + return false + } + + if (options.redirect === 'manual') { + return false + } + + if (options.redirect === 'error') { + throw new FetchError(`redirect mode is set to error: ${request.url}`, + 'no-redirect', { code: 'ENOREDIRECT' }) + } + + if (!response.headers.has('location')) { + throw new FetchError(`redirect location header missing for: ${request.url}`, + 'no-location', { code: 'EINVALIDREDIRECT' }) + } + + if (request.counter >= request.follow) { + throw new FetchError(`maximum redirect reached at: ${request.url}`, + 'max-redirect', { code: 'EMAXREDIRECT' }) + } + + return true +} + +// given a Request, a Response, and the user's options return an object +// with a new Request and a new options object that will be used for +// following the redirect +const getRedirect = (request, response, options) => { + const _opts = { ...options } + const location = response.headers.get('location') + const redirectUrl = new url.URL(location, /^https?:/.test(location) ? undefined : request.url) + // Comment below is used under the following license: + /** + * @license + * Copyright (c) 2010-2012 Mikeal Rogers + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an "AS + * IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language + * governing permissions and limitations under the License. + */ + + // Remove authorization if changing hostnames (but not if just + // changing ports or protocols). This matches the behavior of request: + // https://github.com/request/request/blob/b12a6245/lib/redirect.js#L134-L138 + if (new url.URL(request.url).hostname !== redirectUrl.hostname) { + request.headers.delete('authorization') + request.headers.delete('cookie') + } + + // for POST request with 301/302 response, or any request with 303 response, + // use GET when following redirect + if ( + response.status === 303 || + (request.method === 'POST' && [301, 302].includes(response.status)) + ) { + _opts.method = 'GET' + _opts.body = null + request.headers.delete('content-length') + } + + _opts.headers = {} + request.headers.forEach((value, key) => { + _opts.headers[key] = value + }) + + _opts.counter = ++request.counter + const redirectReq = new Request(url.format(redirectUrl), _opts) + return { + request: redirectReq, + options: _opts, + } +} + +const fetch = async (request, options) => { + const response = CachePolicy.storable(request, options) + ? await cache(request, options) + : await remote(request, options) + + // if the request wasn't a GET or HEAD, and the response + // status is between 200 and 399 inclusive, invalidate the + // request url + if (!['GET', 'HEAD'].includes(request.method) && + response.status >= 200 && + response.status <= 399) { + await cache.invalidate(request, options) + } + + if (!canFollowRedirect(request, response, options)) { + return response + } + + const redirect = getRedirect(request, response, options) + return fetch(redirect.request, redirect.options) +} + +module.exports = fetch diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/index.js b/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/index.js new file mode 100644 index 00000000000000..2f12e8e1b61131 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/index.js @@ -0,0 +1,41 @@ +const { FetchError, Headers, Request, Response } = require('minipass-fetch') + +const configureOptions = require('./options.js') +const fetch = require('./fetch.js') + +const makeFetchHappen = (url, opts) => { + const options = configureOptions(opts) + + const request = new Request(url, options) + return fetch(request, options) +} + +makeFetchHappen.defaults = (defaultUrl, defaultOptions = {}, wrappedFetch = makeFetchHappen) => { + if (typeof defaultUrl === 'object') { + defaultOptions = defaultUrl + defaultUrl = null + } + + const defaultedFetch = (url, options = {}) => { + const finalUrl = url || defaultUrl + const finalOptions = { + ...defaultOptions, + ...options, + headers: { + ...defaultOptions.headers, + ...options.headers, + }, + } + return wrappedFetch(finalUrl, finalOptions) + } + + defaultedFetch.defaults = (defaultUrl1, defaultOptions1 = {}) => + makeFetchHappen.defaults(defaultUrl1, defaultOptions1, defaultedFetch) + return defaultedFetch +} + +module.exports = makeFetchHappen +module.exports.FetchError = FetchError +module.exports.Headers = Headers +module.exports.Request = Request +module.exports.Response = Response diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/options.js b/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/options.js new file mode 100644 index 00000000000000..f77511279f831d --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/options.js @@ -0,0 +1,54 @@ +const dns = require('dns') + +const conditionalHeaders = [ + 'if-modified-since', + 'if-none-match', + 'if-unmodified-since', + 'if-match', + 'if-range', +] + +const configureOptions = (opts) => { + const { strictSSL, ...options } = { ...opts } + options.method = options.method ? options.method.toUpperCase() : 'GET' + options.rejectUnauthorized = strictSSL !== false + + if (!options.retry) { + options.retry = { retries: 0 } + } else if (typeof options.retry === 'string') { + const retries = parseInt(options.retry, 10) + if (isFinite(retries)) { + options.retry = { retries } + } else { + options.retry = { retries: 0 } + } + } else if (typeof options.retry === 'number') { + options.retry = { retries: options.retry } + } else { + options.retry = { retries: 0, ...options.retry } + } + + options.dns = { ttl: 5 * 60 * 1000, lookup: dns.lookup, ...options.dns } + + options.cache = options.cache || 'default' + if (options.cache === 'default') { + const hasConditionalHeader = Object.keys(options.headers || {}).some((name) => { + return conditionalHeaders.includes(name.toLowerCase()) + }) + if (hasConditionalHeader) { + options.cache = 'no-store' + } + } + + options.cacheAdditionalHeaders = options.cacheAdditionalHeaders || [] + + // cacheManager is deprecated, but if it's set and + // cachePath is not we should copy it to the new field + if (options.cacheManager && !options.cachePath) { + options.cachePath = options.cacheManager + } + + return options +} + +module.exports = configureOptions diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/pipeline.js b/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/pipeline.js new file mode 100644 index 00000000000000..b1d221b2d0ce31 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/pipeline.js @@ -0,0 +1,41 @@ +'use strict' + +const MinipassPipeline = require('minipass-pipeline') + +class CachingMinipassPipeline extends MinipassPipeline { + #events = [] + #data = new Map() + + constructor (opts, ...streams) { + // CRITICAL: do NOT pass the streams to the call to super(), this will start + // the flow of data and potentially cause the events we need to catch to emit + // before we've finished our own setup. instead we call super() with no args, + // finish our setup, and then push the streams into ourselves to start the + // data flow + super() + this.#events = opts.events + + /* istanbul ignore next - coverage disabled because this is pointless to test here */ + if (streams.length) { + this.push(...streams) + } + } + + on (event, handler) { + if (this.#events.includes(event) && this.#data.has(event)) { + return handler(...this.#data.get(event)) + } + + return super.on(event, handler) + } + + emit (event, ...data) { + if (this.#events.includes(event)) { + this.#data.set(event, data) + } + + return super.emit(event, ...data) + } +} + +module.exports = CachingMinipassPipeline diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/remote.js b/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/remote.js new file mode 100644 index 00000000000000..8554564074de6e --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/remote.js @@ -0,0 +1,131 @@ +const { Minipass } = require('minipass') +const fetch = require('minipass-fetch') +const promiseRetry = require('promise-retry') +const ssri = require('ssri') +const { log } = require('proc-log') + +const CachingMinipassPipeline = require('./pipeline.js') +const { getAgent } = require('@npmcli/agent') +const pkg = require('../package.json') + +const USER_AGENT = `${pkg.name}/${pkg.version} (+https://npm.im/${pkg.name})` + +const RETRY_ERRORS = [ + 'ECONNRESET', // remote socket closed on us + 'ECONNREFUSED', // remote host refused to open connection + 'EADDRINUSE', // failed to bind to a local port (proxy?) + 'ETIMEDOUT', // someone in the transaction is WAY TOO SLOW + // from @npmcli/agent + 'ECONNECTIONTIMEOUT', + 'EIDLETIMEOUT', + 'ERESPONSETIMEOUT', + 'ETRANSFERTIMEOUT', + // Known codes we do NOT retry on: + // ENOTFOUND (getaddrinfo failure. Either bad hostname, or offline) + // EINVALIDPROXY // invalid protocol from @npmcli/agent + // EINVALIDRESPONSE // invalid status code from @npmcli/agent +] + +const RETRY_TYPES = [ + 'request-timeout', +] + +// make a request directly to the remote source, +// retrying certain classes of errors as well as +// following redirects (through the cache if necessary) +// and verifying response integrity +const remoteFetch = (request, options) => { + const agent = getAgent(request.url, options) + if (!request.headers.has('connection')) { + request.headers.set('connection', agent ? 'keep-alive' : 'close') + } + + if (!request.headers.has('user-agent')) { + request.headers.set('user-agent', USER_AGENT) + } + + // keep our own options since we're overriding the agent + // and the redirect mode + const _opts = { + ...options, + agent, + redirect: 'manual', + } + + return promiseRetry(async (retryHandler, attemptNum) => { + const req = new fetch.Request(request, _opts) + try { + let res = await fetch(req, _opts) + if (_opts.integrity && res.status === 200) { + // we got a 200 response and the user has specified an expected + // integrity value, so wrap the response in an ssri stream to verify it + const integrityStream = ssri.integrityStream({ + algorithms: _opts.algorithms, + integrity: _opts.integrity, + size: _opts.size, + }) + const pipeline = new CachingMinipassPipeline({ + events: ['integrity', 'size'], + }, res.body, integrityStream) + // we also propagate the integrity and size events out to the pipeline so we can use + // this new response body as an integrityEmitter for cacache + integrityStream.on('integrity', i => pipeline.emit('integrity', i)) + integrityStream.on('size', s => pipeline.emit('size', s)) + res = new fetch.Response(pipeline, res) + // set an explicit flag so we know if our response body will emit integrity and size + res.body.hasIntegrityEmitter = true + } + + res.headers.set('x-fetch-attempts', attemptNum) + + // do not retry POST requests, or requests with a streaming body + // do retry requests with a 408, 420, 429 or 500+ status in the response + const isStream = Minipass.isStream(req.body) + const isRetriable = req.method !== 'POST' && + !isStream && + ([408, 420, 429].includes(res.status) || res.status >= 500) + + if (isRetriable) { + if (typeof options.onRetry === 'function') { + options.onRetry(res) + } + + /* eslint-disable-next-line max-len */ + log.http('fetch', `${req.method} ${req.url} attempt ${attemptNum} failed with ${res.status}`) + return retryHandler(res) + } + + return res + } catch (err) { + const code = (err.code === 'EPROMISERETRY') + ? err.retried.code + : err.code + + // err.retried will be the thing that was thrown from above + // if it's a response, we just got a bad status code and we + // can re-throw to allow the retry + const isRetryError = err.retried instanceof fetch.Response || + (RETRY_ERRORS.includes(code) && RETRY_TYPES.includes(err.type)) + + if (req.method === 'POST' || isRetryError) { + throw err + } + + if (typeof options.onRetry === 'function') { + options.onRetry(err) + } + + log.http('fetch', `${req.method} ${req.url} attempt ${attemptNum} failed with ${err.code}`) + return retryHandler(err) + } + }, options.retry).catch((err) => { + // don't reject for http errors, just return them + if (err.status >= 400 && err.type !== 'system') { + return err + } + + throw err + }) +} + +module.exports = remoteFetch diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/package.json b/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/package.json new file mode 100644 index 00000000000000..7adb4d1e7f9719 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/package.json @@ -0,0 +1,75 @@ +{ + "name": "make-fetch-happen", + "version": "13.0.1", + "description": "Opinionated, caching, retrying fetch client", + "main": "lib/index.js", + "files": [ + "bin/", + "lib/" + ], + "scripts": { + "test": "tap", + "posttest": "npm run lint", + "eslint": "eslint", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lintfix": "npm run lint -- --fix", + "postlint": "template-oss-check", + "snap": "tap", + "template-oss-apply": "template-oss-apply --force" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/make-fetch-happen.git" + }, + "keywords": [ + "http", + "request", + "fetch", + "mean girls", + "caching", + "cache", + "subresource integrity" + ], + "author": "GitHub Inc.", + "license": "ISC", + "dependencies": { + "@npmcli/agent": "^2.0.0", + "cacache": "^18.0.0", + "http-cache-semantics": "^4.1.1", + "is-lambda": "^1.0.1", + "minipass": "^7.0.2", + "minipass-fetch": "^3.0.0", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "negotiator": "^0.6.3", + "proc-log": "^4.2.0", + "promise-retry": "^2.0.1", + "ssri": "^10.0.0" + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.21.4", + "nock": "^13.2.4", + "safe-buffer": "^5.2.1", + "standard-version": "^9.3.2", + "tap": "^16.0.0" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + }, + "tap": { + "color": 1, + "files": "test/*.js", + "check-coverage": true, + "timeout": 60, + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.21.4", + "publish": "true" + } +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/LICENSE b/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/LICENSE new file mode 100644 index 00000000000000..3c3410cdc12ee3 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/LICENSE @@ -0,0 +1,28 @@ +The MIT License (MIT) + +Copyright (c) Isaac Z. Schlueter and Contributors +Copyright (c) 2016 David Frank + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +--- + +Note: This is a derivative work based on "node-fetch" by David Frank, +modified and distributed under the terms of the MIT license above. +https://github.com/bitinn/node-fetch diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/abort-error.js b/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/abort-error.js new file mode 100644 index 00000000000000..b18f643269e375 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/abort-error.js @@ -0,0 +1,17 @@ +'use strict' +class AbortError extends Error { + constructor (message) { + super(message) + this.code = 'FETCH_ABORTED' + this.type = 'aborted' + Error.captureStackTrace(this, this.constructor) + } + + get name () { + return 'AbortError' + } + + // don't allow name to be overridden, but don't throw either + set name (s) {} +} +module.exports = AbortError diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/blob.js b/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/blob.js new file mode 100644 index 00000000000000..121b1730102e72 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/blob.js @@ -0,0 +1,97 @@ +'use strict' +const { Minipass } = require('minipass') +const TYPE = Symbol('type') +const BUFFER = Symbol('buffer') + +class Blob { + constructor (blobParts, options) { + this[TYPE] = '' + + const buffers = [] + let size = 0 + + if (blobParts) { + const a = blobParts + const length = Number(a.length) + for (let i = 0; i < length; i++) { + const element = a[i] + const buffer = element instanceof Buffer ? element + : ArrayBuffer.isView(element) + ? Buffer.from(element.buffer, element.byteOffset, element.byteLength) + : element instanceof ArrayBuffer ? Buffer.from(element) + : element instanceof Blob ? element[BUFFER] + : typeof element === 'string' ? Buffer.from(element) + : Buffer.from(String(element)) + size += buffer.length + buffers.push(buffer) + } + } + + this[BUFFER] = Buffer.concat(buffers, size) + + const type = options && options.type !== undefined + && String(options.type).toLowerCase() + if (type && !/[^\u0020-\u007E]/.test(type)) { + this[TYPE] = type + } + } + + get size () { + return this[BUFFER].length + } + + get type () { + return this[TYPE] + } + + text () { + return Promise.resolve(this[BUFFER].toString()) + } + + arrayBuffer () { + const buf = this[BUFFER] + const off = buf.byteOffset + const len = buf.byteLength + const ab = buf.buffer.slice(off, off + len) + return Promise.resolve(ab) + } + + stream () { + return new Minipass().end(this[BUFFER]) + } + + slice (start, end, type) { + const size = this.size + const relativeStart = start === undefined ? 0 + : start < 0 ? Math.max(size + start, 0) + : Math.min(start, size) + const relativeEnd = end === undefined ? size + : end < 0 ? Math.max(size + end, 0) + : Math.min(end, size) + const span = Math.max(relativeEnd - relativeStart, 0) + + const buffer = this[BUFFER] + const slicedBuffer = buffer.slice( + relativeStart, + relativeStart + span + ) + const blob = new Blob([], { type }) + blob[BUFFER] = slicedBuffer + return blob + } + + get [Symbol.toStringTag] () { + return 'Blob' + } + + static get BUFFER () { + return BUFFER + } +} + +Object.defineProperties(Blob.prototype, { + size: { enumerable: true }, + type: { enumerable: true }, +}) + +module.exports = Blob diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/body.js b/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/body.js new file mode 100644 index 00000000000000..62286bd1de0d91 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/body.js @@ -0,0 +1,350 @@ +'use strict' +const { Minipass } = require('minipass') +const MinipassSized = require('minipass-sized') + +const Blob = require('./blob.js') +const { BUFFER } = Blob +const FetchError = require('./fetch-error.js') + +// optional dependency on 'encoding' +let convert +try { + convert = require('encoding').convert +} catch (e) { + // defer error until textConverted is called +} + +const INTERNALS = Symbol('Body internals') +const CONSUME_BODY = Symbol('consumeBody') + +class Body { + constructor (bodyArg, options = {}) { + const { size = 0, timeout = 0 } = options + const body = bodyArg === undefined || bodyArg === null ? null + : isURLSearchParams(bodyArg) ? Buffer.from(bodyArg.toString()) + : isBlob(bodyArg) ? bodyArg + : Buffer.isBuffer(bodyArg) ? bodyArg + : Object.prototype.toString.call(bodyArg) === '[object ArrayBuffer]' + ? Buffer.from(bodyArg) + : ArrayBuffer.isView(bodyArg) + ? Buffer.from(bodyArg.buffer, bodyArg.byteOffset, bodyArg.byteLength) + : Minipass.isStream(bodyArg) ? bodyArg + : Buffer.from(String(bodyArg)) + + this[INTERNALS] = { + body, + disturbed: false, + error: null, + } + + this.size = size + this.timeout = timeout + + if (Minipass.isStream(body)) { + body.on('error', er => { + const error = er.name === 'AbortError' ? er + : new FetchError(`Invalid response while trying to fetch ${ + this.url}: ${er.message}`, 'system', er) + this[INTERNALS].error = error + }) + } + } + + get body () { + return this[INTERNALS].body + } + + get bodyUsed () { + return this[INTERNALS].disturbed + } + + arrayBuffer () { + return this[CONSUME_BODY]().then(buf => + buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength)) + } + + blob () { + const ct = this.headers && this.headers.get('content-type') || '' + return this[CONSUME_BODY]().then(buf => Object.assign( + new Blob([], { type: ct.toLowerCase() }), + { [BUFFER]: buf } + )) + } + + async json () { + const buf = await this[CONSUME_BODY]() + try { + return JSON.parse(buf.toString()) + } catch (er) { + throw new FetchError( + `invalid json response body at ${this.url} reason: ${er.message}`, + 'invalid-json' + ) + } + } + + text () { + return this[CONSUME_BODY]().then(buf => buf.toString()) + } + + buffer () { + return this[CONSUME_BODY]() + } + + textConverted () { + return this[CONSUME_BODY]().then(buf => convertBody(buf, this.headers)) + } + + [CONSUME_BODY] () { + if (this[INTERNALS].disturbed) { + return Promise.reject(new TypeError(`body used already for: ${ + this.url}`)) + } + + this[INTERNALS].disturbed = true + + if (this[INTERNALS].error) { + return Promise.reject(this[INTERNALS].error) + } + + // body is null + if (this.body === null) { + return Promise.resolve(Buffer.alloc(0)) + } + + if (Buffer.isBuffer(this.body)) { + return Promise.resolve(this.body) + } + + const upstream = isBlob(this.body) ? this.body.stream() : this.body + + /* istanbul ignore if: should never happen */ + if (!Minipass.isStream(upstream)) { + return Promise.resolve(Buffer.alloc(0)) + } + + const stream = this.size && upstream instanceof MinipassSized ? upstream + : !this.size && upstream instanceof Minipass && + !(upstream instanceof MinipassSized) ? upstream + : this.size ? new MinipassSized({ size: this.size }) + : new Minipass() + + // allow timeout on slow response body, but only if the stream is still writable. this + // makes the timeout center on the socket stream from lib/index.js rather than the + // intermediary minipass stream we create to receive the data + const resTimeout = this.timeout && stream.writable ? setTimeout(() => { + stream.emit('error', new FetchError( + `Response timeout while trying to fetch ${ + this.url} (over ${this.timeout}ms)`, 'body-timeout')) + }, this.timeout) : null + + // do not keep the process open just for this timeout, even + // though we expect it'll get cleared eventually. + if (resTimeout && resTimeout.unref) { + resTimeout.unref() + } + + // do the pipe in the promise, because the pipe() can send too much + // data through right away and upset the MP Sized object + return new Promise((resolve) => { + // if the stream is some other kind of stream, then pipe through a MP + // so we can collect it more easily. + if (stream !== upstream) { + upstream.on('error', er => stream.emit('error', er)) + upstream.pipe(stream) + } + resolve() + }).then(() => stream.concat()).then(buf => { + clearTimeout(resTimeout) + return buf + }).catch(er => { + clearTimeout(resTimeout) + // request was aborted, reject with this Error + if (er.name === 'AbortError' || er.name === 'FetchError') { + throw er + } else if (er.name === 'RangeError') { + throw new FetchError(`Could not create Buffer from response body for ${ + this.url}: ${er.message}`, 'system', er) + } else { + // other errors, such as incorrect content-encoding or content-length + throw new FetchError(`Invalid response body while trying to fetch ${ + this.url}: ${er.message}`, 'system', er) + } + }) + } + + static clone (instance) { + if (instance.bodyUsed) { + throw new Error('cannot clone body after it is used') + } + + const body = instance.body + + // check that body is a stream and not form-data object + // NB: can't clone the form-data object without having it as a dependency + if (Minipass.isStream(body) && typeof body.getBoundary !== 'function') { + // create a dedicated tee stream so that we don't lose data + // potentially sitting in the body stream's buffer by writing it + // immediately to p1 and not having it for p2. + const tee = new Minipass() + const p1 = new Minipass() + const p2 = new Minipass() + tee.on('error', er => { + p1.emit('error', er) + p2.emit('error', er) + }) + body.on('error', er => tee.emit('error', er)) + tee.pipe(p1) + tee.pipe(p2) + body.pipe(tee) + // set instance body to one fork, return the other + instance[INTERNALS].body = p1 + return p2 + } else { + return instance.body + } + } + + static extractContentType (body) { + return body === null || body === undefined ? null + : typeof body === 'string' ? 'text/plain;charset=UTF-8' + : isURLSearchParams(body) + ? 'application/x-www-form-urlencoded;charset=UTF-8' + : isBlob(body) ? body.type || null + : Buffer.isBuffer(body) ? null + : Object.prototype.toString.call(body) === '[object ArrayBuffer]' ? null + : ArrayBuffer.isView(body) ? null + : typeof body.getBoundary === 'function' + ? `multipart/form-data;boundary=${body.getBoundary()}` + : Minipass.isStream(body) ? null + : 'text/plain;charset=UTF-8' + } + + static getTotalBytes (instance) { + const { body } = instance + return (body === null || body === undefined) ? 0 + : isBlob(body) ? body.size + : Buffer.isBuffer(body) ? body.length + : body && typeof body.getLengthSync === 'function' && ( + // detect form data input from form-data module + body._lengthRetrievers && + /* istanbul ignore next */ body._lengthRetrievers.length === 0 || // 1.x + body.hasKnownLength && body.hasKnownLength()) // 2.x + ? body.getLengthSync() + : null + } + + static writeToStream (dest, instance) { + const { body } = instance + + if (body === null || body === undefined) { + dest.end() + } else if (Buffer.isBuffer(body) || typeof body === 'string') { + dest.end(body) + } else { + // body is stream or blob + const stream = isBlob(body) ? body.stream() : body + stream.on('error', er => dest.emit('error', er)).pipe(dest) + } + + return dest + } +} + +Object.defineProperties(Body.prototype, { + body: { enumerable: true }, + bodyUsed: { enumerable: true }, + arrayBuffer: { enumerable: true }, + blob: { enumerable: true }, + json: { enumerable: true }, + text: { enumerable: true }, +}) + +const isURLSearchParams = obj => + // Duck-typing as a necessary condition. + (typeof obj !== 'object' || + typeof obj.append !== 'function' || + typeof obj.delete !== 'function' || + typeof obj.get !== 'function' || + typeof obj.getAll !== 'function' || + typeof obj.has !== 'function' || + typeof obj.set !== 'function') ? false + // Brand-checking and more duck-typing as optional condition. + : obj.constructor.name === 'URLSearchParams' || + Object.prototype.toString.call(obj) === '[object URLSearchParams]' || + typeof obj.sort === 'function' + +const isBlob = obj => + typeof obj === 'object' && + typeof obj.arrayBuffer === 'function' && + typeof obj.type === 'string' && + typeof obj.stream === 'function' && + typeof obj.constructor === 'function' && + typeof obj.constructor.name === 'string' && + /^(Blob|File)$/.test(obj.constructor.name) && + /^(Blob|File)$/.test(obj[Symbol.toStringTag]) + +const convertBody = (buffer, headers) => { + /* istanbul ignore if */ + if (typeof convert !== 'function') { + throw new Error('The package `encoding` must be installed to use the textConverted() function') + } + + const ct = headers && headers.get('content-type') + let charset = 'utf-8' + let res + + // header + if (ct) { + res = /charset=([^;]*)/i.exec(ct) + } + + // no charset in content type, peek at response body for at most 1024 bytes + const str = buffer.slice(0, 1024).toString() + + // html5 + if (!res && str) { + res = / this.expect + ? 'max-size' : type + this.message = message + Error.captureStackTrace(this, this.constructor) + } + + get name () { + return 'FetchError' + } + + // don't allow name to be overwritten + set name (n) {} + + get [Symbol.toStringTag] () { + return 'FetchError' + } +} +module.exports = FetchError diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/headers.js b/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/headers.js new file mode 100644 index 00000000000000..dd6e854d5ba399 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/headers.js @@ -0,0 +1,267 @@ +'use strict' +const invalidTokenRegex = /[^^_`a-zA-Z\-0-9!#$%&'*+.|~]/ +const invalidHeaderCharRegex = /[^\t\x20-\x7e\x80-\xff]/ + +const validateName = name => { + name = `${name}` + if (invalidTokenRegex.test(name) || name === '') { + throw new TypeError(`${name} is not a legal HTTP header name`) + } +} + +const validateValue = value => { + value = `${value}` + if (invalidHeaderCharRegex.test(value)) { + throw new TypeError(`${value} is not a legal HTTP header value`) + } +} + +const find = (map, name) => { + name = name.toLowerCase() + for (const key in map) { + if (key.toLowerCase() === name) { + return key + } + } + return undefined +} + +const MAP = Symbol('map') +class Headers { + constructor (init = undefined) { + this[MAP] = Object.create(null) + if (init instanceof Headers) { + const rawHeaders = init.raw() + const headerNames = Object.keys(rawHeaders) + for (const headerName of headerNames) { + for (const value of rawHeaders[headerName]) { + this.append(headerName, value) + } + } + return + } + + // no-op + if (init === undefined || init === null) { + return + } + + if (typeof init === 'object') { + const method = init[Symbol.iterator] + if (method !== null && method !== undefined) { + if (typeof method !== 'function') { + throw new TypeError('Header pairs must be iterable') + } + + // sequence> + // Note: per spec we have to first exhaust the lists then process them + const pairs = [] + for (const pair of init) { + if (typeof pair !== 'object' || + typeof pair[Symbol.iterator] !== 'function') { + throw new TypeError('Each header pair must be iterable') + } + const arrPair = Array.from(pair) + if (arrPair.length !== 2) { + throw new TypeError('Each header pair must be a name/value tuple') + } + pairs.push(arrPair) + } + + for (const pair of pairs) { + this.append(pair[0], pair[1]) + } + } else { + // record + for (const key of Object.keys(init)) { + this.append(key, init[key]) + } + } + } else { + throw new TypeError('Provided initializer must be an object') + } + } + + get (name) { + name = `${name}` + validateName(name) + const key = find(this[MAP], name) + if (key === undefined) { + return null + } + + return this[MAP][key].join(', ') + } + + forEach (callback, thisArg = undefined) { + let pairs = getHeaders(this) + for (let i = 0; i < pairs.length; i++) { + const [name, value] = pairs[i] + callback.call(thisArg, value, name, this) + // refresh in case the callback added more headers + pairs = getHeaders(this) + } + } + + set (name, value) { + name = `${name}` + value = `${value}` + validateName(name) + validateValue(value) + const key = find(this[MAP], name) + this[MAP][key !== undefined ? key : name] = [value] + } + + append (name, value) { + name = `${name}` + value = `${value}` + validateName(name) + validateValue(value) + const key = find(this[MAP], name) + if (key !== undefined) { + this[MAP][key].push(value) + } else { + this[MAP][name] = [value] + } + } + + has (name) { + name = `${name}` + validateName(name) + return find(this[MAP], name) !== undefined + } + + delete (name) { + name = `${name}` + validateName(name) + const key = find(this[MAP], name) + if (key !== undefined) { + delete this[MAP][key] + } + } + + raw () { + return this[MAP] + } + + keys () { + return new HeadersIterator(this, 'key') + } + + values () { + return new HeadersIterator(this, 'value') + } + + [Symbol.iterator] () { + return new HeadersIterator(this, 'key+value') + } + + entries () { + return new HeadersIterator(this, 'key+value') + } + + get [Symbol.toStringTag] () { + return 'Headers' + } + + static exportNodeCompatibleHeaders (headers) { + const obj = Object.assign(Object.create(null), headers[MAP]) + + // http.request() only supports string as Host header. This hack makes + // specifying custom Host header possible. + const hostHeaderKey = find(headers[MAP], 'Host') + if (hostHeaderKey !== undefined) { + obj[hostHeaderKey] = obj[hostHeaderKey][0] + } + + return obj + } + + static createHeadersLenient (obj) { + const headers = new Headers() + for (const name of Object.keys(obj)) { + if (invalidTokenRegex.test(name)) { + continue + } + + if (Array.isArray(obj[name])) { + for (const val of obj[name]) { + if (invalidHeaderCharRegex.test(val)) { + continue + } + + if (headers[MAP][name] === undefined) { + headers[MAP][name] = [val] + } else { + headers[MAP][name].push(val) + } + } + } else if (!invalidHeaderCharRegex.test(obj[name])) { + headers[MAP][name] = [obj[name]] + } + } + return headers + } +} + +Object.defineProperties(Headers.prototype, { + get: { enumerable: true }, + forEach: { enumerable: true }, + set: { enumerable: true }, + append: { enumerable: true }, + has: { enumerable: true }, + delete: { enumerable: true }, + keys: { enumerable: true }, + values: { enumerable: true }, + entries: { enumerable: true }, +}) + +const getHeaders = (headers, kind = 'key+value') => + Object.keys(headers[MAP]).sort().map( + kind === 'key' ? k => k.toLowerCase() + : kind === 'value' ? k => headers[MAP][k].join(', ') + : k => [k.toLowerCase(), headers[MAP][k].join(', ')] + ) + +const INTERNAL = Symbol('internal') + +class HeadersIterator { + constructor (target, kind) { + this[INTERNAL] = { + target, + kind, + index: 0, + } + } + + get [Symbol.toStringTag] () { + return 'HeadersIterator' + } + + next () { + /* istanbul ignore if: should be impossible */ + if (!this || Object.getPrototypeOf(this) !== HeadersIterator.prototype) { + throw new TypeError('Value of `this` is not a HeadersIterator') + } + + const { target, kind, index } = this[INTERNAL] + const values = getHeaders(target, kind) + const len = values.length + if (index >= len) { + return { + value: undefined, + done: true, + } + } + + this[INTERNAL].index++ + + return { value: values[index], done: false } + } +} + +// manually extend because 'extends' requires a ctor +Object.setPrototypeOf(HeadersIterator.prototype, + Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))) + +module.exports = Headers diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/index.js b/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/index.js new file mode 100644 index 00000000000000..da402161670e65 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/index.js @@ -0,0 +1,377 @@ +'use strict' +const { URL } = require('url') +const http = require('http') +const https = require('https') +const zlib = require('minizlib') +const { Minipass } = require('minipass') + +const Body = require('./body.js') +const { writeToStream, getTotalBytes } = Body +const Response = require('./response.js') +const Headers = require('./headers.js') +const { createHeadersLenient } = Headers +const Request = require('./request.js') +const { getNodeRequestOptions } = Request +const FetchError = require('./fetch-error.js') +const AbortError = require('./abort-error.js') + +// XXX this should really be split up and unit-ized for easier testing +// and better DRY implementation of data/http request aborting +const fetch = async (url, opts) => { + if (/^data:/.test(url)) { + const request = new Request(url, opts) + // delay 1 promise tick so that the consumer can abort right away + return Promise.resolve().then(() => new Promise((resolve, reject) => { + let type, data + try { + const { pathname, search } = new URL(url) + const split = pathname.split(',') + if (split.length < 2) { + throw new Error('invalid data: URI') + } + const mime = split.shift() + const base64 = /;base64$/.test(mime) + type = base64 ? mime.slice(0, -1 * ';base64'.length) : mime + const rawData = decodeURIComponent(split.join(',') + search) + data = base64 ? Buffer.from(rawData, 'base64') : Buffer.from(rawData) + } catch (er) { + return reject(new FetchError(`[${request.method}] ${ + request.url} invalid URL, ${er.message}`, 'system', er)) + } + + const { signal } = request + if (signal && signal.aborted) { + return reject(new AbortError('The user aborted a request.')) + } + + const headers = { 'Content-Length': data.length } + if (type) { + headers['Content-Type'] = type + } + return resolve(new Response(data, { headers })) + })) + } + + return new Promise((resolve, reject) => { + // build request object + const request = new Request(url, opts) + let options + try { + options = getNodeRequestOptions(request) + } catch (er) { + return reject(er) + } + + const send = (options.protocol === 'https:' ? https : http).request + const { signal } = request + let response = null + const abort = () => { + const error = new AbortError('The user aborted a request.') + reject(error) + if (Minipass.isStream(request.body) && + typeof request.body.destroy === 'function') { + request.body.destroy(error) + } + if (response && response.body) { + response.body.emit('error', error) + } + } + + if (signal && signal.aborted) { + return abort() + } + + const abortAndFinalize = () => { + abort() + finalize() + } + + const finalize = () => { + req.abort() + if (signal) { + signal.removeEventListener('abort', abortAndFinalize) + } + clearTimeout(reqTimeout) + } + + // send request + const req = send(options) + + if (signal) { + signal.addEventListener('abort', abortAndFinalize) + } + + let reqTimeout = null + if (request.timeout) { + req.once('socket', () => { + reqTimeout = setTimeout(() => { + reject(new FetchError(`network timeout at: ${ + request.url}`, 'request-timeout')) + finalize() + }, request.timeout) + }) + } + + req.on('error', er => { + // if a 'response' event is emitted before the 'error' event, then by the + // time this handler is run it's too late to reject the Promise for the + // response. instead, we forward the error event to the response stream + // so that the error will surface to the user when they try to consume + // the body. this is done as a side effect of aborting the request except + // for in windows, where we must forward the event manually, otherwise + // there is no longer a ref'd socket attached to the request and the + // stream never ends so the event loop runs out of work and the process + // exits without warning. + // coverage skipped here due to the difficulty in testing + // istanbul ignore next + if (req.res) { + req.res.emit('error', er) + } + reject(new FetchError(`request to ${request.url} failed, reason: ${ + er.message}`, 'system', er)) + finalize() + }) + + req.on('response', res => { + clearTimeout(reqTimeout) + + const headers = createHeadersLenient(res.headers) + + // HTTP fetch step 5 + if (fetch.isRedirect(res.statusCode)) { + // HTTP fetch step 5.2 + const location = headers.get('Location') + + // HTTP fetch step 5.3 + let locationURL = null + try { + locationURL = location === null ? null : new URL(location, request.url).toString() + } catch { + // error here can only be invalid URL in Location: header + // do not throw when options.redirect == manual + // let the user extract the errorneous redirect URL + if (request.redirect !== 'manual') { + /* eslint-disable-next-line max-len */ + reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect')) + finalize() + return + } + } + + // HTTP fetch step 5.5 + if (request.redirect === 'error') { + reject(new FetchError('uri requested responds with a redirect, ' + + `redirect mode is set to error: ${request.url}`, 'no-redirect')) + finalize() + return + } else if (request.redirect === 'manual') { + // node-fetch-specific step: make manual redirect a bit easier to + // use by setting the Location header value to the resolved URL. + if (locationURL !== null) { + // handle corrupted header + try { + headers.set('Location', locationURL) + } catch (err) { + /* istanbul ignore next: nodejs server prevent invalid + response headers, we can't test this through normal + request */ + reject(err) + } + } + } else if (request.redirect === 'follow' && locationURL !== null) { + // HTTP-redirect fetch step 5 + if (request.counter >= request.follow) { + reject(new FetchError(`maximum redirect reached at: ${ + request.url}`, 'max-redirect')) + finalize() + return + } + + // HTTP-redirect fetch step 9 + if (res.statusCode !== 303 && + request.body && + getTotalBytes(request) === null) { + reject(new FetchError( + 'Cannot follow redirect with body being a readable stream', + 'unsupported-redirect' + )) + finalize() + return + } + + // Update host due to redirection + request.headers.set('host', (new URL(locationURL)).host) + + // HTTP-redirect fetch step 6 (counter increment) + // Create a new Request object. + const requestOpts = { + headers: new Headers(request.headers), + follow: request.follow, + counter: request.counter + 1, + agent: request.agent, + compress: request.compress, + method: request.method, + body: request.body, + signal: request.signal, + timeout: request.timeout, + } + + // if the redirect is to a new hostname, strip the authorization and cookie headers + const parsedOriginal = new URL(request.url) + const parsedRedirect = new URL(locationURL) + if (parsedOriginal.hostname !== parsedRedirect.hostname) { + requestOpts.headers.delete('authorization') + requestOpts.headers.delete('cookie') + } + + // HTTP-redirect fetch step 11 + if (res.statusCode === 303 || ( + (res.statusCode === 301 || res.statusCode === 302) && + request.method === 'POST' + )) { + requestOpts.method = 'GET' + requestOpts.body = undefined + requestOpts.headers.delete('content-length') + } + + // HTTP-redirect fetch step 15 + resolve(fetch(new Request(locationURL, requestOpts))) + finalize() + return + } + } // end if(isRedirect) + + // prepare response + res.once('end', () => + signal && signal.removeEventListener('abort', abortAndFinalize)) + + const body = new Minipass() + // if an error occurs, either on the response stream itself, on one of the + // decoder streams, or a response length timeout from the Body class, we + // forward the error through to our internal body stream. If we see an + // error event on that, we call finalize to abort the request and ensure + // we don't leave a socket believing a request is in flight. + // this is difficult to test, so lacks specific coverage. + body.on('error', finalize) + // exceedingly rare that the stream would have an error, + // but just in case we proxy it to the stream in use. + res.on('error', /* istanbul ignore next */ er => body.emit('error', er)) + res.on('data', (chunk) => body.write(chunk)) + res.on('end', () => body.end()) + + const responseOptions = { + url: request.url, + status: res.statusCode, + statusText: res.statusMessage, + headers: headers, + size: request.size, + timeout: request.timeout, + counter: request.counter, + trailer: new Promise(resolveTrailer => + res.on('end', () => resolveTrailer(createHeadersLenient(res.trailers)))), + } + + // HTTP-network fetch step 12.1.1.3 + const codings = headers.get('Content-Encoding') + + // HTTP-network fetch step 12.1.1.4: handle content codings + + // in following scenarios we ignore compression support + // 1. compression support is disabled + // 2. HEAD request + // 3. no Content-Encoding header + // 4. no content response (204) + // 5. content not modified response (304) + if (!request.compress || + request.method === 'HEAD' || + codings === null || + res.statusCode === 204 || + res.statusCode === 304) { + response = new Response(body, responseOptions) + resolve(response) + return + } + + // Be less strict when decoding compressed responses, since sometimes + // servers send slightly invalid responses that are still accepted + // by common browsers. + // Always using Z_SYNC_FLUSH is what cURL does. + const zlibOptions = { + flush: zlib.constants.Z_SYNC_FLUSH, + finishFlush: zlib.constants.Z_SYNC_FLUSH, + } + + // for gzip + if (codings === 'gzip' || codings === 'x-gzip') { + const unzip = new zlib.Gunzip(zlibOptions) + response = new Response( + // exceedingly rare that the stream would have an error, + // but just in case we proxy it to the stream in use. + body.on('error', /* istanbul ignore next */ er => unzip.emit('error', er)).pipe(unzip), + responseOptions + ) + resolve(response) + return + } + + // for deflate + if (codings === 'deflate' || codings === 'x-deflate') { + // handle the infamous raw deflate response from old servers + // a hack for old IIS and Apache servers + const raw = res.pipe(new Minipass()) + raw.once('data', chunk => { + // see http://stackoverflow.com/questions/37519828 + const decoder = (chunk[0] & 0x0F) === 0x08 + ? new zlib.Inflate() + : new zlib.InflateRaw() + // exceedingly rare that the stream would have an error, + // but just in case we proxy it to the stream in use. + body.on('error', /* istanbul ignore next */ er => decoder.emit('error', er)).pipe(decoder) + response = new Response(decoder, responseOptions) + resolve(response) + }) + return + } + + // for br + if (codings === 'br') { + // ignoring coverage so tests don't have to fake support (or lack of) for brotli + // istanbul ignore next + try { + var decoder = new zlib.BrotliDecompress() + } catch (err) { + reject(err) + finalize() + return + } + // exceedingly rare that the stream would have an error, + // but just in case we proxy it to the stream in use. + body.on('error', /* istanbul ignore next */ er => decoder.emit('error', er)).pipe(decoder) + response = new Response(decoder, responseOptions) + resolve(response) + return + } + + // otherwise, use response as-is + response = new Response(body, responseOptions) + resolve(response) + }) + + writeToStream(req, request) + }) +} + +module.exports = fetch + +fetch.isRedirect = code => + code === 301 || + code === 302 || + code === 303 || + code === 307 || + code === 308 + +fetch.Headers = Headers +fetch.Request = Request +fetch.Response = Response +fetch.FetchError = FetchError +fetch.AbortError = AbortError diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/request.js b/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/request.js new file mode 100644 index 00000000000000..054439e6699107 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/request.js @@ -0,0 +1,282 @@ +'use strict' +const { URL } = require('url') +const { Minipass } = require('minipass') +const Headers = require('./headers.js') +const { exportNodeCompatibleHeaders } = Headers +const Body = require('./body.js') +const { clone, extractContentType, getTotalBytes } = Body + +const version = require('../package.json').version +const defaultUserAgent = + `minipass-fetch/${version} (+https://github.com/isaacs/minipass-fetch)` + +const INTERNALS = Symbol('Request internals') + +const isRequest = input => + typeof input === 'object' && typeof input[INTERNALS] === 'object' + +const isAbortSignal = signal => { + const proto = ( + signal + && typeof signal === 'object' + && Object.getPrototypeOf(signal) + ) + return !!(proto && proto.constructor.name === 'AbortSignal') +} + +class Request extends Body { + constructor (input, init = {}) { + const parsedURL = isRequest(input) ? new URL(input.url) + : input && input.href ? new URL(input.href) + : new URL(`${input}`) + + if (isRequest(input)) { + init = { ...input[INTERNALS], ...init } + } else if (!input || typeof input === 'string') { + input = {} + } + + const method = (init.method || input.method || 'GET').toUpperCase() + const isGETHEAD = method === 'GET' || method === 'HEAD' + + if ((init.body !== null && init.body !== undefined || + isRequest(input) && input.body !== null) && isGETHEAD) { + throw new TypeError('Request with GET/HEAD method cannot have body') + } + + const inputBody = init.body !== null && init.body !== undefined ? init.body + : isRequest(input) && input.body !== null ? clone(input) + : null + + super(inputBody, { + timeout: init.timeout || input.timeout || 0, + size: init.size || input.size || 0, + }) + + const headers = new Headers(init.headers || input.headers || {}) + + if (inputBody !== null && inputBody !== undefined && + !headers.has('Content-Type')) { + const contentType = extractContentType(inputBody) + if (contentType) { + headers.append('Content-Type', contentType) + } + } + + const signal = 'signal' in init ? init.signal + : null + + if (signal !== null && signal !== undefined && !isAbortSignal(signal)) { + throw new TypeError('Expected signal must be an instanceof AbortSignal') + } + + // TLS specific options that are handled by node + const { + ca, + cert, + ciphers, + clientCertEngine, + crl, + dhparam, + ecdhCurve, + family, + honorCipherOrder, + key, + passphrase, + pfx, + rejectUnauthorized = process.env.NODE_TLS_REJECT_UNAUTHORIZED !== '0', + secureOptions, + secureProtocol, + servername, + sessionIdContext, + } = init + + this[INTERNALS] = { + method, + redirect: init.redirect || input.redirect || 'follow', + headers, + parsedURL, + signal, + ca, + cert, + ciphers, + clientCertEngine, + crl, + dhparam, + ecdhCurve, + family, + honorCipherOrder, + key, + passphrase, + pfx, + rejectUnauthorized, + secureOptions, + secureProtocol, + servername, + sessionIdContext, + } + + // node-fetch-only options + this.follow = init.follow !== undefined ? init.follow + : input.follow !== undefined ? input.follow + : 20 + this.compress = init.compress !== undefined ? init.compress + : input.compress !== undefined ? input.compress + : true + this.counter = init.counter || input.counter || 0 + this.agent = init.agent || input.agent + } + + get method () { + return this[INTERNALS].method + } + + get url () { + return this[INTERNALS].parsedURL.toString() + } + + get headers () { + return this[INTERNALS].headers + } + + get redirect () { + return this[INTERNALS].redirect + } + + get signal () { + return this[INTERNALS].signal + } + + clone () { + return new Request(this) + } + + get [Symbol.toStringTag] () { + return 'Request' + } + + static getNodeRequestOptions (request) { + const parsedURL = request[INTERNALS].parsedURL + const headers = new Headers(request[INTERNALS].headers) + + // fetch step 1.3 + if (!headers.has('Accept')) { + headers.set('Accept', '*/*') + } + + // Basic fetch + if (!/^https?:$/.test(parsedURL.protocol)) { + throw new TypeError('Only HTTP(S) protocols are supported') + } + + if (request.signal && + Minipass.isStream(request.body) && + typeof request.body.destroy !== 'function') { + throw new Error( + 'Cancellation of streamed requests with AbortSignal is not supported') + } + + // HTTP-network-or-cache fetch steps 2.4-2.7 + const contentLengthValue = + (request.body === null || request.body === undefined) && + /^(POST|PUT)$/i.test(request.method) ? '0' + : request.body !== null && request.body !== undefined + ? getTotalBytes(request) + : null + + if (contentLengthValue) { + headers.set('Content-Length', contentLengthValue + '') + } + + // HTTP-network-or-cache fetch step 2.11 + if (!headers.has('User-Agent')) { + headers.set('User-Agent', defaultUserAgent) + } + + // HTTP-network-or-cache fetch step 2.15 + if (request.compress && !headers.has('Accept-Encoding')) { + headers.set('Accept-Encoding', 'gzip,deflate') + } + + const agent = typeof request.agent === 'function' + ? request.agent(parsedURL) + : request.agent + + if (!headers.has('Connection') && !agent) { + headers.set('Connection', 'close') + } + + // TLS specific options that are handled by node + const { + ca, + cert, + ciphers, + clientCertEngine, + crl, + dhparam, + ecdhCurve, + family, + honorCipherOrder, + key, + passphrase, + pfx, + rejectUnauthorized, + secureOptions, + secureProtocol, + servername, + sessionIdContext, + } = request[INTERNALS] + + // HTTP-network fetch step 4.2 + // chunked encoding is handled by Node.js + + // we cannot spread parsedURL directly, so we have to read each property one-by-one + // and map them to the equivalent https?.request() method options + const urlProps = { + auth: parsedURL.username || parsedURL.password + ? `${parsedURL.username}:${parsedURL.password}` + : '', + host: parsedURL.host, + hostname: parsedURL.hostname, + path: `${parsedURL.pathname}${parsedURL.search}`, + port: parsedURL.port, + protocol: parsedURL.protocol, + } + + return { + ...urlProps, + method: request.method, + headers: exportNodeCompatibleHeaders(headers), + agent, + ca, + cert, + ciphers, + clientCertEngine, + crl, + dhparam, + ecdhCurve, + family, + honorCipherOrder, + key, + passphrase, + pfx, + rejectUnauthorized, + secureOptions, + secureProtocol, + servername, + sessionIdContext, + timeout: request.timeout, + } + } +} + +module.exports = Request + +Object.defineProperties(Request.prototype, { + method: { enumerable: true }, + url: { enumerable: true }, + headers: { enumerable: true }, + redirect: { enumerable: true }, + clone: { enumerable: true }, + signal: { enumerable: true }, +}) diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/response.js b/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/response.js new file mode 100644 index 00000000000000..54cb52db3594a7 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/response.js @@ -0,0 +1,90 @@ +'use strict' +const http = require('http') +const { STATUS_CODES } = http + +const Headers = require('./headers.js') +const Body = require('./body.js') +const { clone, extractContentType } = Body + +const INTERNALS = Symbol('Response internals') + +class Response extends Body { + constructor (body = null, opts = {}) { + super(body, opts) + + const status = opts.status || 200 + const headers = new Headers(opts.headers) + + if (body !== null && body !== undefined && !headers.has('Content-Type')) { + const contentType = extractContentType(body) + if (contentType) { + headers.append('Content-Type', contentType) + } + } + + this[INTERNALS] = { + url: opts.url, + status, + statusText: opts.statusText || STATUS_CODES[status], + headers, + counter: opts.counter, + trailer: Promise.resolve(opts.trailer || new Headers()), + } + } + + get trailer () { + return this[INTERNALS].trailer + } + + get url () { + return this[INTERNALS].url || '' + } + + get status () { + return this[INTERNALS].status + } + + get ok () { + return this[INTERNALS].status >= 200 && this[INTERNALS].status < 300 + } + + get redirected () { + return this[INTERNALS].counter > 0 + } + + get statusText () { + return this[INTERNALS].statusText + } + + get headers () { + return this[INTERNALS].headers + } + + clone () { + return new Response(clone(this), { + url: this.url, + status: this.status, + statusText: this.statusText, + headers: this.headers, + ok: this.ok, + redirected: this.redirected, + trailer: this.trailer, + }) + } + + get [Symbol.toStringTag] () { + return 'Response' + } +} + +module.exports = Response + +Object.defineProperties(Response.prototype, { + url: { enumerable: true }, + status: { enumerable: true }, + ok: { enumerable: true }, + redirected: { enumerable: true }, + statusText: { enumerable: true }, + headers: { enumerable: true }, + clone: { enumerable: true }, +}) diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/package.json b/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/package.json new file mode 100644 index 00000000000000..d491a7fba126d0 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/package.json @@ -0,0 +1,69 @@ +{ + "name": "minipass-fetch", + "version": "3.0.5", + "description": "An implementation of window.fetch in Node.js using Minipass streams", + "license": "MIT", + "main": "lib/index.js", + "scripts": { + "test:tls-fixtures": "./test/fixtures/tls/setup.sh", + "test": "tap", + "snap": "tap", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "postlint": "template-oss-check", + "lintfix": "npm run lint -- --fix", + "posttest": "npm run lint", + "template-oss-apply": "template-oss-apply --force" + }, + "tap": { + "coverage-map": "map.js", + "check-coverage": true, + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.22.0", + "@ungap/url-search-params": "^0.2.2", + "abort-controller": "^3.0.0", + "abortcontroller-polyfill": "~1.7.3", + "encoding": "^0.1.13", + "form-data": "^4.0.0", + "nock": "^13.2.4", + "parted": "^0.1.1", + "string-to-arraybuffer": "^1.0.2", + "tap": "^16.0.0" + }, + "dependencies": { + "minipass": "^7.0.3", + "minipass-sized": "^1.0.3", + "minizlib": "^2.1.2" + }, + "optionalDependencies": { + "encoding": "^0.1.13" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/npm/minipass-fetch.git" + }, + "keywords": [ + "fetch", + "minipass", + "node-fetch", + "window.fetch" + ], + "files": [ + "bin/", + "lib/" + ], + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "author": "GitHub Inc.", + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.22.0", + "publish": "true" + } +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/proc-log/LICENSE b/deps/npm/node_modules/@sigstore/sign/node_modules/proc-log/LICENSE new file mode 100644 index 00000000000000..83837797202b70 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/proc-log/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) GitHub, Inc. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/proc-log/lib/index.js b/deps/npm/node_modules/@sigstore/sign/node_modules/proc-log/lib/index.js new file mode 100644 index 00000000000000..86d90861078dab --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/proc-log/lib/index.js @@ -0,0 +1,153 @@ +const META = Symbol('proc-log.meta') +module.exports = { + META: META, + output: { + LEVELS: [ + 'standard', + 'error', + 'buffer', + 'flush', + ], + KEYS: { + standard: 'standard', + error: 'error', + buffer: 'buffer', + flush: 'flush', + }, + standard: function (...args) { + return process.emit('output', 'standard', ...args) + }, + error: function (...args) { + return process.emit('output', 'error', ...args) + }, + buffer: function (...args) { + return process.emit('output', 'buffer', ...args) + }, + flush: function (...args) { + return process.emit('output', 'flush', ...args) + }, + }, + log: { + LEVELS: [ + 'notice', + 'error', + 'warn', + 'info', + 'verbose', + 'http', + 'silly', + 'timing', + 'pause', + 'resume', + ], + KEYS: { + notice: 'notice', + error: 'error', + warn: 'warn', + info: 'info', + verbose: 'verbose', + http: 'http', + silly: 'silly', + timing: 'timing', + pause: 'pause', + resume: 'resume', + }, + error: function (...args) { + return process.emit('log', 'error', ...args) + }, + notice: function (...args) { + return process.emit('log', 'notice', ...args) + }, + warn: function (...args) { + return process.emit('log', 'warn', ...args) + }, + info: function (...args) { + return process.emit('log', 'info', ...args) + }, + verbose: function (...args) { + return process.emit('log', 'verbose', ...args) + }, + http: function (...args) { + return process.emit('log', 'http', ...args) + }, + silly: function (...args) { + return process.emit('log', 'silly', ...args) + }, + timing: function (...args) { + return process.emit('log', 'timing', ...args) + }, + pause: function () { + return process.emit('log', 'pause') + }, + resume: function () { + return process.emit('log', 'resume') + }, + }, + time: { + LEVELS: [ + 'start', + 'end', + ], + KEYS: { + start: 'start', + end: 'end', + }, + start: function (name, fn) { + process.emit('time', 'start', name) + function end () { + return process.emit('time', 'end', name) + } + if (typeof fn === 'function') { + const res = fn() + if (res && res.finally) { + return res.finally(end) + } + end() + return res + } + return end + }, + end: function (name) { + return process.emit('time', 'end', name) + }, + }, + input: { + LEVELS: [ + 'start', + 'end', + 'read', + ], + KEYS: { + start: 'start', + end: 'end', + read: 'read', + }, + start: function (fn) { + process.emit('input', 'start') + function end () { + return process.emit('input', 'end') + } + if (typeof fn === 'function') { + const res = fn() + if (res && res.finally) { + return res.finally(end) + } + end() + return res + } + return end + }, + end: function () { + return process.emit('input', 'end') + }, + read: function (...args) { + let resolve, reject + const promise = new Promise((_resolve, _reject) => { + resolve = _resolve + reject = _reject + }) + process.emit('input', 'read', resolve, reject, ...args) + return promise + }, + }, +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/proc-log/package.json b/deps/npm/node_modules/@sigstore/sign/node_modules/proc-log/package.json new file mode 100644 index 00000000000000..4ab89102ecc9b5 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/proc-log/package.json @@ -0,0 +1,45 @@ +{ + "name": "proc-log", + "version": "4.2.0", + "files": [ + "bin/", + "lib/" + ], + "main": "lib/index.js", + "description": "just emit 'log' events on the process object", + "repository": { + "type": "git", + "url": "https://github.com/npm/proc-log.git" + }, + "author": "GitHub Inc.", + "license": "ISC", + "scripts": { + "test": "tap", + "snap": "tap", + "posttest": "npm run lint", + "postsnap": "eslint index.js test/*.js --fix", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "postlint": "template-oss-check", + "lintfix": "npm run lint -- --fix", + "template-oss-apply": "template-oss-apply --force" + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.21.3", + "tap": "^16.0.1" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.21.3", + "publish": true + }, + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + } +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/ssri/LICENSE.md b/deps/npm/node_modules/@sigstore/sign/node_modules/ssri/LICENSE.md new file mode 100644 index 00000000000000..e335388869f50f --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/ssri/LICENSE.md @@ -0,0 +1,16 @@ +ISC License + +Copyright 2021 (c) npm, Inc. + +Permission to use, copy, modify, and/or distribute this software for +any purpose with or without fee is hereby granted, provided that the +above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS +ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/ssri/lib/index.js b/deps/npm/node_modules/@sigstore/sign/node_modules/ssri/lib/index.js new file mode 100644 index 00000000000000..7d749ed480fb98 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/ssri/lib/index.js @@ -0,0 +1,580 @@ +'use strict' + +const crypto = require('crypto') +const { Minipass } = require('minipass') + +const SPEC_ALGORITHMS = ['sha512', 'sha384', 'sha256'] +const DEFAULT_ALGORITHMS = ['sha512'] + +// TODO: this should really be a hardcoded list of algorithms we support, +// rather than [a-z0-9]. +const BASE64_REGEX = /^[a-z0-9+/]+(?:=?=?)$/i +const SRI_REGEX = /^([a-z0-9]+)-([^?]+)([?\S*]*)$/ +const STRICT_SRI_REGEX = /^([a-z0-9]+)-([A-Za-z0-9+/=]{44,88})(\?[\x21-\x7E]*)?$/ +const VCHAR_REGEX = /^[\x21-\x7E]+$/ + +const getOptString = options => options?.length ? `?${options.join('?')}` : '' + +class IntegrityStream extends Minipass { + #emittedIntegrity + #emittedSize + #emittedVerified + + constructor (opts) { + super() + this.size = 0 + this.opts = opts + + // may be overridden later, but set now for class consistency + this.#getOptions() + + // options used for calculating stream. can't be changed. + if (opts?.algorithms) { + this.algorithms = [...opts.algorithms] + } else { + this.algorithms = [...DEFAULT_ALGORITHMS] + } + if (this.algorithm !== null && !this.algorithms.includes(this.algorithm)) { + this.algorithms.push(this.algorithm) + } + + this.hashes = this.algorithms.map(crypto.createHash) + } + + #getOptions () { + // For verification + this.sri = this.opts?.integrity ? parse(this.opts?.integrity, this.opts) : null + this.expectedSize = this.opts?.size + + if (!this.sri) { + this.algorithm = null + } else if (this.sri.isHash) { + this.goodSri = true + this.algorithm = this.sri.algorithm + } else { + this.goodSri = !this.sri.isEmpty() + this.algorithm = this.sri.pickAlgorithm(this.opts) + } + + this.digests = this.goodSri ? this.sri[this.algorithm] : null + this.optString = getOptString(this.opts?.options) + } + + on (ev, handler) { + if (ev === 'size' && this.#emittedSize) { + return handler(this.#emittedSize) + } + + if (ev === 'integrity' && this.#emittedIntegrity) { + return handler(this.#emittedIntegrity) + } + + if (ev === 'verified' && this.#emittedVerified) { + return handler(this.#emittedVerified) + } + + return super.on(ev, handler) + } + + emit (ev, data) { + if (ev === 'end') { + this.#onEnd() + } + return super.emit(ev, data) + } + + write (data) { + this.size += data.length + this.hashes.forEach(h => h.update(data)) + return super.write(data) + } + + #onEnd () { + if (!this.goodSri) { + this.#getOptions() + } + const newSri = parse(this.hashes.map((h, i) => { + return `${this.algorithms[i]}-${h.digest('base64')}${this.optString}` + }).join(' '), this.opts) + // Integrity verification mode + const match = this.goodSri && newSri.match(this.sri, this.opts) + if (typeof this.expectedSize === 'number' && this.size !== this.expectedSize) { + /* eslint-disable-next-line max-len */ + const err = new Error(`stream size mismatch when checking ${this.sri}.\n Wanted: ${this.expectedSize}\n Found: ${this.size}`) + err.code = 'EBADSIZE' + err.found = this.size + err.expected = this.expectedSize + err.sri = this.sri + this.emit('error', err) + } else if (this.sri && !match) { + /* eslint-disable-next-line max-len */ + const err = new Error(`${this.sri} integrity checksum failed when using ${this.algorithm}: wanted ${this.digests} but got ${newSri}. (${this.size} bytes)`) + err.code = 'EINTEGRITY' + err.found = newSri + err.expected = this.digests + err.algorithm = this.algorithm + err.sri = this.sri + this.emit('error', err) + } else { + this.#emittedSize = this.size + this.emit('size', this.size) + this.#emittedIntegrity = newSri + this.emit('integrity', newSri) + if (match) { + this.#emittedVerified = match + this.emit('verified', match) + } + } + } +} + +class Hash { + get isHash () { + return true + } + + constructor (hash, opts) { + const strict = opts?.strict + this.source = hash.trim() + + // set default values so that we make V8 happy to + // always see a familiar object template. + this.digest = '' + this.algorithm = '' + this.options = [] + + // 3.1. Integrity metadata (called "Hash" by ssri) + // https://w3c.github.io/webappsec-subresource-integrity/#integrity-metadata-description + const match = this.source.match( + strict + ? STRICT_SRI_REGEX + : SRI_REGEX + ) + if (!match) { + return + } + if (strict && !SPEC_ALGORITHMS.includes(match[1])) { + return + } + this.algorithm = match[1] + this.digest = match[2] + + const rawOpts = match[3] + if (rawOpts) { + this.options = rawOpts.slice(1).split('?') + } + } + + hexDigest () { + return this.digest && Buffer.from(this.digest, 'base64').toString('hex') + } + + toJSON () { + return this.toString() + } + + match (integrity, opts) { + const other = parse(integrity, opts) + if (!other) { + return false + } + if (other.isIntegrity) { + const algo = other.pickAlgorithm(opts, [this.algorithm]) + + if (!algo) { + return false + } + + const foundHash = other[algo].find(hash => hash.digest === this.digest) + + if (foundHash) { + return foundHash + } + + return false + } + return other.digest === this.digest ? other : false + } + + toString (opts) { + if (opts?.strict) { + // Strict mode enforces the standard as close to the foot of the + // letter as it can. + if (!( + // The spec has very restricted productions for algorithms. + // https://www.w3.org/TR/CSP2/#source-list-syntax + SPEC_ALGORITHMS.includes(this.algorithm) && + // Usually, if someone insists on using a "different" base64, we + // leave it as-is, since there's multiple standards, and the + // specified is not a URL-safe variant. + // https://www.w3.org/TR/CSP2/#base64_value + this.digest.match(BASE64_REGEX) && + // Option syntax is strictly visual chars. + // https://w3c.github.io/webappsec-subresource-integrity/#grammardef-option-expression + // https://tools.ietf.org/html/rfc5234#appendix-B.1 + this.options.every(opt => opt.match(VCHAR_REGEX)) + )) { + return '' + } + } + return `${this.algorithm}-${this.digest}${getOptString(this.options)}` + } +} + +function integrityHashToString (toString, sep, opts, hashes) { + const toStringIsNotEmpty = toString !== '' + + let shouldAddFirstSep = false + let complement = '' + + const lastIndex = hashes.length - 1 + + for (let i = 0; i < lastIndex; i++) { + const hashString = Hash.prototype.toString.call(hashes[i], opts) + + if (hashString) { + shouldAddFirstSep = true + + complement += hashString + complement += sep + } + } + + const finalHashString = Hash.prototype.toString.call(hashes[lastIndex], opts) + + if (finalHashString) { + shouldAddFirstSep = true + complement += finalHashString + } + + if (toStringIsNotEmpty && shouldAddFirstSep) { + return toString + sep + complement + } + + return toString + complement +} + +class Integrity { + get isIntegrity () { + return true + } + + toJSON () { + return this.toString() + } + + isEmpty () { + return Object.keys(this).length === 0 + } + + toString (opts) { + let sep = opts?.sep || ' ' + let toString = '' + + if (opts?.strict) { + // Entries must be separated by whitespace, according to spec. + sep = sep.replace(/\S+/g, ' ') + + for (const hash of SPEC_ALGORITHMS) { + if (this[hash]) { + toString = integrityHashToString(toString, sep, opts, this[hash]) + } + } + } else { + for (const hash of Object.keys(this)) { + toString = integrityHashToString(toString, sep, opts, this[hash]) + } + } + + return toString + } + + concat (integrity, opts) { + const other = typeof integrity === 'string' + ? integrity + : stringify(integrity, opts) + return parse(`${this.toString(opts)} ${other}`, opts) + } + + hexDigest () { + return parse(this, { single: true }).hexDigest() + } + + // add additional hashes to an integrity value, but prevent + // *changing* an existing integrity hash. + merge (integrity, opts) { + const other = parse(integrity, opts) + for (const algo in other) { + if (this[algo]) { + if (!this[algo].find(hash => + other[algo].find(otherhash => + hash.digest === otherhash.digest))) { + throw new Error('hashes do not match, cannot update integrity') + } + } else { + this[algo] = other[algo] + } + } + } + + match (integrity, opts) { + const other = parse(integrity, opts) + if (!other) { + return false + } + const algo = other.pickAlgorithm(opts, Object.keys(this)) + return ( + !!algo && + this[algo] && + other[algo] && + this[algo].find(hash => + other[algo].find(otherhash => + hash.digest === otherhash.digest + ) + ) + ) || false + } + + // Pick the highest priority algorithm present, optionally also limited to a + // set of hashes found in another integrity. When limiting it may return + // nothing. + pickAlgorithm (opts, hashes) { + const pickAlgorithm = opts?.pickAlgorithm || getPrioritizedHash + const keys = Object.keys(this).filter(k => { + if (hashes?.length) { + return hashes.includes(k) + } + return true + }) + if (keys.length) { + return keys.reduce((acc, algo) => pickAlgorithm(acc, algo) || acc) + } + // no intersection between this and hashes, + return null + } +} + +module.exports.parse = parse +function parse (sri, opts) { + if (!sri) { + return null + } + if (typeof sri === 'string') { + return _parse(sri, opts) + } else if (sri.algorithm && sri.digest) { + const fullSri = new Integrity() + fullSri[sri.algorithm] = [sri] + return _parse(stringify(fullSri, opts), opts) + } else { + return _parse(stringify(sri, opts), opts) + } +} + +function _parse (integrity, opts) { + // 3.4.3. Parse metadata + // https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata + if (opts?.single) { + return new Hash(integrity, opts) + } + const hashes = integrity.trim().split(/\s+/).reduce((acc, string) => { + const hash = new Hash(string, opts) + if (hash.algorithm && hash.digest) { + const algo = hash.algorithm + if (!acc[algo]) { + acc[algo] = [] + } + acc[algo].push(hash) + } + return acc + }, new Integrity()) + return hashes.isEmpty() ? null : hashes +} + +module.exports.stringify = stringify +function stringify (obj, opts) { + if (obj.algorithm && obj.digest) { + return Hash.prototype.toString.call(obj, opts) + } else if (typeof obj === 'string') { + return stringify(parse(obj, opts), opts) + } else { + return Integrity.prototype.toString.call(obj, opts) + } +} + +module.exports.fromHex = fromHex +function fromHex (hexDigest, algorithm, opts) { + const optString = getOptString(opts?.options) + return parse( + `${algorithm}-${ + Buffer.from(hexDigest, 'hex').toString('base64') + }${optString}`, opts + ) +} + +module.exports.fromData = fromData +function fromData (data, opts) { + const algorithms = opts?.algorithms || [...DEFAULT_ALGORITHMS] + const optString = getOptString(opts?.options) + return algorithms.reduce((acc, algo) => { + const digest = crypto.createHash(algo).update(data).digest('base64') + const hash = new Hash( + `${algo}-${digest}${optString}`, + opts + ) + /* istanbul ignore else - it would be VERY strange if the string we + * just calculated with an algo did not have an algo or digest. + */ + if (hash.algorithm && hash.digest) { + const hashAlgo = hash.algorithm + if (!acc[hashAlgo]) { + acc[hashAlgo] = [] + } + acc[hashAlgo].push(hash) + } + return acc + }, new Integrity()) +} + +module.exports.fromStream = fromStream +function fromStream (stream, opts) { + const istream = integrityStream(opts) + return new Promise((resolve, reject) => { + stream.pipe(istream) + stream.on('error', reject) + istream.on('error', reject) + let sri + istream.on('integrity', s => { + sri = s + }) + istream.on('end', () => resolve(sri)) + istream.resume() + }) +} + +module.exports.checkData = checkData +function checkData (data, sri, opts) { + sri = parse(sri, opts) + if (!sri || !Object.keys(sri).length) { + if (opts?.error) { + throw Object.assign( + new Error('No valid integrity hashes to check against'), { + code: 'EINTEGRITY', + } + ) + } else { + return false + } + } + const algorithm = sri.pickAlgorithm(opts) + const digest = crypto.createHash(algorithm).update(data).digest('base64') + const newSri = parse({ algorithm, digest }) + const match = newSri.match(sri, opts) + opts = opts || {} + if (match || !(opts.error)) { + return match + } else if (typeof opts.size === 'number' && (data.length !== opts.size)) { + /* eslint-disable-next-line max-len */ + const err = new Error(`data size mismatch when checking ${sri}.\n Wanted: ${opts.size}\n Found: ${data.length}`) + err.code = 'EBADSIZE' + err.found = data.length + err.expected = opts.size + err.sri = sri + throw err + } else { + /* eslint-disable-next-line max-len */ + const err = new Error(`Integrity checksum failed when using ${algorithm}: Wanted ${sri}, but got ${newSri}. (${data.length} bytes)`) + err.code = 'EINTEGRITY' + err.found = newSri + err.expected = sri + err.algorithm = algorithm + err.sri = sri + throw err + } +} + +module.exports.checkStream = checkStream +function checkStream (stream, sri, opts) { + opts = opts || Object.create(null) + opts.integrity = sri + sri = parse(sri, opts) + if (!sri || !Object.keys(sri).length) { + return Promise.reject(Object.assign( + new Error('No valid integrity hashes to check against'), { + code: 'EINTEGRITY', + } + )) + } + const checker = integrityStream(opts) + return new Promise((resolve, reject) => { + stream.pipe(checker) + stream.on('error', reject) + checker.on('error', reject) + let verified + checker.on('verified', s => { + verified = s + }) + checker.on('end', () => resolve(verified)) + checker.resume() + }) +} + +module.exports.integrityStream = integrityStream +function integrityStream (opts = Object.create(null)) { + return new IntegrityStream(opts) +} + +module.exports.create = createIntegrity +function createIntegrity (opts) { + const algorithms = opts?.algorithms || [...DEFAULT_ALGORITHMS] + const optString = getOptString(opts?.options) + + const hashes = algorithms.map(crypto.createHash) + + return { + update: function (chunk, enc) { + hashes.forEach(h => h.update(chunk, enc)) + return this + }, + digest: function () { + const integrity = algorithms.reduce((acc, algo) => { + const digest = hashes.shift().digest('base64') + const hash = new Hash( + `${algo}-${digest}${optString}`, + opts + ) + /* istanbul ignore else - it would be VERY strange if the hash we + * just calculated with an algo did not have an algo or digest. + */ + if (hash.algorithm && hash.digest) { + const hashAlgo = hash.algorithm + if (!acc[hashAlgo]) { + acc[hashAlgo] = [] + } + acc[hashAlgo].push(hash) + } + return acc + }, new Integrity()) + + return integrity + }, + } +} + +const NODE_HASHES = crypto.getHashes() + +// This is a Best Effort™ at a reasonable priority for hash algos +const DEFAULT_PRIORITY = [ + 'md5', 'whirlpool', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512', + // TODO - it's unclear _which_ of these Node will actually use as its name + // for the algorithm, so we guesswork it based on the OpenSSL names. + 'sha3', + 'sha3-256', 'sha3-384', 'sha3-512', + 'sha3_256', 'sha3_384', 'sha3_512', +].filter(algo => NODE_HASHES.includes(algo)) + +function getPrioritizedHash (algo1, algo2) { + /* eslint-disable-next-line max-len */ + return DEFAULT_PRIORITY.indexOf(algo1.toLowerCase()) >= DEFAULT_PRIORITY.indexOf(algo2.toLowerCase()) + ? algo1 + : algo2 +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/ssri/package.json b/deps/npm/node_modules/@sigstore/sign/node_modules/ssri/package.json new file mode 100644 index 00000000000000..28395414e4643c --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/ssri/package.json @@ -0,0 +1,65 @@ +{ + "name": "ssri", + "version": "10.0.6", + "description": "Standard Subresource Integrity library -- parses, serializes, generates, and verifies integrity metadata according to the SRI spec.", + "main": "lib/index.js", + "files": [ + "bin/", + "lib/" + ], + "scripts": { + "prerelease": "npm t", + "postrelease": "npm publish", + "posttest": "npm run lint", + "test": "tap", + "coverage": "tap", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force", + "lintfix": "npm run lint -- --fix", + "snap": "tap" + }, + "tap": { + "check-coverage": true, + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "repository": { + "type": "git", + "url": "git+https://github.com/npm/ssri.git" + }, + "keywords": [ + "w3c", + "web", + "security", + "integrity", + "checksum", + "hashing", + "subresource integrity", + "sri", + "sri hash", + "sri string", + "sri generator", + "html" + ], + "author": "GitHub Inc.", + "license": "ISC", + "dependencies": { + "minipass": "^7.0.3" + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.22.0", + "tap": "^16.0.1" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.22.0", + "publish": "true" + } +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/unique-filename/LICENSE b/deps/npm/node_modules/@sigstore/sign/node_modules/unique-filename/LICENSE new file mode 100644 index 00000000000000..69619c125ea7ef --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/unique-filename/LICENSE @@ -0,0 +1,5 @@ +Copyright npm, Inc + +Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/unique-filename/lib/index.js b/deps/npm/node_modules/@sigstore/sign/node_modules/unique-filename/lib/index.js new file mode 100644 index 00000000000000..d067d2e709809a --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/unique-filename/lib/index.js @@ -0,0 +1,7 @@ +var path = require('path') + +var uniqueSlug = require('unique-slug') + +module.exports = function (filepath, prefix, uniq) { + return path.join(filepath, (prefix ? prefix + '-' : '') + uniqueSlug(uniq)) +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/unique-filename/package.json b/deps/npm/node_modules/@sigstore/sign/node_modules/unique-filename/package.json new file mode 100644 index 00000000000000..b2fbf0666489a6 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/unique-filename/package.json @@ -0,0 +1,51 @@ +{ + "name": "unique-filename", + "version": "3.0.0", + "description": "Generate a unique filename for use in temporary directories or caches.", + "main": "lib/index.js", + "scripts": { + "test": "tap", + "lint": "eslint \"**/*.js\"", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force", + "lintfix": "npm run lint -- --fix", + "snap": "tap", + "posttest": "npm run lint" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/unique-filename.git" + }, + "keywords": [], + "author": "GitHub Inc.", + "license": "ISC", + "bugs": { + "url": "https://github.com/iarna/unique-filename/issues" + }, + "homepage": "https://github.com/iarna/unique-filename", + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.5.1", + "tap": "^16.3.0" + }, + "dependencies": { + "unique-slug": "^4.0.0" + }, + "files": [ + "bin/", + "lib/" + ], + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.5.1" + }, + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + } +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/unique-slug/LICENSE b/deps/npm/node_modules/@sigstore/sign/node_modules/unique-slug/LICENSE new file mode 100644 index 00000000000000..7953647e7760b8 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/unique-slug/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright npm, Inc + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/unique-slug/lib/index.js b/deps/npm/node_modules/@sigstore/sign/node_modules/unique-slug/lib/index.js new file mode 100644 index 00000000000000..1bac84d95d7307 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/unique-slug/lib/index.js @@ -0,0 +1,11 @@ +'use strict' +var MurmurHash3 = require('imurmurhash') + +module.exports = function (uniq) { + if (uniq) { + var hash = new MurmurHash3(uniq) + return ('00000000' + hash.result().toString(16)).slice(-8) + } else { + return (Math.random().toString(16) + '0000000').slice(2, 10) + } +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/unique-slug/package.json b/deps/npm/node_modules/@sigstore/sign/node_modules/unique-slug/package.json new file mode 100644 index 00000000000000..33732cdbb42859 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/unique-slug/package.json @@ -0,0 +1,47 @@ +{ + "name": "unique-slug", + "version": "4.0.0", + "description": "Generate a unique character string suitible for use in files and URLs.", + "main": "lib/index.js", + "scripts": { + "test": "tap", + "lint": "eslint \"**/*.js\"", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force", + "lintfix": "npm run lint -- --fix", + "snap": "tap", + "posttest": "npm run lint" + }, + "keywords": [], + "author": "GitHub Inc.", + "license": "ISC", + "devDependencies": { + "@npmcli/eslint-config": "^3.1.0", + "@npmcli/template-oss": "4.5.1", + "tap": "^16.3.0" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/unique-slug.git" + }, + "dependencies": { + "imurmurhash": "^0.1.4" + }, + "files": [ + "bin/", + "lib/" + ], + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.5.1" + }, + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + } +} diff --git a/deps/npm/node_modules/abbrev/package.json b/deps/npm/node_modules/abbrev/package.json index e26400445631ad..9dfcc0095d7dc6 100644 --- a/deps/npm/node_modules/abbrev/package.json +++ b/deps/npm/node_modules/abbrev/package.json @@ -1,26 +1,27 @@ { "name": "abbrev", - "version": "2.0.0", + "version": "3.0.0", "description": "Like ruby's abbrev module, but in js", "author": "GitHub Inc.", "main": "lib/index.js", "scripts": { "test": "tap", - "lint": "eslint \"**/*.js\"", + "lint": "npm run eslint", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", + "lintfix": "npm run eslint -- --fix", "snap": "tap", - "posttest": "npm run lint" + "posttest": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "repository": { "type": "git", - "url": "https://github.com/npm/abbrev-js.git" + "url": "git+https://github.com/npm/abbrev-js.git" }, "license": "ISC", "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.8.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.3.0" }, "tap": { @@ -34,10 +35,11 @@ "lib/" ], "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.8.0" + "version": "4.23.3", + "publish": true } } diff --git a/deps/npm/node_modules/bin-links/lib/link-gently.js b/deps/npm/node_modules/bin-links/lib/link-gently.js index d1e955ec99b029..a39d3bced57b13 100644 --- a/deps/npm/node_modules/bin-links/lib/link-gently.js +++ b/deps/npm/node_modules/bin-links/lib/link-gently.js @@ -6,10 +6,16 @@ const { resolve, dirname } = require('path') const { lstat, mkdir, readlink, rm, symlink } = require('fs/promises') -const throwNonEnoent = er => { - if (er.code !== 'ENOENT') { - throw er +const { log } = require('proc-log') +const throwSignificant = er => { + if (er.code === 'ENOENT') { + return } + if (er.code === 'EACCES') { + log.warn('error adding file', er.message) + return + } + throw er } const rmOpts = { @@ -37,8 +43,8 @@ const linkGently = async ({ path, to, from, absFrom, force }) => { // or at least a warning, but npm has always behaved this // way in the past, so it'd be a breaking change return Promise.all([ - lstat(absFrom).catch(throwNonEnoent), - lstat(to).catch(throwNonEnoent), + lstat(absFrom).catch(throwSignificant), + lstat(to).catch(throwSignificant), ]).then(([stFrom, stTo]) => { // not present in package, skip it if (!stFrom) { diff --git a/deps/npm/node_modules/bin-links/package.json b/deps/npm/node_modules/bin-links/package.json index 1872756bb4b48c..22858d660ae0b9 100644 --- a/deps/npm/node_modules/bin-links/package.json +++ b/deps/npm/node_modules/bin-links/package.json @@ -1,16 +1,17 @@ { "name": "bin-links", - "version": "4.0.4", + "version": "5.0.0", "description": "JavaScript package binary linker", "main": "./lib/index.js", "scripts": { "snap": "tap", "test": "tap", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", "postlint": "template-oss-check", - "lintfix": "npm run lint -- --fix", + "lintfix": "npm run eslint -- --fix", "posttest": "npm run lint", - "template-oss-apply": "template-oss-apply --force" + "template-oss-apply": "template-oss-apply --force", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "repository": { "type": "git", @@ -23,14 +24,15 @@ ], "license": "ISC", "dependencies": { - "cmd-shim": "^6.0.0", - "npm-normalize-package-bin": "^3.0.0", - "read-cmd-shim": "^4.0.0", - "write-file-atomic": "^5.0.0" + "cmd-shim": "^7.0.0", + "npm-normalize-package-bin": "^4.0.0", + "proc-log": "^5.0.0", + "read-cmd-shim": "^5.0.0", + "write-file-atomic": "^6.0.0" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "require-inject": "^1.4.4", "tap": "^16.0.1" }, @@ -47,13 +49,13 @@ "lib/" ], "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "author": "GitHub Inc.", "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", "windowsCI": false, - "version": "4.22.0", + "version": "4.23.3", "publish": true } } diff --git a/deps/npm/node_modules/cacache/lib/entry-index.js b/deps/npm/node_modules/cacache/lib/entry-index.js index 89c28f2f257d48..0e09b10818d097 100644 --- a/deps/npm/node_modules/cacache/lib/entry-index.js +++ b/deps/npm/node_modules/cacache/lib/entry-index.js @@ -19,7 +19,6 @@ const hashToSegments = require('./util/hash-to-segments') const indexV = require('../package.json')['cache-version'].index const { moveFile } = require('@npmcli/fs') -const pMap = require('p-map') const lsStreamConcurrency = 5 module.exports.NotFoundError = class NotFoundError extends Error { @@ -184,6 +183,7 @@ function lsStream (cache) { // Set all this up to run on the stream and then just return the stream Promise.resolve().then(async () => { + const { default: pMap } = await import('p-map') const buckets = await readdirOrEmpty(indexDir) await pMap(buckets, async (bucket) => { const bucketPath = path.join(indexDir, bucket) diff --git a/deps/npm/node_modules/cacache/lib/verify.js b/deps/npm/node_modules/cacache/lib/verify.js index d7423da1295b68..dcff3aa73f3173 100644 --- a/deps/npm/node_modules/cacache/lib/verify.js +++ b/deps/npm/node_modules/cacache/lib/verify.js @@ -8,7 +8,6 @@ const { truncate, writeFile, } = require('fs/promises') -const pMap = require('p-map') const contentPath = require('./content/path') const fsm = require('fs-minipass') const glob = require('./util/glob.js') @@ -93,6 +92,7 @@ async function fixPerms (cache, opts) { // async function garbageCollect (cache, opts) { opts.log.silly('verify', 'garbage collecting content') + const { default: pMap } = await import('p-map') const indexStream = index.lsStream(cache) const liveContent = new Set() indexStream.on('data', (entry) => { @@ -176,6 +176,7 @@ async function verifyContent (filepath, sri) { async function rebuildIndex (cache, opts) { opts.log.silly('verify', 'rebuilding index') + const { default: pMap } = await import('p-map') const entries = await index.ls(cache) const stats = { missingContent: 0, diff --git a/deps/npm/node_modules/cacache/node_modules/chownr/LICENSE.md b/deps/npm/node_modules/cacache/node_modules/chownr/LICENSE.md new file mode 100644 index 00000000000000..881248b6d7f0ca --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/chownr/LICENSE.md @@ -0,0 +1,63 @@ +All packages under `src/` are licensed according to the terms in +their respective `LICENSE` or `LICENSE.md` files. + +The remainder of this project is licensed under the Blue Oak +Model License, as follows: + +----- + +# Blue Oak Model License + +Version 1.0.0 + +## Purpose + +This license gives everyone as much permission to work with +this software as possible, while protecting contributors +from liability. + +## Acceptance + +In order to receive this license, you must agree to its +rules. The rules of this license are both obligations +under that agreement and conditions to your license. +You must not do anything with this software that triggers +a rule that you cannot or will not follow. + +## Copyright + +Each contributor licenses you to do everything with this +software that would otherwise infringe that contributor's +copyright in it. + +## Notices + +You must ensure that everyone who gets a copy of +any part of this software from you, with or without +changes, also gets the text of this license or a link to +. + +## Excuse + +If anyone notifies you in writing that you have not +complied with [Notices](#notices), you can keep your +license by taking all practical steps to comply within 30 +days after the notice. If you do not do so, your license +ends immediately. + +## Patent + +Each contributor licenses you to do everything with this +software that would otherwise infringe any patent claims +they can license or become able to license. + +## Reliability + +No contributor can revoke this license. + +## No Liability + +***As far as the law allows, this software comes as is, +without any warranty or condition, and no contributor +will be liable to anyone for any damages related to this +software or this license, under any kind of legal claim.*** diff --git a/deps/npm/node_modules/cacache/node_modules/chownr/dist/commonjs/index.js b/deps/npm/node_modules/cacache/node_modules/chownr/dist/commonjs/index.js new file mode 100644 index 00000000000000..6a7b68d5eac26e --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/chownr/dist/commonjs/index.js @@ -0,0 +1,93 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.chownrSync = exports.chownr = void 0; +const node_fs_1 = __importDefault(require("node:fs")); +const node_path_1 = __importDefault(require("node:path")); +const lchownSync = (path, uid, gid) => { + try { + return node_fs_1.default.lchownSync(path, uid, gid); + } + catch (er) { + if (er?.code !== 'ENOENT') + throw er; + } +}; +const chown = (cpath, uid, gid, cb) => { + node_fs_1.default.lchown(cpath, uid, gid, er => { + // Skip ENOENT error + cb(er && er?.code !== 'ENOENT' ? er : null); + }); +}; +const chownrKid = (p, child, uid, gid, cb) => { + if (child.isDirectory()) { + (0, exports.chownr)(node_path_1.default.resolve(p, child.name), uid, gid, (er) => { + if (er) + return cb(er); + const cpath = node_path_1.default.resolve(p, child.name); + chown(cpath, uid, gid, cb); + }); + } + else { + const cpath = node_path_1.default.resolve(p, child.name); + chown(cpath, uid, gid, cb); + } +}; +const chownr = (p, uid, gid, cb) => { + node_fs_1.default.readdir(p, { withFileTypes: true }, (er, children) => { + // any error other than ENOTDIR or ENOTSUP means it's not readable, + // or doesn't exist. give up. + if (er) { + if (er.code === 'ENOENT') + return cb(); + else if (er.code !== 'ENOTDIR' && er.code !== 'ENOTSUP') + return cb(er); + } + if (er || !children.length) + return chown(p, uid, gid, cb); + let len = children.length; + let errState = null; + const then = (er) => { + /* c8 ignore start */ + if (errState) + return; + /* c8 ignore stop */ + if (er) + return cb((errState = er)); + if (--len === 0) + return chown(p, uid, gid, cb); + }; + for (const child of children) { + chownrKid(p, child, uid, gid, then); + } + }); +}; +exports.chownr = chownr; +const chownrKidSync = (p, child, uid, gid) => { + if (child.isDirectory()) + (0, exports.chownrSync)(node_path_1.default.resolve(p, child.name), uid, gid); + lchownSync(node_path_1.default.resolve(p, child.name), uid, gid); +}; +const chownrSync = (p, uid, gid) => { + let children; + try { + children = node_fs_1.default.readdirSync(p, { withFileTypes: true }); + } + catch (er) { + const e = er; + if (e?.code === 'ENOENT') + return; + else if (e?.code === 'ENOTDIR' || e?.code === 'ENOTSUP') + return lchownSync(p, uid, gid); + else + throw e; + } + for (const child of children) { + chownrKidSync(p, child, uid, gid); + } + return lchownSync(p, uid, gid); +}; +exports.chownrSync = chownrSync; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/chownr/dist/commonjs/package.json b/deps/npm/node_modules/cacache/node_modules/chownr/dist/commonjs/package.json new file mode 100644 index 00000000000000..5bbefffbabee39 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/chownr/dist/commonjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/deps/npm/node_modules/cacache/node_modules/chownr/dist/esm/index.js b/deps/npm/node_modules/cacache/node_modules/chownr/dist/esm/index.js new file mode 100644 index 00000000000000..5c2815297a67cb --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/chownr/dist/esm/index.js @@ -0,0 +1,85 @@ +import fs from 'node:fs'; +import path from 'node:path'; +const lchownSync = (path, uid, gid) => { + try { + return fs.lchownSync(path, uid, gid); + } + catch (er) { + if (er?.code !== 'ENOENT') + throw er; + } +}; +const chown = (cpath, uid, gid, cb) => { + fs.lchown(cpath, uid, gid, er => { + // Skip ENOENT error + cb(er && er?.code !== 'ENOENT' ? er : null); + }); +}; +const chownrKid = (p, child, uid, gid, cb) => { + if (child.isDirectory()) { + chownr(path.resolve(p, child.name), uid, gid, (er) => { + if (er) + return cb(er); + const cpath = path.resolve(p, child.name); + chown(cpath, uid, gid, cb); + }); + } + else { + const cpath = path.resolve(p, child.name); + chown(cpath, uid, gid, cb); + } +}; +export const chownr = (p, uid, gid, cb) => { + fs.readdir(p, { withFileTypes: true }, (er, children) => { + // any error other than ENOTDIR or ENOTSUP means it's not readable, + // or doesn't exist. give up. + if (er) { + if (er.code === 'ENOENT') + return cb(); + else if (er.code !== 'ENOTDIR' && er.code !== 'ENOTSUP') + return cb(er); + } + if (er || !children.length) + return chown(p, uid, gid, cb); + let len = children.length; + let errState = null; + const then = (er) => { + /* c8 ignore start */ + if (errState) + return; + /* c8 ignore stop */ + if (er) + return cb((errState = er)); + if (--len === 0) + return chown(p, uid, gid, cb); + }; + for (const child of children) { + chownrKid(p, child, uid, gid, then); + } + }); +}; +const chownrKidSync = (p, child, uid, gid) => { + if (child.isDirectory()) + chownrSync(path.resolve(p, child.name), uid, gid); + lchownSync(path.resolve(p, child.name), uid, gid); +}; +export const chownrSync = (p, uid, gid) => { + let children; + try { + children = fs.readdirSync(p, { withFileTypes: true }); + } + catch (er) { + const e = er; + if (e?.code === 'ENOENT') + return; + else if (e?.code === 'ENOTDIR' || e?.code === 'ENOTSUP') + return lchownSync(p, uid, gid); + else + throw e; + } + for (const child of children) { + chownrKidSync(p, child, uid, gid); + } + return lchownSync(p, uid, gid); +}; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/chownr/dist/esm/package.json b/deps/npm/node_modules/cacache/node_modules/chownr/dist/esm/package.json new file mode 100644 index 00000000000000..3dbc1ca591c055 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/chownr/dist/esm/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/deps/npm/node_modules/cacache/node_modules/chownr/package.json b/deps/npm/node_modules/cacache/node_modules/chownr/package.json new file mode 100644 index 00000000000000..09aa6b2e2e576d --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/chownr/package.json @@ -0,0 +1,69 @@ +{ + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "name": "chownr", + "description": "like `chown -R`", + "version": "3.0.0", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/chownr.git" + }, + "files": [ + "dist" + ], + "devDependencies": { + "@types/node": "^20.12.5", + "mkdirp": "^3.0.1", + "prettier": "^3.2.5", + "rimraf": "^5.0.5", + "tap": "^18.7.2", + "tshy": "^1.13.1", + "typedoc": "^0.25.12" + }, + "scripts": { + "prepare": "tshy", + "pretest": "npm run prepare", + "test": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "format": "prettier --write . --loglevel warn", + "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts" + }, + "license": "BlueOak-1.0.0", + "engines": { + "node": ">=18" + }, + "tshy": { + "exports": { + "./package.json": "./package.json", + ".": "./src/index.ts" + } + }, + "exports": { + "./package.json": "./package.json", + ".": { + "import": { + "types": "./dist/esm/index.d.ts", + "default": "./dist/esm/index.js" + }, + "require": { + "types": "./dist/commonjs/index.d.ts", + "default": "./dist/commonjs/index.js" + } + } + }, + "main": "./dist/commonjs/index.js", + "types": "./dist/commonjs/index.d.ts", + "type": "module", + "prettier": { + "semi": false, + "printWidth": 75, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + } +} diff --git a/deps/npm/node_modules/cacache/node_modules/minizlib/LICENSE b/deps/npm/node_modules/cacache/node_modules/minizlib/LICENSE new file mode 100644 index 00000000000000..49f7efe431c9ea --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/minizlib/LICENSE @@ -0,0 +1,26 @@ +Minizlib was created by Isaac Z. Schlueter. +It is a derivative work of the Node.js project. + +""" +Copyright (c) 2017-2023 Isaac Z. Schlueter and Contributors +Copyright (c) 2017-2023 Node.js contributors. All rights reserved. +Copyright (c) 2017-2023 Joyent, Inc. and other Node contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +""" diff --git a/deps/npm/node_modules/cacache/node_modules/minizlib/dist/commonjs/constants.js b/deps/npm/node_modules/cacache/node_modules/minizlib/dist/commonjs/constants.js new file mode 100644 index 00000000000000..dfc2c1957bfc99 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/minizlib/dist/commonjs/constants.js @@ -0,0 +1,123 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.constants = void 0; +// Update with any zlib constants that are added or changed in the future. +// Node v6 didn't export this, so we just hard code the version and rely +// on all the other hard-coded values from zlib v4736. When node v6 +// support drops, we can just export the realZlibConstants object. +const zlib_1 = __importDefault(require("zlib")); +/* c8 ignore start */ +const realZlibConstants = zlib_1.default.constants || { ZLIB_VERNUM: 4736 }; +/* c8 ignore stop */ +exports.constants = Object.freeze(Object.assign(Object.create(null), { + Z_NO_FLUSH: 0, + Z_PARTIAL_FLUSH: 1, + Z_SYNC_FLUSH: 2, + Z_FULL_FLUSH: 3, + Z_FINISH: 4, + Z_BLOCK: 5, + Z_OK: 0, + Z_STREAM_END: 1, + Z_NEED_DICT: 2, + Z_ERRNO: -1, + Z_STREAM_ERROR: -2, + Z_DATA_ERROR: -3, + Z_MEM_ERROR: -4, + Z_BUF_ERROR: -5, + Z_VERSION_ERROR: -6, + Z_NO_COMPRESSION: 0, + Z_BEST_SPEED: 1, + Z_BEST_COMPRESSION: 9, + Z_DEFAULT_COMPRESSION: -1, + Z_FILTERED: 1, + Z_HUFFMAN_ONLY: 2, + Z_RLE: 3, + Z_FIXED: 4, + Z_DEFAULT_STRATEGY: 0, + DEFLATE: 1, + INFLATE: 2, + GZIP: 3, + GUNZIP: 4, + DEFLATERAW: 5, + INFLATERAW: 6, + UNZIP: 7, + BROTLI_DECODE: 8, + BROTLI_ENCODE: 9, + Z_MIN_WINDOWBITS: 8, + Z_MAX_WINDOWBITS: 15, + Z_DEFAULT_WINDOWBITS: 15, + Z_MIN_CHUNK: 64, + Z_MAX_CHUNK: Infinity, + Z_DEFAULT_CHUNK: 16384, + Z_MIN_MEMLEVEL: 1, + Z_MAX_MEMLEVEL: 9, + Z_DEFAULT_MEMLEVEL: 8, + Z_MIN_LEVEL: -1, + Z_MAX_LEVEL: 9, + Z_DEFAULT_LEVEL: -1, + BROTLI_OPERATION_PROCESS: 0, + BROTLI_OPERATION_FLUSH: 1, + BROTLI_OPERATION_FINISH: 2, + BROTLI_OPERATION_EMIT_METADATA: 3, + BROTLI_MODE_GENERIC: 0, + BROTLI_MODE_TEXT: 1, + BROTLI_MODE_FONT: 2, + BROTLI_DEFAULT_MODE: 0, + BROTLI_MIN_QUALITY: 0, + BROTLI_MAX_QUALITY: 11, + BROTLI_DEFAULT_QUALITY: 11, + BROTLI_MIN_WINDOW_BITS: 10, + BROTLI_MAX_WINDOW_BITS: 24, + BROTLI_LARGE_MAX_WINDOW_BITS: 30, + BROTLI_DEFAULT_WINDOW: 22, + BROTLI_MIN_INPUT_BLOCK_BITS: 16, + BROTLI_MAX_INPUT_BLOCK_BITS: 24, + BROTLI_PARAM_MODE: 0, + BROTLI_PARAM_QUALITY: 1, + BROTLI_PARAM_LGWIN: 2, + BROTLI_PARAM_LGBLOCK: 3, + BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING: 4, + BROTLI_PARAM_SIZE_HINT: 5, + BROTLI_PARAM_LARGE_WINDOW: 6, + BROTLI_PARAM_NPOSTFIX: 7, + BROTLI_PARAM_NDIRECT: 8, + BROTLI_DECODER_RESULT_ERROR: 0, + BROTLI_DECODER_RESULT_SUCCESS: 1, + BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT: 2, + BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT: 3, + BROTLI_DECODER_PARAM_DISABLE_RING_BUFFER_REALLOCATION: 0, + BROTLI_DECODER_PARAM_LARGE_WINDOW: 1, + BROTLI_DECODER_NO_ERROR: 0, + BROTLI_DECODER_SUCCESS: 1, + BROTLI_DECODER_NEEDS_MORE_INPUT: 2, + BROTLI_DECODER_NEEDS_MORE_OUTPUT: 3, + BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_NIBBLE: -1, + BROTLI_DECODER_ERROR_FORMAT_RESERVED: -2, + BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_META_NIBBLE: -3, + BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_ALPHABET: -4, + BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_SAME: -5, + BROTLI_DECODER_ERROR_FORMAT_CL_SPACE: -6, + BROTLI_DECODER_ERROR_FORMAT_HUFFMAN_SPACE: -7, + BROTLI_DECODER_ERROR_FORMAT_CONTEXT_MAP_REPEAT: -8, + BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_1: -9, + BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_2: -10, + BROTLI_DECODER_ERROR_FORMAT_TRANSFORM: -11, + BROTLI_DECODER_ERROR_FORMAT_DICTIONARY: -12, + BROTLI_DECODER_ERROR_FORMAT_WINDOW_BITS: -13, + BROTLI_DECODER_ERROR_FORMAT_PADDING_1: -14, + BROTLI_DECODER_ERROR_FORMAT_PADDING_2: -15, + BROTLI_DECODER_ERROR_FORMAT_DISTANCE: -16, + BROTLI_DECODER_ERROR_DICTIONARY_NOT_SET: -19, + BROTLI_DECODER_ERROR_INVALID_ARGUMENTS: -20, + BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MODES: -21, + BROTLI_DECODER_ERROR_ALLOC_TREE_GROUPS: -22, + BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MAP: -25, + BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_1: -26, + BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_2: -27, + BROTLI_DECODER_ERROR_ALLOC_BLOCK_TYPE_TREES: -30, + BROTLI_DECODER_ERROR_UNREACHABLE: -31, +}, realZlibConstants)); +//# sourceMappingURL=constants.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/minizlib/dist/commonjs/index.js b/deps/npm/node_modules/cacache/node_modules/minizlib/dist/commonjs/index.js new file mode 100644 index 00000000000000..ad65eef0495076 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/minizlib/dist/commonjs/index.js @@ -0,0 +1,352 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.BrotliDecompress = exports.BrotliCompress = exports.Brotli = exports.Unzip = exports.InflateRaw = exports.DeflateRaw = exports.Gunzip = exports.Gzip = exports.Inflate = exports.Deflate = exports.Zlib = exports.ZlibError = exports.constants = void 0; +const assert_1 = __importDefault(require("assert")); +const buffer_1 = require("buffer"); +const minipass_1 = require("minipass"); +const zlib_1 = __importDefault(require("zlib")); +const constants_js_1 = require("./constants.js"); +var constants_js_2 = require("./constants.js"); +Object.defineProperty(exports, "constants", { enumerable: true, get: function () { return constants_js_2.constants; } }); +const OriginalBufferConcat = buffer_1.Buffer.concat; +const _superWrite = Symbol('_superWrite'); +class ZlibError extends Error { + code; + errno; + constructor(err) { + super('zlib: ' + err.message); + this.code = err.code; + this.errno = err.errno; + /* c8 ignore next */ + if (!this.code) + this.code = 'ZLIB_ERROR'; + this.message = 'zlib: ' + err.message; + Error.captureStackTrace(this, this.constructor); + } + get name() { + return 'ZlibError'; + } +} +exports.ZlibError = ZlibError; +// the Zlib class they all inherit from +// This thing manages the queue of requests, and returns +// true or false if there is anything in the queue when +// you call the .write() method. +const _flushFlag = Symbol('flushFlag'); +class ZlibBase extends minipass_1.Minipass { + #sawError = false; + #ended = false; + #flushFlag; + #finishFlushFlag; + #fullFlushFlag; + #handle; + #onError; + get sawError() { + return this.#sawError; + } + get handle() { + return this.#handle; + } + /* c8 ignore start */ + get flushFlag() { + return this.#flushFlag; + } + /* c8 ignore stop */ + constructor(opts, mode) { + if (!opts || typeof opts !== 'object') + throw new TypeError('invalid options for ZlibBase constructor'); + //@ts-ignore + super(opts); + /* c8 ignore start */ + this.#flushFlag = opts.flush ?? 0; + this.#finishFlushFlag = opts.finishFlush ?? 0; + this.#fullFlushFlag = opts.fullFlushFlag ?? 0; + /* c8 ignore stop */ + // this will throw if any options are invalid for the class selected + try { + // @types/node doesn't know that it exports the classes, but they're there + //@ts-ignore + this.#handle = new zlib_1.default[mode](opts); + } + catch (er) { + // make sure that all errors get decorated properly + throw new ZlibError(er); + } + this.#onError = err => { + // no sense raising multiple errors, since we abort on the first one. + if (this.#sawError) + return; + this.#sawError = true; + // there is no way to cleanly recover. + // continuing only obscures problems. + this.close(); + this.emit('error', err); + }; + this.#handle?.on('error', er => this.#onError(new ZlibError(er))); + this.once('end', () => this.close); + } + close() { + if (this.#handle) { + this.#handle.close(); + this.#handle = undefined; + this.emit('close'); + } + } + reset() { + if (!this.#sawError) { + (0, assert_1.default)(this.#handle, 'zlib binding closed'); + //@ts-ignore + return this.#handle.reset?.(); + } + } + flush(flushFlag) { + if (this.ended) + return; + if (typeof flushFlag !== 'number') + flushFlag = this.#fullFlushFlag; + this.write(Object.assign(buffer_1.Buffer.alloc(0), { [_flushFlag]: flushFlag })); + } + end(chunk, encoding, cb) { + /* c8 ignore start */ + if (typeof chunk === 'function') { + cb = chunk; + encoding = undefined; + chunk = undefined; + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = undefined; + } + /* c8 ignore stop */ + if (chunk) { + if (encoding) + this.write(chunk, encoding); + else + this.write(chunk); + } + this.flush(this.#finishFlushFlag); + this.#ended = true; + return super.end(cb); + } + get ended() { + return this.#ended; + } + // overridden in the gzip classes to do portable writes + [_superWrite](data) { + return super.write(data); + } + write(chunk, encoding, cb) { + // process the chunk using the sync process + // then super.write() all the outputted chunks + if (typeof encoding === 'function') + (cb = encoding), (encoding = 'utf8'); + if (typeof chunk === 'string') + chunk = buffer_1.Buffer.from(chunk, encoding); + if (this.#sawError) + return; + (0, assert_1.default)(this.#handle, 'zlib binding closed'); + // _processChunk tries to .close() the native handle after it's done, so we + // intercept that by temporarily making it a no-op. + // diving into the node:zlib internals a bit here + const nativeHandle = this.#handle + ._handle; + const originalNativeClose = nativeHandle.close; + nativeHandle.close = () => { }; + const originalClose = this.#handle.close; + this.#handle.close = () => { }; + // It also calls `Buffer.concat()` at the end, which may be convenient + // for some, but which we are not interested in as it slows us down. + buffer_1.Buffer.concat = args => args; + let result = undefined; + try { + const flushFlag = typeof chunk[_flushFlag] === 'number' + ? chunk[_flushFlag] + : this.#flushFlag; + result = this.#handle._processChunk(chunk, flushFlag); + // if we don't throw, reset it back how it was + buffer_1.Buffer.concat = OriginalBufferConcat; + } + catch (err) { + // or if we do, put Buffer.concat() back before we emit error + // Error events call into user code, which may call Buffer.concat() + buffer_1.Buffer.concat = OriginalBufferConcat; + this.#onError(new ZlibError(err)); + } + finally { + if (this.#handle) { + // Core zlib resets `_handle` to null after attempting to close the + // native handle. Our no-op handler prevented actual closure, but we + // need to restore the `._handle` property. + ; + this.#handle._handle = + nativeHandle; + nativeHandle.close = originalNativeClose; + this.#handle.close = originalClose; + // `_processChunk()` adds an 'error' listener. If we don't remove it + // after each call, these handlers start piling up. + this.#handle.removeAllListeners('error'); + // make sure OUR error listener is still attached tho + } + } + if (this.#handle) + this.#handle.on('error', er => this.#onError(new ZlibError(er))); + let writeReturn; + if (result) { + if (Array.isArray(result) && result.length > 0) { + const r = result[0]; + // The first buffer is always `handle._outBuffer`, which would be + // re-used for later invocations; so, we always have to copy that one. + writeReturn = this[_superWrite](buffer_1.Buffer.from(r)); + for (let i = 1; i < result.length; i++) { + writeReturn = this[_superWrite](result[i]); + } + } + else { + // either a single Buffer or an empty array + writeReturn = this[_superWrite](buffer_1.Buffer.from(result)); + } + } + if (cb) + cb(); + return writeReturn; + } +} +class Zlib extends ZlibBase { + #level; + #strategy; + constructor(opts, mode) { + opts = opts || {}; + opts.flush = opts.flush || constants_js_1.constants.Z_NO_FLUSH; + opts.finishFlush = opts.finishFlush || constants_js_1.constants.Z_FINISH; + opts.fullFlushFlag = constants_js_1.constants.Z_FULL_FLUSH; + super(opts, mode); + this.#level = opts.level; + this.#strategy = opts.strategy; + } + params(level, strategy) { + if (this.sawError) + return; + if (!this.handle) + throw new Error('cannot switch params when binding is closed'); + // no way to test this without also not supporting params at all + /* c8 ignore start */ + if (!this.handle.params) + throw new Error('not supported in this implementation'); + /* c8 ignore stop */ + if (this.#level !== level || this.#strategy !== strategy) { + this.flush(constants_js_1.constants.Z_SYNC_FLUSH); + (0, assert_1.default)(this.handle, 'zlib binding closed'); + // .params() calls .flush(), but the latter is always async in the + // core zlib. We override .flush() temporarily to intercept that and + // flush synchronously. + const origFlush = this.handle.flush; + this.handle.flush = (flushFlag, cb) => { + /* c8 ignore start */ + if (typeof flushFlag === 'function') { + cb = flushFlag; + flushFlag = this.flushFlag; + } + /* c8 ignore stop */ + this.flush(flushFlag); + cb?.(); + }; + try { + ; + this.handle.params(level, strategy); + } + finally { + this.handle.flush = origFlush; + } + /* c8 ignore start */ + if (this.handle) { + this.#level = level; + this.#strategy = strategy; + } + /* c8 ignore stop */ + } + } +} +exports.Zlib = Zlib; +// minimal 2-byte header +class Deflate extends Zlib { + constructor(opts) { + super(opts, 'Deflate'); + } +} +exports.Deflate = Deflate; +class Inflate extends Zlib { + constructor(opts) { + super(opts, 'Inflate'); + } +} +exports.Inflate = Inflate; +class Gzip extends Zlib { + #portable; + constructor(opts) { + super(opts, 'Gzip'); + this.#portable = opts && !!opts.portable; + } + [_superWrite](data) { + if (!this.#portable) + return super[_superWrite](data); + // we'll always get the header emitted in one first chunk + // overwrite the OS indicator byte with 0xFF + this.#portable = false; + data[9] = 255; + return super[_superWrite](data); + } +} +exports.Gzip = Gzip; +class Gunzip extends Zlib { + constructor(opts) { + super(opts, 'Gunzip'); + } +} +exports.Gunzip = Gunzip; +// raw - no header +class DeflateRaw extends Zlib { + constructor(opts) { + super(opts, 'DeflateRaw'); + } +} +exports.DeflateRaw = DeflateRaw; +class InflateRaw extends Zlib { + constructor(opts) { + super(opts, 'InflateRaw'); + } +} +exports.InflateRaw = InflateRaw; +// auto-detect header. +class Unzip extends Zlib { + constructor(opts) { + super(opts, 'Unzip'); + } +} +exports.Unzip = Unzip; +class Brotli extends ZlibBase { + constructor(opts, mode) { + opts = opts || {}; + opts.flush = opts.flush || constants_js_1.constants.BROTLI_OPERATION_PROCESS; + opts.finishFlush = + opts.finishFlush || constants_js_1.constants.BROTLI_OPERATION_FINISH; + opts.fullFlushFlag = constants_js_1.constants.BROTLI_OPERATION_FLUSH; + super(opts, mode); + } +} +exports.Brotli = Brotli; +class BrotliCompress extends Brotli { + constructor(opts) { + super(opts, 'BrotliCompress'); + } +} +exports.BrotliCompress = BrotliCompress; +class BrotliDecompress extends Brotli { + constructor(opts) { + super(opts, 'BrotliDecompress'); + } +} +exports.BrotliDecompress = BrotliDecompress; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/minizlib/dist/commonjs/package.json b/deps/npm/node_modules/cacache/node_modules/minizlib/dist/commonjs/package.json new file mode 100644 index 00000000000000..5bbefffbabee39 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/minizlib/dist/commonjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/deps/npm/node_modules/cacache/node_modules/minizlib/dist/esm/constants.js b/deps/npm/node_modules/cacache/node_modules/minizlib/dist/esm/constants.js new file mode 100644 index 00000000000000..7faf40be5068d0 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/minizlib/dist/esm/constants.js @@ -0,0 +1,117 @@ +// Update with any zlib constants that are added or changed in the future. +// Node v6 didn't export this, so we just hard code the version and rely +// on all the other hard-coded values from zlib v4736. When node v6 +// support drops, we can just export the realZlibConstants object. +import realZlib from 'zlib'; +/* c8 ignore start */ +const realZlibConstants = realZlib.constants || { ZLIB_VERNUM: 4736 }; +/* c8 ignore stop */ +export const constants = Object.freeze(Object.assign(Object.create(null), { + Z_NO_FLUSH: 0, + Z_PARTIAL_FLUSH: 1, + Z_SYNC_FLUSH: 2, + Z_FULL_FLUSH: 3, + Z_FINISH: 4, + Z_BLOCK: 5, + Z_OK: 0, + Z_STREAM_END: 1, + Z_NEED_DICT: 2, + Z_ERRNO: -1, + Z_STREAM_ERROR: -2, + Z_DATA_ERROR: -3, + Z_MEM_ERROR: -4, + Z_BUF_ERROR: -5, + Z_VERSION_ERROR: -6, + Z_NO_COMPRESSION: 0, + Z_BEST_SPEED: 1, + Z_BEST_COMPRESSION: 9, + Z_DEFAULT_COMPRESSION: -1, + Z_FILTERED: 1, + Z_HUFFMAN_ONLY: 2, + Z_RLE: 3, + Z_FIXED: 4, + Z_DEFAULT_STRATEGY: 0, + DEFLATE: 1, + INFLATE: 2, + GZIP: 3, + GUNZIP: 4, + DEFLATERAW: 5, + INFLATERAW: 6, + UNZIP: 7, + BROTLI_DECODE: 8, + BROTLI_ENCODE: 9, + Z_MIN_WINDOWBITS: 8, + Z_MAX_WINDOWBITS: 15, + Z_DEFAULT_WINDOWBITS: 15, + Z_MIN_CHUNK: 64, + Z_MAX_CHUNK: Infinity, + Z_DEFAULT_CHUNK: 16384, + Z_MIN_MEMLEVEL: 1, + Z_MAX_MEMLEVEL: 9, + Z_DEFAULT_MEMLEVEL: 8, + Z_MIN_LEVEL: -1, + Z_MAX_LEVEL: 9, + Z_DEFAULT_LEVEL: -1, + BROTLI_OPERATION_PROCESS: 0, + BROTLI_OPERATION_FLUSH: 1, + BROTLI_OPERATION_FINISH: 2, + BROTLI_OPERATION_EMIT_METADATA: 3, + BROTLI_MODE_GENERIC: 0, + BROTLI_MODE_TEXT: 1, + BROTLI_MODE_FONT: 2, + BROTLI_DEFAULT_MODE: 0, + BROTLI_MIN_QUALITY: 0, + BROTLI_MAX_QUALITY: 11, + BROTLI_DEFAULT_QUALITY: 11, + BROTLI_MIN_WINDOW_BITS: 10, + BROTLI_MAX_WINDOW_BITS: 24, + BROTLI_LARGE_MAX_WINDOW_BITS: 30, + BROTLI_DEFAULT_WINDOW: 22, + BROTLI_MIN_INPUT_BLOCK_BITS: 16, + BROTLI_MAX_INPUT_BLOCK_BITS: 24, + BROTLI_PARAM_MODE: 0, + BROTLI_PARAM_QUALITY: 1, + BROTLI_PARAM_LGWIN: 2, + BROTLI_PARAM_LGBLOCK: 3, + BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING: 4, + BROTLI_PARAM_SIZE_HINT: 5, + BROTLI_PARAM_LARGE_WINDOW: 6, + BROTLI_PARAM_NPOSTFIX: 7, + BROTLI_PARAM_NDIRECT: 8, + BROTLI_DECODER_RESULT_ERROR: 0, + BROTLI_DECODER_RESULT_SUCCESS: 1, + BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT: 2, + BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT: 3, + BROTLI_DECODER_PARAM_DISABLE_RING_BUFFER_REALLOCATION: 0, + BROTLI_DECODER_PARAM_LARGE_WINDOW: 1, + BROTLI_DECODER_NO_ERROR: 0, + BROTLI_DECODER_SUCCESS: 1, + BROTLI_DECODER_NEEDS_MORE_INPUT: 2, + BROTLI_DECODER_NEEDS_MORE_OUTPUT: 3, + BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_NIBBLE: -1, + BROTLI_DECODER_ERROR_FORMAT_RESERVED: -2, + BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_META_NIBBLE: -3, + BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_ALPHABET: -4, + BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_SAME: -5, + BROTLI_DECODER_ERROR_FORMAT_CL_SPACE: -6, + BROTLI_DECODER_ERROR_FORMAT_HUFFMAN_SPACE: -7, + BROTLI_DECODER_ERROR_FORMAT_CONTEXT_MAP_REPEAT: -8, + BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_1: -9, + BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_2: -10, + BROTLI_DECODER_ERROR_FORMAT_TRANSFORM: -11, + BROTLI_DECODER_ERROR_FORMAT_DICTIONARY: -12, + BROTLI_DECODER_ERROR_FORMAT_WINDOW_BITS: -13, + BROTLI_DECODER_ERROR_FORMAT_PADDING_1: -14, + BROTLI_DECODER_ERROR_FORMAT_PADDING_2: -15, + BROTLI_DECODER_ERROR_FORMAT_DISTANCE: -16, + BROTLI_DECODER_ERROR_DICTIONARY_NOT_SET: -19, + BROTLI_DECODER_ERROR_INVALID_ARGUMENTS: -20, + BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MODES: -21, + BROTLI_DECODER_ERROR_ALLOC_TREE_GROUPS: -22, + BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MAP: -25, + BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_1: -26, + BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_2: -27, + BROTLI_DECODER_ERROR_ALLOC_BLOCK_TYPE_TREES: -30, + BROTLI_DECODER_ERROR_UNREACHABLE: -31, +}, realZlibConstants)); +//# sourceMappingURL=constants.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/minizlib/dist/esm/index.js b/deps/npm/node_modules/cacache/node_modules/minizlib/dist/esm/index.js new file mode 100644 index 00000000000000..a6269b505f47cc --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/minizlib/dist/esm/index.js @@ -0,0 +1,333 @@ +import assert from 'assert'; +import { Buffer } from 'buffer'; +import { Minipass } from 'minipass'; +import realZlib from 'zlib'; +import { constants } from './constants.js'; +export { constants } from './constants.js'; +const OriginalBufferConcat = Buffer.concat; +const _superWrite = Symbol('_superWrite'); +export class ZlibError extends Error { + code; + errno; + constructor(err) { + super('zlib: ' + err.message); + this.code = err.code; + this.errno = err.errno; + /* c8 ignore next */ + if (!this.code) + this.code = 'ZLIB_ERROR'; + this.message = 'zlib: ' + err.message; + Error.captureStackTrace(this, this.constructor); + } + get name() { + return 'ZlibError'; + } +} +// the Zlib class they all inherit from +// This thing manages the queue of requests, and returns +// true or false if there is anything in the queue when +// you call the .write() method. +const _flushFlag = Symbol('flushFlag'); +class ZlibBase extends Minipass { + #sawError = false; + #ended = false; + #flushFlag; + #finishFlushFlag; + #fullFlushFlag; + #handle; + #onError; + get sawError() { + return this.#sawError; + } + get handle() { + return this.#handle; + } + /* c8 ignore start */ + get flushFlag() { + return this.#flushFlag; + } + /* c8 ignore stop */ + constructor(opts, mode) { + if (!opts || typeof opts !== 'object') + throw new TypeError('invalid options for ZlibBase constructor'); + //@ts-ignore + super(opts); + /* c8 ignore start */ + this.#flushFlag = opts.flush ?? 0; + this.#finishFlushFlag = opts.finishFlush ?? 0; + this.#fullFlushFlag = opts.fullFlushFlag ?? 0; + /* c8 ignore stop */ + // this will throw if any options are invalid for the class selected + try { + // @types/node doesn't know that it exports the classes, but they're there + //@ts-ignore + this.#handle = new realZlib[mode](opts); + } + catch (er) { + // make sure that all errors get decorated properly + throw new ZlibError(er); + } + this.#onError = err => { + // no sense raising multiple errors, since we abort on the first one. + if (this.#sawError) + return; + this.#sawError = true; + // there is no way to cleanly recover. + // continuing only obscures problems. + this.close(); + this.emit('error', err); + }; + this.#handle?.on('error', er => this.#onError(new ZlibError(er))); + this.once('end', () => this.close); + } + close() { + if (this.#handle) { + this.#handle.close(); + this.#handle = undefined; + this.emit('close'); + } + } + reset() { + if (!this.#sawError) { + assert(this.#handle, 'zlib binding closed'); + //@ts-ignore + return this.#handle.reset?.(); + } + } + flush(flushFlag) { + if (this.ended) + return; + if (typeof flushFlag !== 'number') + flushFlag = this.#fullFlushFlag; + this.write(Object.assign(Buffer.alloc(0), { [_flushFlag]: flushFlag })); + } + end(chunk, encoding, cb) { + /* c8 ignore start */ + if (typeof chunk === 'function') { + cb = chunk; + encoding = undefined; + chunk = undefined; + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = undefined; + } + /* c8 ignore stop */ + if (chunk) { + if (encoding) + this.write(chunk, encoding); + else + this.write(chunk); + } + this.flush(this.#finishFlushFlag); + this.#ended = true; + return super.end(cb); + } + get ended() { + return this.#ended; + } + // overridden in the gzip classes to do portable writes + [_superWrite](data) { + return super.write(data); + } + write(chunk, encoding, cb) { + // process the chunk using the sync process + // then super.write() all the outputted chunks + if (typeof encoding === 'function') + (cb = encoding), (encoding = 'utf8'); + if (typeof chunk === 'string') + chunk = Buffer.from(chunk, encoding); + if (this.#sawError) + return; + assert(this.#handle, 'zlib binding closed'); + // _processChunk tries to .close() the native handle after it's done, so we + // intercept that by temporarily making it a no-op. + // diving into the node:zlib internals a bit here + const nativeHandle = this.#handle + ._handle; + const originalNativeClose = nativeHandle.close; + nativeHandle.close = () => { }; + const originalClose = this.#handle.close; + this.#handle.close = () => { }; + // It also calls `Buffer.concat()` at the end, which may be convenient + // for some, but which we are not interested in as it slows us down. + Buffer.concat = args => args; + let result = undefined; + try { + const flushFlag = typeof chunk[_flushFlag] === 'number' + ? chunk[_flushFlag] + : this.#flushFlag; + result = this.#handle._processChunk(chunk, flushFlag); + // if we don't throw, reset it back how it was + Buffer.concat = OriginalBufferConcat; + } + catch (err) { + // or if we do, put Buffer.concat() back before we emit error + // Error events call into user code, which may call Buffer.concat() + Buffer.concat = OriginalBufferConcat; + this.#onError(new ZlibError(err)); + } + finally { + if (this.#handle) { + // Core zlib resets `_handle` to null after attempting to close the + // native handle. Our no-op handler prevented actual closure, but we + // need to restore the `._handle` property. + ; + this.#handle._handle = + nativeHandle; + nativeHandle.close = originalNativeClose; + this.#handle.close = originalClose; + // `_processChunk()` adds an 'error' listener. If we don't remove it + // after each call, these handlers start piling up. + this.#handle.removeAllListeners('error'); + // make sure OUR error listener is still attached tho + } + } + if (this.#handle) + this.#handle.on('error', er => this.#onError(new ZlibError(er))); + let writeReturn; + if (result) { + if (Array.isArray(result) && result.length > 0) { + const r = result[0]; + // The first buffer is always `handle._outBuffer`, which would be + // re-used for later invocations; so, we always have to copy that one. + writeReturn = this[_superWrite](Buffer.from(r)); + for (let i = 1; i < result.length; i++) { + writeReturn = this[_superWrite](result[i]); + } + } + else { + // either a single Buffer or an empty array + writeReturn = this[_superWrite](Buffer.from(result)); + } + } + if (cb) + cb(); + return writeReturn; + } +} +export class Zlib extends ZlibBase { + #level; + #strategy; + constructor(opts, mode) { + opts = opts || {}; + opts.flush = opts.flush || constants.Z_NO_FLUSH; + opts.finishFlush = opts.finishFlush || constants.Z_FINISH; + opts.fullFlushFlag = constants.Z_FULL_FLUSH; + super(opts, mode); + this.#level = opts.level; + this.#strategy = opts.strategy; + } + params(level, strategy) { + if (this.sawError) + return; + if (!this.handle) + throw new Error('cannot switch params when binding is closed'); + // no way to test this without also not supporting params at all + /* c8 ignore start */ + if (!this.handle.params) + throw new Error('not supported in this implementation'); + /* c8 ignore stop */ + if (this.#level !== level || this.#strategy !== strategy) { + this.flush(constants.Z_SYNC_FLUSH); + assert(this.handle, 'zlib binding closed'); + // .params() calls .flush(), but the latter is always async in the + // core zlib. We override .flush() temporarily to intercept that and + // flush synchronously. + const origFlush = this.handle.flush; + this.handle.flush = (flushFlag, cb) => { + /* c8 ignore start */ + if (typeof flushFlag === 'function') { + cb = flushFlag; + flushFlag = this.flushFlag; + } + /* c8 ignore stop */ + this.flush(flushFlag); + cb?.(); + }; + try { + ; + this.handle.params(level, strategy); + } + finally { + this.handle.flush = origFlush; + } + /* c8 ignore start */ + if (this.handle) { + this.#level = level; + this.#strategy = strategy; + } + /* c8 ignore stop */ + } + } +} +// minimal 2-byte header +export class Deflate extends Zlib { + constructor(opts) { + super(opts, 'Deflate'); + } +} +export class Inflate extends Zlib { + constructor(opts) { + super(opts, 'Inflate'); + } +} +export class Gzip extends Zlib { + #portable; + constructor(opts) { + super(opts, 'Gzip'); + this.#portable = opts && !!opts.portable; + } + [_superWrite](data) { + if (!this.#portable) + return super[_superWrite](data); + // we'll always get the header emitted in one first chunk + // overwrite the OS indicator byte with 0xFF + this.#portable = false; + data[9] = 255; + return super[_superWrite](data); + } +} +export class Gunzip extends Zlib { + constructor(opts) { + super(opts, 'Gunzip'); + } +} +// raw - no header +export class DeflateRaw extends Zlib { + constructor(opts) { + super(opts, 'DeflateRaw'); + } +} +export class InflateRaw extends Zlib { + constructor(opts) { + super(opts, 'InflateRaw'); + } +} +// auto-detect header. +export class Unzip extends Zlib { + constructor(opts) { + super(opts, 'Unzip'); + } +} +export class Brotli extends ZlibBase { + constructor(opts, mode) { + opts = opts || {}; + opts.flush = opts.flush || constants.BROTLI_OPERATION_PROCESS; + opts.finishFlush = + opts.finishFlush || constants.BROTLI_OPERATION_FINISH; + opts.fullFlushFlag = constants.BROTLI_OPERATION_FLUSH; + super(opts, mode); + } +} +export class BrotliCompress extends Brotli { + constructor(opts) { + super(opts, 'BrotliCompress'); + } +} +export class BrotliDecompress extends Brotli { + constructor(opts) { + super(opts, 'BrotliDecompress'); + } +} +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/minizlib/dist/esm/package.json b/deps/npm/node_modules/cacache/node_modules/minizlib/dist/esm/package.json new file mode 100644 index 00000000000000..3dbc1ca591c055 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/minizlib/dist/esm/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/deps/npm/node_modules/cacache/node_modules/minizlib/package.json b/deps/npm/node_modules/cacache/node_modules/minizlib/package.json new file mode 100644 index 00000000000000..e94623ff43d353 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/minizlib/package.json @@ -0,0 +1,81 @@ +{ + "name": "minizlib", + "version": "3.0.1", + "description": "A small fast zlib stream built on [minipass](http://npm.im/minipass) and Node.js's zlib binding.", + "main": "./dist/commonjs/index.js", + "dependencies": { + "minipass": "^7.0.4", + "rimraf": "^5.0.5" + }, + "scripts": { + "prepare": "tshy", + "pretest": "npm run prepare", + "test": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "format": "prettier --write . --loglevel warn", + "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/isaacs/minizlib.git" + }, + "keywords": [ + "zlib", + "gzip", + "gunzip", + "deflate", + "inflate", + "compression", + "zip", + "unzip" + ], + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "MIT", + "devDependencies": { + "@types/node": "^20.11.29", + "mkdirp": "^3.0.1", + "tap": "^18.7.1", + "tshy": "^1.12.0", + "typedoc": "^0.25.12" + }, + "files": [ + "dist" + ], + "engines": { + "node": ">= 18" + }, + "tshy": { + "exports": { + "./package.json": "./package.json", + ".": "./src/index.ts" + } + }, + "exports": { + "./package.json": "./package.json", + ".": { + "import": { + "types": "./dist/esm/index.d.ts", + "default": "./dist/esm/index.js" + }, + "require": { + "types": "./dist/commonjs/index.d.ts", + "default": "./dist/commonjs/index.js" + } + } + }, + "types": "./dist/commonjs/index.d.ts", + "type": "module", + "prettier": { + "semi": false, + "printWidth": 75, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + } +} diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/LICENSE b/deps/npm/node_modules/cacache/node_modules/mkdirp/LICENSE new file mode 100644 index 00000000000000..0a034db7a73b5d --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/LICENSE @@ -0,0 +1,21 @@ +Copyright (c) 2011-2023 James Halliday (mail@substack.net) and Isaac Z. Schlueter (i@izs.me) + +This project is free software released under the MIT license: + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/package.json b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/package.json new file mode 100644 index 00000000000000..9d04a66e16cd93 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/package.json @@ -0,0 +1,91 @@ +{ + "name": "mkdirp", + "description": "Recursively mkdir, like `mkdir -p`", + "version": "3.0.1", + "keywords": [ + "mkdir", + "directory", + "make dir", + "make", + "dir", + "recursive", + "native" + ], + "bin": "./dist/cjs/src/bin.js", + "main": "./dist/cjs/src/index.js", + "module": "./dist/mjs/index.js", + "types": "./dist/mjs/index.d.ts", + "exports": { + ".": { + "import": { + "types": "./dist/mjs/index.d.ts", + "default": "./dist/mjs/index.js" + }, + "require": { + "types": "./dist/cjs/src/index.d.ts", + "default": "./dist/cjs/src/index.js" + } + } + }, + "files": [ + "dist" + ], + "scripts": { + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "preprepare": "rm -rf dist", + "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json", + "postprepare": "bash fixup.sh", + "pretest": "npm run prepare", + "presnap": "npm run prepare", + "test": "c8 tap", + "snap": "c8 tap", + "format": "prettier --write . --loglevel warn", + "benchmark": "node benchmark/index.js", + "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts" + }, + "prettier": { + "semi": false, + "printWidth": 80, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + }, + "devDependencies": { + "@types/brace-expansion": "^1.1.0", + "@types/node": "^18.11.9", + "@types/tap": "^15.0.7", + "c8": "^7.12.0", + "eslint-config-prettier": "^8.6.0", + "prettier": "^2.8.2", + "tap": "^16.3.3", + "ts-node": "^10.9.1", + "typedoc": "^0.23.21", + "typescript": "^4.9.3" + }, + "tap": { + "coverage": false, + "node-arg": [ + "--no-warnings", + "--loader", + "ts-node/esm" + ], + "ts": false + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + }, + "repository": { + "type": "git", + "url": "https://github.com/isaacs/node-mkdirp.git" + }, + "license": "MIT", + "engines": { + "node": ">=10" + } +} diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/bin.d.ts b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/bin.d.ts new file mode 100644 index 00000000000000..34e005228653c8 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/bin.d.ts @@ -0,0 +1,3 @@ +#!/usr/bin/env node +export {}; +//# sourceMappingURL=bin.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/bin.d.ts.map b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/bin.d.ts.map new file mode 100644 index 00000000000000..c10c656ec75109 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/bin.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"bin.d.ts","sourceRoot":"","sources":["../../../src/bin.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/bin.js b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/bin.js new file mode 100755 index 00000000000000..757aae1fd96cb2 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/bin.js @@ -0,0 +1,80 @@ +#!/usr/bin/env node +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const package_json_1 = require("../package.json"); +const usage = () => ` +usage: mkdirp [DIR1,DIR2..] {OPTIONS} + + Create each supplied directory including any necessary parent directories + that don't yet exist. + + If the directory already exists, do nothing. + +OPTIONS are: + + -m If a directory needs to be created, set the mode as an octal + --mode= permission string. + + -v --version Print the mkdirp version number + + -h --help Print this helpful banner + + -p --print Print the first directories created for each path provided + + --manual Use manual implementation, even if native is available +`; +const dirs = []; +const opts = {}; +let doPrint = false; +let dashdash = false; +let manual = false; +for (const arg of process.argv.slice(2)) { + if (dashdash) + dirs.push(arg); + else if (arg === '--') + dashdash = true; + else if (arg === '--manual') + manual = true; + else if (/^-h/.test(arg) || /^--help/.test(arg)) { + console.log(usage()); + process.exit(0); + } + else if (arg === '-v' || arg === '--version') { + console.log(package_json_1.version); + process.exit(0); + } + else if (arg === '-p' || arg === '--print') { + doPrint = true; + } + else if (/^-m/.test(arg) || /^--mode=/.test(arg)) { + // these don't get covered in CI, but work locally + // weird because the tests below show as passing in the output. + /* c8 ignore start */ + const mode = parseInt(arg.replace(/^(-m|--mode=)/, ''), 8); + if (isNaN(mode)) { + console.error(`invalid mode argument: ${arg}\nMust be an octal number.`); + process.exit(1); + } + /* c8 ignore stop */ + opts.mode = mode; + } + else + dirs.push(arg); +} +const index_js_1 = require("./index.js"); +const impl = manual ? index_js_1.mkdirp.manual : index_js_1.mkdirp; +if (dirs.length === 0) { + console.error(usage()); +} +// these don't get covered in CI, but work locally +/* c8 ignore start */ +Promise.all(dirs.map(dir => impl(dir, opts))) + .then(made => (doPrint ? made.forEach(m => m && console.log(m)) : null)) + .catch(er => { + console.error(er.message); + if (er.code) + console.error(' code: ' + er.code); + process.exit(1); +}); +/* c8 ignore stop */ +//# sourceMappingURL=bin.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/bin.js.map b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/bin.js.map new file mode 100644 index 00000000000000..d99295301b5fa7 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/bin.js.map @@ -0,0 +1 @@ +{"version":3,"file":"bin.js","sourceRoot":"","sources":["../../../src/bin.ts"],"names":[],"mappings":";;;AAEA,kDAAyC;AAGzC,MAAM,KAAK,GAAG,GAAG,EAAE,CAAC;;;;;;;;;;;;;;;;;;;;CAoBnB,CAAA;AAED,MAAM,IAAI,GAAa,EAAE,CAAA;AACzB,MAAM,IAAI,GAAkB,EAAE,CAAA;AAC9B,IAAI,OAAO,GAAY,KAAK,CAAA;AAC5B,IAAI,QAAQ,GAAG,KAAK,CAAA;AACpB,IAAI,MAAM,GAAG,KAAK,CAAA;AAClB,KAAK,MAAM,GAAG,IAAI,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;IACvC,IAAI,QAAQ;QAAE,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;SACvB,IAAI,GAAG,KAAK,IAAI;QAAE,QAAQ,GAAG,IAAI,CAAA;SACjC,IAAI,GAAG,KAAK,UAAU;QAAE,MAAM,GAAG,IAAI,CAAA;SACrC,IAAI,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,SAAS,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE;QAC/C,OAAO,CAAC,GAAG,CAAC,KAAK,EAAE,CAAC,CAAA;QACpB,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;KAChB;SAAM,IAAI,GAAG,KAAK,IAAI,IAAI,GAAG,KAAK,WAAW,EAAE;QAC9C,OAAO,CAAC,GAAG,CAAC,sBAAO,CAAC,CAAA;QACpB,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;KAChB;SAAM,IAAI,GAAG,KAAK,IAAI,IAAI,GAAG,KAAK,SAAS,EAAE;QAC5C,OAAO,GAAG,IAAI,CAAA;KACf;SAAM,IAAI,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,UAAU,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE;QAClD,kDAAkD;QAClD,+DAA+D;QAC/D,qBAAqB;QACrB,MAAM,IAAI,GAAG,QAAQ,CAAC,GAAG,CAAC,OAAO,CAAC,eAAe,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;QAC1D,IAAI,KAAK,CAAC,IAAI,CAAC,EAAE;YACf,OAAO,CAAC,KAAK,CAAC,0BAA0B,GAAG,4BAA4B,CAAC,CAAA;YACxE,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;SAChB;QACD,oBAAoB;QACpB,IAAI,CAAC,IAAI,GAAG,IAAI,CAAA;KACjB;;QAAM,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;CACtB;AAED,yCAAmC;AACnC,MAAM,IAAI,GAAG,MAAM,CAAC,CAAC,CAAC,iBAAM,CAAC,MAAM,CAAC,CAAC,CAAC,iBAAM,CAAA;AAC5C,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC,EAAE;IACrB,OAAO,CAAC,KAAK,CAAC,KAAK,EAAE,CAAC,CAAA;CACvB;AAED,kDAAkD;AAClD,qBAAqB;AACrB,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC,CAAC;KAC1C,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,IAAI,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;KACvE,KAAK,CAAC,EAAE,CAAC,EAAE;IACV,OAAO,CAAC,KAAK,CAAC,EAAE,CAAC,OAAO,CAAC,CAAA;IACzB,IAAI,EAAE,CAAC,IAAI;QAAE,OAAO,CAAC,KAAK,CAAC,UAAU,GAAG,EAAE,CAAC,IAAI,CAAC,CAAA;IAChD,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;AACjB,CAAC,CAAC,CAAA;AACJ,oBAAoB","sourcesContent":["#!/usr/bin/env node\n\nimport { version } from '../package.json'\nimport { MkdirpOptions } from './opts-arg.js'\n\nconst usage = () => `\nusage: mkdirp [DIR1,DIR2..] {OPTIONS}\n\n Create each supplied directory including any necessary parent directories\n that don't yet exist.\n\n If the directory already exists, do nothing.\n\nOPTIONS are:\n\n -m If a directory needs to be created, set the mode as an octal\n --mode= permission string.\n\n -v --version Print the mkdirp version number\n\n -h --help Print this helpful banner\n\n -p --print Print the first directories created for each path provided\n\n --manual Use manual implementation, even if native is available\n`\n\nconst dirs: string[] = []\nconst opts: MkdirpOptions = {}\nlet doPrint: boolean = false\nlet dashdash = false\nlet manual = false\nfor (const arg of process.argv.slice(2)) {\n if (dashdash) dirs.push(arg)\n else if (arg === '--') dashdash = true\n else if (arg === '--manual') manual = true\n else if (/^-h/.test(arg) || /^--help/.test(arg)) {\n console.log(usage())\n process.exit(0)\n } else if (arg === '-v' || arg === '--version') {\n console.log(version)\n process.exit(0)\n } else if (arg === '-p' || arg === '--print') {\n doPrint = true\n } else if (/^-m/.test(arg) || /^--mode=/.test(arg)) {\n // these don't get covered in CI, but work locally\n // weird because the tests below show as passing in the output.\n /* c8 ignore start */\n const mode = parseInt(arg.replace(/^(-m|--mode=)/, ''), 8)\n if (isNaN(mode)) {\n console.error(`invalid mode argument: ${arg}\\nMust be an octal number.`)\n process.exit(1)\n }\n /* c8 ignore stop */\n opts.mode = mode\n } else dirs.push(arg)\n}\n\nimport { mkdirp } from './index.js'\nconst impl = manual ? mkdirp.manual : mkdirp\nif (dirs.length === 0) {\n console.error(usage())\n}\n\n// these don't get covered in CI, but work locally\n/* c8 ignore start */\nPromise.all(dirs.map(dir => impl(dir, opts)))\n .then(made => (doPrint ? made.forEach(m => m && console.log(m)) : null))\n .catch(er => {\n console.error(er.message)\n if (er.code) console.error(' code: ' + er.code)\n process.exit(1)\n })\n/* c8 ignore stop */\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/find-made.d.ts b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/find-made.d.ts new file mode 100644 index 00000000000000..e47794b3bb72a3 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/find-made.d.ts @@ -0,0 +1,4 @@ +import { MkdirpOptionsResolved } from './opts-arg.js'; +export declare const findMade: (opts: MkdirpOptionsResolved, parent: string, path?: string) => Promise; +export declare const findMadeSync: (opts: MkdirpOptionsResolved, parent: string, path?: string) => undefined | string; +//# sourceMappingURL=find-made.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/find-made.d.ts.map b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/find-made.d.ts.map new file mode 100644 index 00000000000000..00d5d1a4dbefdf --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/find-made.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"find-made.d.ts","sourceRoot":"","sources":["../../../src/find-made.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,qBAAqB,EAAE,MAAM,eAAe,CAAA;AAErD,eAAO,MAAM,QAAQ,SACb,qBAAqB,UACnB,MAAM,SACP,MAAM,KACZ,QAAQ,SAAS,GAAG,MAAM,CAe5B,CAAA;AAED,eAAO,MAAM,YAAY,SACjB,qBAAqB,UACnB,MAAM,SACP,MAAM,KACZ,SAAS,GAAG,MAad,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/find-made.js b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/find-made.js new file mode 100644 index 00000000000000..e831ef27cadc1d --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/find-made.js @@ -0,0 +1,35 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.findMadeSync = exports.findMade = void 0; +const path_1 = require("path"); +const findMade = async (opts, parent, path) => { + // we never want the 'made' return value to be a root directory + if (path === parent) { + return; + } + return opts.statAsync(parent).then(st => (st.isDirectory() ? path : undefined), // will fail later + // will fail later + er => { + const fer = er; + return fer && fer.code === 'ENOENT' + ? (0, exports.findMade)(opts, (0, path_1.dirname)(parent), parent) + : undefined; + }); +}; +exports.findMade = findMade; +const findMadeSync = (opts, parent, path) => { + if (path === parent) { + return undefined; + } + try { + return opts.statSync(parent).isDirectory() ? path : undefined; + } + catch (er) { + const fer = er; + return fer && fer.code === 'ENOENT' + ? (0, exports.findMadeSync)(opts, (0, path_1.dirname)(parent), parent) + : undefined; + } +}; +exports.findMadeSync = findMadeSync; +//# sourceMappingURL=find-made.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/find-made.js.map b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/find-made.js.map new file mode 100644 index 00000000000000..30a0d66398878d --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/find-made.js.map @@ -0,0 +1 @@ +{"version":3,"file":"find-made.js","sourceRoot":"","sources":["../../../src/find-made.ts"],"names":[],"mappings":";;;AAAA,+BAA8B;AAGvB,MAAM,QAAQ,GAAG,KAAK,EAC3B,IAA2B,EAC3B,MAAc,EACd,IAAa,EACgB,EAAE;IAC/B,+DAA+D;IAC/D,IAAI,IAAI,KAAK,MAAM,EAAE;QACnB,OAAM;KACP;IAED,OAAO,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,IAAI,CAChC,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC,EAAE,kBAAkB;IAC/D,AAD6C,kBAAkB;IAC/D,EAAE,CAAC,EAAE;QACH,MAAM,GAAG,GAAG,EAA2B,CAAA;QACvC,OAAO,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ;YACjC,CAAC,CAAC,IAAA,gBAAQ,EAAC,IAAI,EAAE,IAAA,cAAO,EAAC,MAAM,CAAC,EAAE,MAAM,CAAC;YACzC,CAAC,CAAC,SAAS,CAAA;IACf,CAAC,CACF,CAAA;AACH,CAAC,CAAA;AAnBY,QAAA,QAAQ,YAmBpB;AAEM,MAAM,YAAY,GAAG,CAC1B,IAA2B,EAC3B,MAAc,EACd,IAAa,EACO,EAAE;IACtB,IAAI,IAAI,KAAK,MAAM,EAAE;QACnB,OAAO,SAAS,CAAA;KACjB;IAED,IAAI;QACF,OAAO,IAAI,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAA;KAC9D;IAAC,OAAO,EAAE,EAAE;QACX,MAAM,GAAG,GAAG,EAA2B,CAAA;QACvC,OAAO,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ;YACjC,CAAC,CAAC,IAAA,oBAAY,EAAC,IAAI,EAAE,IAAA,cAAO,EAAC,MAAM,CAAC,EAAE,MAAM,CAAC;YAC7C,CAAC,CAAC,SAAS,CAAA;KACd;AACH,CAAC,CAAA;AAjBY,QAAA,YAAY,gBAiBxB","sourcesContent":["import { dirname } from 'path'\nimport { MkdirpOptionsResolved } from './opts-arg.js'\n\nexport const findMade = async (\n opts: MkdirpOptionsResolved,\n parent: string,\n path?: string\n): Promise => {\n // we never want the 'made' return value to be a root directory\n if (path === parent) {\n return\n }\n\n return opts.statAsync(parent).then(\n st => (st.isDirectory() ? path : undefined), // will fail later\n er => {\n const fer = er as NodeJS.ErrnoException\n return fer && fer.code === 'ENOENT'\n ? findMade(opts, dirname(parent), parent)\n : undefined\n }\n )\n}\n\nexport const findMadeSync = (\n opts: MkdirpOptionsResolved,\n parent: string,\n path?: string\n): undefined | string => {\n if (path === parent) {\n return undefined\n }\n\n try {\n return opts.statSync(parent).isDirectory() ? path : undefined\n } catch (er) {\n const fer = er as NodeJS.ErrnoException\n return fer && fer.code === 'ENOENT'\n ? findMadeSync(opts, dirname(parent), parent)\n : undefined\n }\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/index.d.ts b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/index.d.ts new file mode 100644 index 00000000000000..fc9e43b3a45de1 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/index.d.ts @@ -0,0 +1,39 @@ +import { MkdirpOptions } from './opts-arg.js'; +export { mkdirpManual, mkdirpManualSync } from './mkdirp-manual.js'; +export { mkdirpNative, mkdirpNativeSync } from './mkdirp-native.js'; +export { useNative, useNativeSync } from './use-native.js'; +export declare const mkdirpSync: (path: string, opts?: MkdirpOptions) => string | void; +export declare const sync: (path: string, opts?: MkdirpOptions) => string | void; +export declare const manual: ((path: string, options?: MkdirpOptions | undefined, made?: string | void | undefined) => Promise) & { + sync: (path: string, options?: MkdirpOptions | undefined, made?: string | void | undefined) => string | void | undefined; +}; +export declare const manualSync: (path: string, options?: MkdirpOptions | undefined, made?: string | void | undefined) => string | void | undefined; +export declare const native: ((path: string, options?: MkdirpOptions | undefined) => Promise) & { + sync: (path: string, options?: MkdirpOptions | undefined) => string | void | undefined; +}; +export declare const nativeSync: (path: string, options?: MkdirpOptions | undefined) => string | void | undefined; +export declare const mkdirp: ((path: string, opts?: MkdirpOptions) => Promise) & { + mkdirpSync: (path: string, opts?: MkdirpOptions) => string | void; + mkdirpNative: ((path: string, options?: MkdirpOptions | undefined) => Promise) & { + sync: (path: string, options?: MkdirpOptions | undefined) => string | void | undefined; + }; + mkdirpNativeSync: (path: string, options?: MkdirpOptions | undefined) => string | void | undefined; + mkdirpManual: ((path: string, options?: MkdirpOptions | undefined, made?: string | void | undefined) => Promise) & { + sync: (path: string, options?: MkdirpOptions | undefined, made?: string | void | undefined) => string | void | undefined; + }; + mkdirpManualSync: (path: string, options?: MkdirpOptions | undefined, made?: string | void | undefined) => string | void | undefined; + sync: (path: string, opts?: MkdirpOptions) => string | void; + native: ((path: string, options?: MkdirpOptions | undefined) => Promise) & { + sync: (path: string, options?: MkdirpOptions | undefined) => string | void | undefined; + }; + nativeSync: (path: string, options?: MkdirpOptions | undefined) => string | void | undefined; + manual: ((path: string, options?: MkdirpOptions | undefined, made?: string | void | undefined) => Promise) & { + sync: (path: string, options?: MkdirpOptions | undefined, made?: string | void | undefined) => string | void | undefined; + }; + manualSync: (path: string, options?: MkdirpOptions | undefined, made?: string | void | undefined) => string | void | undefined; + useNative: ((opts?: MkdirpOptions | undefined) => boolean) & { + sync: (opts?: MkdirpOptions | undefined) => boolean; + }; + useNativeSync: (opts?: MkdirpOptions | undefined) => boolean; +}; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/index.d.ts.map b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/index.d.ts.map new file mode 100644 index 00000000000000..0e915bbc9a0c7a --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/index.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,aAAa,EAAW,MAAM,eAAe,CAAA;AAItD,OAAO,EAAE,YAAY,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAA;AACnE,OAAO,EAAE,YAAY,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAA;AACnE,OAAO,EAAE,SAAS,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAA;AAG1D,eAAO,MAAM,UAAU,SAAU,MAAM,SAAS,aAAa,kBAM5D,CAAA;AAED,eAAO,MAAM,IAAI,SARgB,MAAM,SAAS,aAAa,kBAQ/B,CAAA;AAC9B,eAAO,MAAM,MAAM;;CAAe,CAAA;AAClC,eAAO,MAAM,UAAU,oHAAmB,CAAA;AAC1C,eAAO,MAAM,MAAM;;CAAe,CAAA;AAClC,eAAO,MAAM,UAAU,kFAAmB,CAAA;AAC1C,eAAO,MAAM,MAAM,UACJ,MAAM,SAAS,aAAa;uBAdV,MAAM,SAAS,aAAa;;;;;;;;;iBAA5B,MAAM,SAAS,aAAa;;;;;;;;;;;;;CAoC5D,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/index.js b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/index.js new file mode 100644 index 00000000000000..ab9dc62cddda36 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/index.js @@ -0,0 +1,53 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.mkdirp = exports.nativeSync = exports.native = exports.manualSync = exports.manual = exports.sync = exports.mkdirpSync = exports.useNativeSync = exports.useNative = exports.mkdirpNativeSync = exports.mkdirpNative = exports.mkdirpManualSync = exports.mkdirpManual = void 0; +const mkdirp_manual_js_1 = require("./mkdirp-manual.js"); +const mkdirp_native_js_1 = require("./mkdirp-native.js"); +const opts_arg_js_1 = require("./opts-arg.js"); +const path_arg_js_1 = require("./path-arg.js"); +const use_native_js_1 = require("./use-native.js"); +/* c8 ignore start */ +var mkdirp_manual_js_2 = require("./mkdirp-manual.js"); +Object.defineProperty(exports, "mkdirpManual", { enumerable: true, get: function () { return mkdirp_manual_js_2.mkdirpManual; } }); +Object.defineProperty(exports, "mkdirpManualSync", { enumerable: true, get: function () { return mkdirp_manual_js_2.mkdirpManualSync; } }); +var mkdirp_native_js_2 = require("./mkdirp-native.js"); +Object.defineProperty(exports, "mkdirpNative", { enumerable: true, get: function () { return mkdirp_native_js_2.mkdirpNative; } }); +Object.defineProperty(exports, "mkdirpNativeSync", { enumerable: true, get: function () { return mkdirp_native_js_2.mkdirpNativeSync; } }); +var use_native_js_2 = require("./use-native.js"); +Object.defineProperty(exports, "useNative", { enumerable: true, get: function () { return use_native_js_2.useNative; } }); +Object.defineProperty(exports, "useNativeSync", { enumerable: true, get: function () { return use_native_js_2.useNativeSync; } }); +/* c8 ignore stop */ +const mkdirpSync = (path, opts) => { + path = (0, path_arg_js_1.pathArg)(path); + const resolved = (0, opts_arg_js_1.optsArg)(opts); + return (0, use_native_js_1.useNativeSync)(resolved) + ? (0, mkdirp_native_js_1.mkdirpNativeSync)(path, resolved) + : (0, mkdirp_manual_js_1.mkdirpManualSync)(path, resolved); +}; +exports.mkdirpSync = mkdirpSync; +exports.sync = exports.mkdirpSync; +exports.manual = mkdirp_manual_js_1.mkdirpManual; +exports.manualSync = mkdirp_manual_js_1.mkdirpManualSync; +exports.native = mkdirp_native_js_1.mkdirpNative; +exports.nativeSync = mkdirp_native_js_1.mkdirpNativeSync; +exports.mkdirp = Object.assign(async (path, opts) => { + path = (0, path_arg_js_1.pathArg)(path); + const resolved = (0, opts_arg_js_1.optsArg)(opts); + return (0, use_native_js_1.useNative)(resolved) + ? (0, mkdirp_native_js_1.mkdirpNative)(path, resolved) + : (0, mkdirp_manual_js_1.mkdirpManual)(path, resolved); +}, { + mkdirpSync: exports.mkdirpSync, + mkdirpNative: mkdirp_native_js_1.mkdirpNative, + mkdirpNativeSync: mkdirp_native_js_1.mkdirpNativeSync, + mkdirpManual: mkdirp_manual_js_1.mkdirpManual, + mkdirpManualSync: mkdirp_manual_js_1.mkdirpManualSync, + sync: exports.mkdirpSync, + native: mkdirp_native_js_1.mkdirpNative, + nativeSync: mkdirp_native_js_1.mkdirpNativeSync, + manual: mkdirp_manual_js_1.mkdirpManual, + manualSync: mkdirp_manual_js_1.mkdirpManualSync, + useNative: use_native_js_1.useNative, + useNativeSync: use_native_js_1.useNativeSync, +}); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/index.js.map b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/index.js.map new file mode 100644 index 00000000000000..fdb572677a98ef --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../src/index.ts"],"names":[],"mappings":";;;AAAA,yDAAmE;AACnE,yDAAmE;AACnE,+CAAsD;AACtD,+CAAuC;AACvC,mDAA0D;AAC1D,qBAAqB;AACrB,uDAAmE;AAA1D,gHAAA,YAAY,OAAA;AAAE,oHAAA,gBAAgB,OAAA;AACvC,uDAAmE;AAA1D,gHAAA,YAAY,OAAA;AAAE,oHAAA,gBAAgB,OAAA;AACvC,iDAA0D;AAAjD,0GAAA,SAAS,OAAA;AAAE,8GAAA,aAAa,OAAA;AACjC,oBAAoB;AAEb,MAAM,UAAU,GAAG,CAAC,IAAY,EAAE,IAAoB,EAAE,EAAE;IAC/D,IAAI,GAAG,IAAA,qBAAO,EAAC,IAAI,CAAC,CAAA;IACpB,MAAM,QAAQ,GAAG,IAAA,qBAAO,EAAC,IAAI,CAAC,CAAA;IAC9B,OAAO,IAAA,6BAAa,EAAC,QAAQ,CAAC;QAC5B,CAAC,CAAC,IAAA,mCAAgB,EAAC,IAAI,EAAE,QAAQ,CAAC;QAClC,CAAC,CAAC,IAAA,mCAAgB,EAAC,IAAI,EAAE,QAAQ,CAAC,CAAA;AACtC,CAAC,CAAA;AANY,QAAA,UAAU,cAMtB;AAEY,QAAA,IAAI,GAAG,kBAAU,CAAA;AACjB,QAAA,MAAM,GAAG,+BAAY,CAAA;AACrB,QAAA,UAAU,GAAG,mCAAgB,CAAA;AAC7B,QAAA,MAAM,GAAG,+BAAY,CAAA;AACrB,QAAA,UAAU,GAAG,mCAAgB,CAAA;AAC7B,QAAA,MAAM,GAAG,MAAM,CAAC,MAAM,CACjC,KAAK,EAAE,IAAY,EAAE,IAAoB,EAAE,EAAE;IAC3C,IAAI,GAAG,IAAA,qBAAO,EAAC,IAAI,CAAC,CAAA;IACpB,MAAM,QAAQ,GAAG,IAAA,qBAAO,EAAC,IAAI,CAAC,CAAA;IAC9B,OAAO,IAAA,yBAAS,EAAC,QAAQ,CAAC;QACxB,CAAC,CAAC,IAAA,+BAAY,EAAC,IAAI,EAAE,QAAQ,CAAC;QAC9B,CAAC,CAAC,IAAA,+BAAY,EAAC,IAAI,EAAE,QAAQ,CAAC,CAAA;AAClC,CAAC,EACD;IACE,UAAU,EAAV,kBAAU;IACV,YAAY,EAAZ,+BAAY;IACZ,gBAAgB,EAAhB,mCAAgB;IAChB,YAAY,EAAZ,+BAAY;IACZ,gBAAgB,EAAhB,mCAAgB;IAEhB,IAAI,EAAE,kBAAU;IAChB,MAAM,EAAE,+BAAY;IACpB,UAAU,EAAE,mCAAgB;IAC5B,MAAM,EAAE,+BAAY;IACpB,UAAU,EAAE,mCAAgB;IAC5B,SAAS,EAAT,yBAAS;IACT,aAAa,EAAb,6BAAa;CACd,CACF,CAAA","sourcesContent":["import { mkdirpManual, mkdirpManualSync } from './mkdirp-manual.js'\nimport { mkdirpNative, mkdirpNativeSync } from './mkdirp-native.js'\nimport { MkdirpOptions, optsArg } from './opts-arg.js'\nimport { pathArg } from './path-arg.js'\nimport { useNative, useNativeSync } from './use-native.js'\n/* c8 ignore start */\nexport { mkdirpManual, mkdirpManualSync } from './mkdirp-manual.js'\nexport { mkdirpNative, mkdirpNativeSync } from './mkdirp-native.js'\nexport { useNative, useNativeSync } from './use-native.js'\n/* c8 ignore stop */\n\nexport const mkdirpSync = (path: string, opts?: MkdirpOptions) => {\n path = pathArg(path)\n const resolved = optsArg(opts)\n return useNativeSync(resolved)\n ? mkdirpNativeSync(path, resolved)\n : mkdirpManualSync(path, resolved)\n}\n\nexport const sync = mkdirpSync\nexport const manual = mkdirpManual\nexport const manualSync = mkdirpManualSync\nexport const native = mkdirpNative\nexport const nativeSync = mkdirpNativeSync\nexport const mkdirp = Object.assign(\n async (path: string, opts?: MkdirpOptions) => {\n path = pathArg(path)\n const resolved = optsArg(opts)\n return useNative(resolved)\n ? mkdirpNative(path, resolved)\n : mkdirpManual(path, resolved)\n },\n {\n mkdirpSync,\n mkdirpNative,\n mkdirpNativeSync,\n mkdirpManual,\n mkdirpManualSync,\n\n sync: mkdirpSync,\n native: mkdirpNative,\n nativeSync: mkdirpNativeSync,\n manual: mkdirpManual,\n manualSync: mkdirpManualSync,\n useNative,\n useNativeSync,\n }\n)\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.d.ts b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.d.ts new file mode 100644 index 00000000000000..e49cdf9f1bd122 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.d.ts @@ -0,0 +1,6 @@ +import { MkdirpOptions } from './opts-arg.js'; +export declare const mkdirpManualSync: (path: string, options?: MkdirpOptions, made?: string | undefined | void) => string | undefined | void; +export declare const mkdirpManual: ((path: string, options?: MkdirpOptions, made?: string | undefined | void) => Promise) & { + sync: (path: string, options?: MkdirpOptions, made?: string | undefined | void) => string | undefined | void; +}; +//# sourceMappingURL=mkdirp-manual.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.d.ts.map b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.d.ts.map new file mode 100644 index 00000000000000..9301bab1ffb35b --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"mkdirp-manual.d.ts","sourceRoot":"","sources":["../../../src/mkdirp-manual.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,aAAa,EAAW,MAAM,eAAe,CAAA;AAEtD,eAAO,MAAM,gBAAgB,SACrB,MAAM,YACF,aAAa,SAChB,MAAM,GAAG,SAAS,GAAG,IAAI,KAC/B,MAAM,GAAG,SAAS,GAAG,IAmCvB,CAAA;AAED,eAAO,MAAM,YAAY,UAEf,MAAM,YACF,aAAa,SAChB,MAAM,GAAG,SAAS,GAAG,IAAI,KAC/B,QAAQ,MAAM,GAAG,SAAS,GAAG,IAAI,CAAC;iBA7C/B,MAAM,YACF,aAAa,SAChB,MAAM,GAAG,SAAS,GAAG,IAAI,KAC/B,MAAM,GAAG,SAAS,GAAG,IAAI;CAqF3B,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.js b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.js new file mode 100644 index 00000000000000..d9bd1d8bb5a49b --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.js @@ -0,0 +1,79 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.mkdirpManual = exports.mkdirpManualSync = void 0; +const path_1 = require("path"); +const opts_arg_js_1 = require("./opts-arg.js"); +const mkdirpManualSync = (path, options, made) => { + const parent = (0, path_1.dirname)(path); + const opts = { ...(0, opts_arg_js_1.optsArg)(options), recursive: false }; + if (parent === path) { + try { + return opts.mkdirSync(path, opts); + } + catch (er) { + // swallowed by recursive implementation on posix systems + // any other error is a failure + const fer = er; + if (fer && fer.code !== 'EISDIR') { + throw er; + } + return; + } + } + try { + opts.mkdirSync(path, opts); + return made || path; + } + catch (er) { + const fer = er; + if (fer && fer.code === 'ENOENT') { + return (0, exports.mkdirpManualSync)(path, opts, (0, exports.mkdirpManualSync)(parent, opts, made)); + } + if (fer && fer.code !== 'EEXIST' && fer && fer.code !== 'EROFS') { + throw er; + } + try { + if (!opts.statSync(path).isDirectory()) + throw er; + } + catch (_) { + throw er; + } + } +}; +exports.mkdirpManualSync = mkdirpManualSync; +exports.mkdirpManual = Object.assign(async (path, options, made) => { + const opts = (0, opts_arg_js_1.optsArg)(options); + opts.recursive = false; + const parent = (0, path_1.dirname)(path); + if (parent === path) { + return opts.mkdirAsync(path, opts).catch(er => { + // swallowed by recursive implementation on posix systems + // any other error is a failure + const fer = er; + if (fer && fer.code !== 'EISDIR') { + throw er; + } + }); + } + return opts.mkdirAsync(path, opts).then(() => made || path, async (er) => { + const fer = er; + if (fer && fer.code === 'ENOENT') { + return (0, exports.mkdirpManual)(parent, opts).then((made) => (0, exports.mkdirpManual)(path, opts, made)); + } + if (fer && fer.code !== 'EEXIST' && fer.code !== 'EROFS') { + throw er; + } + return opts.statAsync(path).then(st => { + if (st.isDirectory()) { + return made; + } + else { + throw er; + } + }, () => { + throw er; + }); + }); +}, { sync: exports.mkdirpManualSync }); +//# sourceMappingURL=mkdirp-manual.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.js.map b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.js.map new file mode 100644 index 00000000000000..ff7ba24dca32ad --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.js.map @@ -0,0 +1 @@ +{"version":3,"file":"mkdirp-manual.js","sourceRoot":"","sources":["../../../src/mkdirp-manual.ts"],"names":[],"mappings":";;;AAAA,+BAA8B;AAC9B,+CAAsD;AAE/C,MAAM,gBAAgB,GAAG,CAC9B,IAAY,EACZ,OAAuB,EACvB,IAAgC,EACL,EAAE;IAC7B,MAAM,MAAM,GAAG,IAAA,cAAO,EAAC,IAAI,CAAC,CAAA;IAC5B,MAAM,IAAI,GAAG,EAAE,GAAG,IAAA,qBAAO,EAAC,OAAO,CAAC,EAAE,SAAS,EAAE,KAAK,EAAE,CAAA;IAEtD,IAAI,MAAM,KAAK,IAAI,EAAE;QACnB,IAAI;YACF,OAAO,IAAI,CAAC,SAAS,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;SAClC;QAAC,OAAO,EAAE,EAAE;YACX,yDAAyD;YACzD,+BAA+B;YAC/B,MAAM,GAAG,GAAG,EAA2B,CAAA;YACvC,IAAI,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE;gBAChC,MAAM,EAAE,CAAA;aACT;YACD,OAAM;SACP;KACF;IAED,IAAI;QACF,IAAI,CAAC,SAAS,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;QAC1B,OAAO,IAAI,IAAI,IAAI,CAAA;KACpB;IAAC,OAAO,EAAE,EAAE;QACX,MAAM,GAAG,GAAG,EAA2B,CAAA;QACvC,IAAI,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE;YAChC,OAAO,IAAA,wBAAgB,EAAC,IAAI,EAAE,IAAI,EAAE,IAAA,wBAAgB,EAAC,MAAM,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC,CAAA;SAC1E;QACD,IAAI,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,IAAI,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,OAAO,EAAE;YAC/D,MAAM,EAAE,CAAA;SACT;QACD,IAAI;YACF,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,WAAW,EAAE;gBAAE,MAAM,EAAE,CAAA;SACjD;QAAC,OAAO,CAAC,EAAE;YACV,MAAM,EAAE,CAAA;SACT;KACF;AACH,CAAC,CAAA;AAvCY,QAAA,gBAAgB,oBAuC5B;AAEY,QAAA,YAAY,GAAG,MAAM,CAAC,MAAM,CACvC,KAAK,EACH,IAAY,EACZ,OAAuB,EACvB,IAAgC,EACI,EAAE;IACtC,MAAM,IAAI,GAAG,IAAA,qBAAO,EAAC,OAAO,CAAC,CAAA;IAC7B,IAAI,CAAC,SAAS,GAAG,KAAK,CAAA;IACtB,MAAM,MAAM,GAAG,IAAA,cAAO,EAAC,IAAI,CAAC,CAAA;IAC5B,IAAI,MAAM,KAAK,IAAI,EAAE;QACnB,OAAO,IAAI,CAAC,UAAU,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,EAAE;YAC5C,yDAAyD;YACzD,+BAA+B;YAC/B,MAAM,GAAG,GAAG,EAA2B,CAAA;YACvC,IAAI,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE;gBAChC,MAAM,EAAE,CAAA;aACT;QACH,CAAC,CAAC,CAAA;KACH;IAED,OAAO,IAAI,CAAC,UAAU,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC,IAAI,CACrC,GAAG,EAAE,CAAC,IAAI,IAAI,IAAI,EAClB,KAAK,EAAC,EAAE,EAAC,EAAE;QACT,MAAM,GAAG,GAAG,EAA2B,CAAA;QACvC,IAAI,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE;YAChC,OAAO,IAAA,oBAAY,EAAC,MAAM,EAAE,IAAI,CAAC,CAAC,IAAI,CACpC,CAAC,IAAgC,EAAE,EAAE,CAAC,IAAA,oBAAY,EAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CACrE,CAAA;SACF;QACD,IAAI,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,IAAI,GAAG,CAAC,IAAI,KAAK,OAAO,EAAE;YACxD,MAAM,EAAE,CAAA;SACT;QACD,OAAO,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,IAAI,CAC9B,EAAE,CAAC,EAAE;YACH,IAAI,EAAE,CAAC,WAAW,EAAE,EAAE;gBACpB,OAAO,IAAI,CAAA;aACZ;iBAAM;gBACL,MAAM,EAAE,CAAA;aACT;QACH,CAAC,EACD,GAAG,EAAE;YACH,MAAM,EAAE,CAAA;QACV,CAAC,CACF,CAAA;IACH,CAAC,CACF,CAAA;AACH,CAAC,EACD,EAAE,IAAI,EAAE,wBAAgB,EAAE,CAC3B,CAAA","sourcesContent":["import { dirname } from 'path'\nimport { MkdirpOptions, optsArg } from './opts-arg.js'\n\nexport const mkdirpManualSync = (\n path: string,\n options?: MkdirpOptions,\n made?: string | undefined | void\n): string | undefined | void => {\n const parent = dirname(path)\n const opts = { ...optsArg(options), recursive: false }\n\n if (parent === path) {\n try {\n return opts.mkdirSync(path, opts)\n } catch (er) {\n // swallowed by recursive implementation on posix systems\n // any other error is a failure\n const fer = er as NodeJS.ErrnoException\n if (fer && fer.code !== 'EISDIR') {\n throw er\n }\n return\n }\n }\n\n try {\n opts.mkdirSync(path, opts)\n return made || path\n } catch (er) {\n const fer = er as NodeJS.ErrnoException\n if (fer && fer.code === 'ENOENT') {\n return mkdirpManualSync(path, opts, mkdirpManualSync(parent, opts, made))\n }\n if (fer && fer.code !== 'EEXIST' && fer && fer.code !== 'EROFS') {\n throw er\n }\n try {\n if (!opts.statSync(path).isDirectory()) throw er\n } catch (_) {\n throw er\n }\n }\n}\n\nexport const mkdirpManual = Object.assign(\n async (\n path: string,\n options?: MkdirpOptions,\n made?: string | undefined | void\n ): Promise => {\n const opts = optsArg(options)\n opts.recursive = false\n const parent = dirname(path)\n if (parent === path) {\n return opts.mkdirAsync(path, opts).catch(er => {\n // swallowed by recursive implementation on posix systems\n // any other error is a failure\n const fer = er as NodeJS.ErrnoException\n if (fer && fer.code !== 'EISDIR') {\n throw er\n }\n })\n }\n\n return opts.mkdirAsync(path, opts).then(\n () => made || path,\n async er => {\n const fer = er as NodeJS.ErrnoException\n if (fer && fer.code === 'ENOENT') {\n return mkdirpManual(parent, opts).then(\n (made?: string | undefined | void) => mkdirpManual(path, opts, made)\n )\n }\n if (fer && fer.code !== 'EEXIST' && fer.code !== 'EROFS') {\n throw er\n }\n return opts.statAsync(path).then(\n st => {\n if (st.isDirectory()) {\n return made\n } else {\n throw er\n }\n },\n () => {\n throw er\n }\n )\n }\n )\n },\n { sync: mkdirpManualSync }\n)\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/mkdirp-native.d.ts b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/mkdirp-native.d.ts new file mode 100644 index 00000000000000..28b64814b2545a --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/mkdirp-native.d.ts @@ -0,0 +1,6 @@ +import { MkdirpOptions } from './opts-arg.js'; +export declare const mkdirpNativeSync: (path: string, options?: MkdirpOptions) => string | void | undefined; +export declare const mkdirpNative: ((path: string, options?: MkdirpOptions) => Promise) & { + sync: (path: string, options?: MkdirpOptions) => string | void | undefined; +}; +//# sourceMappingURL=mkdirp-native.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/mkdirp-native.d.ts.map b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/mkdirp-native.d.ts.map new file mode 100644 index 00000000000000..379c0f6591c686 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/mkdirp-native.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"mkdirp-native.d.ts","sourceRoot":"","sources":["../../../src/mkdirp-native.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,aAAa,EAAW,MAAM,eAAe,CAAA;AAEtD,eAAO,MAAM,gBAAgB,SACrB,MAAM,YACF,aAAa,KACtB,MAAM,GAAG,IAAI,GAAG,SAoBlB,CAAA;AAED,eAAO,MAAM,YAAY,UAEf,MAAM,YACF,aAAa,KACtB,QAAQ,MAAM,GAAG,IAAI,GAAG,SAAS,CAAC;iBA5B/B,MAAM,YACF,aAAa,KACtB,MAAM,GAAG,IAAI,GAAG,SAAS;CAgD3B,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/mkdirp-native.js b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/mkdirp-native.js new file mode 100644 index 00000000000000..9f00567d7cc200 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/mkdirp-native.js @@ -0,0 +1,50 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.mkdirpNative = exports.mkdirpNativeSync = void 0; +const path_1 = require("path"); +const find_made_js_1 = require("./find-made.js"); +const mkdirp_manual_js_1 = require("./mkdirp-manual.js"); +const opts_arg_js_1 = require("./opts-arg.js"); +const mkdirpNativeSync = (path, options) => { + const opts = (0, opts_arg_js_1.optsArg)(options); + opts.recursive = true; + const parent = (0, path_1.dirname)(path); + if (parent === path) { + return opts.mkdirSync(path, opts); + } + const made = (0, find_made_js_1.findMadeSync)(opts, path); + try { + opts.mkdirSync(path, opts); + return made; + } + catch (er) { + const fer = er; + if (fer && fer.code === 'ENOENT') { + return (0, mkdirp_manual_js_1.mkdirpManualSync)(path, opts); + } + else { + throw er; + } + } +}; +exports.mkdirpNativeSync = mkdirpNativeSync; +exports.mkdirpNative = Object.assign(async (path, options) => { + const opts = { ...(0, opts_arg_js_1.optsArg)(options), recursive: true }; + const parent = (0, path_1.dirname)(path); + if (parent === path) { + return await opts.mkdirAsync(path, opts); + } + return (0, find_made_js_1.findMade)(opts, path).then((made) => opts + .mkdirAsync(path, opts) + .then(m => made || m) + .catch(er => { + const fer = er; + if (fer && fer.code === 'ENOENT') { + return (0, mkdirp_manual_js_1.mkdirpManual)(path, opts); + } + else { + throw er; + } + })); +}, { sync: exports.mkdirpNativeSync }); +//# sourceMappingURL=mkdirp-native.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/mkdirp-native.js.map b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/mkdirp-native.js.map new file mode 100644 index 00000000000000..1f889ee98876cc --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/mkdirp-native.js.map @@ -0,0 +1 @@ +{"version":3,"file":"mkdirp-native.js","sourceRoot":"","sources":["../../../src/mkdirp-native.ts"],"names":[],"mappings":";;;AAAA,+BAA8B;AAC9B,iDAAuD;AACvD,yDAAmE;AACnE,+CAAsD;AAE/C,MAAM,gBAAgB,GAAG,CAC9B,IAAY,EACZ,OAAuB,EACI,EAAE;IAC7B,MAAM,IAAI,GAAG,IAAA,qBAAO,EAAC,OAAO,CAAC,CAAA;IAC7B,IAAI,CAAC,SAAS,GAAG,IAAI,CAAA;IACrB,MAAM,MAAM,GAAG,IAAA,cAAO,EAAC,IAAI,CAAC,CAAA;IAC5B,IAAI,MAAM,KAAK,IAAI,EAAE;QACnB,OAAO,IAAI,CAAC,SAAS,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;KAClC;IAED,MAAM,IAAI,GAAG,IAAA,2BAAY,EAAC,IAAI,EAAE,IAAI,CAAC,CAAA;IACrC,IAAI;QACF,IAAI,CAAC,SAAS,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;QAC1B,OAAO,IAAI,CAAA;KACZ;IAAC,OAAO,EAAE,EAAE;QACX,MAAM,GAAG,GAAG,EAA2B,CAAA;QACvC,IAAI,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE;YAChC,OAAO,IAAA,mCAAgB,EAAC,IAAI,EAAE,IAAI,CAAC,CAAA;SACpC;aAAM;YACL,MAAM,EAAE,CAAA;SACT;KACF;AACH,CAAC,CAAA;AAvBY,QAAA,gBAAgB,oBAuB5B;AAEY,QAAA,YAAY,GAAG,MAAM,CAAC,MAAM,CACvC,KAAK,EACH,IAAY,EACZ,OAAuB,EACa,EAAE;IACtC,MAAM,IAAI,GAAG,EAAE,GAAG,IAAA,qBAAO,EAAC,OAAO,CAAC,EAAE,SAAS,EAAE,IAAI,EAAE,CAAA;IACrD,MAAM,MAAM,GAAG,IAAA,cAAO,EAAC,IAAI,CAAC,CAAA;IAC5B,IAAI,MAAM,KAAK,IAAI,EAAE;QACnB,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;KACzC;IAED,OAAO,IAAA,uBAAQ,EAAC,IAAI,EAAE,IAAI,CAAC,CAAC,IAAI,CAAC,CAAC,IAAyB,EAAE,EAAE,CAC7D,IAAI;SACD,UAAU,CAAC,IAAI,EAAE,IAAI,CAAC;SACtB,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,IAAI,CAAC,CAAC;SACpB,KAAK,CAAC,EAAE,CAAC,EAAE;QACV,MAAM,GAAG,GAAG,EAA2B,CAAA;QACvC,IAAI,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE;YAChC,OAAO,IAAA,+BAAY,EAAC,IAAI,EAAE,IAAI,CAAC,CAAA;SAChC;aAAM;YACL,MAAM,EAAE,CAAA;SACT;IACH,CAAC,CAAC,CACL,CAAA;AACH,CAAC,EACD,EAAE,IAAI,EAAE,wBAAgB,EAAE,CAC3B,CAAA","sourcesContent":["import { dirname } from 'path'\nimport { findMade, findMadeSync } from './find-made.js'\nimport { mkdirpManual, mkdirpManualSync } from './mkdirp-manual.js'\nimport { MkdirpOptions, optsArg } from './opts-arg.js'\n\nexport const mkdirpNativeSync = (\n path: string,\n options?: MkdirpOptions\n): string | void | undefined => {\n const opts = optsArg(options)\n opts.recursive = true\n const parent = dirname(path)\n if (parent === path) {\n return opts.mkdirSync(path, opts)\n }\n\n const made = findMadeSync(opts, path)\n try {\n opts.mkdirSync(path, opts)\n return made\n } catch (er) {\n const fer = er as NodeJS.ErrnoException\n if (fer && fer.code === 'ENOENT') {\n return mkdirpManualSync(path, opts)\n } else {\n throw er\n }\n }\n}\n\nexport const mkdirpNative = Object.assign(\n async (\n path: string,\n options?: MkdirpOptions\n ): Promise => {\n const opts = { ...optsArg(options), recursive: true }\n const parent = dirname(path)\n if (parent === path) {\n return await opts.mkdirAsync(path, opts)\n }\n\n return findMade(opts, path).then((made?: string | undefined) =>\n opts\n .mkdirAsync(path, opts)\n .then(m => made || m)\n .catch(er => {\n const fer = er as NodeJS.ErrnoException\n if (fer && fer.code === 'ENOENT') {\n return mkdirpManual(path, opts)\n } else {\n throw er\n }\n })\n )\n },\n { sync: mkdirpNativeSync }\n)\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/opts-arg.d.ts b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/opts-arg.d.ts new file mode 100644 index 00000000000000..73d076b3b6923c --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/opts-arg.d.ts @@ -0,0 +1,42 @@ +/// +/// +import { MakeDirectoryOptions, Stats } from 'fs'; +export interface FsProvider { + stat?: (path: string, callback: (err: NodeJS.ErrnoException | null, stats: Stats) => any) => any; + mkdir?: (path: string, opts: MakeDirectoryOptions & { + recursive?: boolean; + }, callback: (err: NodeJS.ErrnoException | null, made?: string) => any) => any; + statSync?: (path: string) => Stats; + mkdirSync?: (path: string, opts: MakeDirectoryOptions & { + recursive?: boolean; + }) => string | undefined; +} +interface Options extends FsProvider { + mode?: number | string; + fs?: FsProvider; + mkdirAsync?: (path: string, opts: MakeDirectoryOptions & { + recursive?: boolean; + }) => Promise; + statAsync?: (path: string) => Promise; +} +export type MkdirpOptions = Options | number | string; +export interface MkdirpOptionsResolved { + mode: number; + fs: FsProvider; + mkdirAsync: (path: string, opts: MakeDirectoryOptions & { + recursive?: boolean; + }) => Promise; + statAsync: (path: string) => Promise; + stat: (path: string, callback: (err: NodeJS.ErrnoException | null, stats: Stats) => any) => any; + mkdir: (path: string, opts: MakeDirectoryOptions & { + recursive?: boolean; + }, callback: (err: NodeJS.ErrnoException | null, made?: string) => any) => any; + statSync: (path: string) => Stats; + mkdirSync: (path: string, opts: MakeDirectoryOptions & { + recursive?: boolean; + }) => string | undefined; + recursive?: boolean; +} +export declare const optsArg: (opts?: MkdirpOptions) => MkdirpOptionsResolved; +export {}; +//# sourceMappingURL=opts-arg.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/opts-arg.d.ts.map b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/opts-arg.d.ts.map new file mode 100644 index 00000000000000..e575161714f651 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/opts-arg.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"opts-arg.d.ts","sourceRoot":"","sources":["../../../src/opts-arg.ts"],"names":[],"mappings":";;AAAA,OAAO,EACL,oBAAoB,EAIpB,KAAK,EAEN,MAAM,IAAI,CAAA;AAEX,MAAM,WAAW,UAAU;IACzB,IAAI,CAAC,EAAE,CACL,IAAI,EAAE,MAAM,EACZ,QAAQ,EAAE,CAAC,GAAG,EAAE,MAAM,CAAC,cAAc,GAAG,IAAI,EAAE,KAAK,EAAE,KAAK,KAAK,GAAG,KAC/D,GAAG,CAAA;IACR,KAAK,CAAC,EAAE,CACN,IAAI,EAAE,MAAM,EACZ,IAAI,EAAE,oBAAoB,GAAG;QAAE,SAAS,CAAC,EAAE,OAAO,CAAA;KAAE,EACpD,QAAQ,EAAE,CAAC,GAAG,EAAE,MAAM,CAAC,cAAc,GAAG,IAAI,EAAE,IAAI,CAAC,EAAE,MAAM,KAAK,GAAG,KAChE,GAAG,CAAA;IACR,QAAQ,CAAC,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK,KAAK,CAAA;IAClC,SAAS,CAAC,EAAE,CACV,IAAI,EAAE,MAAM,EACZ,IAAI,EAAE,oBAAoB,GAAG;QAAE,SAAS,CAAC,EAAE,OAAO,CAAA;KAAE,KACjD,MAAM,GAAG,SAAS,CAAA;CACxB;AAED,UAAU,OAAQ,SAAQ,UAAU;IAClC,IAAI,CAAC,EAAE,MAAM,GAAG,MAAM,CAAA;IACtB,EAAE,CAAC,EAAE,UAAU,CAAA;IACf,UAAU,CAAC,EAAE,CACX,IAAI,EAAE,MAAM,EACZ,IAAI,EAAE,oBAAoB,GAAG;QAAE,SAAS,CAAC,EAAE,OAAO,CAAA;KAAE,KACjD,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC,CAAA;IAChC,SAAS,CAAC,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK,OAAO,CAAC,KAAK,CAAC,CAAA;CAC7C;AAED,MAAM,MAAM,aAAa,GAAG,OAAO,GAAG,MAAM,GAAG,MAAM,CAAA;AAErD,MAAM,WAAW,qBAAqB;IACpC,IAAI,EAAE,MAAM,CAAA;IACZ,EAAE,EAAE,UAAU,CAAA;IACd,UAAU,EAAE,CACV,IAAI,EAAE,MAAM,EACZ,IAAI,EAAE,oBAAoB,GAAG;QAAE,SAAS,CAAC,EAAE,OAAO,CAAA;KAAE,KACjD,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC,CAAA;IAChC,SAAS,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK,OAAO,CAAC,KAAK,CAAC,CAAA;IAC3C,IAAI,EAAE,CACJ,IAAI,EAAE,MAAM,EACZ,QAAQ,EAAE,CAAC,GAAG,EAAE,MAAM,CAAC,cAAc,GAAG,IAAI,EAAE,KAAK,EAAE,KAAK,KAAK,GAAG,KAC/D,GAAG,CAAA;IACR,KAAK,EAAE,CACL,IAAI,EAAE,MAAM,EACZ,IAAI,EAAE,oBAAoB,GAAG;QAAE,SAAS,CAAC,EAAE,OAAO,CAAA;KAAE,EACpD,QAAQ,EAAE,CAAC,GAAG,EAAE,MAAM,CAAC,cAAc,GAAG,IAAI,EAAE,IAAI,CAAC,EAAE,MAAM,KAAK,GAAG,KAChE,GAAG,CAAA;IACR,QAAQ,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK,KAAK,CAAA;IACjC,SAAS,EAAE,CACT,IAAI,EAAE,MAAM,EACZ,IAAI,EAAE,oBAAoB,GAAG;QAAE,SAAS,CAAC,EAAE,OAAO,CAAA;KAAE,KACjD,MAAM,GAAG,SAAS,CAAA;IACvB,SAAS,CAAC,EAAE,OAAO,CAAA;CACpB;AAED,eAAO,MAAM,OAAO,UAAW,aAAa,KAAG,qBA2C9C,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/opts-arg.js b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/opts-arg.js new file mode 100644 index 00000000000000..e8f486c0905957 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/opts-arg.js @@ -0,0 +1,38 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.optsArg = void 0; +const fs_1 = require("fs"); +const optsArg = (opts) => { + if (!opts) { + opts = { mode: 0o777 }; + } + else if (typeof opts === 'object') { + opts = { mode: 0o777, ...opts }; + } + else if (typeof opts === 'number') { + opts = { mode: opts }; + } + else if (typeof opts === 'string') { + opts = { mode: parseInt(opts, 8) }; + } + else { + throw new TypeError('invalid options argument'); + } + const resolved = opts; + const optsFs = opts.fs || {}; + opts.mkdir = opts.mkdir || optsFs.mkdir || fs_1.mkdir; + opts.mkdirAsync = opts.mkdirAsync + ? opts.mkdirAsync + : async (path, options) => { + return new Promise((res, rej) => resolved.mkdir(path, options, (er, made) => er ? rej(er) : res(made))); + }; + opts.stat = opts.stat || optsFs.stat || fs_1.stat; + opts.statAsync = opts.statAsync + ? opts.statAsync + : async (path) => new Promise((res, rej) => resolved.stat(path, (err, stats) => (err ? rej(err) : res(stats)))); + opts.statSync = opts.statSync || optsFs.statSync || fs_1.statSync; + opts.mkdirSync = opts.mkdirSync || optsFs.mkdirSync || fs_1.mkdirSync; + return resolved; +}; +exports.optsArg = optsArg; +//# sourceMappingURL=opts-arg.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/opts-arg.js.map b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/opts-arg.js.map new file mode 100644 index 00000000000000..fd5590f40f54cd --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/opts-arg.js.map @@ -0,0 +1 @@ +{"version":3,"file":"opts-arg.js","sourceRoot":"","sources":["../../../src/opts-arg.ts"],"names":[],"mappings":";;;AAAA,2BAOW;AAwDJ,MAAM,OAAO,GAAG,CAAC,IAAoB,EAAyB,EAAE;IACrE,IAAI,CAAC,IAAI,EAAE;QACT,IAAI,GAAG,EAAE,IAAI,EAAE,KAAK,EAAE,CAAA;KACvB;SAAM,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;QACnC,IAAI,GAAG,EAAE,IAAI,EAAE,KAAK,EAAE,GAAG,IAAI,EAAE,CAAA;KAChC;SAAM,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;QACnC,IAAI,GAAG,EAAE,IAAI,EAAE,IAAI,EAAE,CAAA;KACtB;SAAM,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;QACnC,IAAI,GAAG,EAAE,IAAI,EAAE,QAAQ,CAAC,IAAI,EAAE,CAAC,CAAC,EAAE,CAAA;KACnC;SAAM;QACL,MAAM,IAAI,SAAS,CAAC,0BAA0B,CAAC,CAAA;KAChD;IAED,MAAM,QAAQ,GAAG,IAA6B,CAAA;IAC9C,MAAM,MAAM,GAAG,IAAI,CAAC,EAAE,IAAI,EAAE,CAAA;IAE5B,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,KAAK,IAAI,MAAM,CAAC,KAAK,IAAI,UAAK,CAAA;IAEhD,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC,UAAU;QAC/B,CAAC,CAAC,IAAI,CAAC,UAAU;QACjB,CAAC,CAAC,KAAK,EACH,IAAY,EACZ,OAAuD,EAC1B,EAAE;YAC/B,OAAO,IAAI,OAAO,CAAqB,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CAClD,QAAQ,CAAC,KAAK,CAAC,IAAI,EAAE,OAAO,EAAE,CAAC,EAAE,EAAE,IAAI,EAAE,EAAE,CACzC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,CACzB,CACF,CAAA;QACH,CAAC,CAAA;IAEL,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,IAAI,IAAI,MAAM,CAAC,IAAI,IAAI,SAAI,CAAA;IAC5C,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,SAAS;QAC7B,CAAC,CAAC,IAAI,CAAC,SAAS;QAChB,CAAC,CAAC,KAAK,EAAE,IAAY,EAAE,EAAE,CACrB,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACvB,QAAQ,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,GAAG,EAAE,KAAK,EAAE,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,CAAA;IAEP,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,IAAI,MAAM,CAAC,QAAQ,IAAI,aAAQ,CAAA;IAC5D,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,SAAS,IAAI,MAAM,CAAC,SAAS,IAAI,cAAS,CAAA;IAEhE,OAAO,QAAQ,CAAA;AACjB,CAAC,CAAA;AA3CY,QAAA,OAAO,WA2CnB","sourcesContent":["import {\n MakeDirectoryOptions,\n mkdir,\n mkdirSync,\n stat,\n Stats,\n statSync,\n} from 'fs'\n\nexport interface FsProvider {\n stat?: (\n path: string,\n callback: (err: NodeJS.ErrnoException | null, stats: Stats) => any\n ) => any\n mkdir?: (\n path: string,\n opts: MakeDirectoryOptions & { recursive?: boolean },\n callback: (err: NodeJS.ErrnoException | null, made?: string) => any\n ) => any\n statSync?: (path: string) => Stats\n mkdirSync?: (\n path: string,\n opts: MakeDirectoryOptions & { recursive?: boolean }\n ) => string | undefined\n}\n\ninterface Options extends FsProvider {\n mode?: number | string\n fs?: FsProvider\n mkdirAsync?: (\n path: string,\n opts: MakeDirectoryOptions & { recursive?: boolean }\n ) => Promise\n statAsync?: (path: string) => Promise\n}\n\nexport type MkdirpOptions = Options | number | string\n\nexport interface MkdirpOptionsResolved {\n mode: number\n fs: FsProvider\n mkdirAsync: (\n path: string,\n opts: MakeDirectoryOptions & { recursive?: boolean }\n ) => Promise\n statAsync: (path: string) => Promise\n stat: (\n path: string,\n callback: (err: NodeJS.ErrnoException | null, stats: Stats) => any\n ) => any\n mkdir: (\n path: string,\n opts: MakeDirectoryOptions & { recursive?: boolean },\n callback: (err: NodeJS.ErrnoException | null, made?: string) => any\n ) => any\n statSync: (path: string) => Stats\n mkdirSync: (\n path: string,\n opts: MakeDirectoryOptions & { recursive?: boolean }\n ) => string | undefined\n recursive?: boolean\n}\n\nexport const optsArg = (opts?: MkdirpOptions): MkdirpOptionsResolved => {\n if (!opts) {\n opts = { mode: 0o777 }\n } else if (typeof opts === 'object') {\n opts = { mode: 0o777, ...opts }\n } else if (typeof opts === 'number') {\n opts = { mode: opts }\n } else if (typeof opts === 'string') {\n opts = { mode: parseInt(opts, 8) }\n } else {\n throw new TypeError('invalid options argument')\n }\n\n const resolved = opts as MkdirpOptionsResolved\n const optsFs = opts.fs || {}\n\n opts.mkdir = opts.mkdir || optsFs.mkdir || mkdir\n\n opts.mkdirAsync = opts.mkdirAsync\n ? opts.mkdirAsync\n : async (\n path: string,\n options: MakeDirectoryOptions & { recursive?: boolean }\n ): Promise => {\n return new Promise((res, rej) =>\n resolved.mkdir(path, options, (er, made) =>\n er ? rej(er) : res(made)\n )\n )\n }\n\n opts.stat = opts.stat || optsFs.stat || stat\n opts.statAsync = opts.statAsync\n ? opts.statAsync\n : async (path: string) =>\n new Promise((res, rej) =>\n resolved.stat(path, (err, stats) => (err ? rej(err) : res(stats)))\n )\n\n opts.statSync = opts.statSync || optsFs.statSync || statSync\n opts.mkdirSync = opts.mkdirSync || optsFs.mkdirSync || mkdirSync\n\n return resolved\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/path-arg.d.ts b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/path-arg.d.ts new file mode 100644 index 00000000000000..ad0ccfc482a485 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/path-arg.d.ts @@ -0,0 +1,2 @@ +export declare const pathArg: (path: string) => string; +//# sourceMappingURL=path-arg.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/path-arg.d.ts.map b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/path-arg.d.ts.map new file mode 100644 index 00000000000000..3b52b077c6c05c --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/path-arg.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"path-arg.d.ts","sourceRoot":"","sources":["../../../src/path-arg.ts"],"names":[],"mappings":"AAEA,eAAO,MAAM,OAAO,SAAU,MAAM,WAyBnC,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/path-arg.js b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/path-arg.js new file mode 100644 index 00000000000000..a6b457f6e23d58 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/path-arg.js @@ -0,0 +1,28 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.pathArg = void 0; +const platform = process.env.__TESTING_MKDIRP_PLATFORM__ || process.platform; +const path_1 = require("path"); +const pathArg = (path) => { + if (/\0/.test(path)) { + // simulate same failure that node raises + throw Object.assign(new TypeError('path must be a string without null bytes'), { + path, + code: 'ERR_INVALID_ARG_VALUE', + }); + } + path = (0, path_1.resolve)(path); + if (platform === 'win32') { + const badWinChars = /[*|"<>?:]/; + const { root } = (0, path_1.parse)(path); + if (badWinChars.test(path.substring(root.length))) { + throw Object.assign(new Error('Illegal characters in path.'), { + path, + code: 'EINVAL', + }); + } + } + return path; +}; +exports.pathArg = pathArg; +//# sourceMappingURL=path-arg.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/path-arg.js.map b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/path-arg.js.map new file mode 100644 index 00000000000000..ad3b5d38cad3cd --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/path-arg.js.map @@ -0,0 +1 @@ +{"version":3,"file":"path-arg.js","sourceRoot":"","sources":["../../../src/path-arg.ts"],"names":[],"mappings":";;;AAAA,MAAM,QAAQ,GAAG,OAAO,CAAC,GAAG,CAAC,2BAA2B,IAAI,OAAO,CAAC,QAAQ,CAAA;AAC5E,+BAAqC;AAC9B,MAAM,OAAO,GAAG,CAAC,IAAY,EAAE,EAAE;IACtC,IAAI,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE;QACnB,yCAAyC;QACzC,MAAM,MAAM,CAAC,MAAM,CACjB,IAAI,SAAS,CAAC,0CAA0C,CAAC,EACzD;YACE,IAAI;YACJ,IAAI,EAAE,uBAAuB;SAC9B,CACF,CAAA;KACF;IAED,IAAI,GAAG,IAAA,cAAO,EAAC,IAAI,CAAC,CAAA;IACpB,IAAI,QAAQ,KAAK,OAAO,EAAE;QACxB,MAAM,WAAW,GAAG,WAAW,CAAA;QAC/B,MAAM,EAAE,IAAI,EAAE,GAAG,IAAA,YAAK,EAAC,IAAI,CAAC,CAAA;QAC5B,IAAI,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,EAAE;YACjD,MAAM,MAAM,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,6BAA6B,CAAC,EAAE;gBAC5D,IAAI;gBACJ,IAAI,EAAE,QAAQ;aACf,CAAC,CAAA;SACH;KACF;IAED,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAzBY,QAAA,OAAO,WAyBnB","sourcesContent":["const platform = process.env.__TESTING_MKDIRP_PLATFORM__ || process.platform\nimport { parse, resolve } from 'path'\nexport const pathArg = (path: string) => {\n if (/\\0/.test(path)) {\n // simulate same failure that node raises\n throw Object.assign(\n new TypeError('path must be a string without null bytes'),\n {\n path,\n code: 'ERR_INVALID_ARG_VALUE',\n }\n )\n }\n\n path = resolve(path)\n if (platform === 'win32') {\n const badWinChars = /[*|\"<>?:]/\n const { root } = parse(path)\n if (badWinChars.test(path.substring(root.length))) {\n throw Object.assign(new Error('Illegal characters in path.'), {\n path,\n code: 'EINVAL',\n })\n }\n }\n\n return path\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/use-native.d.ts b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/use-native.d.ts new file mode 100644 index 00000000000000..1c6cb619e30405 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/use-native.d.ts @@ -0,0 +1,6 @@ +import { MkdirpOptions } from './opts-arg.js'; +export declare const useNativeSync: (opts?: MkdirpOptions) => boolean; +export declare const useNative: ((opts?: MkdirpOptions) => boolean) & { + sync: (opts?: MkdirpOptions) => boolean; +}; +//# sourceMappingURL=use-native.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/use-native.d.ts.map b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/use-native.d.ts.map new file mode 100644 index 00000000000000..7dc275e322ea3b --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/use-native.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"use-native.d.ts","sourceRoot":"","sources":["../../../src/use-native.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,aAAa,EAAW,MAAM,eAAe,CAAA;AAMtD,eAAO,MAAM,aAAa,UAEd,aAAa,YAA0C,CAAA;AAEnE,eAAO,MAAM,SAAS,WAGR,aAAa;kBALf,aAAa;CASxB,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/use-native.js b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/use-native.js new file mode 100644 index 00000000000000..550b3452688ee5 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/use-native.js @@ -0,0 +1,17 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.useNative = exports.useNativeSync = void 0; +const fs_1 = require("fs"); +const opts_arg_js_1 = require("./opts-arg.js"); +const version = process.env.__TESTING_MKDIRP_NODE_VERSION__ || process.version; +const versArr = version.replace(/^v/, '').split('.'); +const hasNative = +versArr[0] > 10 || (+versArr[0] === 10 && +versArr[1] >= 12); +exports.useNativeSync = !hasNative + ? () => false + : (opts) => (0, opts_arg_js_1.optsArg)(opts).mkdirSync === fs_1.mkdirSync; +exports.useNative = Object.assign(!hasNative + ? () => false + : (opts) => (0, opts_arg_js_1.optsArg)(opts).mkdir === fs_1.mkdir, { + sync: exports.useNativeSync, +}); +//# sourceMappingURL=use-native.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/use-native.js.map b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/use-native.js.map new file mode 100644 index 00000000000000..9a15efebb9ec28 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/use-native.js.map @@ -0,0 +1 @@ +{"version":3,"file":"use-native.js","sourceRoot":"","sources":["../../../src/use-native.ts"],"names":[],"mappings":";;;AAAA,2BAAqC;AACrC,+CAAsD;AAEtD,MAAM,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,+BAA+B,IAAI,OAAO,CAAC,OAAO,CAAA;AAC9E,MAAM,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAA;AACpD,MAAM,SAAS,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,KAAK,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,CAAA;AAElE,QAAA,aAAa,GAAG,CAAC,SAAS;IACrC,CAAC,CAAC,GAAG,EAAE,CAAC,KAAK;IACb,CAAC,CAAC,CAAC,IAAoB,EAAE,EAAE,CAAC,IAAA,qBAAO,EAAC,IAAI,CAAC,CAAC,SAAS,KAAK,cAAS,CAAA;AAEtD,QAAA,SAAS,GAAG,MAAM,CAAC,MAAM,CACpC,CAAC,SAAS;IACR,CAAC,CAAC,GAAG,EAAE,CAAC,KAAK;IACb,CAAC,CAAC,CAAC,IAAoB,EAAE,EAAE,CAAC,IAAA,qBAAO,EAAC,IAAI,CAAC,CAAC,KAAK,KAAK,UAAK,EAC3D;IACE,IAAI,EAAE,qBAAa;CACpB,CACF,CAAA","sourcesContent":["import { mkdir, mkdirSync } from 'fs'\nimport { MkdirpOptions, optsArg } from './opts-arg.js'\n\nconst version = process.env.__TESTING_MKDIRP_NODE_VERSION__ || process.version\nconst versArr = version.replace(/^v/, '').split('.')\nconst hasNative = +versArr[0] > 10 || (+versArr[0] === 10 && +versArr[1] >= 12)\n\nexport const useNativeSync = !hasNative\n ? () => false\n : (opts?: MkdirpOptions) => optsArg(opts).mkdirSync === mkdirSync\n\nexport const useNative = Object.assign(\n !hasNative\n ? () => false\n : (opts?: MkdirpOptions) => optsArg(opts).mkdir === mkdir,\n {\n sync: useNativeSync,\n }\n)\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/find-made.d.ts b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/find-made.d.ts new file mode 100644 index 00000000000000..e47794b3bb72a3 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/find-made.d.ts @@ -0,0 +1,4 @@ +import { MkdirpOptionsResolved } from './opts-arg.js'; +export declare const findMade: (opts: MkdirpOptionsResolved, parent: string, path?: string) => Promise; +export declare const findMadeSync: (opts: MkdirpOptionsResolved, parent: string, path?: string) => undefined | string; +//# sourceMappingURL=find-made.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/find-made.d.ts.map b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/find-made.d.ts.map new file mode 100644 index 00000000000000..411aad1410eb7a --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/find-made.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"find-made.d.ts","sourceRoot":"","sources":["../../src/find-made.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,qBAAqB,EAAE,MAAM,eAAe,CAAA;AAErD,eAAO,MAAM,QAAQ,SACb,qBAAqB,UACnB,MAAM,SACP,MAAM,KACZ,QAAQ,SAAS,GAAG,MAAM,CAe5B,CAAA;AAED,eAAO,MAAM,YAAY,SACjB,qBAAqB,UACnB,MAAM,SACP,MAAM,KACZ,SAAS,GAAG,MAad,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/find-made.js b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/find-made.js new file mode 100644 index 00000000000000..3e72fd59a2c1fb --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/find-made.js @@ -0,0 +1,30 @@ +import { dirname } from 'path'; +export const findMade = async (opts, parent, path) => { + // we never want the 'made' return value to be a root directory + if (path === parent) { + return; + } + return opts.statAsync(parent).then(st => (st.isDirectory() ? path : undefined), // will fail later + // will fail later + er => { + const fer = er; + return fer && fer.code === 'ENOENT' + ? findMade(opts, dirname(parent), parent) + : undefined; + }); +}; +export const findMadeSync = (opts, parent, path) => { + if (path === parent) { + return undefined; + } + try { + return opts.statSync(parent).isDirectory() ? path : undefined; + } + catch (er) { + const fer = er; + return fer && fer.code === 'ENOENT' + ? findMadeSync(opts, dirname(parent), parent) + : undefined; + } +}; +//# sourceMappingURL=find-made.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/find-made.js.map b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/find-made.js.map new file mode 100644 index 00000000000000..7b58089c6266c1 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/find-made.js.map @@ -0,0 +1 @@ +{"version":3,"file":"find-made.js","sourceRoot":"","sources":["../../src/find-made.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,MAAM,MAAM,CAAA;AAG9B,MAAM,CAAC,MAAM,QAAQ,GAAG,KAAK,EAC3B,IAA2B,EAC3B,MAAc,EACd,IAAa,EACgB,EAAE;IAC/B,+DAA+D;IAC/D,IAAI,IAAI,KAAK,MAAM,EAAE;QACnB,OAAM;KACP;IAED,OAAO,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,IAAI,CAChC,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC,EAAE,kBAAkB;IAC/D,AAD6C,kBAAkB;IAC/D,EAAE,CAAC,EAAE;QACH,MAAM,GAAG,GAAG,EAA2B,CAAA;QACvC,OAAO,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ;YACjC,CAAC,CAAC,QAAQ,CAAC,IAAI,EAAE,OAAO,CAAC,MAAM,CAAC,EAAE,MAAM,CAAC;YACzC,CAAC,CAAC,SAAS,CAAA;IACf,CAAC,CACF,CAAA;AACH,CAAC,CAAA;AAED,MAAM,CAAC,MAAM,YAAY,GAAG,CAC1B,IAA2B,EAC3B,MAAc,EACd,IAAa,EACO,EAAE;IACtB,IAAI,IAAI,KAAK,MAAM,EAAE;QACnB,OAAO,SAAS,CAAA;KACjB;IAED,IAAI;QACF,OAAO,IAAI,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAA;KAC9D;IAAC,OAAO,EAAE,EAAE;QACX,MAAM,GAAG,GAAG,EAA2B,CAAA;QACvC,OAAO,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ;YACjC,CAAC,CAAC,YAAY,CAAC,IAAI,EAAE,OAAO,CAAC,MAAM,CAAC,EAAE,MAAM,CAAC;YAC7C,CAAC,CAAC,SAAS,CAAA;KACd;AACH,CAAC,CAAA","sourcesContent":["import { dirname } from 'path'\nimport { MkdirpOptionsResolved } from './opts-arg.js'\n\nexport const findMade = async (\n opts: MkdirpOptionsResolved,\n parent: string,\n path?: string\n): Promise => {\n // we never want the 'made' return value to be a root directory\n if (path === parent) {\n return\n }\n\n return opts.statAsync(parent).then(\n st => (st.isDirectory() ? path : undefined), // will fail later\n er => {\n const fer = er as NodeJS.ErrnoException\n return fer && fer.code === 'ENOENT'\n ? findMade(opts, dirname(parent), parent)\n : undefined\n }\n )\n}\n\nexport const findMadeSync = (\n opts: MkdirpOptionsResolved,\n parent: string,\n path?: string\n): undefined | string => {\n if (path === parent) {\n return undefined\n }\n\n try {\n return opts.statSync(parent).isDirectory() ? path : undefined\n } catch (er) {\n const fer = er as NodeJS.ErrnoException\n return fer && fer.code === 'ENOENT'\n ? findMadeSync(opts, dirname(parent), parent)\n : undefined\n }\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/index.d.ts b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/index.d.ts new file mode 100644 index 00000000000000..fc9e43b3a45de1 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/index.d.ts @@ -0,0 +1,39 @@ +import { MkdirpOptions } from './opts-arg.js'; +export { mkdirpManual, mkdirpManualSync } from './mkdirp-manual.js'; +export { mkdirpNative, mkdirpNativeSync } from './mkdirp-native.js'; +export { useNative, useNativeSync } from './use-native.js'; +export declare const mkdirpSync: (path: string, opts?: MkdirpOptions) => string | void; +export declare const sync: (path: string, opts?: MkdirpOptions) => string | void; +export declare const manual: ((path: string, options?: MkdirpOptions | undefined, made?: string | void | undefined) => Promise) & { + sync: (path: string, options?: MkdirpOptions | undefined, made?: string | void | undefined) => string | void | undefined; +}; +export declare const manualSync: (path: string, options?: MkdirpOptions | undefined, made?: string | void | undefined) => string | void | undefined; +export declare const native: ((path: string, options?: MkdirpOptions | undefined) => Promise) & { + sync: (path: string, options?: MkdirpOptions | undefined) => string | void | undefined; +}; +export declare const nativeSync: (path: string, options?: MkdirpOptions | undefined) => string | void | undefined; +export declare const mkdirp: ((path: string, opts?: MkdirpOptions) => Promise) & { + mkdirpSync: (path: string, opts?: MkdirpOptions) => string | void; + mkdirpNative: ((path: string, options?: MkdirpOptions | undefined) => Promise) & { + sync: (path: string, options?: MkdirpOptions | undefined) => string | void | undefined; + }; + mkdirpNativeSync: (path: string, options?: MkdirpOptions | undefined) => string | void | undefined; + mkdirpManual: ((path: string, options?: MkdirpOptions | undefined, made?: string | void | undefined) => Promise) & { + sync: (path: string, options?: MkdirpOptions | undefined, made?: string | void | undefined) => string | void | undefined; + }; + mkdirpManualSync: (path: string, options?: MkdirpOptions | undefined, made?: string | void | undefined) => string | void | undefined; + sync: (path: string, opts?: MkdirpOptions) => string | void; + native: ((path: string, options?: MkdirpOptions | undefined) => Promise) & { + sync: (path: string, options?: MkdirpOptions | undefined) => string | void | undefined; + }; + nativeSync: (path: string, options?: MkdirpOptions | undefined) => string | void | undefined; + manual: ((path: string, options?: MkdirpOptions | undefined, made?: string | void | undefined) => Promise) & { + sync: (path: string, options?: MkdirpOptions | undefined, made?: string | void | undefined) => string | void | undefined; + }; + manualSync: (path: string, options?: MkdirpOptions | undefined, made?: string | void | undefined) => string | void | undefined; + useNative: ((opts?: MkdirpOptions | undefined) => boolean) & { + sync: (opts?: MkdirpOptions | undefined) => boolean; + }; + useNativeSync: (opts?: MkdirpOptions | undefined) => boolean; +}; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/index.d.ts.map b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/index.d.ts.map new file mode 100644 index 00000000000000..cfcc78083857b1 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,aAAa,EAAW,MAAM,eAAe,CAAA;AAItD,OAAO,EAAE,YAAY,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAA;AACnE,OAAO,EAAE,YAAY,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAA;AACnE,OAAO,EAAE,SAAS,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAA;AAG1D,eAAO,MAAM,UAAU,SAAU,MAAM,SAAS,aAAa,kBAM5D,CAAA;AAED,eAAO,MAAM,IAAI,SARgB,MAAM,SAAS,aAAa,kBAQ/B,CAAA;AAC9B,eAAO,MAAM,MAAM;;CAAe,CAAA;AAClC,eAAO,MAAM,UAAU,oHAAmB,CAAA;AAC1C,eAAO,MAAM,MAAM;;CAAe,CAAA;AAClC,eAAO,MAAM,UAAU,kFAAmB,CAAA;AAC1C,eAAO,MAAM,MAAM,UACJ,MAAM,SAAS,aAAa;uBAdV,MAAM,SAAS,aAAa;;;;;;;;;iBAA5B,MAAM,SAAS,aAAa;;;;;;;;;;;;;CAoC5D,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/index.js b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/index.js new file mode 100644 index 00000000000000..0217ecc8cdd83d --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/index.js @@ -0,0 +1,43 @@ +import { mkdirpManual, mkdirpManualSync } from './mkdirp-manual.js'; +import { mkdirpNative, mkdirpNativeSync } from './mkdirp-native.js'; +import { optsArg } from './opts-arg.js'; +import { pathArg } from './path-arg.js'; +import { useNative, useNativeSync } from './use-native.js'; +/* c8 ignore start */ +export { mkdirpManual, mkdirpManualSync } from './mkdirp-manual.js'; +export { mkdirpNative, mkdirpNativeSync } from './mkdirp-native.js'; +export { useNative, useNativeSync } from './use-native.js'; +/* c8 ignore stop */ +export const mkdirpSync = (path, opts) => { + path = pathArg(path); + const resolved = optsArg(opts); + return useNativeSync(resolved) + ? mkdirpNativeSync(path, resolved) + : mkdirpManualSync(path, resolved); +}; +export const sync = mkdirpSync; +export const manual = mkdirpManual; +export const manualSync = mkdirpManualSync; +export const native = mkdirpNative; +export const nativeSync = mkdirpNativeSync; +export const mkdirp = Object.assign(async (path, opts) => { + path = pathArg(path); + const resolved = optsArg(opts); + return useNative(resolved) + ? mkdirpNative(path, resolved) + : mkdirpManual(path, resolved); +}, { + mkdirpSync, + mkdirpNative, + mkdirpNativeSync, + mkdirpManual, + mkdirpManualSync, + sync: mkdirpSync, + native: mkdirpNative, + nativeSync: mkdirpNativeSync, + manual: mkdirpManual, + manualSync: mkdirpManualSync, + useNative, + useNativeSync, +}); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/index.js.map b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/index.js.map new file mode 100644 index 00000000000000..47a8133a070c8f --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,YAAY,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAA;AACnE,OAAO,EAAE,YAAY,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAA;AACnE,OAAO,EAAiB,OAAO,EAAE,MAAM,eAAe,CAAA;AACtD,OAAO,EAAE,OAAO,EAAE,MAAM,eAAe,CAAA;AACvC,OAAO,EAAE,SAAS,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAA;AAC1D,qBAAqB;AACrB,OAAO,EAAE,YAAY,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAA;AACnE,OAAO,EAAE,YAAY,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAA;AACnE,OAAO,EAAE,SAAS,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAA;AAC1D,oBAAoB;AAEpB,MAAM,CAAC,MAAM,UAAU,GAAG,CAAC,IAAY,EAAE,IAAoB,EAAE,EAAE;IAC/D,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;IACpB,MAAM,QAAQ,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;IAC9B,OAAO,aAAa,CAAC,QAAQ,CAAC;QAC5B,CAAC,CAAC,gBAAgB,CAAC,IAAI,EAAE,QAAQ,CAAC;QAClC,CAAC,CAAC,gBAAgB,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAA;AACtC,CAAC,CAAA;AAED,MAAM,CAAC,MAAM,IAAI,GAAG,UAAU,CAAA;AAC9B,MAAM,CAAC,MAAM,MAAM,GAAG,YAAY,CAAA;AAClC,MAAM,CAAC,MAAM,UAAU,GAAG,gBAAgB,CAAA;AAC1C,MAAM,CAAC,MAAM,MAAM,GAAG,YAAY,CAAA;AAClC,MAAM,CAAC,MAAM,UAAU,GAAG,gBAAgB,CAAA;AAC1C,MAAM,CAAC,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,CACjC,KAAK,EAAE,IAAY,EAAE,IAAoB,EAAE,EAAE;IAC3C,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;IACpB,MAAM,QAAQ,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;IAC9B,OAAO,SAAS,CAAC,QAAQ,CAAC;QACxB,CAAC,CAAC,YAAY,CAAC,IAAI,EAAE,QAAQ,CAAC;QAC9B,CAAC,CAAC,YAAY,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAA;AAClC,CAAC,EACD;IACE,UAAU;IACV,YAAY;IACZ,gBAAgB;IAChB,YAAY;IACZ,gBAAgB;IAEhB,IAAI,EAAE,UAAU;IAChB,MAAM,EAAE,YAAY;IACpB,UAAU,EAAE,gBAAgB;IAC5B,MAAM,EAAE,YAAY;IACpB,UAAU,EAAE,gBAAgB;IAC5B,SAAS;IACT,aAAa;CACd,CACF,CAAA","sourcesContent":["import { mkdirpManual, mkdirpManualSync } from './mkdirp-manual.js'\nimport { mkdirpNative, mkdirpNativeSync } from './mkdirp-native.js'\nimport { MkdirpOptions, optsArg } from './opts-arg.js'\nimport { pathArg } from './path-arg.js'\nimport { useNative, useNativeSync } from './use-native.js'\n/* c8 ignore start */\nexport { mkdirpManual, mkdirpManualSync } from './mkdirp-manual.js'\nexport { mkdirpNative, mkdirpNativeSync } from './mkdirp-native.js'\nexport { useNative, useNativeSync } from './use-native.js'\n/* c8 ignore stop */\n\nexport const mkdirpSync = (path: string, opts?: MkdirpOptions) => {\n path = pathArg(path)\n const resolved = optsArg(opts)\n return useNativeSync(resolved)\n ? mkdirpNativeSync(path, resolved)\n : mkdirpManualSync(path, resolved)\n}\n\nexport const sync = mkdirpSync\nexport const manual = mkdirpManual\nexport const manualSync = mkdirpManualSync\nexport const native = mkdirpNative\nexport const nativeSync = mkdirpNativeSync\nexport const mkdirp = Object.assign(\n async (path: string, opts?: MkdirpOptions) => {\n path = pathArg(path)\n const resolved = optsArg(opts)\n return useNative(resolved)\n ? mkdirpNative(path, resolved)\n : mkdirpManual(path, resolved)\n },\n {\n mkdirpSync,\n mkdirpNative,\n mkdirpNativeSync,\n mkdirpManual,\n mkdirpManualSync,\n\n sync: mkdirpSync,\n native: mkdirpNative,\n nativeSync: mkdirpNativeSync,\n manual: mkdirpManual,\n manualSync: mkdirpManualSync,\n useNative,\n useNativeSync,\n }\n)\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/mkdirp-manual.d.ts b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/mkdirp-manual.d.ts new file mode 100644 index 00000000000000..e49cdf9f1bd122 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/mkdirp-manual.d.ts @@ -0,0 +1,6 @@ +import { MkdirpOptions } from './opts-arg.js'; +export declare const mkdirpManualSync: (path: string, options?: MkdirpOptions, made?: string | undefined | void) => string | undefined | void; +export declare const mkdirpManual: ((path: string, options?: MkdirpOptions, made?: string | undefined | void) => Promise) & { + sync: (path: string, options?: MkdirpOptions, made?: string | undefined | void) => string | undefined | void; +}; +//# sourceMappingURL=mkdirp-manual.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/mkdirp-manual.d.ts.map b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/mkdirp-manual.d.ts.map new file mode 100644 index 00000000000000..ae7f243d3ca78b --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/mkdirp-manual.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"mkdirp-manual.d.ts","sourceRoot":"","sources":["../../src/mkdirp-manual.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,aAAa,EAAW,MAAM,eAAe,CAAA;AAEtD,eAAO,MAAM,gBAAgB,SACrB,MAAM,YACF,aAAa,SAChB,MAAM,GAAG,SAAS,GAAG,IAAI,KAC/B,MAAM,GAAG,SAAS,GAAG,IAmCvB,CAAA;AAED,eAAO,MAAM,YAAY,UAEf,MAAM,YACF,aAAa,SAChB,MAAM,GAAG,SAAS,GAAG,IAAI,KAC/B,QAAQ,MAAM,GAAG,SAAS,GAAG,IAAI,CAAC;iBA7C/B,MAAM,YACF,aAAa,SAChB,MAAM,GAAG,SAAS,GAAG,IAAI,KAC/B,MAAM,GAAG,SAAS,GAAG,IAAI;CAqF3B,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/mkdirp-manual.js b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/mkdirp-manual.js new file mode 100644 index 00000000000000..a4d044e02d3bfc --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/mkdirp-manual.js @@ -0,0 +1,75 @@ +import { dirname } from 'path'; +import { optsArg } from './opts-arg.js'; +export const mkdirpManualSync = (path, options, made) => { + const parent = dirname(path); + const opts = { ...optsArg(options), recursive: false }; + if (parent === path) { + try { + return opts.mkdirSync(path, opts); + } + catch (er) { + // swallowed by recursive implementation on posix systems + // any other error is a failure + const fer = er; + if (fer && fer.code !== 'EISDIR') { + throw er; + } + return; + } + } + try { + opts.mkdirSync(path, opts); + return made || path; + } + catch (er) { + const fer = er; + if (fer && fer.code === 'ENOENT') { + return mkdirpManualSync(path, opts, mkdirpManualSync(parent, opts, made)); + } + if (fer && fer.code !== 'EEXIST' && fer && fer.code !== 'EROFS') { + throw er; + } + try { + if (!opts.statSync(path).isDirectory()) + throw er; + } + catch (_) { + throw er; + } + } +}; +export const mkdirpManual = Object.assign(async (path, options, made) => { + const opts = optsArg(options); + opts.recursive = false; + const parent = dirname(path); + if (parent === path) { + return opts.mkdirAsync(path, opts).catch(er => { + // swallowed by recursive implementation on posix systems + // any other error is a failure + const fer = er; + if (fer && fer.code !== 'EISDIR') { + throw er; + } + }); + } + return opts.mkdirAsync(path, opts).then(() => made || path, async (er) => { + const fer = er; + if (fer && fer.code === 'ENOENT') { + return mkdirpManual(parent, opts).then((made) => mkdirpManual(path, opts, made)); + } + if (fer && fer.code !== 'EEXIST' && fer.code !== 'EROFS') { + throw er; + } + return opts.statAsync(path).then(st => { + if (st.isDirectory()) { + return made; + } + else { + throw er; + } + }, () => { + throw er; + }); + }); +}, { sync: mkdirpManualSync }); +//# sourceMappingURL=mkdirp-manual.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/mkdirp-manual.js.map b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/mkdirp-manual.js.map new file mode 100644 index 00000000000000..29eab250e126c8 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/mkdirp-manual.js.map @@ -0,0 +1 @@ +{"version":3,"file":"mkdirp-manual.js","sourceRoot":"","sources":["../../src/mkdirp-manual.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,MAAM,MAAM,CAAA;AAC9B,OAAO,EAAiB,OAAO,EAAE,MAAM,eAAe,CAAA;AAEtD,MAAM,CAAC,MAAM,gBAAgB,GAAG,CAC9B,IAAY,EACZ,OAAuB,EACvB,IAAgC,EACL,EAAE;IAC7B,MAAM,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;IAC5B,MAAM,IAAI,GAAG,EAAE,GAAG,OAAO,CAAC,OAAO,CAAC,EAAE,SAAS,EAAE,KAAK,EAAE,CAAA;IAEtD,IAAI,MAAM,KAAK,IAAI,EAAE;QACnB,IAAI;YACF,OAAO,IAAI,CAAC,SAAS,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;SAClC;QAAC,OAAO,EAAE,EAAE;YACX,yDAAyD;YACzD,+BAA+B;YAC/B,MAAM,GAAG,GAAG,EAA2B,CAAA;YACvC,IAAI,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE;gBAChC,MAAM,EAAE,CAAA;aACT;YACD,OAAM;SACP;KACF;IAED,IAAI;QACF,IAAI,CAAC,SAAS,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;QAC1B,OAAO,IAAI,IAAI,IAAI,CAAA;KACpB;IAAC,OAAO,EAAE,EAAE;QACX,MAAM,GAAG,GAAG,EAA2B,CAAA;QACvC,IAAI,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE;YAChC,OAAO,gBAAgB,CAAC,IAAI,EAAE,IAAI,EAAE,gBAAgB,CAAC,MAAM,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC,CAAA;SAC1E;QACD,IAAI,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,IAAI,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,OAAO,EAAE;YAC/D,MAAM,EAAE,CAAA;SACT;QACD,IAAI;YACF,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,WAAW,EAAE;gBAAE,MAAM,EAAE,CAAA;SACjD;QAAC,OAAO,CAAC,EAAE;YACV,MAAM,EAAE,CAAA;SACT;KACF;AACH,CAAC,CAAA;AAED,MAAM,CAAC,MAAM,YAAY,GAAG,MAAM,CAAC,MAAM,CACvC,KAAK,EACH,IAAY,EACZ,OAAuB,EACvB,IAAgC,EACI,EAAE;IACtC,MAAM,IAAI,GAAG,OAAO,CAAC,OAAO,CAAC,CAAA;IAC7B,IAAI,CAAC,SAAS,GAAG,KAAK,CAAA;IACtB,MAAM,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;IAC5B,IAAI,MAAM,KAAK,IAAI,EAAE;QACnB,OAAO,IAAI,CAAC,UAAU,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,EAAE;YAC5C,yDAAyD;YACzD,+BAA+B;YAC/B,MAAM,GAAG,GAAG,EAA2B,CAAA;YACvC,IAAI,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE;gBAChC,MAAM,EAAE,CAAA;aACT;QACH,CAAC,CAAC,CAAA;KACH;IAED,OAAO,IAAI,CAAC,UAAU,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC,IAAI,CACrC,GAAG,EAAE,CAAC,IAAI,IAAI,IAAI,EAClB,KAAK,EAAC,EAAE,EAAC,EAAE;QACT,MAAM,GAAG,GAAG,EAA2B,CAAA;QACvC,IAAI,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE;YAChC,OAAO,YAAY,CAAC,MAAM,EAAE,IAAI,CAAC,CAAC,IAAI,CACpC,CAAC,IAAgC,EAAE,EAAE,CAAC,YAAY,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CACrE,CAAA;SACF;QACD,IAAI,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,IAAI,GAAG,CAAC,IAAI,KAAK,OAAO,EAAE;YACxD,MAAM,EAAE,CAAA;SACT;QACD,OAAO,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,IAAI,CAC9B,EAAE,CAAC,EAAE;YACH,IAAI,EAAE,CAAC,WAAW,EAAE,EAAE;gBACpB,OAAO,IAAI,CAAA;aACZ;iBAAM;gBACL,MAAM,EAAE,CAAA;aACT;QACH,CAAC,EACD,GAAG,EAAE;YACH,MAAM,EAAE,CAAA;QACV,CAAC,CACF,CAAA;IACH,CAAC,CACF,CAAA;AACH,CAAC,EACD,EAAE,IAAI,EAAE,gBAAgB,EAAE,CAC3B,CAAA","sourcesContent":["import { dirname } from 'path'\nimport { MkdirpOptions, optsArg } from './opts-arg.js'\n\nexport const mkdirpManualSync = (\n path: string,\n options?: MkdirpOptions,\n made?: string | undefined | void\n): string | undefined | void => {\n const parent = dirname(path)\n const opts = { ...optsArg(options), recursive: false }\n\n if (parent === path) {\n try {\n return opts.mkdirSync(path, opts)\n } catch (er) {\n // swallowed by recursive implementation on posix systems\n // any other error is a failure\n const fer = er as NodeJS.ErrnoException\n if (fer && fer.code !== 'EISDIR') {\n throw er\n }\n return\n }\n }\n\n try {\n opts.mkdirSync(path, opts)\n return made || path\n } catch (er) {\n const fer = er as NodeJS.ErrnoException\n if (fer && fer.code === 'ENOENT') {\n return mkdirpManualSync(path, opts, mkdirpManualSync(parent, opts, made))\n }\n if (fer && fer.code !== 'EEXIST' && fer && fer.code !== 'EROFS') {\n throw er\n }\n try {\n if (!opts.statSync(path).isDirectory()) throw er\n } catch (_) {\n throw er\n }\n }\n}\n\nexport const mkdirpManual = Object.assign(\n async (\n path: string,\n options?: MkdirpOptions,\n made?: string | undefined | void\n ): Promise => {\n const opts = optsArg(options)\n opts.recursive = false\n const parent = dirname(path)\n if (parent === path) {\n return opts.mkdirAsync(path, opts).catch(er => {\n // swallowed by recursive implementation on posix systems\n // any other error is a failure\n const fer = er as NodeJS.ErrnoException\n if (fer && fer.code !== 'EISDIR') {\n throw er\n }\n })\n }\n\n return opts.mkdirAsync(path, opts).then(\n () => made || path,\n async er => {\n const fer = er as NodeJS.ErrnoException\n if (fer && fer.code === 'ENOENT') {\n return mkdirpManual(parent, opts).then(\n (made?: string | undefined | void) => mkdirpManual(path, opts, made)\n )\n }\n if (fer && fer.code !== 'EEXIST' && fer.code !== 'EROFS') {\n throw er\n }\n return opts.statAsync(path).then(\n st => {\n if (st.isDirectory()) {\n return made\n } else {\n throw er\n }\n },\n () => {\n throw er\n }\n )\n }\n )\n },\n { sync: mkdirpManualSync }\n)\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/mkdirp-native.d.ts b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/mkdirp-native.d.ts new file mode 100644 index 00000000000000..28b64814b2545a --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/mkdirp-native.d.ts @@ -0,0 +1,6 @@ +import { MkdirpOptions } from './opts-arg.js'; +export declare const mkdirpNativeSync: (path: string, options?: MkdirpOptions) => string | void | undefined; +export declare const mkdirpNative: ((path: string, options?: MkdirpOptions) => Promise) & { + sync: (path: string, options?: MkdirpOptions) => string | void | undefined; +}; +//# sourceMappingURL=mkdirp-native.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/mkdirp-native.d.ts.map b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/mkdirp-native.d.ts.map new file mode 100644 index 00000000000000..517dfabe7d1213 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/mkdirp-native.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"mkdirp-native.d.ts","sourceRoot":"","sources":["../../src/mkdirp-native.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,aAAa,EAAW,MAAM,eAAe,CAAA;AAEtD,eAAO,MAAM,gBAAgB,SACrB,MAAM,YACF,aAAa,KACtB,MAAM,GAAG,IAAI,GAAG,SAoBlB,CAAA;AAED,eAAO,MAAM,YAAY,UAEf,MAAM,YACF,aAAa,KACtB,QAAQ,MAAM,GAAG,IAAI,GAAG,SAAS,CAAC;iBA5B/B,MAAM,YACF,aAAa,KACtB,MAAM,GAAG,IAAI,GAAG,SAAS;CAgD3B,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/mkdirp-native.js b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/mkdirp-native.js new file mode 100644 index 00000000000000..99d10a5425dade --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/mkdirp-native.js @@ -0,0 +1,46 @@ +import { dirname } from 'path'; +import { findMade, findMadeSync } from './find-made.js'; +import { mkdirpManual, mkdirpManualSync } from './mkdirp-manual.js'; +import { optsArg } from './opts-arg.js'; +export const mkdirpNativeSync = (path, options) => { + const opts = optsArg(options); + opts.recursive = true; + const parent = dirname(path); + if (parent === path) { + return opts.mkdirSync(path, opts); + } + const made = findMadeSync(opts, path); + try { + opts.mkdirSync(path, opts); + return made; + } + catch (er) { + const fer = er; + if (fer && fer.code === 'ENOENT') { + return mkdirpManualSync(path, opts); + } + else { + throw er; + } + } +}; +export const mkdirpNative = Object.assign(async (path, options) => { + const opts = { ...optsArg(options), recursive: true }; + const parent = dirname(path); + if (parent === path) { + return await opts.mkdirAsync(path, opts); + } + return findMade(opts, path).then((made) => opts + .mkdirAsync(path, opts) + .then(m => made || m) + .catch(er => { + const fer = er; + if (fer && fer.code === 'ENOENT') { + return mkdirpManual(path, opts); + } + else { + throw er; + } + })); +}, { sync: mkdirpNativeSync }); +//# sourceMappingURL=mkdirp-native.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/mkdirp-native.js.map b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/mkdirp-native.js.map new file mode 100644 index 00000000000000..27de32d9436d67 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/mkdirp-native.js.map @@ -0,0 +1 @@ +{"version":3,"file":"mkdirp-native.js","sourceRoot":"","sources":["../../src/mkdirp-native.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,MAAM,MAAM,CAAA;AAC9B,OAAO,EAAE,QAAQ,EAAE,YAAY,EAAE,MAAM,gBAAgB,CAAA;AACvD,OAAO,EAAE,YAAY,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAA;AACnE,OAAO,EAAiB,OAAO,EAAE,MAAM,eAAe,CAAA;AAEtD,MAAM,CAAC,MAAM,gBAAgB,GAAG,CAC9B,IAAY,EACZ,OAAuB,EACI,EAAE;IAC7B,MAAM,IAAI,GAAG,OAAO,CAAC,OAAO,CAAC,CAAA;IAC7B,IAAI,CAAC,SAAS,GAAG,IAAI,CAAA;IACrB,MAAM,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;IAC5B,IAAI,MAAM,KAAK,IAAI,EAAE;QACnB,OAAO,IAAI,CAAC,SAAS,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;KAClC;IAED,MAAM,IAAI,GAAG,YAAY,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;IACrC,IAAI;QACF,IAAI,CAAC,SAAS,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;QAC1B,OAAO,IAAI,CAAA;KACZ;IAAC,OAAO,EAAE,EAAE;QACX,MAAM,GAAG,GAAG,EAA2B,CAAA;QACvC,IAAI,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE;YAChC,OAAO,gBAAgB,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;SACpC;aAAM;YACL,MAAM,EAAE,CAAA;SACT;KACF;AACH,CAAC,CAAA;AAED,MAAM,CAAC,MAAM,YAAY,GAAG,MAAM,CAAC,MAAM,CACvC,KAAK,EACH,IAAY,EACZ,OAAuB,EACa,EAAE;IACtC,MAAM,IAAI,GAAG,EAAE,GAAG,OAAO,CAAC,OAAO,CAAC,EAAE,SAAS,EAAE,IAAI,EAAE,CAAA;IACrD,MAAM,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;IAC5B,IAAI,MAAM,KAAK,IAAI,EAAE;QACnB,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;KACzC;IAED,OAAO,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC,IAAI,CAAC,CAAC,IAAyB,EAAE,EAAE,CAC7D,IAAI;SACD,UAAU,CAAC,IAAI,EAAE,IAAI,CAAC;SACtB,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,IAAI,CAAC,CAAC;SACpB,KAAK,CAAC,EAAE,CAAC,EAAE;QACV,MAAM,GAAG,GAAG,EAA2B,CAAA;QACvC,IAAI,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE;YAChC,OAAO,YAAY,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;SAChC;aAAM;YACL,MAAM,EAAE,CAAA;SACT;IACH,CAAC,CAAC,CACL,CAAA;AACH,CAAC,EACD,EAAE,IAAI,EAAE,gBAAgB,EAAE,CAC3B,CAAA","sourcesContent":["import { dirname } from 'path'\nimport { findMade, findMadeSync } from './find-made.js'\nimport { mkdirpManual, mkdirpManualSync } from './mkdirp-manual.js'\nimport { MkdirpOptions, optsArg } from './opts-arg.js'\n\nexport const mkdirpNativeSync = (\n path: string,\n options?: MkdirpOptions\n): string | void | undefined => {\n const opts = optsArg(options)\n opts.recursive = true\n const parent = dirname(path)\n if (parent === path) {\n return opts.mkdirSync(path, opts)\n }\n\n const made = findMadeSync(opts, path)\n try {\n opts.mkdirSync(path, opts)\n return made\n } catch (er) {\n const fer = er as NodeJS.ErrnoException\n if (fer && fer.code === 'ENOENT') {\n return mkdirpManualSync(path, opts)\n } else {\n throw er\n }\n }\n}\n\nexport const mkdirpNative = Object.assign(\n async (\n path: string,\n options?: MkdirpOptions\n ): Promise => {\n const opts = { ...optsArg(options), recursive: true }\n const parent = dirname(path)\n if (parent === path) {\n return await opts.mkdirAsync(path, opts)\n }\n\n return findMade(opts, path).then((made?: string | undefined) =>\n opts\n .mkdirAsync(path, opts)\n .then(m => made || m)\n .catch(er => {\n const fer = er as NodeJS.ErrnoException\n if (fer && fer.code === 'ENOENT') {\n return mkdirpManual(path, opts)\n } else {\n throw er\n }\n })\n )\n },\n { sync: mkdirpNativeSync }\n)\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/opts-arg.d.ts b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/opts-arg.d.ts new file mode 100644 index 00000000000000..73d076b3b6923c --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/opts-arg.d.ts @@ -0,0 +1,42 @@ +/// +/// +import { MakeDirectoryOptions, Stats } from 'fs'; +export interface FsProvider { + stat?: (path: string, callback: (err: NodeJS.ErrnoException | null, stats: Stats) => any) => any; + mkdir?: (path: string, opts: MakeDirectoryOptions & { + recursive?: boolean; + }, callback: (err: NodeJS.ErrnoException | null, made?: string) => any) => any; + statSync?: (path: string) => Stats; + mkdirSync?: (path: string, opts: MakeDirectoryOptions & { + recursive?: boolean; + }) => string | undefined; +} +interface Options extends FsProvider { + mode?: number | string; + fs?: FsProvider; + mkdirAsync?: (path: string, opts: MakeDirectoryOptions & { + recursive?: boolean; + }) => Promise; + statAsync?: (path: string) => Promise; +} +export type MkdirpOptions = Options | number | string; +export interface MkdirpOptionsResolved { + mode: number; + fs: FsProvider; + mkdirAsync: (path: string, opts: MakeDirectoryOptions & { + recursive?: boolean; + }) => Promise; + statAsync: (path: string) => Promise; + stat: (path: string, callback: (err: NodeJS.ErrnoException | null, stats: Stats) => any) => any; + mkdir: (path: string, opts: MakeDirectoryOptions & { + recursive?: boolean; + }, callback: (err: NodeJS.ErrnoException | null, made?: string) => any) => any; + statSync: (path: string) => Stats; + mkdirSync: (path: string, opts: MakeDirectoryOptions & { + recursive?: boolean; + }) => string | undefined; + recursive?: boolean; +} +export declare const optsArg: (opts?: MkdirpOptions) => MkdirpOptionsResolved; +export {}; +//# sourceMappingURL=opts-arg.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/opts-arg.d.ts.map b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/opts-arg.d.ts.map new file mode 100644 index 00000000000000..717deb5f9cb0c6 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/opts-arg.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"opts-arg.d.ts","sourceRoot":"","sources":["../../src/opts-arg.ts"],"names":[],"mappings":";;AAAA,OAAO,EACL,oBAAoB,EAIpB,KAAK,EAEN,MAAM,IAAI,CAAA;AAEX,MAAM,WAAW,UAAU;IACzB,IAAI,CAAC,EAAE,CACL,IAAI,EAAE,MAAM,EACZ,QAAQ,EAAE,CAAC,GAAG,EAAE,MAAM,CAAC,cAAc,GAAG,IAAI,EAAE,KAAK,EAAE,KAAK,KAAK,GAAG,KAC/D,GAAG,CAAA;IACR,KAAK,CAAC,EAAE,CACN,IAAI,EAAE,MAAM,EACZ,IAAI,EAAE,oBAAoB,GAAG;QAAE,SAAS,CAAC,EAAE,OAAO,CAAA;KAAE,EACpD,QAAQ,EAAE,CAAC,GAAG,EAAE,MAAM,CAAC,cAAc,GAAG,IAAI,EAAE,IAAI,CAAC,EAAE,MAAM,KAAK,GAAG,KAChE,GAAG,CAAA;IACR,QAAQ,CAAC,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK,KAAK,CAAA;IAClC,SAAS,CAAC,EAAE,CACV,IAAI,EAAE,MAAM,EACZ,IAAI,EAAE,oBAAoB,GAAG;QAAE,SAAS,CAAC,EAAE,OAAO,CAAA;KAAE,KACjD,MAAM,GAAG,SAAS,CAAA;CACxB;AAED,UAAU,OAAQ,SAAQ,UAAU;IAClC,IAAI,CAAC,EAAE,MAAM,GAAG,MAAM,CAAA;IACtB,EAAE,CAAC,EAAE,UAAU,CAAA;IACf,UAAU,CAAC,EAAE,CACX,IAAI,EAAE,MAAM,EACZ,IAAI,EAAE,oBAAoB,GAAG;QAAE,SAAS,CAAC,EAAE,OAAO,CAAA;KAAE,KACjD,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC,CAAA;IAChC,SAAS,CAAC,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK,OAAO,CAAC,KAAK,CAAC,CAAA;CAC7C;AAED,MAAM,MAAM,aAAa,GAAG,OAAO,GAAG,MAAM,GAAG,MAAM,CAAA;AAErD,MAAM,WAAW,qBAAqB;IACpC,IAAI,EAAE,MAAM,CAAA;IACZ,EAAE,EAAE,UAAU,CAAA;IACd,UAAU,EAAE,CACV,IAAI,EAAE,MAAM,EACZ,IAAI,EAAE,oBAAoB,GAAG;QAAE,SAAS,CAAC,EAAE,OAAO,CAAA;KAAE,KACjD,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC,CAAA;IAChC,SAAS,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK,OAAO,CAAC,KAAK,CAAC,CAAA;IAC3C,IAAI,EAAE,CACJ,IAAI,EAAE,MAAM,EACZ,QAAQ,EAAE,CAAC,GAAG,EAAE,MAAM,CAAC,cAAc,GAAG,IAAI,EAAE,KAAK,EAAE,KAAK,KAAK,GAAG,KAC/D,GAAG,CAAA;IACR,KAAK,EAAE,CACL,IAAI,EAAE,MAAM,EACZ,IAAI,EAAE,oBAAoB,GAAG;QAAE,SAAS,CAAC,EAAE,OAAO,CAAA;KAAE,EACpD,QAAQ,EAAE,CAAC,GAAG,EAAE,MAAM,CAAC,cAAc,GAAG,IAAI,EAAE,IAAI,CAAC,EAAE,MAAM,KAAK,GAAG,KAChE,GAAG,CAAA;IACR,QAAQ,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK,KAAK,CAAA;IACjC,SAAS,EAAE,CACT,IAAI,EAAE,MAAM,EACZ,IAAI,EAAE,oBAAoB,GAAG;QAAE,SAAS,CAAC,EAAE,OAAO,CAAA;KAAE,KACjD,MAAM,GAAG,SAAS,CAAA;IACvB,SAAS,CAAC,EAAE,OAAO,CAAA;CACpB;AAED,eAAO,MAAM,OAAO,UAAW,aAAa,KAAG,qBA2C9C,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/opts-arg.js b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/opts-arg.js new file mode 100644 index 00000000000000..d47e2927fee4c0 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/opts-arg.js @@ -0,0 +1,34 @@ +import { mkdir, mkdirSync, stat, statSync, } from 'fs'; +export const optsArg = (opts) => { + if (!opts) { + opts = { mode: 0o777 }; + } + else if (typeof opts === 'object') { + opts = { mode: 0o777, ...opts }; + } + else if (typeof opts === 'number') { + opts = { mode: opts }; + } + else if (typeof opts === 'string') { + opts = { mode: parseInt(opts, 8) }; + } + else { + throw new TypeError('invalid options argument'); + } + const resolved = opts; + const optsFs = opts.fs || {}; + opts.mkdir = opts.mkdir || optsFs.mkdir || mkdir; + opts.mkdirAsync = opts.mkdirAsync + ? opts.mkdirAsync + : async (path, options) => { + return new Promise((res, rej) => resolved.mkdir(path, options, (er, made) => er ? rej(er) : res(made))); + }; + opts.stat = opts.stat || optsFs.stat || stat; + opts.statAsync = opts.statAsync + ? opts.statAsync + : async (path) => new Promise((res, rej) => resolved.stat(path, (err, stats) => (err ? rej(err) : res(stats)))); + opts.statSync = opts.statSync || optsFs.statSync || statSync; + opts.mkdirSync = opts.mkdirSync || optsFs.mkdirSync || mkdirSync; + return resolved; +}; +//# sourceMappingURL=opts-arg.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/opts-arg.js.map b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/opts-arg.js.map new file mode 100644 index 00000000000000..663286dc7212ed --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/opts-arg.js.map @@ -0,0 +1 @@ +{"version":3,"file":"opts-arg.js","sourceRoot":"","sources":["../../src/opts-arg.ts"],"names":[],"mappings":"AAAA,OAAO,EAEL,KAAK,EACL,SAAS,EACT,IAAI,EAEJ,QAAQ,GACT,MAAM,IAAI,CAAA;AAwDX,MAAM,CAAC,MAAM,OAAO,GAAG,CAAC,IAAoB,EAAyB,EAAE;IACrE,IAAI,CAAC,IAAI,EAAE;QACT,IAAI,GAAG,EAAE,IAAI,EAAE,KAAK,EAAE,CAAA;KACvB;SAAM,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;QACnC,IAAI,GAAG,EAAE,IAAI,EAAE,KAAK,EAAE,GAAG,IAAI,EAAE,CAAA;KAChC;SAAM,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;QACnC,IAAI,GAAG,EAAE,IAAI,EAAE,IAAI,EAAE,CAAA;KACtB;SAAM,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;QACnC,IAAI,GAAG,EAAE,IAAI,EAAE,QAAQ,CAAC,IAAI,EAAE,CAAC,CAAC,EAAE,CAAA;KACnC;SAAM;QACL,MAAM,IAAI,SAAS,CAAC,0BAA0B,CAAC,CAAA;KAChD;IAED,MAAM,QAAQ,GAAG,IAA6B,CAAA;IAC9C,MAAM,MAAM,GAAG,IAAI,CAAC,EAAE,IAAI,EAAE,CAAA;IAE5B,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,KAAK,IAAI,MAAM,CAAC,KAAK,IAAI,KAAK,CAAA;IAEhD,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC,UAAU;QAC/B,CAAC,CAAC,IAAI,CAAC,UAAU;QACjB,CAAC,CAAC,KAAK,EACH,IAAY,EACZ,OAAuD,EAC1B,EAAE;YAC/B,OAAO,IAAI,OAAO,CAAqB,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CAClD,QAAQ,CAAC,KAAK,CAAC,IAAI,EAAE,OAAO,EAAE,CAAC,EAAE,EAAE,IAAI,EAAE,EAAE,CACzC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,CACzB,CACF,CAAA;QACH,CAAC,CAAA;IAEL,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,IAAI,IAAI,MAAM,CAAC,IAAI,IAAI,IAAI,CAAA;IAC5C,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,SAAS;QAC7B,CAAC,CAAC,IAAI,CAAC,SAAS;QAChB,CAAC,CAAC,KAAK,EAAE,IAAY,EAAE,EAAE,CACrB,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACvB,QAAQ,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,GAAG,EAAE,KAAK,EAAE,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,CAAA;IAEP,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,IAAI,MAAM,CAAC,QAAQ,IAAI,QAAQ,CAAA;IAC5D,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,SAAS,IAAI,MAAM,CAAC,SAAS,IAAI,SAAS,CAAA;IAEhE,OAAO,QAAQ,CAAA;AACjB,CAAC,CAAA","sourcesContent":["import {\n MakeDirectoryOptions,\n mkdir,\n mkdirSync,\n stat,\n Stats,\n statSync,\n} from 'fs'\n\nexport interface FsProvider {\n stat?: (\n path: string,\n callback: (err: NodeJS.ErrnoException | null, stats: Stats) => any\n ) => any\n mkdir?: (\n path: string,\n opts: MakeDirectoryOptions & { recursive?: boolean },\n callback: (err: NodeJS.ErrnoException | null, made?: string) => any\n ) => any\n statSync?: (path: string) => Stats\n mkdirSync?: (\n path: string,\n opts: MakeDirectoryOptions & { recursive?: boolean }\n ) => string | undefined\n}\n\ninterface Options extends FsProvider {\n mode?: number | string\n fs?: FsProvider\n mkdirAsync?: (\n path: string,\n opts: MakeDirectoryOptions & { recursive?: boolean }\n ) => Promise\n statAsync?: (path: string) => Promise\n}\n\nexport type MkdirpOptions = Options | number | string\n\nexport interface MkdirpOptionsResolved {\n mode: number\n fs: FsProvider\n mkdirAsync: (\n path: string,\n opts: MakeDirectoryOptions & { recursive?: boolean }\n ) => Promise\n statAsync: (path: string) => Promise\n stat: (\n path: string,\n callback: (err: NodeJS.ErrnoException | null, stats: Stats) => any\n ) => any\n mkdir: (\n path: string,\n opts: MakeDirectoryOptions & { recursive?: boolean },\n callback: (err: NodeJS.ErrnoException | null, made?: string) => any\n ) => any\n statSync: (path: string) => Stats\n mkdirSync: (\n path: string,\n opts: MakeDirectoryOptions & { recursive?: boolean }\n ) => string | undefined\n recursive?: boolean\n}\n\nexport const optsArg = (opts?: MkdirpOptions): MkdirpOptionsResolved => {\n if (!opts) {\n opts = { mode: 0o777 }\n } else if (typeof opts === 'object') {\n opts = { mode: 0o777, ...opts }\n } else if (typeof opts === 'number') {\n opts = { mode: opts }\n } else if (typeof opts === 'string') {\n opts = { mode: parseInt(opts, 8) }\n } else {\n throw new TypeError('invalid options argument')\n }\n\n const resolved = opts as MkdirpOptionsResolved\n const optsFs = opts.fs || {}\n\n opts.mkdir = opts.mkdir || optsFs.mkdir || mkdir\n\n opts.mkdirAsync = opts.mkdirAsync\n ? opts.mkdirAsync\n : async (\n path: string,\n options: MakeDirectoryOptions & { recursive?: boolean }\n ): Promise => {\n return new Promise((res, rej) =>\n resolved.mkdir(path, options, (er, made) =>\n er ? rej(er) : res(made)\n )\n )\n }\n\n opts.stat = opts.stat || optsFs.stat || stat\n opts.statAsync = opts.statAsync\n ? opts.statAsync\n : async (path: string) =>\n new Promise((res, rej) =>\n resolved.stat(path, (err, stats) => (err ? rej(err) : res(stats)))\n )\n\n opts.statSync = opts.statSync || optsFs.statSync || statSync\n opts.mkdirSync = opts.mkdirSync || optsFs.mkdirSync || mkdirSync\n\n return resolved\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/package.json b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/package.json new file mode 100644 index 00000000000000..3dbc1ca591c055 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/path-arg.d.ts b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/path-arg.d.ts new file mode 100644 index 00000000000000..ad0ccfc482a485 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/path-arg.d.ts @@ -0,0 +1,2 @@ +export declare const pathArg: (path: string) => string; +//# sourceMappingURL=path-arg.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/path-arg.d.ts.map b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/path-arg.d.ts.map new file mode 100644 index 00000000000000..801799e766fabc --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/path-arg.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"path-arg.d.ts","sourceRoot":"","sources":["../../src/path-arg.ts"],"names":[],"mappings":"AAEA,eAAO,MAAM,OAAO,SAAU,MAAM,WAyBnC,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/path-arg.js b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/path-arg.js new file mode 100644 index 00000000000000..03539cc5a94f98 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/path-arg.js @@ -0,0 +1,24 @@ +const platform = process.env.__TESTING_MKDIRP_PLATFORM__ || process.platform; +import { parse, resolve } from 'path'; +export const pathArg = (path) => { + if (/\0/.test(path)) { + // simulate same failure that node raises + throw Object.assign(new TypeError('path must be a string without null bytes'), { + path, + code: 'ERR_INVALID_ARG_VALUE', + }); + } + path = resolve(path); + if (platform === 'win32') { + const badWinChars = /[*|"<>?:]/; + const { root } = parse(path); + if (badWinChars.test(path.substring(root.length))) { + throw Object.assign(new Error('Illegal characters in path.'), { + path, + code: 'EINVAL', + }); + } + } + return path; +}; +//# sourceMappingURL=path-arg.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/path-arg.js.map b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/path-arg.js.map new file mode 100644 index 00000000000000..43efe1e3a9976f --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/path-arg.js.map @@ -0,0 +1 @@ +{"version":3,"file":"path-arg.js","sourceRoot":"","sources":["../../src/path-arg.ts"],"names":[],"mappings":"AAAA,MAAM,QAAQ,GAAG,OAAO,CAAC,GAAG,CAAC,2BAA2B,IAAI,OAAO,CAAC,QAAQ,CAAA;AAC5E,OAAO,EAAE,KAAK,EAAE,OAAO,EAAE,MAAM,MAAM,CAAA;AACrC,MAAM,CAAC,MAAM,OAAO,GAAG,CAAC,IAAY,EAAE,EAAE;IACtC,IAAI,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE;QACnB,yCAAyC;QACzC,MAAM,MAAM,CAAC,MAAM,CACjB,IAAI,SAAS,CAAC,0CAA0C,CAAC,EACzD;YACE,IAAI;YACJ,IAAI,EAAE,uBAAuB;SAC9B,CACF,CAAA;KACF;IAED,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;IACpB,IAAI,QAAQ,KAAK,OAAO,EAAE;QACxB,MAAM,WAAW,GAAG,WAAW,CAAA;QAC/B,MAAM,EAAE,IAAI,EAAE,GAAG,KAAK,CAAC,IAAI,CAAC,CAAA;QAC5B,IAAI,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,EAAE;YACjD,MAAM,MAAM,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,6BAA6B,CAAC,EAAE;gBAC5D,IAAI;gBACJ,IAAI,EAAE,QAAQ;aACf,CAAC,CAAA;SACH;KACF;IAED,OAAO,IAAI,CAAA;AACb,CAAC,CAAA","sourcesContent":["const platform = process.env.__TESTING_MKDIRP_PLATFORM__ || process.platform\nimport { parse, resolve } from 'path'\nexport const pathArg = (path: string) => {\n if (/\\0/.test(path)) {\n // simulate same failure that node raises\n throw Object.assign(\n new TypeError('path must be a string without null bytes'),\n {\n path,\n code: 'ERR_INVALID_ARG_VALUE',\n }\n )\n }\n\n path = resolve(path)\n if (platform === 'win32') {\n const badWinChars = /[*|\"<>?:]/\n const { root } = parse(path)\n if (badWinChars.test(path.substring(root.length))) {\n throw Object.assign(new Error('Illegal characters in path.'), {\n path,\n code: 'EINVAL',\n })\n }\n }\n\n return path\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/use-native.d.ts b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/use-native.d.ts new file mode 100644 index 00000000000000..1c6cb619e30405 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/use-native.d.ts @@ -0,0 +1,6 @@ +import { MkdirpOptions } from './opts-arg.js'; +export declare const useNativeSync: (opts?: MkdirpOptions) => boolean; +export declare const useNative: ((opts?: MkdirpOptions) => boolean) & { + sync: (opts?: MkdirpOptions) => boolean; +}; +//# sourceMappingURL=use-native.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/use-native.d.ts.map b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/use-native.d.ts.map new file mode 100644 index 00000000000000..e2484228a04472 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/use-native.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"use-native.d.ts","sourceRoot":"","sources":["../../src/use-native.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,aAAa,EAAW,MAAM,eAAe,CAAA;AAMtD,eAAO,MAAM,aAAa,UAEd,aAAa,YAA0C,CAAA;AAEnE,eAAO,MAAM,SAAS,WAGR,aAAa;kBALf,aAAa;CASxB,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/use-native.js b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/use-native.js new file mode 100644 index 00000000000000..ad2093867eb74e --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/use-native.js @@ -0,0 +1,14 @@ +import { mkdir, mkdirSync } from 'fs'; +import { optsArg } from './opts-arg.js'; +const version = process.env.__TESTING_MKDIRP_NODE_VERSION__ || process.version; +const versArr = version.replace(/^v/, '').split('.'); +const hasNative = +versArr[0] > 10 || (+versArr[0] === 10 && +versArr[1] >= 12); +export const useNativeSync = !hasNative + ? () => false + : (opts) => optsArg(opts).mkdirSync === mkdirSync; +export const useNative = Object.assign(!hasNative + ? () => false + : (opts) => optsArg(opts).mkdir === mkdir, { + sync: useNativeSync, +}); +//# sourceMappingURL=use-native.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/use-native.js.map b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/use-native.js.map new file mode 100644 index 00000000000000..08c616d365510f --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/use-native.js.map @@ -0,0 +1 @@ +{"version":3,"file":"use-native.js","sourceRoot":"","sources":["../../src/use-native.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,EAAE,SAAS,EAAE,MAAM,IAAI,CAAA;AACrC,OAAO,EAAiB,OAAO,EAAE,MAAM,eAAe,CAAA;AAEtD,MAAM,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,+BAA+B,IAAI,OAAO,CAAC,OAAO,CAAA;AAC9E,MAAM,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAA;AACpD,MAAM,SAAS,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,KAAK,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,CAAA;AAE/E,MAAM,CAAC,MAAM,aAAa,GAAG,CAAC,SAAS;IACrC,CAAC,CAAC,GAAG,EAAE,CAAC,KAAK;IACb,CAAC,CAAC,CAAC,IAAoB,EAAE,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,SAAS,KAAK,SAAS,CAAA;AAEnE,MAAM,CAAC,MAAM,SAAS,GAAG,MAAM,CAAC,MAAM,CACpC,CAAC,SAAS;IACR,CAAC,CAAC,GAAG,EAAE,CAAC,KAAK;IACb,CAAC,CAAC,CAAC,IAAoB,EAAE,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,KAAK,KAAK,KAAK,EAC3D;IACE,IAAI,EAAE,aAAa;CACpB,CACF,CAAA","sourcesContent":["import { mkdir, mkdirSync } from 'fs'\nimport { MkdirpOptions, optsArg } from './opts-arg.js'\n\nconst version = process.env.__TESTING_MKDIRP_NODE_VERSION__ || process.version\nconst versArr = version.replace(/^v/, '').split('.')\nconst hasNative = +versArr[0] > 10 || (+versArr[0] === 10 && +versArr[1] >= 12)\n\nexport const useNativeSync = !hasNative\n ? () => false\n : (opts?: MkdirpOptions) => optsArg(opts).mkdirSync === mkdirSync\n\nexport const useNative = Object.assign(\n !hasNative\n ? () => false\n : (opts?: MkdirpOptions) => optsArg(opts).mkdir === mkdir,\n {\n sync: useNativeSync,\n }\n)\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/package.json b/deps/npm/node_modules/cacache/node_modules/mkdirp/package.json new file mode 100644 index 00000000000000..f31ac3314d6f6a --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/package.json @@ -0,0 +1,91 @@ +{ + "name": "mkdirp", + "description": "Recursively mkdir, like `mkdir -p`", + "version": "3.0.1", + "keywords": [ + "mkdir", + "directory", + "make dir", + "make", + "dir", + "recursive", + "native" + ], + "bin": "./dist/cjs/src/bin.js", + "main": "./dist/cjs/src/index.js", + "module": "./dist/mjs/index.js", + "types": "./dist/mjs/index.d.ts", + "exports": { + ".": { + "import": { + "types": "./dist/mjs/index.d.ts", + "default": "./dist/mjs/index.js" + }, + "require": { + "types": "./dist/cjs/src/index.d.ts", + "default": "./dist/cjs/src/index.js" + } + } + }, + "files": [ + "dist" + ], + "scripts": { + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "preprepare": "rm -rf dist", + "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json", + "postprepare": "bash fixup.sh", + "pretest": "npm run prepare", + "presnap": "npm run prepare", + "test": "c8 tap", + "snap": "c8 tap", + "format": "prettier --write . --loglevel warn", + "benchmark": "node benchmark/index.js", + "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts" + }, + "prettier": { + "semi": false, + "printWidth": 80, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + }, + "devDependencies": { + "@types/brace-expansion": "^1.1.0", + "@types/node": "^18.11.9", + "@types/tap": "^15.0.7", + "c8": "^7.12.0", + "eslint-config-prettier": "^8.6.0", + "prettier": "^2.8.2", + "tap": "^16.3.3", + "ts-node": "^10.9.1", + "typedoc": "^0.23.21", + "typescript": "^4.9.3" + }, + "tap": { + "coverage": false, + "node-arg": [ + "--no-warnings", + "--loader", + "ts-node/esm" + ], + "ts": false + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + }, + "repository": { + "type": "git", + "url": "https://github.com/isaacs/node-mkdirp.git" + }, + "license": "MIT", + "engines": { + "node": ">=10" + } +} diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/readme.markdown b/deps/npm/node_modules/cacache/node_modules/mkdirp/readme.markdown new file mode 100644 index 00000000000000..df654b808755f5 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/readme.markdown @@ -0,0 +1,281 @@ +# mkdirp + +Like `mkdir -p`, but in Node.js! + +Now with a modern API and no\* bugs! + +\* may contain some bugs + +# example + +## pow.js + +```js +// hybrid module, import or require() both work +import { mkdirp } from 'mkdirp' +// or: +const { mkdirp } = require('mkdirp') + +// return value is a Promise resolving to the first directory created +mkdirp('/tmp/foo/bar/baz').then(made => + console.log(`made directories, starting with ${made}`) +) +``` + +Output (where `/tmp/foo` already exists) + +``` +made directories, starting with /tmp/foo/bar +``` + +Or, if you don't have time to wait around for promises: + +```js +import { mkdirp } from 'mkdirp' + +// return value is the first directory created +const made = mkdirp.sync('/tmp/foo/bar/baz') +console.log(`made directories, starting with ${made}`) +``` + +And now /tmp/foo/bar/baz exists, huzzah! + +# methods + +```js +import { mkdirp } from 'mkdirp' +``` + +## `mkdirp(dir: string, opts?: MkdirpOptions) => Promise` + +Create a new directory and any necessary subdirectories at `dir` +with octal permission string `opts.mode`. If `opts` is a string +or number, it will be treated as the `opts.mode`. + +If `opts.mode` isn't specified, it defaults to `0o777`. + +Promise resolves to first directory `made` that had to be +created, or `undefined` if everything already exists. Promise +rejects if any errors are encountered. Note that, in the case of +promise rejection, some directories _may_ have been created, as +recursive directory creation is not an atomic operation. + +You can optionally pass in an alternate `fs` implementation by +passing in `opts.fs`. Your implementation should have +`opts.fs.mkdir(path, opts, cb)` and `opts.fs.stat(path, cb)`. + +You can also override just one or the other of `mkdir` and `stat` +by passing in `opts.stat` or `opts.mkdir`, or providing an `fs` +option that only overrides one of these. + +## `mkdirp.sync(dir: string, opts: MkdirpOptions) => string|undefined` + +Synchronously create a new directory and any necessary +subdirectories at `dir` with octal permission string `opts.mode`. +If `opts` is a string or number, it will be treated as the +`opts.mode`. + +If `opts.mode` isn't specified, it defaults to `0o777`. + +Returns the first directory that had to be created, or undefined +if everything already exists. + +You can optionally pass in an alternate `fs` implementation by +passing in `opts.fs`. Your implementation should have +`opts.fs.mkdirSync(path, mode)` and `opts.fs.statSync(path)`. + +You can also override just one or the other of `mkdirSync` and +`statSync` by passing in `opts.statSync` or `opts.mkdirSync`, or +providing an `fs` option that only overrides one of these. + +## `mkdirp.manual`, `mkdirp.manualSync` + +Use the manual implementation (not the native one). This is the +default when the native implementation is not available or the +stat/mkdir implementation is overridden. + +## `mkdirp.native`, `mkdirp.nativeSync` + +Use the native implementation (not the manual one). This is the +default when the native implementation is available and +stat/mkdir are not overridden. + +# implementation + +On Node.js v10.12.0 and above, use the native `fs.mkdir(p, +{recursive:true})` option, unless `fs.mkdir`/`fs.mkdirSync` has +been overridden by an option. + +## native implementation + +- If the path is a root directory, then pass it to the underlying + implementation and return the result/error. (In this case, + it'll either succeed or fail, but we aren't actually creating + any dirs.) +- Walk up the path statting each directory, to find the first + path that will be created, `made`. +- Call `fs.mkdir(path, { recursive: true })` (or `fs.mkdirSync`) +- If error, raise it to the caller. +- Return `made`. + +## manual implementation + +- Call underlying `fs.mkdir` implementation, with `recursive: +false` +- If error: + - If path is a root directory, raise to the caller and do not + handle it + - If ENOENT, mkdirp parent dir, store result as `made` + - stat(path) + - If error, raise original `mkdir` error + - If directory, return `made` + - Else, raise original `mkdir` error +- else + - return `undefined` if a root dir, or `made` if set, or `path` + +## windows vs unix caveat + +On Windows file systems, attempts to create a root directory (ie, +a drive letter or root UNC path) will fail. If the root +directory exists, then it will fail with `EPERM`. If the root +directory does not exist, then it will fail with `ENOENT`. + +On posix file systems, attempts to create a root directory (in +recursive mode) will succeed silently, as it is treated like just +another directory that already exists. (In non-recursive mode, +of course, it fails with `EEXIST`.) + +In order to preserve this system-specific behavior (and because +it's not as if we can create the parent of a root directory +anyway), attempts to create a root directory are passed directly +to the `fs` implementation, and any errors encountered are not +handled. + +## native error caveat + +The native implementation (as of at least Node.js v13.4.0) does +not provide appropriate errors in some cases (see +[nodejs/node#31481](https://github.com/nodejs/node/issues/31481) +and +[nodejs/node#28015](https://github.com/nodejs/node/issues/28015)). + +In order to work around this issue, the native implementation +will fall back to the manual implementation if an `ENOENT` error +is encountered. + +# choosing a recursive mkdir implementation + +There are a few to choose from! Use the one that suits your +needs best :D + +## use `fs.mkdir(path, {recursive: true}, cb)` if: + +- You wish to optimize performance even at the expense of other + factors. +- You don't need to know the first dir created. +- You are ok with getting `ENOENT` as the error when some other + problem is the actual cause. +- You can limit your platforms to Node.js v10.12 and above. +- You're ok with using callbacks instead of promises. +- You don't need/want a CLI. +- You don't need to override the `fs` methods in use. + +## use this module (mkdirp 1.x or 2.x) if: + +- You need to know the first directory that was created. +- You wish to use the native implementation if available, but + fall back when it's not. +- You prefer promise-returning APIs to callback-taking APIs. +- You want more useful error messages than the native recursive + mkdir provides (at least as of Node.js v13.4), and are ok with + re-trying on `ENOENT` to achieve this. +- You need (or at least, are ok with) a CLI. +- You need to override the `fs` methods in use. + +## use [`make-dir`](http://npm.im/make-dir) if: + +- You do not need to know the first dir created (and wish to save + a few `stat` calls when using the native implementation for + this reason). +- You wish to use the native implementation if available, but + fall back when it's not. +- You prefer promise-returning APIs to callback-taking APIs. +- You are ok with occasionally getting `ENOENT` errors for + failures that are actually related to something other than a + missing file system entry. +- You don't need/want a CLI. +- You need to override the `fs` methods in use. + +## use mkdirp 0.x if: + +- You need to know the first directory that was created. +- You need (or at least, are ok with) a CLI. +- You need to override the `fs` methods in use. +- You're ok with using callbacks instead of promises. +- You are not running on Windows, where the root-level ENOENT + errors can lead to infinite regress. +- You think vinyl just sounds warmer and richer for some weird + reason. +- You are supporting truly ancient Node.js versions, before even + the advent of a `Promise` language primitive. (Please don't. + You deserve better.) + +# cli + +This package also ships with a `mkdirp` command. + +``` +$ mkdirp -h + +usage: mkdirp [DIR1,DIR2..] {OPTIONS} + + Create each supplied directory including any necessary parent directories + that don't yet exist. + + If the directory already exists, do nothing. + +OPTIONS are: + + -m If a directory needs to be created, set the mode as an octal + --mode= permission string. + + -v --version Print the mkdirp version number + + -h --help Print this helpful banner + + -p --print Print the first directories created for each path provided + + --manual Use manual implementation, even if native is available +``` + +# install + +With [npm](http://npmjs.org) do: + +``` +npm install mkdirp +``` + +to get the library locally, or + +``` +npm install -g mkdirp +``` + +to get the command everywhere, or + +``` +npx mkdirp ... +``` + +to run the command without installing it globally. + +# platform support + +This module works on node v8, but only v10 and above are officially +supported, as Node v8 reached its LTS end of life 2020-01-01, which is in +the past, as of this writing. + +# license + +MIT diff --git a/deps/npm/node_modules/cacache/node_modules/p-map/index.js b/deps/npm/node_modules/cacache/node_modules/p-map/index.js new file mode 100644 index 00000000000000..2f7d91ccca4eda --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/p-map/index.js @@ -0,0 +1,269 @@ +export default async function pMap( + iterable, + mapper, + { + concurrency = Number.POSITIVE_INFINITY, + stopOnError = true, + signal, + } = {}, +) { + return new Promise((resolve, reject_) => { + if (iterable[Symbol.iterator] === undefined && iterable[Symbol.asyncIterator] === undefined) { + throw new TypeError(`Expected \`input\` to be either an \`Iterable\` or \`AsyncIterable\`, got (${typeof iterable})`); + } + + if (typeof mapper !== 'function') { + throw new TypeError('Mapper function is required'); + } + + if (!((Number.isSafeInteger(concurrency) && concurrency >= 1) || concurrency === Number.POSITIVE_INFINITY)) { + throw new TypeError(`Expected \`concurrency\` to be an integer from 1 and up or \`Infinity\`, got \`${concurrency}\` (${typeof concurrency})`); + } + + const result = []; + const errors = []; + const skippedIndexesMap = new Map(); + let isRejected = false; + let isResolved = false; + let isIterableDone = false; + let resolvingCount = 0; + let currentIndex = 0; + const iterator = iterable[Symbol.iterator] === undefined ? iterable[Symbol.asyncIterator]() : iterable[Symbol.iterator](); + + const reject = reason => { + isRejected = true; + isResolved = true; + reject_(reason); + }; + + if (signal) { + if (signal.aborted) { + reject(signal.reason); + } + + signal.addEventListener('abort', () => { + reject(signal.reason); + }); + } + + const next = async () => { + if (isResolved) { + return; + } + + const nextItem = await iterator.next(); + + const index = currentIndex; + currentIndex++; + + // Note: `iterator.next()` can be called many times in parallel. + // This can cause multiple calls to this `next()` function to + // receive a `nextItem` with `done === true`. + // The shutdown logic that rejects/resolves must be protected + // so it runs only one time as the `skippedIndex` logic is + // non-idempotent. + if (nextItem.done) { + isIterableDone = true; + + if (resolvingCount === 0 && !isResolved) { + if (!stopOnError && errors.length > 0) { + reject(new AggregateError(errors)); // eslint-disable-line unicorn/error-message + return; + } + + isResolved = true; + + if (skippedIndexesMap.size === 0) { + resolve(result); + return; + } + + const pureResult = []; + + // Support multiple `pMapSkip`'s. + for (const [index, value] of result.entries()) { + if (skippedIndexesMap.get(index) === pMapSkip) { + continue; + } + + pureResult.push(value); + } + + resolve(pureResult); + } + + return; + } + + resolvingCount++; + + // Intentionally detached + (async () => { + try { + const element = await nextItem.value; + + if (isResolved) { + return; + } + + const value = await mapper(element, index); + + // Use Map to stage the index of the element. + if (value === pMapSkip) { + skippedIndexesMap.set(index, value); + } + + result[index] = value; + + resolvingCount--; + await next(); + } catch (error) { + if (stopOnError) { + reject(error); + } else { + errors.push(error); + resolvingCount--; + + // In that case we can't really continue regardless of `stopOnError` state + // since an iterable is likely to continue throwing after it throws once. + // If we continue calling `next()` indefinitely we will likely end up + // in an infinite loop of failed iteration. + try { + await next(); + } catch (error) { + reject(error); + } + } + } + })(); + }; + + // Create the concurrent runners in a detached (non-awaited) + // promise. We need this so we can await the `next()` calls + // to stop creating runners before hitting the concurrency limit + // if the iterable has already been marked as done. + // NOTE: We *must* do this for async iterators otherwise we'll spin up + // infinite `next()` calls by default and never start the event loop. + (async () => { + for (let index = 0; index < concurrency; index++) { + try { + // eslint-disable-next-line no-await-in-loop + await next(); + } catch (error) { + reject(error); + break; + } + + if (isIterableDone || isRejected) { + break; + } + } + })(); + }); +} + +export function pMapIterable( + iterable, + mapper, + { + concurrency = Number.POSITIVE_INFINITY, + backpressure = concurrency, + } = {}, +) { + if (iterable[Symbol.iterator] === undefined && iterable[Symbol.asyncIterator] === undefined) { + throw new TypeError(`Expected \`input\` to be either an \`Iterable\` or \`AsyncIterable\`, got (${typeof iterable})`); + } + + if (typeof mapper !== 'function') { + throw new TypeError('Mapper function is required'); + } + + if (!((Number.isSafeInteger(concurrency) && concurrency >= 1) || concurrency === Number.POSITIVE_INFINITY)) { + throw new TypeError(`Expected \`concurrency\` to be an integer from 1 and up or \`Infinity\`, got \`${concurrency}\` (${typeof concurrency})`); + } + + if (!((Number.isSafeInteger(backpressure) && backpressure >= concurrency) || backpressure === Number.POSITIVE_INFINITY)) { + throw new TypeError(`Expected \`backpressure\` to be an integer from \`concurrency\` (${concurrency}) and up or \`Infinity\`, got \`${backpressure}\` (${typeof backpressure})`); + } + + return { + async * [Symbol.asyncIterator]() { + const iterator = iterable[Symbol.asyncIterator] === undefined ? iterable[Symbol.iterator]() : iterable[Symbol.asyncIterator](); + + const promises = []; + let runningMappersCount = 0; + let isDone = false; + let index = 0; + + function trySpawn() { + if (isDone || !(runningMappersCount < concurrency && promises.length < backpressure)) { + return; + } + + const promise = (async () => { + const {done, value} = await iterator.next(); + + if (done) { + return {done: true}; + } + + runningMappersCount++; + + // Spawn if still below concurrency and backpressure limit + trySpawn(); + + try { + const returnValue = await mapper(await value, index++); + + runningMappersCount--; + + if (returnValue === pMapSkip) { + const index = promises.indexOf(promise); + + if (index > 0) { + promises.splice(index, 1); + } + } + + // Spawn if still below backpressure limit and just dropped below concurrency limit + trySpawn(); + + return {done: false, value: returnValue}; + } catch (error) { + isDone = true; + return {error}; + } + })(); + + promises.push(promise); + } + + trySpawn(); + + while (promises.length > 0) { + const {error, done, value} = await promises[0]; // eslint-disable-line no-await-in-loop + + promises.shift(); + + if (error) { + throw error; + } + + if (done) { + return; + } + + // Spawn if just dropped below backpressure limit and below the concurrency limit + trySpawn(); + + if (value === pMapSkip) { + continue; + } + + yield value; + } + }, + }; +} + +export const pMapSkip = Symbol('skip'); diff --git a/deps/npm/node_modules/cacache/node_modules/p-map/license b/deps/npm/node_modules/cacache/node_modules/p-map/license new file mode 100644 index 00000000000000..fa7ceba3eb4a96 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/p-map/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (https://sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/deps/npm/node_modules/cacache/node_modules/p-map/package.json b/deps/npm/node_modules/cacache/node_modules/p-map/package.json new file mode 100644 index 00000000000000..ea58f599f3a035 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/p-map/package.json @@ -0,0 +1,57 @@ +{ + "name": "p-map", + "version": "7.0.2", + "description": "Map over promises concurrently", + "license": "MIT", + "repository": "sindresorhus/p-map", + "funding": "https://github.com/sponsors/sindresorhus", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "https://sindresorhus.com" + }, + "type": "module", + "exports": { + "types": "./index.d.ts", + "default": "./index.js" + }, + "sideEffects": false, + "engines": { + "node": ">=18" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "promise", + "map", + "resolved", + "wait", + "collection", + "iterable", + "iterator", + "race", + "fulfilled", + "async", + "await", + "promises", + "concurrently", + "concurrency", + "parallel", + "bluebird" + ], + "devDependencies": { + "ava": "^5.2.0", + "chalk": "^5.3.0", + "delay": "^6.0.0", + "in-range": "^3.0.0", + "random-int": "^3.0.0", + "time-span": "^5.1.0", + "tsd": "^0.29.0", + "xo": "^0.56.0" + } +} diff --git a/deps/npm/node_modules/cacache/node_modules/tar/LICENSE b/deps/npm/node_modules/cacache/node_modules/tar/LICENSE new file mode 100644 index 00000000000000..19129e315fe593 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/create.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/create.js new file mode 100644 index 00000000000000..3190afc48318f9 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/create.js @@ -0,0 +1,83 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.create = void 0; +const fs_minipass_1 = require("@isaacs/fs-minipass"); +const node_path_1 = __importDefault(require("node:path")); +const list_js_1 = require("./list.js"); +const make_command_js_1 = require("./make-command.js"); +const pack_js_1 = require("./pack.js"); +const createFileSync = (opt, files) => { + const p = new pack_js_1.PackSync(opt); + const stream = new fs_minipass_1.WriteStreamSync(opt.file, { + mode: opt.mode || 0o666, + }); + p.pipe(stream); + addFilesSync(p, files); +}; +const createFile = (opt, files) => { + const p = new pack_js_1.Pack(opt); + const stream = new fs_minipass_1.WriteStream(opt.file, { + mode: opt.mode || 0o666, + }); + p.pipe(stream); + const promise = new Promise((res, rej) => { + stream.on('error', rej); + stream.on('close', res); + p.on('error', rej); + }); + addFilesAsync(p, files); + return promise; +}; +const addFilesSync = (p, files) => { + files.forEach(file => { + if (file.charAt(0) === '@') { + (0, list_js_1.list)({ + file: node_path_1.default.resolve(p.cwd, file.slice(1)), + sync: true, + noResume: true, + onReadEntry: entry => p.add(entry), + }); + } + else { + p.add(file); + } + }); + p.end(); +}; +const addFilesAsync = async (p, files) => { + for (let i = 0; i < files.length; i++) { + const file = String(files[i]); + if (file.charAt(0) === '@') { + await (0, list_js_1.list)({ + file: node_path_1.default.resolve(String(p.cwd), file.slice(1)), + noResume: true, + onReadEntry: entry => { + p.add(entry); + }, + }); + } + else { + p.add(file); + } + } + p.end(); +}; +const createSync = (opt, files) => { + const p = new pack_js_1.PackSync(opt); + addFilesSync(p, files); + return p; +}; +const createAsync = (opt, files) => { + const p = new pack_js_1.Pack(opt); + addFilesAsync(p, files); + return p; +}; +exports.create = (0, make_command_js_1.makeCommand)(createFileSync, createFile, createSync, createAsync, (_opt, files) => { + if (!files?.length) { + throw new TypeError('no paths specified to add to archive'); + } +}); +//# sourceMappingURL=create.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/cwd-error.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/cwd-error.js new file mode 100644 index 00000000000000..d703a7772be3a5 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/cwd-error.js @@ -0,0 +1,18 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.CwdError = void 0; +class CwdError extends Error { + path; + code; + syscall = 'chdir'; + constructor(path, code) { + super(`${code}: Cannot cd into '${path}'`); + this.path = path; + this.code = code; + } + get name() { + return 'CwdError'; + } +} +exports.CwdError = CwdError; +//# sourceMappingURL=cwd-error.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/extract.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/extract.js new file mode 100644 index 00000000000000..f848cbcbf779e8 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/extract.js @@ -0,0 +1,78 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.extract = void 0; +// tar -x +const fsm = __importStar(require("@isaacs/fs-minipass")); +const node_fs_1 = __importDefault(require("node:fs")); +const list_js_1 = require("./list.js"); +const make_command_js_1 = require("./make-command.js"); +const unpack_js_1 = require("./unpack.js"); +const extractFileSync = (opt) => { + const u = new unpack_js_1.UnpackSync(opt); + const file = opt.file; + const stat = node_fs_1.default.statSync(file); + // This trades a zero-byte read() syscall for a stat + // However, it will usually result in less memory allocation + const readSize = opt.maxReadSize || 16 * 1024 * 1024; + const stream = new fsm.ReadStreamSync(file, { + readSize: readSize, + size: stat.size, + }); + stream.pipe(u); +}; +const extractFile = (opt, _) => { + const u = new unpack_js_1.Unpack(opt); + const readSize = opt.maxReadSize || 16 * 1024 * 1024; + const file = opt.file; + const p = new Promise((resolve, reject) => { + u.on('error', reject); + u.on('close', resolve); + // This trades a zero-byte read() syscall for a stat + // However, it will usually result in less memory allocation + node_fs_1.default.stat(file, (er, stat) => { + if (er) { + reject(er); + } + else { + const stream = new fsm.ReadStream(file, { + readSize: readSize, + size: stat.size, + }); + stream.on('error', reject); + stream.pipe(u); + } + }); + }); + return p; +}; +exports.extract = (0, make_command_js_1.makeCommand)(extractFileSync, extractFile, opt => new unpack_js_1.UnpackSync(opt), opt => new unpack_js_1.Unpack(opt), (opt, files) => { + if (files?.length) + (0, list_js_1.filesFilter)(opt, files); +}); +//# sourceMappingURL=extract.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/get-write-flag.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/get-write-flag.js new file mode 100644 index 00000000000000..94add8f6b2231c --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/get-write-flag.js @@ -0,0 +1,29 @@ +"use strict"; +// Get the appropriate flag to use for creating files +// We use fmap on Windows platforms for files less than +// 512kb. This is a fairly low limit, but avoids making +// things slower in some cases. Since most of what this +// library is used for is extracting tarballs of many +// relatively small files in npm packages and the like, +// it can be a big boost on Windows platforms. +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getWriteFlag = void 0; +const fs_1 = __importDefault(require("fs")); +const platform = process.env.__FAKE_PLATFORM__ || process.platform; +const isWindows = platform === 'win32'; +/* c8 ignore start */ +const { O_CREAT, O_TRUNC, O_WRONLY } = fs_1.default.constants; +const UV_FS_O_FILEMAP = Number(process.env.__FAKE_FS_O_FILENAME__) || + fs_1.default.constants.UV_FS_O_FILEMAP || + 0; +/* c8 ignore stop */ +const fMapEnabled = isWindows && !!UV_FS_O_FILEMAP; +const fMapLimit = 512 * 1024; +const fMapFlag = UV_FS_O_FILEMAP | O_TRUNC | O_CREAT | O_WRONLY; +exports.getWriteFlag = !fMapEnabled ? + () => 'w' + : (size) => (size < fMapLimit ? fMapFlag : 'w'); +//# sourceMappingURL=get-write-flag.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/header.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/header.js new file mode 100644 index 00000000000000..b3a48037b849ab --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/header.js @@ -0,0 +1,306 @@ +"use strict"; +// parse a 512-byte header block to a data object, or vice-versa +// encode returns `true` if a pax extended header is needed, because +// the data could not be faithfully encoded in a simple header. +// (Also, check header.needPax to see if it needs a pax header.) +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Header = void 0; +const node_path_1 = require("node:path"); +const large = __importStar(require("./large-numbers.js")); +const types = __importStar(require("./types.js")); +class Header { + cksumValid = false; + needPax = false; + nullBlock = false; + block; + path; + mode; + uid; + gid; + size; + cksum; + #type = 'Unsupported'; + linkpath; + uname; + gname; + devmaj = 0; + devmin = 0; + atime; + ctime; + mtime; + charset; + comment; + constructor(data, off = 0, ex, gex) { + if (Buffer.isBuffer(data)) { + this.decode(data, off || 0, ex, gex); + } + else if (data) { + this.#slurp(data); + } + } + decode(buf, off, ex, gex) { + if (!off) { + off = 0; + } + if (!buf || !(buf.length >= off + 512)) { + throw new Error('need 512 bytes for header'); + } + this.path = decString(buf, off, 100); + this.mode = decNumber(buf, off + 100, 8); + this.uid = decNumber(buf, off + 108, 8); + this.gid = decNumber(buf, off + 116, 8); + this.size = decNumber(buf, off + 124, 12); + this.mtime = decDate(buf, off + 136, 12); + this.cksum = decNumber(buf, off + 148, 12); + // if we have extended or global extended headers, apply them now + // See https://github.com/npm/node-tar/pull/187 + // Apply global before local, so it overrides + if (gex) + this.#slurp(gex, true); + if (ex) + this.#slurp(ex); + // old tar versions marked dirs as a file with a trailing / + const t = decString(buf, off + 156, 1); + if (types.isCode(t)) { + this.#type = t || '0'; + } + if (this.#type === '0' && this.path.slice(-1) === '/') { + this.#type = '5'; + } + // tar implementations sometimes incorrectly put the stat(dir).size + // as the size in the tarball, even though Directory entries are + // not able to have any body at all. In the very rare chance that + // it actually DOES have a body, we weren't going to do anything with + // it anyway, and it'll just be a warning about an invalid header. + if (this.#type === '5') { + this.size = 0; + } + this.linkpath = decString(buf, off + 157, 100); + if (buf.subarray(off + 257, off + 265).toString() === + 'ustar\u000000') { + this.uname = decString(buf, off + 265, 32); + this.gname = decString(buf, off + 297, 32); + /* c8 ignore start */ + this.devmaj = decNumber(buf, off + 329, 8) ?? 0; + this.devmin = decNumber(buf, off + 337, 8) ?? 0; + /* c8 ignore stop */ + if (buf[off + 475] !== 0) { + // definitely a prefix, definitely >130 chars. + const prefix = decString(buf, off + 345, 155); + this.path = prefix + '/' + this.path; + } + else { + const prefix = decString(buf, off + 345, 130); + if (prefix) { + this.path = prefix + '/' + this.path; + } + this.atime = decDate(buf, off + 476, 12); + this.ctime = decDate(buf, off + 488, 12); + } + } + let sum = 8 * 0x20; + for (let i = off; i < off + 148; i++) { + sum += buf[i]; + } + for (let i = off + 156; i < off + 512; i++) { + sum += buf[i]; + } + this.cksumValid = sum === this.cksum; + if (this.cksum === undefined && sum === 8 * 0x20) { + this.nullBlock = true; + } + } + #slurp(ex, gex = false) { + Object.assign(this, Object.fromEntries(Object.entries(ex).filter(([k, v]) => { + // we slurp in everything except for the path attribute in + // a global extended header, because that's weird. Also, any + // null/undefined values are ignored. + return !(v === null || + v === undefined || + (k === 'path' && gex) || + (k === 'linkpath' && gex) || + k === 'global'); + }))); + } + encode(buf, off = 0) { + if (!buf) { + buf = this.block = Buffer.alloc(512); + } + if (this.#type === 'Unsupported') { + this.#type = '0'; + } + if (!(buf.length >= off + 512)) { + throw new Error('need 512 bytes for header'); + } + const prefixSize = this.ctime || this.atime ? 130 : 155; + const split = splitPrefix(this.path || '', prefixSize); + const path = split[0]; + const prefix = split[1]; + this.needPax = !!split[2]; + this.needPax = encString(buf, off, 100, path) || this.needPax; + this.needPax = + encNumber(buf, off + 100, 8, this.mode) || this.needPax; + this.needPax = + encNumber(buf, off + 108, 8, this.uid) || this.needPax; + this.needPax = + encNumber(buf, off + 116, 8, this.gid) || this.needPax; + this.needPax = + encNumber(buf, off + 124, 12, this.size) || this.needPax; + this.needPax = + encDate(buf, off + 136, 12, this.mtime) || this.needPax; + buf[off + 156] = this.#type.charCodeAt(0); + this.needPax = + encString(buf, off + 157, 100, this.linkpath) || this.needPax; + buf.write('ustar\u000000', off + 257, 8); + this.needPax = + encString(buf, off + 265, 32, this.uname) || this.needPax; + this.needPax = + encString(buf, off + 297, 32, this.gname) || this.needPax; + this.needPax = + encNumber(buf, off + 329, 8, this.devmaj) || this.needPax; + this.needPax = + encNumber(buf, off + 337, 8, this.devmin) || this.needPax; + this.needPax = + encString(buf, off + 345, prefixSize, prefix) || this.needPax; + if (buf[off + 475] !== 0) { + this.needPax = + encString(buf, off + 345, 155, prefix) || this.needPax; + } + else { + this.needPax = + encString(buf, off + 345, 130, prefix) || this.needPax; + this.needPax = + encDate(buf, off + 476, 12, this.atime) || this.needPax; + this.needPax = + encDate(buf, off + 488, 12, this.ctime) || this.needPax; + } + let sum = 8 * 0x20; + for (let i = off; i < off + 148; i++) { + sum += buf[i]; + } + for (let i = off + 156; i < off + 512; i++) { + sum += buf[i]; + } + this.cksum = sum; + encNumber(buf, off + 148, 8, this.cksum); + this.cksumValid = true; + return this.needPax; + } + get type() { + return (this.#type === 'Unsupported' ? + this.#type + : types.name.get(this.#type)); + } + get typeKey() { + return this.#type; + } + set type(type) { + const c = String(types.code.get(type)); + if (types.isCode(c) || c === 'Unsupported') { + this.#type = c; + } + else if (types.isCode(type)) { + this.#type = type; + } + else { + throw new TypeError('invalid entry type: ' + type); + } + } +} +exports.Header = Header; +const splitPrefix = (p, prefixSize) => { + const pathSize = 100; + let pp = p; + let prefix = ''; + let ret = undefined; + const root = node_path_1.posix.parse(p).root || '.'; + if (Buffer.byteLength(pp) < pathSize) { + ret = [pp, prefix, false]; + } + else { + // first set prefix to the dir, and path to the base + prefix = node_path_1.posix.dirname(pp); + pp = node_path_1.posix.basename(pp); + do { + if (Buffer.byteLength(pp) <= pathSize && + Buffer.byteLength(prefix) <= prefixSize) { + // both fit! + ret = [pp, prefix, false]; + } + else if (Buffer.byteLength(pp) > pathSize && + Buffer.byteLength(prefix) <= prefixSize) { + // prefix fits in prefix, but path doesn't fit in path + ret = [pp.slice(0, pathSize - 1), prefix, true]; + } + else { + // make path take a bit from prefix + pp = node_path_1.posix.join(node_path_1.posix.basename(prefix), pp); + prefix = node_path_1.posix.dirname(prefix); + } + } while (prefix !== root && ret === undefined); + // at this point, found no resolution, just truncate + if (!ret) { + ret = [p.slice(0, pathSize - 1), '', true]; + } + } + return ret; +}; +const decString = (buf, off, size) => buf + .subarray(off, off + size) + .toString('utf8') + .replace(/\0.*/, ''); +const decDate = (buf, off, size) => numToDate(decNumber(buf, off, size)); +const numToDate = (num) => num === undefined ? undefined : new Date(num * 1000); +const decNumber = (buf, off, size) => Number(buf[off]) & 0x80 ? + large.parse(buf.subarray(off, off + size)) + : decSmallNumber(buf, off, size); +const nanUndef = (value) => (isNaN(value) ? undefined : value); +const decSmallNumber = (buf, off, size) => nanUndef(parseInt(buf + .subarray(off, off + size) + .toString('utf8') + .replace(/\0.*$/, '') + .trim(), 8)); +// the maximum encodable as a null-terminated octal, by field size +const MAXNUM = { + 12: 0o77777777777, + 8: 0o7777777, +}; +const encNumber = (buf, off, size, num) => num === undefined ? false + : num > MAXNUM[size] || num < 0 ? + (large.encode(num, buf.subarray(off, off + size)), true) + : (encSmallNumber(buf, off, size, num), false); +const encSmallNumber = (buf, off, size, num) => buf.write(octalString(num, size), off, size, 'ascii'); +const octalString = (num, size) => padOctal(Math.floor(num).toString(8), size); +const padOctal = (str, size) => (str.length === size - 1 ? + str + : new Array(size - str.length - 1).join('0') + str + ' ') + '\0'; +const encDate = (buf, off, size, date) => date === undefined ? false : (encNumber(buf, off, size, date.getTime() / 1000)); +// enough to fill the longest string we've got +const NULLS = new Array(156).join('\0'); +// pad with nulls, return true if it's longer or non-ascii +const encString = (buf, off, size, str) => str === undefined ? false : ((buf.write(str + NULLS, off, size, 'utf8'), + str.length !== Buffer.byteLength(str) || str.length > size)); +//# sourceMappingURL=header.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/index.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/index.js new file mode 100644 index 00000000000000..e93ed5ad54aa6e --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/index.js @@ -0,0 +1,54 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.u = exports.types = exports.r = exports.t = exports.x = exports.c = void 0; +__exportStar(require("./create.js"), exports); +var create_js_1 = require("./create.js"); +Object.defineProperty(exports, "c", { enumerable: true, get: function () { return create_js_1.create; } }); +__exportStar(require("./extract.js"), exports); +var extract_js_1 = require("./extract.js"); +Object.defineProperty(exports, "x", { enumerable: true, get: function () { return extract_js_1.extract; } }); +__exportStar(require("./header.js"), exports); +__exportStar(require("./list.js"), exports); +var list_js_1 = require("./list.js"); +Object.defineProperty(exports, "t", { enumerable: true, get: function () { return list_js_1.list; } }); +// classes +__exportStar(require("./pack.js"), exports); +__exportStar(require("./parse.js"), exports); +__exportStar(require("./pax.js"), exports); +__exportStar(require("./read-entry.js"), exports); +__exportStar(require("./replace.js"), exports); +var replace_js_1 = require("./replace.js"); +Object.defineProperty(exports, "r", { enumerable: true, get: function () { return replace_js_1.replace; } }); +exports.types = __importStar(require("./types.js")); +__exportStar(require("./unpack.js"), exports); +__exportStar(require("./update.js"), exports); +var update_js_1 = require("./update.js"); +Object.defineProperty(exports, "u", { enumerable: true, get: function () { return update_js_1.update; } }); +__exportStar(require("./write-entry.js"), exports); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/large-numbers.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/large-numbers.js new file mode 100644 index 00000000000000..5b07aa7f71b48d --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/large-numbers.js @@ -0,0 +1,99 @@ +"use strict"; +// Tar can encode large and negative numbers using a leading byte of +// 0xff for negative, and 0x80 for positive. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.parse = exports.encode = void 0; +const encode = (num, buf) => { + if (!Number.isSafeInteger(num)) { + // The number is so large that javascript cannot represent it with integer + // precision. + throw Error('cannot encode number outside of javascript safe integer range'); + } + else if (num < 0) { + encodeNegative(num, buf); + } + else { + encodePositive(num, buf); + } + return buf; +}; +exports.encode = encode; +const encodePositive = (num, buf) => { + buf[0] = 0x80; + for (var i = buf.length; i > 1; i--) { + buf[i - 1] = num & 0xff; + num = Math.floor(num / 0x100); + } +}; +const encodeNegative = (num, buf) => { + buf[0] = 0xff; + var flipped = false; + num = num * -1; + for (var i = buf.length; i > 1; i--) { + var byte = num & 0xff; + num = Math.floor(num / 0x100); + if (flipped) { + buf[i - 1] = onesComp(byte); + } + else if (byte === 0) { + buf[i - 1] = 0; + } + else { + flipped = true; + buf[i - 1] = twosComp(byte); + } + } +}; +const parse = (buf) => { + const pre = buf[0]; + const value = pre === 0x80 ? pos(buf.subarray(1, buf.length)) + : pre === 0xff ? twos(buf) + : null; + if (value === null) { + throw Error('invalid base256 encoding'); + } + if (!Number.isSafeInteger(value)) { + // The number is so large that javascript cannot represent it with integer + // precision. + throw Error('parsed number outside of javascript safe integer range'); + } + return value; +}; +exports.parse = parse; +const twos = (buf) => { + var len = buf.length; + var sum = 0; + var flipped = false; + for (var i = len - 1; i > -1; i--) { + var byte = Number(buf[i]); + var f; + if (flipped) { + f = onesComp(byte); + } + else if (byte === 0) { + f = byte; + } + else { + flipped = true; + f = twosComp(byte); + } + if (f !== 0) { + sum -= f * Math.pow(256, len - i - 1); + } + } + return sum; +}; +const pos = (buf) => { + var len = buf.length; + var sum = 0; + for (var i = len - 1; i > -1; i--) { + var byte = Number(buf[i]); + if (byte !== 0) { + sum += byte * Math.pow(256, len - i - 1); + } + } + return sum; +}; +const onesComp = (byte) => (0xff ^ byte) & 0xff; +const twosComp = (byte) => ((0xff ^ byte) + 1) & 0xff; +//# sourceMappingURL=large-numbers.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/list.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/list.js new file mode 100644 index 00000000000000..3cd34bb4bad481 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/list.js @@ -0,0 +1,136 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.list = exports.filesFilter = void 0; +// tar -t +const fsm = __importStar(require("@isaacs/fs-minipass")); +const node_fs_1 = __importDefault(require("node:fs")); +const path_1 = require("path"); +const make_command_js_1 = require("./make-command.js"); +const parse_js_1 = require("./parse.js"); +const strip_trailing_slashes_js_1 = require("./strip-trailing-slashes.js"); +const onReadEntryFunction = (opt) => { + const onReadEntry = opt.onReadEntry; + opt.onReadEntry = + onReadEntry ? + e => { + onReadEntry(e); + e.resume(); + } + : e => e.resume(); +}; +// construct a filter that limits the file entries listed +// include child entries if a dir is included +const filesFilter = (opt, files) => { + const map = new Map(files.map(f => [(0, strip_trailing_slashes_js_1.stripTrailingSlashes)(f), true])); + const filter = opt.filter; + const mapHas = (file, r = '') => { + const root = r || (0, path_1.parse)(file).root || '.'; + let ret; + if (file === root) + ret = false; + else { + const m = map.get(file); + if (m !== undefined) { + ret = m; + } + else { + ret = mapHas((0, path_1.dirname)(file), root); + } + } + map.set(file, ret); + return ret; + }; + opt.filter = + filter ? + (file, entry) => filter(file, entry) && mapHas((0, strip_trailing_slashes_js_1.stripTrailingSlashes)(file)) + : file => mapHas((0, strip_trailing_slashes_js_1.stripTrailingSlashes)(file)); +}; +exports.filesFilter = filesFilter; +const listFileSync = (opt) => { + const p = new parse_js_1.Parser(opt); + const file = opt.file; + let fd; + try { + const stat = node_fs_1.default.statSync(file); + const readSize = opt.maxReadSize || 16 * 1024 * 1024; + if (stat.size < readSize) { + p.end(node_fs_1.default.readFileSync(file)); + } + else { + let pos = 0; + const buf = Buffer.allocUnsafe(readSize); + fd = node_fs_1.default.openSync(file, 'r'); + while (pos < stat.size) { + const bytesRead = node_fs_1.default.readSync(fd, buf, 0, readSize, pos); + pos += bytesRead; + p.write(buf.subarray(0, bytesRead)); + } + p.end(); + } + } + finally { + if (typeof fd === 'number') { + try { + node_fs_1.default.closeSync(fd); + /* c8 ignore next */ + } + catch (er) { } + } + } +}; +const listFile = (opt, _files) => { + const parse = new parse_js_1.Parser(opt); + const readSize = opt.maxReadSize || 16 * 1024 * 1024; + const file = opt.file; + const p = new Promise((resolve, reject) => { + parse.on('error', reject); + parse.on('end', resolve); + node_fs_1.default.stat(file, (er, stat) => { + if (er) { + reject(er); + } + else { + const stream = new fsm.ReadStream(file, { + readSize: readSize, + size: stat.size, + }); + stream.on('error', reject); + stream.pipe(parse); + } + }); + }); + return p; +}; +exports.list = (0, make_command_js_1.makeCommand)(listFileSync, listFile, opt => new parse_js_1.Parser(opt), opt => new parse_js_1.Parser(opt), (opt, files) => { + if (files?.length) + (0, exports.filesFilter)(opt, files); + if (!opt.noResume) + onReadEntryFunction(opt); +}); +//# sourceMappingURL=list.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/make-command.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/make-command.js new file mode 100644 index 00000000000000..1814319e78bc62 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/make-command.js @@ -0,0 +1,61 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.makeCommand = void 0; +const options_js_1 = require("./options.js"); +const makeCommand = (syncFile, asyncFile, syncNoFile, asyncNoFile, validate) => { + return Object.assign((opt_ = [], entries, cb) => { + if (Array.isArray(opt_)) { + entries = opt_; + opt_ = {}; + } + if (typeof entries === 'function') { + cb = entries; + entries = undefined; + } + if (!entries) { + entries = []; + } + else { + entries = Array.from(entries); + } + const opt = (0, options_js_1.dealias)(opt_); + validate?.(opt, entries); + if ((0, options_js_1.isSyncFile)(opt)) { + if (typeof cb === 'function') { + throw new TypeError('callback not supported for sync tar functions'); + } + return syncFile(opt, entries); + } + else if ((0, options_js_1.isAsyncFile)(opt)) { + const p = asyncFile(opt, entries); + // weirdness to make TS happy + const c = cb ? cb : undefined; + return c ? p.then(() => c(), c) : p; + } + else if ((0, options_js_1.isSyncNoFile)(opt)) { + if (typeof cb === 'function') { + throw new TypeError('callback not supported for sync tar functions'); + } + return syncNoFile(opt, entries); + } + else if ((0, options_js_1.isAsyncNoFile)(opt)) { + if (typeof cb === 'function') { + throw new TypeError('callback only supported with file option'); + } + return asyncNoFile(opt, entries); + /* c8 ignore start */ + } + else { + throw new Error('impossible options??'); + } + /* c8 ignore stop */ + }, { + syncFile, + asyncFile, + syncNoFile, + asyncNoFile, + validate, + }); +}; +exports.makeCommand = makeCommand; +//# sourceMappingURL=make-command.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/mkdir.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/mkdir.js new file mode 100644 index 00000000000000..2b13ecbab6723e --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/mkdir.js @@ -0,0 +1,209 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.mkdirSync = exports.mkdir = void 0; +const chownr_1 = require("chownr"); +const fs_1 = __importDefault(require("fs")); +const mkdirp_1 = require("mkdirp"); +const node_path_1 = __importDefault(require("node:path")); +const cwd_error_js_1 = require("./cwd-error.js"); +const normalize_windows_path_js_1 = require("./normalize-windows-path.js"); +const symlink_error_js_1 = require("./symlink-error.js"); +const cGet = (cache, key) => cache.get((0, normalize_windows_path_js_1.normalizeWindowsPath)(key)); +const cSet = (cache, key, val) => cache.set((0, normalize_windows_path_js_1.normalizeWindowsPath)(key), val); +const checkCwd = (dir, cb) => { + fs_1.default.stat(dir, (er, st) => { + if (er || !st.isDirectory()) { + er = new cwd_error_js_1.CwdError(dir, er?.code || 'ENOTDIR'); + } + cb(er); + }); +}; +/** + * Wrapper around mkdirp for tar's needs. + * + * The main purpose is to avoid creating directories if we know that + * they already exist (and track which ones exist for this purpose), + * and prevent entries from being extracted into symlinked folders, + * if `preservePaths` is not set. + */ +const mkdir = (dir, opt, cb) => { + dir = (0, normalize_windows_path_js_1.normalizeWindowsPath)(dir); + // if there's any overlap between mask and mode, + // then we'll need an explicit chmod + /* c8 ignore next */ + const umask = opt.umask ?? 0o22; + const mode = opt.mode | 0o0700; + const needChmod = (mode & umask) !== 0; + const uid = opt.uid; + const gid = opt.gid; + const doChown = typeof uid === 'number' && + typeof gid === 'number' && + (uid !== opt.processUid || gid !== opt.processGid); + const preserve = opt.preserve; + const unlink = opt.unlink; + const cache = opt.cache; + const cwd = (0, normalize_windows_path_js_1.normalizeWindowsPath)(opt.cwd); + const done = (er, created) => { + if (er) { + cb(er); + } + else { + cSet(cache, dir, true); + if (created && doChown) { + (0, chownr_1.chownr)(created, uid, gid, er => done(er)); + } + else if (needChmod) { + fs_1.default.chmod(dir, mode, cb); + } + else { + cb(); + } + } + }; + if (cache && cGet(cache, dir) === true) { + return done(); + } + if (dir === cwd) { + return checkCwd(dir, done); + } + if (preserve) { + return (0, mkdirp_1.mkdirp)(dir, { mode }).then(made => done(null, made ?? undefined), // oh, ts + done); + } + const sub = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.relative(cwd, dir)); + const parts = sub.split('/'); + mkdir_(cwd, parts, mode, cache, unlink, cwd, undefined, done); +}; +exports.mkdir = mkdir; +const mkdir_ = (base, parts, mode, cache, unlink, cwd, created, cb) => { + if (!parts.length) { + return cb(null, created); + } + const p = parts.shift(); + const part = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.resolve(base + '/' + p)); + if (cGet(cache, part)) { + return mkdir_(part, parts, mode, cache, unlink, cwd, created, cb); + } + fs_1.default.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb)); +}; +const onmkdir = (part, parts, mode, cache, unlink, cwd, created, cb) => (er) => { + if (er) { + fs_1.default.lstat(part, (statEr, st) => { + if (statEr) { + statEr.path = + statEr.path && (0, normalize_windows_path_js_1.normalizeWindowsPath)(statEr.path); + cb(statEr); + } + else if (st.isDirectory()) { + mkdir_(part, parts, mode, cache, unlink, cwd, created, cb); + } + else if (unlink) { + fs_1.default.unlink(part, er => { + if (er) { + return cb(er); + } + fs_1.default.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb)); + }); + } + else if (st.isSymbolicLink()) { + return cb(new symlink_error_js_1.SymlinkError(part, part + '/' + parts.join('/'))); + } + else { + cb(er); + } + }); + } + else { + created = created || part; + mkdir_(part, parts, mode, cache, unlink, cwd, created, cb); + } +}; +const checkCwdSync = (dir) => { + let ok = false; + let code = undefined; + try { + ok = fs_1.default.statSync(dir).isDirectory(); + } + catch (er) { + code = er?.code; + } + finally { + if (!ok) { + throw new cwd_error_js_1.CwdError(dir, code ?? 'ENOTDIR'); + } + } +}; +const mkdirSync = (dir, opt) => { + dir = (0, normalize_windows_path_js_1.normalizeWindowsPath)(dir); + // if there's any overlap between mask and mode, + // then we'll need an explicit chmod + /* c8 ignore next */ + const umask = opt.umask ?? 0o22; + const mode = opt.mode | 0o700; + const needChmod = (mode & umask) !== 0; + const uid = opt.uid; + const gid = opt.gid; + const doChown = typeof uid === 'number' && + typeof gid === 'number' && + (uid !== opt.processUid || gid !== opt.processGid); + const preserve = opt.preserve; + const unlink = opt.unlink; + const cache = opt.cache; + const cwd = (0, normalize_windows_path_js_1.normalizeWindowsPath)(opt.cwd); + const done = (created) => { + cSet(cache, dir, true); + if (created && doChown) { + (0, chownr_1.chownrSync)(created, uid, gid); + } + if (needChmod) { + fs_1.default.chmodSync(dir, mode); + } + }; + if (cache && cGet(cache, dir) === true) { + return done(); + } + if (dir === cwd) { + checkCwdSync(cwd); + return done(); + } + if (preserve) { + return done((0, mkdirp_1.mkdirpSync)(dir, mode) ?? undefined); + } + const sub = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.relative(cwd, dir)); + const parts = sub.split('/'); + let created = undefined; + for (let p = parts.shift(), part = cwd; p && (part += '/' + p); p = parts.shift()) { + part = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.resolve(part)); + if (cGet(cache, part)) { + continue; + } + try { + fs_1.default.mkdirSync(part, mode); + created = created || part; + cSet(cache, part, true); + } + catch (er) { + const st = fs_1.default.lstatSync(part); + if (st.isDirectory()) { + cSet(cache, part, true); + continue; + } + else if (unlink) { + fs_1.default.unlinkSync(part); + fs_1.default.mkdirSync(part, mode); + created = created || part; + cSet(cache, part, true); + continue; + } + else if (st.isSymbolicLink()) { + return new symlink_error_js_1.SymlinkError(part, part + '/' + parts.join('/')); + } + } + } + return done(created); +}; +exports.mkdirSync = mkdirSync; +//# sourceMappingURL=mkdir.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/mode-fix.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/mode-fix.js new file mode 100644 index 00000000000000..49dd727961d290 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/mode-fix.js @@ -0,0 +1,29 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.modeFix = void 0; +const modeFix = (mode, isDir, portable) => { + mode &= 0o7777; + // in portable mode, use the minimum reasonable umask + // if this system creates files with 0o664 by default + // (as some linux distros do), then we'll write the + // archive with 0o644 instead. Also, don't ever create + // a file that is not readable/writable by the owner. + if (portable) { + mode = (mode | 0o600) & ~0o22; + } + // if dirs are readable, then they should be listable + if (isDir) { + if (mode & 0o400) { + mode |= 0o100; + } + if (mode & 0o40) { + mode |= 0o10; + } + if (mode & 0o4) { + mode |= 0o1; + } + } + return mode; +}; +exports.modeFix = modeFix; +//# sourceMappingURL=mode-fix.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/normalize-unicode.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/normalize-unicode.js new file mode 100644 index 00000000000000..2f08ce46d98c4c --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/normalize-unicode.js @@ -0,0 +1,17 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.normalizeUnicode = void 0; +// warning: extremely hot code path. +// This has been meticulously optimized for use +// within npm install on large package trees. +// Do not edit without careful benchmarking. +const normalizeCache = Object.create(null); +const { hasOwnProperty } = Object.prototype; +const normalizeUnicode = (s) => { + if (!hasOwnProperty.call(normalizeCache, s)) { + normalizeCache[s] = s.normalize('NFD'); + } + return normalizeCache[s]; +}; +exports.normalizeUnicode = normalizeUnicode; +//# sourceMappingURL=normalize-unicode.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/normalize-windows-path.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/normalize-windows-path.js new file mode 100644 index 00000000000000..b0c7aaa9f2d175 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/normalize-windows-path.js @@ -0,0 +1,12 @@ +"use strict"; +// on windows, either \ or / are valid directory separators. +// on unix, \ is a valid character in filenames. +// so, on windows, and only on windows, we replace all \ chars with /, +// so that we can use / as our one and only directory separator char. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.normalizeWindowsPath = void 0; +const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform; +exports.normalizeWindowsPath = platform !== 'win32' ? + (p) => p + : (p) => p && p.replace(/\\/g, '/'); +//# sourceMappingURL=normalize-windows-path.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/options.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/options.js new file mode 100644 index 00000000000000..4cd06505bc72b2 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/options.js @@ -0,0 +1,66 @@ +"use strict"; +// turn tar(1) style args like `C` into the more verbose things like `cwd` +Object.defineProperty(exports, "__esModule", { value: true }); +exports.dealias = exports.isNoFile = exports.isFile = exports.isAsync = exports.isSync = exports.isAsyncNoFile = exports.isSyncNoFile = exports.isAsyncFile = exports.isSyncFile = void 0; +const argmap = new Map([ + ['C', 'cwd'], + ['f', 'file'], + ['z', 'gzip'], + ['P', 'preservePaths'], + ['U', 'unlink'], + ['strip-components', 'strip'], + ['stripComponents', 'strip'], + ['keep-newer', 'newer'], + ['keepNewer', 'newer'], + ['keep-newer-files', 'newer'], + ['keepNewerFiles', 'newer'], + ['k', 'keep'], + ['keep-existing', 'keep'], + ['keepExisting', 'keep'], + ['m', 'noMtime'], + ['no-mtime', 'noMtime'], + ['p', 'preserveOwner'], + ['L', 'follow'], + ['h', 'follow'], + ['onentry', 'onReadEntry'], +]); +const isSyncFile = (o) => !!o.sync && !!o.file; +exports.isSyncFile = isSyncFile; +const isAsyncFile = (o) => !o.sync && !!o.file; +exports.isAsyncFile = isAsyncFile; +const isSyncNoFile = (o) => !!o.sync && !o.file; +exports.isSyncNoFile = isSyncNoFile; +const isAsyncNoFile = (o) => !o.sync && !o.file; +exports.isAsyncNoFile = isAsyncNoFile; +const isSync = (o) => !!o.sync; +exports.isSync = isSync; +const isAsync = (o) => !o.sync; +exports.isAsync = isAsync; +const isFile = (o) => !!o.file; +exports.isFile = isFile; +const isNoFile = (o) => !o.file; +exports.isNoFile = isNoFile; +const dealiasKey = (k) => { + const d = argmap.get(k); + if (d) + return d; + return k; +}; +const dealias = (opt = {}) => { + if (!opt) + return {}; + const result = {}; + for (const [key, v] of Object.entries(opt)) { + // TS doesn't know that aliases are going to always be the same type + const k = dealiasKey(key); + result[k] = v; + } + // affordance for deprecated noChmod -> chmod + if (result.chmod === undefined && result.noChmod === false) { + result.chmod = true; + } + delete result.noChmod; + return result; +}; +exports.dealias = dealias; +//# sourceMappingURL=options.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/pack.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/pack.js new file mode 100644 index 00000000000000..303e93063c2db4 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/pack.js @@ -0,0 +1,477 @@ +"use strict"; +// A readable tar stream creator +// Technically, this is a transform stream that you write paths into, +// and tar format comes out of. +// The `add()` method is like `write()` but returns this, +// and end() return `this` as well, so you can +// do `new Pack(opt).add('files').add('dir').end().pipe(output) +// You could also do something like: +// streamOfPaths().pipe(new Pack()).pipe(new fs.WriteStream('out.tar')) +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.PackSync = exports.Pack = exports.PackJob = void 0; +const fs_1 = __importDefault(require("fs")); +const write_entry_js_1 = require("./write-entry.js"); +class PackJob { + path; + absolute; + entry; + stat; + readdir; + pending = false; + ignore = false; + piped = false; + constructor(path, absolute) { + this.path = path || './'; + this.absolute = absolute; + } +} +exports.PackJob = PackJob; +const minipass_1 = require("minipass"); +const zlib = __importStar(require("minizlib")); +const yallist_1 = require("yallist"); +const read_entry_js_1 = require("./read-entry.js"); +const warn_method_js_1 = require("./warn-method.js"); +const EOF = Buffer.alloc(1024); +const ONSTAT = Symbol('onStat'); +const ENDED = Symbol('ended'); +const QUEUE = Symbol('queue'); +const CURRENT = Symbol('current'); +const PROCESS = Symbol('process'); +const PROCESSING = Symbol('processing'); +const PROCESSJOB = Symbol('processJob'); +const JOBS = Symbol('jobs'); +const JOBDONE = Symbol('jobDone'); +const ADDFSENTRY = Symbol('addFSEntry'); +const ADDTARENTRY = Symbol('addTarEntry'); +const STAT = Symbol('stat'); +const READDIR = Symbol('readdir'); +const ONREADDIR = Symbol('onreaddir'); +const PIPE = Symbol('pipe'); +const ENTRY = Symbol('entry'); +const ENTRYOPT = Symbol('entryOpt'); +const WRITEENTRYCLASS = Symbol('writeEntryClass'); +const WRITE = Symbol('write'); +const ONDRAIN = Symbol('ondrain'); +const path_1 = __importDefault(require("path")); +const normalize_windows_path_js_1 = require("./normalize-windows-path.js"); +class Pack extends minipass_1.Minipass { + opt; + cwd; + maxReadSize; + preservePaths; + strict; + noPax; + prefix; + linkCache; + statCache; + file; + portable; + zip; + readdirCache; + noDirRecurse; + follow; + noMtime; + mtime; + filter; + jobs; + [WRITEENTRYCLASS]; + onWriteEntry; + [QUEUE]; + [JOBS] = 0; + [PROCESSING] = false; + [ENDED] = false; + constructor(opt = {}) { + //@ts-ignore + super(); + this.opt = opt; + this.file = opt.file || ''; + this.cwd = opt.cwd || process.cwd(); + this.maxReadSize = opt.maxReadSize; + this.preservePaths = !!opt.preservePaths; + this.strict = !!opt.strict; + this.noPax = !!opt.noPax; + this.prefix = (0, normalize_windows_path_js_1.normalizeWindowsPath)(opt.prefix || ''); + this.linkCache = opt.linkCache || new Map(); + this.statCache = opt.statCache || new Map(); + this.readdirCache = opt.readdirCache || new Map(); + this.onWriteEntry = opt.onWriteEntry; + this[WRITEENTRYCLASS] = write_entry_js_1.WriteEntry; + if (typeof opt.onwarn === 'function') { + this.on('warn', opt.onwarn); + } + this.portable = !!opt.portable; + if (opt.gzip || opt.brotli) { + if (opt.gzip && opt.brotli) { + throw new TypeError('gzip and brotli are mutually exclusive'); + } + if (opt.gzip) { + if (typeof opt.gzip !== 'object') { + opt.gzip = {}; + } + if (this.portable) { + opt.gzip.portable = true; + } + this.zip = new zlib.Gzip(opt.gzip); + } + if (opt.brotli) { + if (typeof opt.brotli !== 'object') { + opt.brotli = {}; + } + this.zip = new zlib.BrotliCompress(opt.brotli); + } + /* c8 ignore next */ + if (!this.zip) + throw new Error('impossible'); + const zip = this.zip; + zip.on('data', chunk => super.write(chunk)); + zip.on('end', () => super.end()); + zip.on('drain', () => this[ONDRAIN]()); + this.on('resume', () => zip.resume()); + } + else { + this.on('drain', this[ONDRAIN]); + } + this.noDirRecurse = !!opt.noDirRecurse; + this.follow = !!opt.follow; + this.noMtime = !!opt.noMtime; + if (opt.mtime) + this.mtime = opt.mtime; + this.filter = + typeof opt.filter === 'function' ? opt.filter : () => true; + this[QUEUE] = new yallist_1.Yallist(); + this[JOBS] = 0; + this.jobs = Number(opt.jobs) || 4; + this[PROCESSING] = false; + this[ENDED] = false; + } + [WRITE](chunk) { + return super.write(chunk); + } + add(path) { + this.write(path); + return this; + } + end(path, encoding, cb) { + /* c8 ignore start */ + if (typeof path === 'function') { + cb = path; + path = undefined; + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = undefined; + } + /* c8 ignore stop */ + if (path) { + this.add(path); + } + this[ENDED] = true; + this[PROCESS](); + /* c8 ignore next */ + if (cb) + cb(); + return this; + } + write(path) { + if (this[ENDED]) { + throw new Error('write after end'); + } + if (path instanceof read_entry_js_1.ReadEntry) { + this[ADDTARENTRY](path); + } + else { + this[ADDFSENTRY](path); + } + return this.flowing; + } + [ADDTARENTRY](p) { + const absolute = (0, normalize_windows_path_js_1.normalizeWindowsPath)(path_1.default.resolve(this.cwd, p.path)); + // in this case, we don't have to wait for the stat + if (!this.filter(p.path, p)) { + p.resume(); + } + else { + const job = new PackJob(p.path, absolute); + job.entry = new write_entry_js_1.WriteEntryTar(p, this[ENTRYOPT](job)); + job.entry.on('end', () => this[JOBDONE](job)); + this[JOBS] += 1; + this[QUEUE].push(job); + } + this[PROCESS](); + } + [ADDFSENTRY](p) { + const absolute = (0, normalize_windows_path_js_1.normalizeWindowsPath)(path_1.default.resolve(this.cwd, p)); + this[QUEUE].push(new PackJob(p, absolute)); + this[PROCESS](); + } + [STAT](job) { + job.pending = true; + this[JOBS] += 1; + const stat = this.follow ? 'stat' : 'lstat'; + fs_1.default[stat](job.absolute, (er, stat) => { + job.pending = false; + this[JOBS] -= 1; + if (er) { + this.emit('error', er); + } + else { + this[ONSTAT](job, stat); + } + }); + } + [ONSTAT](job, stat) { + this.statCache.set(job.absolute, stat); + job.stat = stat; + // now we have the stat, we can filter it. + if (!this.filter(job.path, stat)) { + job.ignore = true; + } + this[PROCESS](); + } + [READDIR](job) { + job.pending = true; + this[JOBS] += 1; + fs_1.default.readdir(job.absolute, (er, entries) => { + job.pending = false; + this[JOBS] -= 1; + if (er) { + return this.emit('error', er); + } + this[ONREADDIR](job, entries); + }); + } + [ONREADDIR](job, entries) { + this.readdirCache.set(job.absolute, entries); + job.readdir = entries; + this[PROCESS](); + } + [PROCESS]() { + if (this[PROCESSING]) { + return; + } + this[PROCESSING] = true; + for (let w = this[QUEUE].head; !!w && this[JOBS] < this.jobs; w = w.next) { + this[PROCESSJOB](w.value); + if (w.value.ignore) { + const p = w.next; + this[QUEUE].removeNode(w); + w.next = p; + } + } + this[PROCESSING] = false; + if (this[ENDED] && !this[QUEUE].length && this[JOBS] === 0) { + if (this.zip) { + this.zip.end(EOF); + } + else { + super.write(EOF); + super.end(); + } + } + } + get [CURRENT]() { + return this[QUEUE] && this[QUEUE].head && this[QUEUE].head.value; + } + [JOBDONE](_job) { + this[QUEUE].shift(); + this[JOBS] -= 1; + this[PROCESS](); + } + [PROCESSJOB](job) { + if (job.pending) { + return; + } + if (job.entry) { + if (job === this[CURRENT] && !job.piped) { + this[PIPE](job); + } + return; + } + if (!job.stat) { + const sc = this.statCache.get(job.absolute); + if (sc) { + this[ONSTAT](job, sc); + } + else { + this[STAT](job); + } + } + if (!job.stat) { + return; + } + // filtered out! + if (job.ignore) { + return; + } + if (!this.noDirRecurse && + job.stat.isDirectory() && + !job.readdir) { + const rc = this.readdirCache.get(job.absolute); + if (rc) { + this[ONREADDIR](job, rc); + } + else { + this[READDIR](job); + } + if (!job.readdir) { + return; + } + } + // we know it doesn't have an entry, because that got checked above + job.entry = this[ENTRY](job); + if (!job.entry) { + job.ignore = true; + return; + } + if (job === this[CURRENT] && !job.piped) { + this[PIPE](job); + } + } + [ENTRYOPT](job) { + return { + onwarn: (code, msg, data) => this.warn(code, msg, data), + noPax: this.noPax, + cwd: this.cwd, + absolute: job.absolute, + preservePaths: this.preservePaths, + maxReadSize: this.maxReadSize, + strict: this.strict, + portable: this.portable, + linkCache: this.linkCache, + statCache: this.statCache, + noMtime: this.noMtime, + mtime: this.mtime, + prefix: this.prefix, + onWriteEntry: this.onWriteEntry, + }; + } + [ENTRY](job) { + this[JOBS] += 1; + try { + const e = new this[WRITEENTRYCLASS](job.path, this[ENTRYOPT](job)); + return e + .on('end', () => this[JOBDONE](job)) + .on('error', er => this.emit('error', er)); + } + catch (er) { + this.emit('error', er); + } + } + [ONDRAIN]() { + if (this[CURRENT] && this[CURRENT].entry) { + this[CURRENT].entry.resume(); + } + } + // like .pipe() but using super, because our write() is special + [PIPE](job) { + job.piped = true; + if (job.readdir) { + job.readdir.forEach(entry => { + const p = job.path; + const base = p === './' ? '' : p.replace(/\/*$/, '/'); + this[ADDFSENTRY](base + entry); + }); + } + const source = job.entry; + const zip = this.zip; + /* c8 ignore start */ + if (!source) + throw new Error('cannot pipe without source'); + /* c8 ignore stop */ + if (zip) { + source.on('data', chunk => { + if (!zip.write(chunk)) { + source.pause(); + } + }); + } + else { + source.on('data', chunk => { + if (!super.write(chunk)) { + source.pause(); + } + }); + } + } + pause() { + if (this.zip) { + this.zip.pause(); + } + return super.pause(); + } + warn(code, message, data = {}) { + (0, warn_method_js_1.warnMethod)(this, code, message, data); + } +} +exports.Pack = Pack; +class PackSync extends Pack { + sync = true; + constructor(opt) { + super(opt); + this[WRITEENTRYCLASS] = write_entry_js_1.WriteEntrySync; + } + // pause/resume are no-ops in sync streams. + pause() { } + resume() { } + [STAT](job) { + const stat = this.follow ? 'statSync' : 'lstatSync'; + this[ONSTAT](job, fs_1.default[stat](job.absolute)); + } + [READDIR](job) { + this[ONREADDIR](job, fs_1.default.readdirSync(job.absolute)); + } + // gotta get it all in this tick + [PIPE](job) { + const source = job.entry; + const zip = this.zip; + if (job.readdir) { + job.readdir.forEach(entry => { + const p = job.path; + const base = p === './' ? '' : p.replace(/\/*$/, '/'); + this[ADDFSENTRY](base + entry); + }); + } + /* c8 ignore start */ + if (!source) + throw new Error('Cannot pipe without source'); + /* c8 ignore stop */ + if (zip) { + source.on('data', chunk => { + zip.write(chunk); + }); + } + else { + source.on('data', chunk => { + super[WRITE](chunk); + }); + } + } +} +exports.PackSync = PackSync; +//# sourceMappingURL=pack.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/package.json b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/package.json new file mode 100644 index 00000000000000..5bbefffbabee39 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/parse.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/parse.js new file mode 100644 index 00000000000000..1f7e5fd65e869f --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/parse.js @@ -0,0 +1,599 @@ +"use strict"; +// this[BUFFER] is the remainder of a chunk if we're waiting for +// the full 512 bytes of a header to come in. We will Buffer.concat() +// it to the next write(), which is a mem copy, but a small one. +// +// this[QUEUE] is a Yallist of entries that haven't been emitted +// yet this can only get filled up if the user keeps write()ing after +// a write() returns false, or does a write() with more than one entry +// +// We don't buffer chunks, we always parse them and either create an +// entry, or push it into the active entry. The ReadEntry class knows +// to throw data away if .ignore=true +// +// Shift entry off the buffer when it emits 'end', and emit 'entry' for +// the next one in the list. +// +// At any time, we're pushing body chunks into the entry at WRITEENTRY, +// and waiting for 'end' on the entry at READENTRY +// +// ignored entries get .resume() called on them straight away +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Parser = void 0; +const events_1 = require("events"); +const minizlib_1 = require("minizlib"); +const yallist_1 = require("yallist"); +const header_js_1 = require("./header.js"); +const pax_js_1 = require("./pax.js"); +const read_entry_js_1 = require("./read-entry.js"); +const warn_method_js_1 = require("./warn-method.js"); +const maxMetaEntrySize = 1024 * 1024; +const gzipHeader = Buffer.from([0x1f, 0x8b]); +const STATE = Symbol('state'); +const WRITEENTRY = Symbol('writeEntry'); +const READENTRY = Symbol('readEntry'); +const NEXTENTRY = Symbol('nextEntry'); +const PROCESSENTRY = Symbol('processEntry'); +const EX = Symbol('extendedHeader'); +const GEX = Symbol('globalExtendedHeader'); +const META = Symbol('meta'); +const EMITMETA = Symbol('emitMeta'); +const BUFFER = Symbol('buffer'); +const QUEUE = Symbol('queue'); +const ENDED = Symbol('ended'); +const EMITTEDEND = Symbol('emittedEnd'); +const EMIT = Symbol('emit'); +const UNZIP = Symbol('unzip'); +const CONSUMECHUNK = Symbol('consumeChunk'); +const CONSUMECHUNKSUB = Symbol('consumeChunkSub'); +const CONSUMEBODY = Symbol('consumeBody'); +const CONSUMEMETA = Symbol('consumeMeta'); +const CONSUMEHEADER = Symbol('consumeHeader'); +const CONSUMING = Symbol('consuming'); +const BUFFERCONCAT = Symbol('bufferConcat'); +const MAYBEEND = Symbol('maybeEnd'); +const WRITING = Symbol('writing'); +const ABORTED = Symbol('aborted'); +const DONE = Symbol('onDone'); +const SAW_VALID_ENTRY = Symbol('sawValidEntry'); +const SAW_NULL_BLOCK = Symbol('sawNullBlock'); +const SAW_EOF = Symbol('sawEOF'); +const CLOSESTREAM = Symbol('closeStream'); +const noop = () => true; +class Parser extends events_1.EventEmitter { + file; + strict; + maxMetaEntrySize; + filter; + brotli; + writable = true; + readable = false; + [QUEUE] = new yallist_1.Yallist(); + [BUFFER]; + [READENTRY]; + [WRITEENTRY]; + [STATE] = 'begin'; + [META] = ''; + [EX]; + [GEX]; + [ENDED] = false; + [UNZIP]; + [ABORTED] = false; + [SAW_VALID_ENTRY]; + [SAW_NULL_BLOCK] = false; + [SAW_EOF] = false; + [WRITING] = false; + [CONSUMING] = false; + [EMITTEDEND] = false; + constructor(opt = {}) { + super(); + this.file = opt.file || ''; + // these BADARCHIVE errors can't be detected early. listen on DONE. + this.on(DONE, () => { + if (this[STATE] === 'begin' || + this[SAW_VALID_ENTRY] === false) { + // either less than 1 block of data, or all entries were invalid. + // Either way, probably not even a tarball. + this.warn('TAR_BAD_ARCHIVE', 'Unrecognized archive format'); + } + }); + if (opt.ondone) { + this.on(DONE, opt.ondone); + } + else { + this.on(DONE, () => { + this.emit('prefinish'); + this.emit('finish'); + this.emit('end'); + }); + } + this.strict = !!opt.strict; + this.maxMetaEntrySize = opt.maxMetaEntrySize || maxMetaEntrySize; + this.filter = typeof opt.filter === 'function' ? opt.filter : noop; + // Unlike gzip, brotli doesn't have any magic bytes to identify it + // Users need to explicitly tell us they're extracting a brotli file + // Or we infer from the file extension + const isTBR = opt.file && + (opt.file.endsWith('.tar.br') || opt.file.endsWith('.tbr')); + // if it's a tbr file it MIGHT be brotli, but we don't know until + // we look at it and verify it's not a valid tar file. + this.brotli = + !opt.gzip && opt.brotli !== undefined ? opt.brotli + : isTBR ? undefined + : false; + // have to set this so that streams are ok piping into it + this.on('end', () => this[CLOSESTREAM]()); + if (typeof opt.onwarn === 'function') { + this.on('warn', opt.onwarn); + } + if (typeof opt.onReadEntry === 'function') { + this.on('entry', opt.onReadEntry); + } + } + warn(code, message, data = {}) { + (0, warn_method_js_1.warnMethod)(this, code, message, data); + } + [CONSUMEHEADER](chunk, position) { + if (this[SAW_VALID_ENTRY] === undefined) { + this[SAW_VALID_ENTRY] = false; + } + let header; + try { + header = new header_js_1.Header(chunk, position, this[EX], this[GEX]); + } + catch (er) { + return this.warn('TAR_ENTRY_INVALID', er); + } + if (header.nullBlock) { + if (this[SAW_NULL_BLOCK]) { + this[SAW_EOF] = true; + // ending an archive with no entries. pointless, but legal. + if (this[STATE] === 'begin') { + this[STATE] = 'header'; + } + this[EMIT]('eof'); + } + else { + this[SAW_NULL_BLOCK] = true; + this[EMIT]('nullBlock'); + } + } + else { + this[SAW_NULL_BLOCK] = false; + if (!header.cksumValid) { + this.warn('TAR_ENTRY_INVALID', 'checksum failure', { header }); + } + else if (!header.path) { + this.warn('TAR_ENTRY_INVALID', 'path is required', { header }); + } + else { + const type = header.type; + if (/^(Symbolic)?Link$/.test(type) && !header.linkpath) { + this.warn('TAR_ENTRY_INVALID', 'linkpath required', { + header, + }); + } + else if (!/^(Symbolic)?Link$/.test(type) && + !/^(Global)?ExtendedHeader$/.test(type) && + header.linkpath) { + this.warn('TAR_ENTRY_INVALID', 'linkpath forbidden', { + header, + }); + } + else { + const entry = (this[WRITEENTRY] = new read_entry_js_1.ReadEntry(header, this[EX], this[GEX])); + // we do this for meta & ignored entries as well, because they + // are still valid tar, or else we wouldn't know to ignore them + if (!this[SAW_VALID_ENTRY]) { + if (entry.remain) { + // this might be the one! + const onend = () => { + if (!entry.invalid) { + this[SAW_VALID_ENTRY] = true; + } + }; + entry.on('end', onend); + } + else { + this[SAW_VALID_ENTRY] = true; + } + } + if (entry.meta) { + if (entry.size > this.maxMetaEntrySize) { + entry.ignore = true; + this[EMIT]('ignoredEntry', entry); + this[STATE] = 'ignore'; + entry.resume(); + } + else if (entry.size > 0) { + this[META] = ''; + entry.on('data', c => (this[META] += c)); + this[STATE] = 'meta'; + } + } + else { + this[EX] = undefined; + entry.ignore = + entry.ignore || !this.filter(entry.path, entry); + if (entry.ignore) { + // probably valid, just not something we care about + this[EMIT]('ignoredEntry', entry); + this[STATE] = entry.remain ? 'ignore' : 'header'; + entry.resume(); + } + else { + if (entry.remain) { + this[STATE] = 'body'; + } + else { + this[STATE] = 'header'; + entry.end(); + } + if (!this[READENTRY]) { + this[QUEUE].push(entry); + this[NEXTENTRY](); + } + else { + this[QUEUE].push(entry); + } + } + } + } + } + } + } + [CLOSESTREAM]() { + queueMicrotask(() => this.emit('close')); + } + [PROCESSENTRY](entry) { + let go = true; + if (!entry) { + this[READENTRY] = undefined; + go = false; + } + else if (Array.isArray(entry)) { + const [ev, ...args] = entry; + this.emit(ev, ...args); + } + else { + this[READENTRY] = entry; + this.emit('entry', entry); + if (!entry.emittedEnd) { + entry.on('end', () => this[NEXTENTRY]()); + go = false; + } + } + return go; + } + [NEXTENTRY]() { + do { } while (this[PROCESSENTRY](this[QUEUE].shift())); + if (!this[QUEUE].length) { + // At this point, there's nothing in the queue, but we may have an + // entry which is being consumed (readEntry). + // If we don't, then we definitely can handle more data. + // If we do, and either it's flowing, or it has never had any data + // written to it, then it needs more. + // The only other possibility is that it has returned false from a + // write() call, so we wait for the next drain to continue. + const re = this[READENTRY]; + const drainNow = !re || re.flowing || re.size === re.remain; + if (drainNow) { + if (!this[WRITING]) { + this.emit('drain'); + } + } + else { + re.once('drain', () => this.emit('drain')); + } + } + } + [CONSUMEBODY](chunk, position) { + // write up to but no more than writeEntry.blockRemain + const entry = this[WRITEENTRY]; + /* c8 ignore start */ + if (!entry) { + throw new Error('attempt to consume body without entry??'); + } + const br = entry.blockRemain ?? 0; + /* c8 ignore stop */ + const c = br >= chunk.length && position === 0 ? + chunk + : chunk.subarray(position, position + br); + entry.write(c); + if (!entry.blockRemain) { + this[STATE] = 'header'; + this[WRITEENTRY] = undefined; + entry.end(); + } + return c.length; + } + [CONSUMEMETA](chunk, position) { + const entry = this[WRITEENTRY]; + const ret = this[CONSUMEBODY](chunk, position); + // if we finished, then the entry is reset + if (!this[WRITEENTRY] && entry) { + this[EMITMETA](entry); + } + return ret; + } + [EMIT](ev, data, extra) { + if (!this[QUEUE].length && !this[READENTRY]) { + this.emit(ev, data, extra); + } + else { + this[QUEUE].push([ev, data, extra]); + } + } + [EMITMETA](entry) { + this[EMIT]('meta', this[META]); + switch (entry.type) { + case 'ExtendedHeader': + case 'OldExtendedHeader': + this[EX] = pax_js_1.Pax.parse(this[META], this[EX], false); + break; + case 'GlobalExtendedHeader': + this[GEX] = pax_js_1.Pax.parse(this[META], this[GEX], true); + break; + case 'NextFileHasLongPath': + case 'OldGnuLongPath': { + const ex = this[EX] ?? Object.create(null); + this[EX] = ex; + ex.path = this[META].replace(/\0.*/, ''); + break; + } + case 'NextFileHasLongLinkpath': { + const ex = this[EX] || Object.create(null); + this[EX] = ex; + ex.linkpath = this[META].replace(/\0.*/, ''); + break; + } + /* c8 ignore start */ + default: + throw new Error('unknown meta: ' + entry.type); + /* c8 ignore stop */ + } + } + abort(error) { + this[ABORTED] = true; + this.emit('abort', error); + // always throws, even in non-strict mode + this.warn('TAR_ABORT', error, { recoverable: false }); + } + write(chunk, encoding, cb) { + if (typeof encoding === 'function') { + cb = encoding; + encoding = undefined; + } + if (typeof chunk === 'string') { + chunk = Buffer.from(chunk, + /* c8 ignore next */ + typeof encoding === 'string' ? encoding : 'utf8'); + } + if (this[ABORTED]) { + /* c8 ignore next */ + cb?.(); + return false; + } + // first write, might be gzipped + const needSniff = this[UNZIP] === undefined || + (this.brotli === undefined && this[UNZIP] === false); + if (needSniff && chunk) { + if (this[BUFFER]) { + chunk = Buffer.concat([this[BUFFER], chunk]); + this[BUFFER] = undefined; + } + if (chunk.length < gzipHeader.length) { + this[BUFFER] = chunk; + /* c8 ignore next */ + cb?.(); + return true; + } + // look for gzip header + for (let i = 0; this[UNZIP] === undefined && i < gzipHeader.length; i++) { + if (chunk[i] !== gzipHeader[i]) { + this[UNZIP] = false; + } + } + const maybeBrotli = this.brotli === undefined; + if (this[UNZIP] === false && maybeBrotli) { + // read the first header to see if it's a valid tar file. If so, + // we can safely assume that it's not actually brotli, despite the + // .tbr or .tar.br file extension. + // if we ended before getting a full chunk, yes, def brotli + if (chunk.length < 512) { + if (this[ENDED]) { + this.brotli = true; + } + else { + this[BUFFER] = chunk; + /* c8 ignore next */ + cb?.(); + return true; + } + } + else { + // if it's tar, it's pretty reliably not brotli, chances of + // that happening are astronomical. + try { + new header_js_1.Header(chunk.subarray(0, 512)); + this.brotli = false; + } + catch (_) { + this.brotli = true; + } + } + } + if (this[UNZIP] === undefined || + (this[UNZIP] === false && this.brotli)) { + const ended = this[ENDED]; + this[ENDED] = false; + this[UNZIP] = + this[UNZIP] === undefined ? + new minizlib_1.Unzip({}) + : new minizlib_1.BrotliDecompress({}); + this[UNZIP].on('data', chunk => this[CONSUMECHUNK](chunk)); + this[UNZIP].on('error', er => this.abort(er)); + this[UNZIP].on('end', () => { + this[ENDED] = true; + this[CONSUMECHUNK](); + }); + this[WRITING] = true; + const ret = !!this[UNZIP][ended ? 'end' : 'write'](chunk); + this[WRITING] = false; + cb?.(); + return ret; + } + } + this[WRITING] = true; + if (this[UNZIP]) { + this[UNZIP].write(chunk); + } + else { + this[CONSUMECHUNK](chunk); + } + this[WRITING] = false; + // return false if there's a queue, or if the current entry isn't flowing + const ret = this[QUEUE].length ? false + : this[READENTRY] ? this[READENTRY].flowing + : true; + // if we have no queue, then that means a clogged READENTRY + if (!ret && !this[QUEUE].length) { + this[READENTRY]?.once('drain', () => this.emit('drain')); + } + /* c8 ignore next */ + cb?.(); + return ret; + } + [BUFFERCONCAT](c) { + if (c && !this[ABORTED]) { + this[BUFFER] = + this[BUFFER] ? Buffer.concat([this[BUFFER], c]) : c; + } + } + [MAYBEEND]() { + if (this[ENDED] && + !this[EMITTEDEND] && + !this[ABORTED] && + !this[CONSUMING]) { + this[EMITTEDEND] = true; + const entry = this[WRITEENTRY]; + if (entry && entry.blockRemain) { + // truncated, likely a damaged file + const have = this[BUFFER] ? this[BUFFER].length : 0; + this.warn('TAR_BAD_ARCHIVE', `Truncated input (needed ${entry.blockRemain} more bytes, only ${have} available)`, { entry }); + if (this[BUFFER]) { + entry.write(this[BUFFER]); + } + entry.end(); + } + this[EMIT](DONE); + } + } + [CONSUMECHUNK](chunk) { + if (this[CONSUMING] && chunk) { + this[BUFFERCONCAT](chunk); + } + else if (!chunk && !this[BUFFER]) { + this[MAYBEEND](); + } + else if (chunk) { + this[CONSUMING] = true; + if (this[BUFFER]) { + this[BUFFERCONCAT](chunk); + const c = this[BUFFER]; + this[BUFFER] = undefined; + this[CONSUMECHUNKSUB](c); + } + else { + this[CONSUMECHUNKSUB](chunk); + } + while (this[BUFFER] && + this[BUFFER]?.length >= 512 && + !this[ABORTED] && + !this[SAW_EOF]) { + const c = this[BUFFER]; + this[BUFFER] = undefined; + this[CONSUMECHUNKSUB](c); + } + this[CONSUMING] = false; + } + if (!this[BUFFER] || this[ENDED]) { + this[MAYBEEND](); + } + } + [CONSUMECHUNKSUB](chunk) { + // we know that we are in CONSUMING mode, so anything written goes into + // the buffer. Advance the position and put any remainder in the buffer. + let position = 0; + const length = chunk.length; + while (position + 512 <= length && + !this[ABORTED] && + !this[SAW_EOF]) { + switch (this[STATE]) { + case 'begin': + case 'header': + this[CONSUMEHEADER](chunk, position); + position += 512; + break; + case 'ignore': + case 'body': + position += this[CONSUMEBODY](chunk, position); + break; + case 'meta': + position += this[CONSUMEMETA](chunk, position); + break; + /* c8 ignore start */ + default: + throw new Error('invalid state: ' + this[STATE]); + /* c8 ignore stop */ + } + } + if (position < length) { + if (this[BUFFER]) { + this[BUFFER] = Buffer.concat([ + chunk.subarray(position), + this[BUFFER], + ]); + } + else { + this[BUFFER] = chunk.subarray(position); + } + } + } + end(chunk, encoding, cb) { + if (typeof chunk === 'function') { + cb = chunk; + encoding = undefined; + chunk = undefined; + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = undefined; + } + if (typeof chunk === 'string') { + chunk = Buffer.from(chunk, encoding); + } + if (cb) + this.once('finish', cb); + if (!this[ABORTED]) { + if (this[UNZIP]) { + /* c8 ignore start */ + if (chunk) + this[UNZIP].write(chunk); + /* c8 ignore stop */ + this[UNZIP].end(); + } + else { + this[ENDED] = true; + if (this.brotli === undefined) + chunk = chunk || Buffer.alloc(0); + if (chunk) + this.write(chunk); + this[MAYBEEND](); + } + } + return this; + } +} +exports.Parser = Parser; +//# sourceMappingURL=parse.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/path-reservations.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/path-reservations.js new file mode 100644 index 00000000000000..9ff391c44092c7 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/path-reservations.js @@ -0,0 +1,170 @@ +"use strict"; +// A path exclusive reservation system +// reserve([list, of, paths], fn) +// When the fn is first in line for all its paths, it +// is called with a cb that clears the reservation. +// +// Used by async unpack to avoid clobbering paths in use, +// while still allowing maximal safe parallelization. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.PathReservations = void 0; +const node_path_1 = require("node:path"); +const normalize_unicode_js_1 = require("./normalize-unicode.js"); +const strip_trailing_slashes_js_1 = require("./strip-trailing-slashes.js"); +const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform; +const isWindows = platform === 'win32'; +// return a set of parent dirs for a given path +// '/a/b/c/d' -> ['/', '/a', '/a/b', '/a/b/c', '/a/b/c/d'] +const getDirs = (path) => { + const dirs = path + .split('/') + .slice(0, -1) + .reduce((set, path) => { + const s = set[set.length - 1]; + if (s !== undefined) { + path = (0, node_path_1.join)(s, path); + } + set.push(path || '/'); + return set; + }, []); + return dirs; +}; +class PathReservations { + // path => [function or Set] + // A Set object means a directory reservation + // A fn is a direct reservation on that path + #queues = new Map(); + // fn => {paths:[path,...], dirs:[path, ...]} + #reservations = new Map(); + // functions currently running + #running = new Set(); + reserve(paths, fn) { + paths = + isWindows ? + ['win32 parallelization disabled'] + : paths.map(p => { + // don't need normPath, because we skip this entirely for windows + return (0, strip_trailing_slashes_js_1.stripTrailingSlashes)((0, node_path_1.join)((0, normalize_unicode_js_1.normalizeUnicode)(p))).toLowerCase(); + }); + const dirs = new Set(paths.map(path => getDirs(path)).reduce((a, b) => a.concat(b))); + this.#reservations.set(fn, { dirs, paths }); + for (const p of paths) { + const q = this.#queues.get(p); + if (!q) { + this.#queues.set(p, [fn]); + } + else { + q.push(fn); + } + } + for (const dir of dirs) { + const q = this.#queues.get(dir); + if (!q) { + this.#queues.set(dir, [new Set([fn])]); + } + else { + const l = q[q.length - 1]; + if (l instanceof Set) { + l.add(fn); + } + else { + q.push(new Set([fn])); + } + } + } + return this.#run(fn); + } + // return the queues for each path the function cares about + // fn => {paths, dirs} + #getQueues(fn) { + const res = this.#reservations.get(fn); + /* c8 ignore start */ + if (!res) { + throw new Error('function does not have any path reservations'); + } + /* c8 ignore stop */ + return { + paths: res.paths.map((path) => this.#queues.get(path)), + dirs: [...res.dirs].map(path => this.#queues.get(path)), + }; + } + // check if fn is first in line for all its paths, and is + // included in the first set for all its dir queues + check(fn) { + const { paths, dirs } = this.#getQueues(fn); + return (paths.every(q => q && q[0] === fn) && + dirs.every(q => q && q[0] instanceof Set && q[0].has(fn))); + } + // run the function if it's first in line and not already running + #run(fn) { + if (this.#running.has(fn) || !this.check(fn)) { + return false; + } + this.#running.add(fn); + fn(() => this.#clear(fn)); + return true; + } + #clear(fn) { + if (!this.#running.has(fn)) { + return false; + } + const res = this.#reservations.get(fn); + /* c8 ignore start */ + if (!res) { + throw new Error('invalid reservation'); + } + /* c8 ignore stop */ + const { paths, dirs } = res; + const next = new Set(); + for (const path of paths) { + const q = this.#queues.get(path); + /* c8 ignore start */ + if (!q || q?.[0] !== fn) { + continue; + } + /* c8 ignore stop */ + const q0 = q[1]; + if (!q0) { + this.#queues.delete(path); + continue; + } + q.shift(); + if (typeof q0 === 'function') { + next.add(q0); + } + else { + for (const f of q0) { + next.add(f); + } + } + } + for (const dir of dirs) { + const q = this.#queues.get(dir); + const q0 = q?.[0]; + /* c8 ignore next - type safety only */ + if (!q || !(q0 instanceof Set)) + continue; + if (q0.size === 1 && q.length === 1) { + this.#queues.delete(dir); + continue; + } + else if (q0.size === 1) { + q.shift(); + // next one must be a function, + // or else the Set would've been reused + const n = q[0]; + if (typeof n === 'function') { + next.add(n); + } + } + else { + q0.delete(fn); + } + } + this.#running.delete(fn); + next.forEach(fn => this.#run(fn)); + return true; + } +} +exports.PathReservations = PathReservations; +//# sourceMappingURL=path-reservations.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/pax.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/pax.js new file mode 100644 index 00000000000000..d30c0f3efbe9ea --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/pax.js @@ -0,0 +1,158 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Pax = void 0; +const node_path_1 = require("node:path"); +const header_js_1 = require("./header.js"); +class Pax { + atime; + mtime; + ctime; + charset; + comment; + gid; + uid; + gname; + uname; + linkpath; + dev; + ino; + nlink; + path; + size; + mode; + global; + constructor(obj, global = false) { + this.atime = obj.atime; + this.charset = obj.charset; + this.comment = obj.comment; + this.ctime = obj.ctime; + this.dev = obj.dev; + this.gid = obj.gid; + this.global = global; + this.gname = obj.gname; + this.ino = obj.ino; + this.linkpath = obj.linkpath; + this.mtime = obj.mtime; + this.nlink = obj.nlink; + this.path = obj.path; + this.size = obj.size; + this.uid = obj.uid; + this.uname = obj.uname; + } + encode() { + const body = this.encodeBody(); + if (body === '') { + return Buffer.allocUnsafe(0); + } + const bodyLen = Buffer.byteLength(body); + // round up to 512 bytes + // add 512 for header + const bufLen = 512 * Math.ceil(1 + bodyLen / 512); + const buf = Buffer.allocUnsafe(bufLen); + // 0-fill the header section, it might not hit every field + for (let i = 0; i < 512; i++) { + buf[i] = 0; + } + new header_js_1.Header({ + // XXX split the path + // then the path should be PaxHeader + basename, but less than 99, + // prepend with the dirname + /* c8 ignore start */ + path: ('PaxHeader/' + (0, node_path_1.basename)(this.path ?? '')).slice(0, 99), + /* c8 ignore stop */ + mode: this.mode || 0o644, + uid: this.uid, + gid: this.gid, + size: bodyLen, + mtime: this.mtime, + type: this.global ? 'GlobalExtendedHeader' : 'ExtendedHeader', + linkpath: '', + uname: this.uname || '', + gname: this.gname || '', + devmaj: 0, + devmin: 0, + atime: this.atime, + ctime: this.ctime, + }).encode(buf); + buf.write(body, 512, bodyLen, 'utf8'); + // null pad after the body + for (let i = bodyLen + 512; i < buf.length; i++) { + buf[i] = 0; + } + return buf; + } + encodeBody() { + return (this.encodeField('path') + + this.encodeField('ctime') + + this.encodeField('atime') + + this.encodeField('dev') + + this.encodeField('ino') + + this.encodeField('nlink') + + this.encodeField('charset') + + this.encodeField('comment') + + this.encodeField('gid') + + this.encodeField('gname') + + this.encodeField('linkpath') + + this.encodeField('mtime') + + this.encodeField('size') + + this.encodeField('uid') + + this.encodeField('uname')); + } + encodeField(field) { + if (this[field] === undefined) { + return ''; + } + const r = this[field]; + const v = r instanceof Date ? r.getTime() / 1000 : r; + const s = ' ' + + (field === 'dev' || field === 'ino' || field === 'nlink' ? + 'SCHILY.' + : '') + + field + + '=' + + v + + '\n'; + const byteLen = Buffer.byteLength(s); + // the digits includes the length of the digits in ascii base-10 + // so if it's 9 characters, then adding 1 for the 9 makes it 10 + // which makes it 11 chars. + let digits = Math.floor(Math.log(byteLen) / Math.log(10)) + 1; + if (byteLen + digits >= Math.pow(10, digits)) { + digits += 1; + } + const len = digits + byteLen; + return len + s; + } + static parse(str, ex, g = false) { + return new Pax(merge(parseKV(str), ex), g); + } +} +exports.Pax = Pax; +const merge = (a, b) => b ? Object.assign({}, b, a) : a; +const parseKV = (str) => str + .replace(/\n$/, '') + .split('\n') + .reduce(parseKVLine, Object.create(null)); +const parseKVLine = (set, line) => { + const n = parseInt(line, 10); + // XXX Values with \n in them will fail this. + // Refactor to not be a naive line-by-line parse. + if (n !== Buffer.byteLength(line) + 1) { + return set; + } + line = line.slice((n + ' ').length); + const kv = line.split('='); + const r = kv.shift(); + if (!r) { + return set; + } + const k = r.replace(/^SCHILY\.(dev|ino|nlink)/, '$1'); + const v = kv.join('='); + set[k] = + /^([A-Z]+\.)?([mac]|birth|creation)time$/.test(k) ? + new Date(Number(v) * 1000) + : /^[0-9]+$/.test(v) ? +v + : v; + return set; +}; +//# sourceMappingURL=pax.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/read-entry.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/read-entry.js new file mode 100644 index 00000000000000..15e2d55c938a43 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/read-entry.js @@ -0,0 +1,140 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ReadEntry = void 0; +const minipass_1 = require("minipass"); +const normalize_windows_path_js_1 = require("./normalize-windows-path.js"); +class ReadEntry extends minipass_1.Minipass { + extended; + globalExtended; + header; + startBlockSize; + blockRemain; + remain; + type; + meta = false; + ignore = false; + path; + mode; + uid; + gid; + uname; + gname; + size = 0; + mtime; + atime; + ctime; + linkpath; + dev; + ino; + nlink; + invalid = false; + absolute; + unsupported = false; + constructor(header, ex, gex) { + super({}); + // read entries always start life paused. this is to avoid the + // situation where Minipass's auto-ending empty streams results + // in an entry ending before we're ready for it. + this.pause(); + this.extended = ex; + this.globalExtended = gex; + this.header = header; + /* c8 ignore start */ + this.remain = header.size ?? 0; + /* c8 ignore stop */ + this.startBlockSize = 512 * Math.ceil(this.remain / 512); + this.blockRemain = this.startBlockSize; + this.type = header.type; + switch (this.type) { + case 'File': + case 'OldFile': + case 'Link': + case 'SymbolicLink': + case 'CharacterDevice': + case 'BlockDevice': + case 'Directory': + case 'FIFO': + case 'ContiguousFile': + case 'GNUDumpDir': + break; + case 'NextFileHasLongLinkpath': + case 'NextFileHasLongPath': + case 'OldGnuLongPath': + case 'GlobalExtendedHeader': + case 'ExtendedHeader': + case 'OldExtendedHeader': + this.meta = true; + break; + // NOTE: gnutar and bsdtar treat unrecognized types as 'File' + // it may be worth doing the same, but with a warning. + default: + this.ignore = true; + } + /* c8 ignore start */ + if (!header.path) { + throw new Error('no path provided for tar.ReadEntry'); + } + /* c8 ignore stop */ + this.path = (0, normalize_windows_path_js_1.normalizeWindowsPath)(header.path); + this.mode = header.mode; + if (this.mode) { + this.mode = this.mode & 0o7777; + } + this.uid = header.uid; + this.gid = header.gid; + this.uname = header.uname; + this.gname = header.gname; + this.size = this.remain; + this.mtime = header.mtime; + this.atime = header.atime; + this.ctime = header.ctime; + /* c8 ignore start */ + this.linkpath = + header.linkpath ? + (0, normalize_windows_path_js_1.normalizeWindowsPath)(header.linkpath) + : undefined; + /* c8 ignore stop */ + this.uname = header.uname; + this.gname = header.gname; + if (ex) { + this.#slurp(ex); + } + if (gex) { + this.#slurp(gex, true); + } + } + write(data) { + const writeLen = data.length; + if (writeLen > this.blockRemain) { + throw new Error('writing more to entry than is appropriate'); + } + const r = this.remain; + const br = this.blockRemain; + this.remain = Math.max(0, r - writeLen); + this.blockRemain = Math.max(0, br - writeLen); + if (this.ignore) { + return true; + } + if (r >= writeLen) { + return super.write(data); + } + // r < writeLen + return super.write(data.subarray(0, r)); + } + #slurp(ex, gex = false) { + if (ex.path) + ex.path = (0, normalize_windows_path_js_1.normalizeWindowsPath)(ex.path); + if (ex.linkpath) + ex.linkpath = (0, normalize_windows_path_js_1.normalizeWindowsPath)(ex.linkpath); + Object.assign(this, Object.fromEntries(Object.entries(ex).filter(([k, v]) => { + // we slurp in everything except for the path attribute in + // a global extended header, because that's weird. Also, any + // null/undefined values are ignored. + return !(v === null || + v === undefined || + (k === 'path' && gex)); + }))); + } +} +exports.ReadEntry = ReadEntry; +//# sourceMappingURL=read-entry.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/replace.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/replace.js new file mode 100644 index 00000000000000..22eff246d4d75f --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/replace.js @@ -0,0 +1,231 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.replace = void 0; +// tar -r +const fs_minipass_1 = require("@isaacs/fs-minipass"); +const node_fs_1 = __importDefault(require("node:fs")); +const node_path_1 = __importDefault(require("node:path")); +const header_js_1 = require("./header.js"); +const list_js_1 = require("./list.js"); +const make_command_js_1 = require("./make-command.js"); +const options_js_1 = require("./options.js"); +const pack_js_1 = require("./pack.js"); +// starting at the head of the file, read a Header +// If the checksum is invalid, that's our position to start writing +// If it is, jump forward by the specified size (round up to 512) +// and try again. +// Write the new Pack stream starting there. +const replaceSync = (opt, files) => { + const p = new pack_js_1.PackSync(opt); + let threw = true; + let fd; + let position; + try { + try { + fd = node_fs_1.default.openSync(opt.file, 'r+'); + } + catch (er) { + if (er?.code === 'ENOENT') { + fd = node_fs_1.default.openSync(opt.file, 'w+'); + } + else { + throw er; + } + } + const st = node_fs_1.default.fstatSync(fd); + const headBuf = Buffer.alloc(512); + POSITION: for (position = 0; position < st.size; position += 512) { + for (let bufPos = 0, bytes = 0; bufPos < 512; bufPos += bytes) { + bytes = node_fs_1.default.readSync(fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos); + if (position === 0 && + headBuf[0] === 0x1f && + headBuf[1] === 0x8b) { + throw new Error('cannot append to compressed archives'); + } + if (!bytes) { + break POSITION; + } + } + const h = new header_js_1.Header(headBuf); + if (!h.cksumValid) { + break; + } + const entryBlockSize = 512 * Math.ceil((h.size || 0) / 512); + if (position + entryBlockSize + 512 > st.size) { + break; + } + // the 512 for the header we just parsed will be added as well + // also jump ahead all the blocks for the body + position += entryBlockSize; + if (opt.mtimeCache && h.mtime) { + opt.mtimeCache.set(String(h.path), h.mtime); + } + } + threw = false; + streamSync(opt, p, position, fd, files); + } + finally { + if (threw) { + try { + node_fs_1.default.closeSync(fd); + } + catch (er) { } + } + } +}; +const streamSync = (opt, p, position, fd, files) => { + const stream = new fs_minipass_1.WriteStreamSync(opt.file, { + fd: fd, + start: position, + }); + p.pipe(stream); + addFilesSync(p, files); +}; +const replaceAsync = (opt, files) => { + files = Array.from(files); + const p = new pack_js_1.Pack(opt); + const getPos = (fd, size, cb_) => { + const cb = (er, pos) => { + if (er) { + node_fs_1.default.close(fd, _ => cb_(er)); + } + else { + cb_(null, pos); + } + }; + let position = 0; + if (size === 0) { + return cb(null, 0); + } + let bufPos = 0; + const headBuf = Buffer.alloc(512); + const onread = (er, bytes) => { + if (er || typeof bytes === 'undefined') { + return cb(er); + } + bufPos += bytes; + if (bufPos < 512 && bytes) { + return node_fs_1.default.read(fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos, onread); + } + if (position === 0 && + headBuf[0] === 0x1f && + headBuf[1] === 0x8b) { + return cb(new Error('cannot append to compressed archives')); + } + // truncated header + if (bufPos < 512) { + return cb(null, position); + } + const h = new header_js_1.Header(headBuf); + if (!h.cksumValid) { + return cb(null, position); + } + /* c8 ignore next */ + const entryBlockSize = 512 * Math.ceil((h.size ?? 0) / 512); + if (position + entryBlockSize + 512 > size) { + return cb(null, position); + } + position += entryBlockSize + 512; + if (position >= size) { + return cb(null, position); + } + if (opt.mtimeCache && h.mtime) { + opt.mtimeCache.set(String(h.path), h.mtime); + } + bufPos = 0; + node_fs_1.default.read(fd, headBuf, 0, 512, position, onread); + }; + node_fs_1.default.read(fd, headBuf, 0, 512, position, onread); + }; + const promise = new Promise((resolve, reject) => { + p.on('error', reject); + let flag = 'r+'; + const onopen = (er, fd) => { + if (er && er.code === 'ENOENT' && flag === 'r+') { + flag = 'w+'; + return node_fs_1.default.open(opt.file, flag, onopen); + } + if (er || !fd) { + return reject(er); + } + node_fs_1.default.fstat(fd, (er, st) => { + if (er) { + return node_fs_1.default.close(fd, () => reject(er)); + } + getPos(fd, st.size, (er, position) => { + if (er) { + return reject(er); + } + const stream = new fs_minipass_1.WriteStream(opt.file, { + fd: fd, + start: position, + }); + p.pipe(stream); + stream.on('error', reject); + stream.on('close', resolve); + addFilesAsync(p, files); + }); + }); + }; + node_fs_1.default.open(opt.file, flag, onopen); + }); + return promise; +}; +const addFilesSync = (p, files) => { + files.forEach(file => { + if (file.charAt(0) === '@') { + (0, list_js_1.list)({ + file: node_path_1.default.resolve(p.cwd, file.slice(1)), + sync: true, + noResume: true, + onReadEntry: entry => p.add(entry), + }); + } + else { + p.add(file); + } + }); + p.end(); +}; +const addFilesAsync = async (p, files) => { + for (let i = 0; i < files.length; i++) { + const file = String(files[i]); + if (file.charAt(0) === '@') { + await (0, list_js_1.list)({ + file: node_path_1.default.resolve(String(p.cwd), file.slice(1)), + noResume: true, + onReadEntry: entry => p.add(entry), + }); + } + else { + p.add(file); + } + } + p.end(); +}; +exports.replace = (0, make_command_js_1.makeCommand)(replaceSync, replaceAsync, +/* c8 ignore start */ +() => { + throw new TypeError('file is required'); +}, () => { + throw new TypeError('file is required'); +}, +/* c8 ignore stop */ +(opt, entries) => { + if (!(0, options_js_1.isFile)(opt)) { + throw new TypeError('file is required'); + } + if (opt.gzip || + opt.brotli || + opt.file.endsWith('.br') || + opt.file.endsWith('.tbr')) { + throw new TypeError('cannot append to compressed archives'); + } + if (!entries?.length) { + throw new TypeError('no paths specified to add/replace'); + } +}); +//# sourceMappingURL=replace.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/strip-absolute-path.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/strip-absolute-path.js new file mode 100644 index 00000000000000..bb7639c35a1104 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/strip-absolute-path.js @@ -0,0 +1,29 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.stripAbsolutePath = void 0; +// unix absolute paths are also absolute on win32, so we use this for both +const node_path_1 = require("node:path"); +const { isAbsolute, parse } = node_path_1.win32; +// returns [root, stripped] +// Note that windows will think that //x/y/z/a has a "root" of //x/y, and in +// those cases, we want to sanitize it to x/y/z/a, not z/a, so we strip / +// explicitly if it's the first character. +// drive-specific relative paths on Windows get their root stripped off even +// though they are not absolute, so `c:../foo` becomes ['c:', '../foo'] +const stripAbsolutePath = (path) => { + let r = ''; + let parsed = parse(path); + while (isAbsolute(path) || parsed.root) { + // windows will think that //x/y/z has a "root" of //x/y/ + // but strip the //?/C:/ off of //?/C:/path + const root = path.charAt(0) === '/' && path.slice(0, 4) !== '//?/' ? + '/' + : parsed.root; + path = path.slice(root.length); + r += root; + parsed = parse(path); + } + return [r, path]; +}; +exports.stripAbsolutePath = stripAbsolutePath; +//# sourceMappingURL=strip-absolute-path.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/strip-trailing-slashes.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/strip-trailing-slashes.js new file mode 100644 index 00000000000000..6fa74ad6a4ac93 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/strip-trailing-slashes.js @@ -0,0 +1,18 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.stripTrailingSlashes = void 0; +// warning: extremely hot code path. +// This has been meticulously optimized for use +// within npm install on large package trees. +// Do not edit without careful benchmarking. +const stripTrailingSlashes = (str) => { + let i = str.length - 1; + let slashesStart = -1; + while (i > -1 && str.charAt(i) === '/') { + slashesStart = i; + i--; + } + return slashesStart === -1 ? str : str.slice(0, slashesStart); +}; +exports.stripTrailingSlashes = stripTrailingSlashes; +//# sourceMappingURL=strip-trailing-slashes.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/symlink-error.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/symlink-error.js new file mode 100644 index 00000000000000..cc19ac1a2e3c6b --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/symlink-error.js @@ -0,0 +1,19 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.SymlinkError = void 0; +class SymlinkError extends Error { + path; + symlink; + syscall = 'symlink'; + code = 'TAR_SYMLINK_ERROR'; + constructor(symlink, path) { + super('TAR_SYMLINK_ERROR: Cannot extract through symbolic link'); + this.symlink = symlink; + this.path = path; + } + get name() { + return 'SymlinkError'; + } +} +exports.SymlinkError = SymlinkError; +//# sourceMappingURL=symlink-error.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/types.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/types.js new file mode 100644 index 00000000000000..cb9b684e843b72 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/types.js @@ -0,0 +1,50 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.code = exports.name = exports.isName = exports.isCode = void 0; +const isCode = (c) => exports.name.has(c); +exports.isCode = isCode; +const isName = (c) => exports.code.has(c); +exports.isName = isName; +// map types from key to human-friendly name +exports.name = new Map([ + ['0', 'File'], + // same as File + ['', 'OldFile'], + ['1', 'Link'], + ['2', 'SymbolicLink'], + // Devices and FIFOs aren't fully supported + // they are parsed, but skipped when unpacking + ['3', 'CharacterDevice'], + ['4', 'BlockDevice'], + ['5', 'Directory'], + ['6', 'FIFO'], + // same as File + ['7', 'ContiguousFile'], + // pax headers + ['g', 'GlobalExtendedHeader'], + ['x', 'ExtendedHeader'], + // vendor-specific stuff + // skip + ['A', 'SolarisACL'], + // like 5, but with data, which should be skipped + ['D', 'GNUDumpDir'], + // metadata only, skip + ['I', 'Inode'], + // data = link path of next file + ['K', 'NextFileHasLongLinkpath'], + // data = path of next file + ['L', 'NextFileHasLongPath'], + // skip + ['M', 'ContinuationFile'], + // like L + ['N', 'OldGnuLongPath'], + // skip + ['S', 'SparseFile'], + // skip + ['V', 'TapeVolumeHeader'], + // like x + ['X', 'OldExtendedHeader'], +]); +// map the other direction +exports.code = new Map(Array.from(exports.name).map(kv => [kv[1], kv[0]])); +//# sourceMappingURL=types.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/unpack.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/unpack.js new file mode 100644 index 00000000000000..edf8acbb18c408 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/unpack.js @@ -0,0 +1,919 @@ +"use strict"; +// the PEND/UNPEND stuff tracks whether we're ready to emit end/close yet. +// but the path reservations are required to avoid race conditions where +// parallelized unpack ops may mess with one another, due to dependencies +// (like a Link depending on its target) or destructive operations (like +// clobbering an fs object to create one of a different type.) +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.UnpackSync = exports.Unpack = void 0; +const fsm = __importStar(require("@isaacs/fs-minipass")); +const node_assert_1 = __importDefault(require("node:assert")); +const node_crypto_1 = require("node:crypto"); +const node_fs_1 = __importDefault(require("node:fs")); +const node_path_1 = __importDefault(require("node:path")); +const get_write_flag_js_1 = require("./get-write-flag.js"); +const mkdir_js_1 = require("./mkdir.js"); +const normalize_unicode_js_1 = require("./normalize-unicode.js"); +const normalize_windows_path_js_1 = require("./normalize-windows-path.js"); +const parse_js_1 = require("./parse.js"); +const strip_absolute_path_js_1 = require("./strip-absolute-path.js"); +const strip_trailing_slashes_js_1 = require("./strip-trailing-slashes.js"); +const wc = __importStar(require("./winchars.js")); +const path_reservations_js_1 = require("./path-reservations.js"); +const ONENTRY = Symbol('onEntry'); +const CHECKFS = Symbol('checkFs'); +const CHECKFS2 = Symbol('checkFs2'); +const PRUNECACHE = Symbol('pruneCache'); +const ISREUSABLE = Symbol('isReusable'); +const MAKEFS = Symbol('makeFs'); +const FILE = Symbol('file'); +const DIRECTORY = Symbol('directory'); +const LINK = Symbol('link'); +const SYMLINK = Symbol('symlink'); +const HARDLINK = Symbol('hardlink'); +const UNSUPPORTED = Symbol('unsupported'); +const CHECKPATH = Symbol('checkPath'); +const MKDIR = Symbol('mkdir'); +const ONERROR = Symbol('onError'); +const PENDING = Symbol('pending'); +const PEND = Symbol('pend'); +const UNPEND = Symbol('unpend'); +const ENDED = Symbol('ended'); +const MAYBECLOSE = Symbol('maybeClose'); +const SKIP = Symbol('skip'); +const DOCHOWN = Symbol('doChown'); +const UID = Symbol('uid'); +const GID = Symbol('gid'); +const CHECKED_CWD = Symbol('checkedCwd'); +const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform; +const isWindows = platform === 'win32'; +const DEFAULT_MAX_DEPTH = 1024; +// Unlinks on Windows are not atomic. +// +// This means that if you have a file entry, followed by another +// file entry with an identical name, and you cannot re-use the file +// (because it's a hardlink, or because unlink:true is set, or it's +// Windows, which does not have useful nlink values), then the unlink +// will be committed to the disk AFTER the new file has been written +// over the old one, deleting the new file. +// +// To work around this, on Windows systems, we rename the file and then +// delete the renamed file. It's a sloppy kludge, but frankly, I do not +// know of a better way to do this, given windows' non-atomic unlink +// semantics. +// +// See: https://github.com/npm/node-tar/issues/183 +/* c8 ignore start */ +const unlinkFile = (path, cb) => { + if (!isWindows) { + return node_fs_1.default.unlink(path, cb); + } + const name = path + '.DELETE.' + (0, node_crypto_1.randomBytes)(16).toString('hex'); + node_fs_1.default.rename(path, name, er => { + if (er) { + return cb(er); + } + node_fs_1.default.unlink(name, cb); + }); +}; +/* c8 ignore stop */ +/* c8 ignore start */ +const unlinkFileSync = (path) => { + if (!isWindows) { + return node_fs_1.default.unlinkSync(path); + } + const name = path + '.DELETE.' + (0, node_crypto_1.randomBytes)(16).toString('hex'); + node_fs_1.default.renameSync(path, name); + node_fs_1.default.unlinkSync(name); +}; +/* c8 ignore stop */ +// this.gid, entry.gid, this.processUid +const uint32 = (a, b, c) => a !== undefined && a === a >>> 0 ? a + : b !== undefined && b === b >>> 0 ? b + : c; +// clear the cache if it's a case-insensitive unicode-squashing match. +// we can't know if the current file system is case-sensitive or supports +// unicode fully, so we check for similarity on the maximally compatible +// representation. Err on the side of pruning, since all it's doing is +// preventing lstats, and it's not the end of the world if we get a false +// positive. +// Note that on windows, we always drop the entire cache whenever a +// symbolic link is encountered, because 8.3 filenames are impossible +// to reason about, and collisions are hazards rather than just failures. +const cacheKeyNormalize = (path) => (0, strip_trailing_slashes_js_1.stripTrailingSlashes)((0, normalize_windows_path_js_1.normalizeWindowsPath)((0, normalize_unicode_js_1.normalizeUnicode)(path))).toLowerCase(); +// remove all cache entries matching ${abs}/** +const pruneCache = (cache, abs) => { + abs = cacheKeyNormalize(abs); + for (const path of cache.keys()) { + const pnorm = cacheKeyNormalize(path); + if (pnorm === abs || pnorm.indexOf(abs + '/') === 0) { + cache.delete(path); + } + } +}; +const dropCache = (cache) => { + for (const key of cache.keys()) { + cache.delete(key); + } +}; +class Unpack extends parse_js_1.Parser { + [ENDED] = false; + [CHECKED_CWD] = false; + [PENDING] = 0; + reservations = new path_reservations_js_1.PathReservations(); + transform; + writable = true; + readable = false; + dirCache; + uid; + gid; + setOwner; + preserveOwner; + processGid; + processUid; + maxDepth; + forceChown; + win32; + newer; + keep; + noMtime; + preservePaths; + unlink; + cwd; + strip; + processUmask; + umask; + dmode; + fmode; + chmod; + constructor(opt = {}) { + opt.ondone = () => { + this[ENDED] = true; + this[MAYBECLOSE](); + }; + super(opt); + this.transform = opt.transform; + this.dirCache = opt.dirCache || new Map(); + this.chmod = !!opt.chmod; + if (typeof opt.uid === 'number' || typeof opt.gid === 'number') { + // need both or neither + if (typeof opt.uid !== 'number' || + typeof opt.gid !== 'number') { + throw new TypeError('cannot set owner without number uid and gid'); + } + if (opt.preserveOwner) { + throw new TypeError('cannot preserve owner in archive and also set owner explicitly'); + } + this.uid = opt.uid; + this.gid = opt.gid; + this.setOwner = true; + } + else { + this.uid = undefined; + this.gid = undefined; + this.setOwner = false; + } + // default true for root + if (opt.preserveOwner === undefined && + typeof opt.uid !== 'number') { + this.preserveOwner = !!(process.getuid && process.getuid() === 0); + } + else { + this.preserveOwner = !!opt.preserveOwner; + } + this.processUid = + (this.preserveOwner || this.setOwner) && process.getuid ? + process.getuid() + : undefined; + this.processGid = + (this.preserveOwner || this.setOwner) && process.getgid ? + process.getgid() + : undefined; + // prevent excessively deep nesting of subfolders + // set to `Infinity` to remove this restriction + this.maxDepth = + typeof opt.maxDepth === 'number' ? + opt.maxDepth + : DEFAULT_MAX_DEPTH; + // mostly just for testing, but useful in some cases. + // Forcibly trigger a chown on every entry, no matter what + this.forceChown = opt.forceChown === true; + // turn > this[ONENTRY](entry)); + } + // a bad or damaged archive is a warning for Parser, but an error + // when extracting. Mark those errors as unrecoverable, because + // the Unpack contract cannot be met. + warn(code, msg, data = {}) { + if (code === 'TAR_BAD_ARCHIVE' || code === 'TAR_ABORT') { + data.recoverable = false; + } + return super.warn(code, msg, data); + } + [MAYBECLOSE]() { + if (this[ENDED] && this[PENDING] === 0) { + this.emit('prefinish'); + this.emit('finish'); + this.emit('end'); + } + } + [CHECKPATH](entry) { + const p = (0, normalize_windows_path_js_1.normalizeWindowsPath)(entry.path); + const parts = p.split('/'); + if (this.strip) { + if (parts.length < this.strip) { + return false; + } + if (entry.type === 'Link') { + const linkparts = (0, normalize_windows_path_js_1.normalizeWindowsPath)(String(entry.linkpath)).split('/'); + if (linkparts.length >= this.strip) { + entry.linkpath = linkparts.slice(this.strip).join('/'); + } + else { + return false; + } + } + parts.splice(0, this.strip); + entry.path = parts.join('/'); + } + if (isFinite(this.maxDepth) && parts.length > this.maxDepth) { + this.warn('TAR_ENTRY_ERROR', 'path excessively deep', { + entry, + path: p, + depth: parts.length, + maxDepth: this.maxDepth, + }); + return false; + } + if (!this.preservePaths) { + if (parts.includes('..') || + /* c8 ignore next */ + (isWindows && /^[a-z]:\.\.$/i.test(parts[0] ?? ''))) { + this.warn('TAR_ENTRY_ERROR', `path contains '..'`, { + entry, + path: p, + }); + return false; + } + // strip off the root + const [root, stripped] = (0, strip_absolute_path_js_1.stripAbsolutePath)(p); + if (root) { + entry.path = String(stripped); + this.warn('TAR_ENTRY_INFO', `stripping ${root} from absolute path`, { + entry, + path: p, + }); + } + } + if (node_path_1.default.isAbsolute(entry.path)) { + entry.absolute = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.resolve(entry.path)); + } + else { + entry.absolute = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.resolve(this.cwd, entry.path)); + } + // if we somehow ended up with a path that escapes the cwd, and we are + // not in preservePaths mode, then something is fishy! This should have + // been prevented above, so ignore this for coverage. + /* c8 ignore start - defense in depth */ + if (!this.preservePaths && + typeof entry.absolute === 'string' && + entry.absolute.indexOf(this.cwd + '/') !== 0 && + entry.absolute !== this.cwd) { + this.warn('TAR_ENTRY_ERROR', 'path escaped extraction target', { + entry, + path: (0, normalize_windows_path_js_1.normalizeWindowsPath)(entry.path), + resolvedPath: entry.absolute, + cwd: this.cwd, + }); + return false; + } + /* c8 ignore stop */ + // an archive can set properties on the extraction directory, but it + // may not replace the cwd with a different kind of thing entirely. + if (entry.absolute === this.cwd && + entry.type !== 'Directory' && + entry.type !== 'GNUDumpDir') { + return false; + } + // only encode : chars that aren't drive letter indicators + if (this.win32) { + const { root: aRoot } = node_path_1.default.win32.parse(String(entry.absolute)); + entry.absolute = + aRoot + wc.encode(String(entry.absolute).slice(aRoot.length)); + const { root: pRoot } = node_path_1.default.win32.parse(entry.path); + entry.path = pRoot + wc.encode(entry.path.slice(pRoot.length)); + } + return true; + } + [ONENTRY](entry) { + if (!this[CHECKPATH](entry)) { + return entry.resume(); + } + node_assert_1.default.equal(typeof entry.absolute, 'string'); + switch (entry.type) { + case 'Directory': + case 'GNUDumpDir': + if (entry.mode) { + entry.mode = entry.mode | 0o700; + } + // eslint-disable-next-line no-fallthrough + case 'File': + case 'OldFile': + case 'ContiguousFile': + case 'Link': + case 'SymbolicLink': + return this[CHECKFS](entry); + case 'CharacterDevice': + case 'BlockDevice': + case 'FIFO': + default: + return this[UNSUPPORTED](entry); + } + } + [ONERROR](er, entry) { + // Cwd has to exist, or else nothing works. That's serious. + // Other errors are warnings, which raise the error in strict + // mode, but otherwise continue on. + if (er.name === 'CwdError') { + this.emit('error', er); + } + else { + this.warn('TAR_ENTRY_ERROR', er, { entry }); + this[UNPEND](); + entry.resume(); + } + } + [MKDIR](dir, mode, cb) { + (0, mkdir_js_1.mkdir)((0, normalize_windows_path_js_1.normalizeWindowsPath)(dir), { + uid: this.uid, + gid: this.gid, + processUid: this.processUid, + processGid: this.processGid, + umask: this.processUmask, + preserve: this.preservePaths, + unlink: this.unlink, + cache: this.dirCache, + cwd: this.cwd, + mode: mode, + }, cb); + } + [DOCHOWN](entry) { + // in preserve owner mode, chown if the entry doesn't match process + // in set owner mode, chown if setting doesn't match process + return (this.forceChown || + (this.preserveOwner && + ((typeof entry.uid === 'number' && + entry.uid !== this.processUid) || + (typeof entry.gid === 'number' && + entry.gid !== this.processGid))) || + (typeof this.uid === 'number' && + this.uid !== this.processUid) || + (typeof this.gid === 'number' && this.gid !== this.processGid)); + } + [UID](entry) { + return uint32(this.uid, entry.uid, this.processUid); + } + [GID](entry) { + return uint32(this.gid, entry.gid, this.processGid); + } + [FILE](entry, fullyDone) { + const mode = typeof entry.mode === 'number' ? + entry.mode & 0o7777 + : this.fmode; + const stream = new fsm.WriteStream(String(entry.absolute), { + // slight lie, but it can be numeric flags + flags: (0, get_write_flag_js_1.getWriteFlag)(entry.size), + mode: mode, + autoClose: false, + }); + stream.on('error', (er) => { + if (stream.fd) { + node_fs_1.default.close(stream.fd, () => { }); + } + // flush all the data out so that we aren't left hanging + // if the error wasn't actually fatal. otherwise the parse + // is blocked, and we never proceed. + stream.write = () => true; + this[ONERROR](er, entry); + fullyDone(); + }); + let actions = 1; + const done = (er) => { + if (er) { + /* c8 ignore start - we should always have a fd by now */ + if (stream.fd) { + node_fs_1.default.close(stream.fd, () => { }); + } + /* c8 ignore stop */ + this[ONERROR](er, entry); + fullyDone(); + return; + } + if (--actions === 0) { + if (stream.fd !== undefined) { + node_fs_1.default.close(stream.fd, er => { + if (er) { + this[ONERROR](er, entry); + } + else { + this[UNPEND](); + } + fullyDone(); + }); + } + } + }; + stream.on('finish', () => { + // if futimes fails, try utimes + // if utimes fails, fail with the original error + // same for fchown/chown + const abs = String(entry.absolute); + const fd = stream.fd; + if (typeof fd === 'number' && entry.mtime && !this.noMtime) { + actions++; + const atime = entry.atime || new Date(); + const mtime = entry.mtime; + node_fs_1.default.futimes(fd, atime, mtime, er => er ? + node_fs_1.default.utimes(abs, atime, mtime, er2 => done(er2 && er)) + : done()); + } + if (typeof fd === 'number' && this[DOCHOWN](entry)) { + actions++; + const uid = this[UID](entry); + const gid = this[GID](entry); + if (typeof uid === 'number' && typeof gid === 'number') { + node_fs_1.default.fchown(fd, uid, gid, er => er ? + node_fs_1.default.chown(abs, uid, gid, er2 => done(er2 && er)) + : done()); + } + } + done(); + }); + const tx = this.transform ? this.transform(entry) || entry : entry; + if (tx !== entry) { + tx.on('error', (er) => { + this[ONERROR](er, entry); + fullyDone(); + }); + entry.pipe(tx); + } + tx.pipe(stream); + } + [DIRECTORY](entry, fullyDone) { + const mode = typeof entry.mode === 'number' ? + entry.mode & 0o7777 + : this.dmode; + this[MKDIR](String(entry.absolute), mode, er => { + if (er) { + this[ONERROR](er, entry); + fullyDone(); + return; + } + let actions = 1; + const done = () => { + if (--actions === 0) { + fullyDone(); + this[UNPEND](); + entry.resume(); + } + }; + if (entry.mtime && !this.noMtime) { + actions++; + node_fs_1.default.utimes(String(entry.absolute), entry.atime || new Date(), entry.mtime, done); + } + if (this[DOCHOWN](entry)) { + actions++; + node_fs_1.default.chown(String(entry.absolute), Number(this[UID](entry)), Number(this[GID](entry)), done); + } + done(); + }); + } + [UNSUPPORTED](entry) { + entry.unsupported = true; + this.warn('TAR_ENTRY_UNSUPPORTED', `unsupported entry type: ${entry.type}`, { entry }); + entry.resume(); + } + [SYMLINK](entry, done) { + this[LINK](entry, String(entry.linkpath), 'symlink', done); + } + [HARDLINK](entry, done) { + const linkpath = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.resolve(this.cwd, String(entry.linkpath))); + this[LINK](entry, linkpath, 'link', done); + } + [PEND]() { + this[PENDING]++; + } + [UNPEND]() { + this[PENDING]--; + this[MAYBECLOSE](); + } + [SKIP](entry) { + this[UNPEND](); + entry.resume(); + } + // Check if we can reuse an existing filesystem entry safely and + // overwrite it, rather than unlinking and recreating + // Windows doesn't report a useful nlink, so we just never reuse entries + [ISREUSABLE](entry, st) { + return (entry.type === 'File' && + !this.unlink && + st.isFile() && + st.nlink <= 1 && + !isWindows); + } + // check if a thing is there, and if so, try to clobber it + [CHECKFS](entry) { + this[PEND](); + const paths = [entry.path]; + if (entry.linkpath) { + paths.push(entry.linkpath); + } + this.reservations.reserve(paths, done => this[CHECKFS2](entry, done)); + } + [PRUNECACHE](entry) { + // if we are not creating a directory, and the path is in the dirCache, + // then that means we are about to delete the directory we created + // previously, and it is no longer going to be a directory, and neither + // is any of its children. + // If a symbolic link is encountered, all bets are off. There is no + // reasonable way to sanitize the cache in such a way we will be able to + // avoid having filesystem collisions. If this happens with a non-symlink + // entry, it'll just fail to unpack, but a symlink to a directory, using an + // 8.3 shortname or certain unicode attacks, can evade detection and lead + // to arbitrary writes to anywhere on the system. + if (entry.type === 'SymbolicLink') { + dropCache(this.dirCache); + } + else if (entry.type !== 'Directory') { + pruneCache(this.dirCache, String(entry.absolute)); + } + } + [CHECKFS2](entry, fullyDone) { + this[PRUNECACHE](entry); + const done = (er) => { + this[PRUNECACHE](entry); + fullyDone(er); + }; + const checkCwd = () => { + this[MKDIR](this.cwd, this.dmode, er => { + if (er) { + this[ONERROR](er, entry); + done(); + return; + } + this[CHECKED_CWD] = true; + start(); + }); + }; + const start = () => { + if (entry.absolute !== this.cwd) { + const parent = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.dirname(String(entry.absolute))); + if (parent !== this.cwd) { + return this[MKDIR](parent, this.dmode, er => { + if (er) { + this[ONERROR](er, entry); + done(); + return; + } + afterMakeParent(); + }); + } + } + afterMakeParent(); + }; + const afterMakeParent = () => { + node_fs_1.default.lstat(String(entry.absolute), (lstatEr, st) => { + if (st && + (this.keep || + /* c8 ignore next */ + (this.newer && st.mtime > (entry.mtime ?? st.mtime)))) { + this[SKIP](entry); + done(); + return; + } + if (lstatEr || this[ISREUSABLE](entry, st)) { + return this[MAKEFS](null, entry, done); + } + if (st.isDirectory()) { + if (entry.type === 'Directory') { + const needChmod = this.chmod && + entry.mode && + (st.mode & 0o7777) !== entry.mode; + const afterChmod = (er) => this[MAKEFS](er ?? null, entry, done); + if (!needChmod) { + return afterChmod(); + } + return node_fs_1.default.chmod(String(entry.absolute), Number(entry.mode), afterChmod); + } + // Not a dir entry, have to remove it. + // NB: the only way to end up with an entry that is the cwd + // itself, in such a way that == does not detect, is a + // tricky windows absolute path with UNC or 8.3 parts (and + // preservePaths:true, or else it will have been stripped). + // In that case, the user has opted out of path protections + // explicitly, so if they blow away the cwd, c'est la vie. + if (entry.absolute !== this.cwd) { + return node_fs_1.default.rmdir(String(entry.absolute), (er) => this[MAKEFS](er ?? null, entry, done)); + } + } + // not a dir, and not reusable + // don't remove if the cwd, we want that error + if (entry.absolute === this.cwd) { + return this[MAKEFS](null, entry, done); + } + unlinkFile(String(entry.absolute), er => this[MAKEFS](er ?? null, entry, done)); + }); + }; + if (this[CHECKED_CWD]) { + start(); + } + else { + checkCwd(); + } + } + [MAKEFS](er, entry, done) { + if (er) { + this[ONERROR](er, entry); + done(); + return; + } + switch (entry.type) { + case 'File': + case 'OldFile': + case 'ContiguousFile': + return this[FILE](entry, done); + case 'Link': + return this[HARDLINK](entry, done); + case 'SymbolicLink': + return this[SYMLINK](entry, done); + case 'Directory': + case 'GNUDumpDir': + return this[DIRECTORY](entry, done); + } + } + [LINK](entry, linkpath, link, done) { + // XXX: get the type ('symlink' or 'junction') for windows + node_fs_1.default[link](linkpath, String(entry.absolute), er => { + if (er) { + this[ONERROR](er, entry); + } + else { + this[UNPEND](); + entry.resume(); + } + done(); + }); + } +} +exports.Unpack = Unpack; +const callSync = (fn) => { + try { + return [null, fn()]; + } + catch (er) { + return [er, null]; + } +}; +class UnpackSync extends Unpack { + sync = true; + [MAKEFS](er, entry) { + return super[MAKEFS](er, entry, () => { }); + } + [CHECKFS](entry) { + this[PRUNECACHE](entry); + if (!this[CHECKED_CWD]) { + const er = this[MKDIR](this.cwd, this.dmode); + if (er) { + return this[ONERROR](er, entry); + } + this[CHECKED_CWD] = true; + } + // don't bother to make the parent if the current entry is the cwd, + // we've already checked it. + if (entry.absolute !== this.cwd) { + const parent = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.dirname(String(entry.absolute))); + if (parent !== this.cwd) { + const mkParent = this[MKDIR](parent, this.dmode); + if (mkParent) { + return this[ONERROR](mkParent, entry); + } + } + } + const [lstatEr, st] = callSync(() => node_fs_1.default.lstatSync(String(entry.absolute))); + if (st && + (this.keep || + /* c8 ignore next */ + (this.newer && st.mtime > (entry.mtime ?? st.mtime)))) { + return this[SKIP](entry); + } + if (lstatEr || this[ISREUSABLE](entry, st)) { + return this[MAKEFS](null, entry); + } + if (st.isDirectory()) { + if (entry.type === 'Directory') { + const needChmod = this.chmod && + entry.mode && + (st.mode & 0o7777) !== entry.mode; + const [er] = needChmod ? + callSync(() => { + node_fs_1.default.chmodSync(String(entry.absolute), Number(entry.mode)); + }) + : []; + return this[MAKEFS](er, entry); + } + // not a dir entry, have to remove it + const [er] = callSync(() => node_fs_1.default.rmdirSync(String(entry.absolute))); + this[MAKEFS](er, entry); + } + // not a dir, and not reusable. + // don't remove if it's the cwd, since we want that error. + const [er] = entry.absolute === this.cwd ? + [] + : callSync(() => unlinkFileSync(String(entry.absolute))); + this[MAKEFS](er, entry); + } + [FILE](entry, done) { + const mode = typeof entry.mode === 'number' ? + entry.mode & 0o7777 + : this.fmode; + const oner = (er) => { + let closeError; + try { + node_fs_1.default.closeSync(fd); + } + catch (e) { + closeError = e; + } + if (er || closeError) { + this[ONERROR](er || closeError, entry); + } + done(); + }; + let fd; + try { + fd = node_fs_1.default.openSync(String(entry.absolute), (0, get_write_flag_js_1.getWriteFlag)(entry.size), mode); + } + catch (er) { + return oner(er); + } + const tx = this.transform ? this.transform(entry) || entry : entry; + if (tx !== entry) { + tx.on('error', (er) => this[ONERROR](er, entry)); + entry.pipe(tx); + } + tx.on('data', (chunk) => { + try { + node_fs_1.default.writeSync(fd, chunk, 0, chunk.length); + } + catch (er) { + oner(er); + } + }); + tx.on('end', () => { + let er = null; + // try both, falling futimes back to utimes + // if either fails, handle the first error + if (entry.mtime && !this.noMtime) { + const atime = entry.atime || new Date(); + const mtime = entry.mtime; + try { + node_fs_1.default.futimesSync(fd, atime, mtime); + } + catch (futimeser) { + try { + node_fs_1.default.utimesSync(String(entry.absolute), atime, mtime); + } + catch (utimeser) { + er = futimeser; + } + } + } + if (this[DOCHOWN](entry)) { + const uid = this[UID](entry); + const gid = this[GID](entry); + try { + node_fs_1.default.fchownSync(fd, Number(uid), Number(gid)); + } + catch (fchowner) { + try { + node_fs_1.default.chownSync(String(entry.absolute), Number(uid), Number(gid)); + } + catch (chowner) { + er = er || fchowner; + } + } + } + oner(er); + }); + } + [DIRECTORY](entry, done) { + const mode = typeof entry.mode === 'number' ? + entry.mode & 0o7777 + : this.dmode; + const er = this[MKDIR](String(entry.absolute), mode); + if (er) { + this[ONERROR](er, entry); + done(); + return; + } + if (entry.mtime && !this.noMtime) { + try { + node_fs_1.default.utimesSync(String(entry.absolute), entry.atime || new Date(), entry.mtime); + /* c8 ignore next */ + } + catch (er) { } + } + if (this[DOCHOWN](entry)) { + try { + node_fs_1.default.chownSync(String(entry.absolute), Number(this[UID](entry)), Number(this[GID](entry))); + } + catch (er) { } + } + done(); + entry.resume(); + } + [MKDIR](dir, mode) { + try { + return (0, mkdir_js_1.mkdirSync)((0, normalize_windows_path_js_1.normalizeWindowsPath)(dir), { + uid: this.uid, + gid: this.gid, + processUid: this.processUid, + processGid: this.processGid, + umask: this.processUmask, + preserve: this.preservePaths, + unlink: this.unlink, + cache: this.dirCache, + cwd: this.cwd, + mode: mode, + }); + } + catch (er) { + return er; + } + } + [LINK](entry, linkpath, link, done) { + const ls = `${link}Sync`; + try { + node_fs_1.default[ls](linkpath, String(entry.absolute)); + done(); + entry.resume(); + } + catch (er) { + return this[ONERROR](er, entry); + } + } +} +exports.UnpackSync = UnpackSync; +//# sourceMappingURL=unpack.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/update.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/update.js new file mode 100644 index 00000000000000..7687896f4bfeeb --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/update.js @@ -0,0 +1,33 @@ +"use strict"; +// tar -u +Object.defineProperty(exports, "__esModule", { value: true }); +exports.update = void 0; +const make_command_js_1 = require("./make-command.js"); +const replace_js_1 = require("./replace.js"); +// just call tar.r with the filter and mtimeCache +exports.update = (0, make_command_js_1.makeCommand)(replace_js_1.replace.syncFile, replace_js_1.replace.asyncFile, replace_js_1.replace.syncNoFile, replace_js_1.replace.asyncNoFile, (opt, entries = []) => { + replace_js_1.replace.validate?.(opt, entries); + mtimeFilter(opt); +}); +const mtimeFilter = (opt) => { + const filter = opt.filter; + if (!opt.mtimeCache) { + opt.mtimeCache = new Map(); + } + opt.filter = + filter ? + (path, stat) => filter(path, stat) && + !( + /* c8 ignore start */ + ((opt.mtimeCache?.get(path) ?? stat.mtime ?? 0) > + (stat.mtime ?? 0)) + /* c8 ignore stop */ + ) + : (path, stat) => !( + /* c8 ignore start */ + ((opt.mtimeCache?.get(path) ?? stat.mtime ?? 0) > + (stat.mtime ?? 0)) + /* c8 ignore stop */ + ); +}; +//# sourceMappingURL=update.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/warn-method.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/warn-method.js new file mode 100644 index 00000000000000..f25502776e36a3 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/warn-method.js @@ -0,0 +1,31 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.warnMethod = void 0; +const warnMethod = (self, code, message, data = {}) => { + if (self.file) { + data.file = self.file; + } + if (self.cwd) { + data.cwd = self.cwd; + } + data.code = + (message instanceof Error && + message.code) || + code; + data.tarCode = code; + if (!self.strict && data.recoverable !== false) { + if (message instanceof Error) { + data = Object.assign(message, data); + message = message.message; + } + self.emit('warn', code, message, data); + } + else if (message instanceof Error) { + self.emit('error', Object.assign(message, data)); + } + else { + self.emit('error', Object.assign(new Error(`${code}: ${message}`), data)); + } +}; +exports.warnMethod = warnMethod; +//# sourceMappingURL=warn-method.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/winchars.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/winchars.js new file mode 100644 index 00000000000000..c0a4405812929e --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/winchars.js @@ -0,0 +1,14 @@ +"use strict"; +// When writing files on Windows, translate the characters to their +// 0xf000 higher-encoded versions. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.decode = exports.encode = void 0; +const raw = ['|', '<', '>', '?', ':']; +const win = raw.map(char => String.fromCharCode(0xf000 + char.charCodeAt(0))); +const toWin = new Map(raw.map((char, i) => [char, win[i]])); +const toRaw = new Map(win.map((char, i) => [char, raw[i]])); +const encode = (s) => raw.reduce((s, c) => s.split(c).join(toWin.get(c)), s); +exports.encode = encode; +const decode = (s) => win.reduce((s, c) => s.split(c).join(toRaw.get(c)), s); +exports.decode = decode; +//# sourceMappingURL=winchars.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/write-entry.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/write-entry.js new file mode 100644 index 00000000000000..45b7efeb795027 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/write-entry.js @@ -0,0 +1,689 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.WriteEntryTar = exports.WriteEntrySync = exports.WriteEntry = void 0; +const fs_1 = __importDefault(require("fs")); +const minipass_1 = require("minipass"); +const path_1 = __importDefault(require("path")); +const header_js_1 = require("./header.js"); +const mode_fix_js_1 = require("./mode-fix.js"); +const normalize_windows_path_js_1 = require("./normalize-windows-path.js"); +const options_js_1 = require("./options.js"); +const pax_js_1 = require("./pax.js"); +const strip_absolute_path_js_1 = require("./strip-absolute-path.js"); +const strip_trailing_slashes_js_1 = require("./strip-trailing-slashes.js"); +const warn_method_js_1 = require("./warn-method.js"); +const winchars = __importStar(require("./winchars.js")); +const prefixPath = (path, prefix) => { + if (!prefix) { + return (0, normalize_windows_path_js_1.normalizeWindowsPath)(path); + } + path = (0, normalize_windows_path_js_1.normalizeWindowsPath)(path).replace(/^\.(\/|$)/, ''); + return (0, strip_trailing_slashes_js_1.stripTrailingSlashes)(prefix) + '/' + path; +}; +const maxReadSize = 16 * 1024 * 1024; +const PROCESS = Symbol('process'); +const FILE = Symbol('file'); +const DIRECTORY = Symbol('directory'); +const SYMLINK = Symbol('symlink'); +const HARDLINK = Symbol('hardlink'); +const HEADER = Symbol('header'); +const READ = Symbol('read'); +const LSTAT = Symbol('lstat'); +const ONLSTAT = Symbol('onlstat'); +const ONREAD = Symbol('onread'); +const ONREADLINK = Symbol('onreadlink'); +const OPENFILE = Symbol('openfile'); +const ONOPENFILE = Symbol('onopenfile'); +const CLOSE = Symbol('close'); +const MODE = Symbol('mode'); +const AWAITDRAIN = Symbol('awaitDrain'); +const ONDRAIN = Symbol('ondrain'); +const PREFIX = Symbol('prefix'); +class WriteEntry extends minipass_1.Minipass { + path; + portable; + myuid = (process.getuid && process.getuid()) || 0; + // until node has builtin pwnam functions, this'll have to do + myuser = process.env.USER || ''; + maxReadSize; + linkCache; + statCache; + preservePaths; + cwd; + strict; + mtime; + noPax; + noMtime; + prefix; + fd; + blockLen = 0; + blockRemain = 0; + buf; + pos = 0; + remain = 0; + length = 0; + offset = 0; + win32; + absolute; + header; + type; + linkpath; + stat; + onWriteEntry; + #hadError = false; + constructor(p, opt_ = {}) { + const opt = (0, options_js_1.dealias)(opt_); + super(); + this.path = (0, normalize_windows_path_js_1.normalizeWindowsPath)(p); + // suppress atime, ctime, uid, gid, uname, gname + this.portable = !!opt.portable; + this.maxReadSize = opt.maxReadSize || maxReadSize; + this.linkCache = opt.linkCache || new Map(); + this.statCache = opt.statCache || new Map(); + this.preservePaths = !!opt.preservePaths; + this.cwd = (0, normalize_windows_path_js_1.normalizeWindowsPath)(opt.cwd || process.cwd()); + this.strict = !!opt.strict; + this.noPax = !!opt.noPax; + this.noMtime = !!opt.noMtime; + this.mtime = opt.mtime; + this.prefix = + opt.prefix ? (0, normalize_windows_path_js_1.normalizeWindowsPath)(opt.prefix) : undefined; + this.onWriteEntry = opt.onWriteEntry; + if (typeof opt.onwarn === 'function') { + this.on('warn', opt.onwarn); + } + let pathWarn = false; + if (!this.preservePaths) { + const [root, stripped] = (0, strip_absolute_path_js_1.stripAbsolutePath)(this.path); + if (root && typeof stripped === 'string') { + this.path = stripped; + pathWarn = root; + } + } + this.win32 = !!opt.win32 || process.platform === 'win32'; + if (this.win32) { + // force the \ to / normalization, since we might not *actually* + // be on windows, but want \ to be considered a path separator. + this.path = winchars.decode(this.path.replace(/\\/g, '/')); + p = p.replace(/\\/g, '/'); + } + this.absolute = (0, normalize_windows_path_js_1.normalizeWindowsPath)(opt.absolute || path_1.default.resolve(this.cwd, p)); + if (this.path === '') { + this.path = './'; + } + if (pathWarn) { + this.warn('TAR_ENTRY_INFO', `stripping ${pathWarn} from absolute path`, { + entry: this, + path: pathWarn + this.path, + }); + } + const cs = this.statCache.get(this.absolute); + if (cs) { + this[ONLSTAT](cs); + } + else { + this[LSTAT](); + } + } + warn(code, message, data = {}) { + return (0, warn_method_js_1.warnMethod)(this, code, message, data); + } + emit(ev, ...data) { + if (ev === 'error') { + this.#hadError = true; + } + return super.emit(ev, ...data); + } + [LSTAT]() { + fs_1.default.lstat(this.absolute, (er, stat) => { + if (er) { + return this.emit('error', er); + } + this[ONLSTAT](stat); + }); + } + [ONLSTAT](stat) { + this.statCache.set(this.absolute, stat); + this.stat = stat; + if (!stat.isFile()) { + stat.size = 0; + } + this.type = getType(stat); + this.emit('stat', stat); + this[PROCESS](); + } + [PROCESS]() { + switch (this.type) { + case 'File': + return this[FILE](); + case 'Directory': + return this[DIRECTORY](); + case 'SymbolicLink': + return this[SYMLINK](); + // unsupported types are ignored. + default: + return this.end(); + } + } + [MODE](mode) { + return (0, mode_fix_js_1.modeFix)(mode, this.type === 'Directory', this.portable); + } + [PREFIX](path) { + return prefixPath(path, this.prefix); + } + [HEADER]() { + /* c8 ignore start */ + if (!this.stat) { + throw new Error('cannot write header before stat'); + } + /* c8 ignore stop */ + if (this.type === 'Directory' && this.portable) { + this.noMtime = true; + } + this.onWriteEntry?.(this); + this.header = new header_js_1.Header({ + path: this[PREFIX](this.path), + // only apply the prefix to hard links. + linkpath: this.type === 'Link' && this.linkpath !== undefined ? + this[PREFIX](this.linkpath) + : this.linkpath, + // only the permissions and setuid/setgid/sticky bitflags + // not the higher-order bits that specify file type + mode: this[MODE](this.stat.mode), + uid: this.portable ? undefined : this.stat.uid, + gid: this.portable ? undefined : this.stat.gid, + size: this.stat.size, + mtime: this.noMtime ? undefined : this.mtime || this.stat.mtime, + /* c8 ignore next */ + type: this.type === 'Unsupported' ? undefined : this.type, + uname: this.portable ? undefined + : this.stat.uid === this.myuid ? this.myuser + : '', + atime: this.portable ? undefined : this.stat.atime, + ctime: this.portable ? undefined : this.stat.ctime, + }); + if (this.header.encode() && !this.noPax) { + super.write(new pax_js_1.Pax({ + atime: this.portable ? undefined : this.header.atime, + ctime: this.portable ? undefined : this.header.ctime, + gid: this.portable ? undefined : this.header.gid, + mtime: this.noMtime ? undefined : (this.mtime || this.header.mtime), + path: this[PREFIX](this.path), + linkpath: this.type === 'Link' && this.linkpath !== undefined ? + this[PREFIX](this.linkpath) + : this.linkpath, + size: this.header.size, + uid: this.portable ? undefined : this.header.uid, + uname: this.portable ? undefined : this.header.uname, + dev: this.portable ? undefined : this.stat.dev, + ino: this.portable ? undefined : this.stat.ino, + nlink: this.portable ? undefined : this.stat.nlink, + }).encode()); + } + const block = this.header?.block; + /* c8 ignore start */ + if (!block) { + throw new Error('failed to encode header'); + } + /* c8 ignore stop */ + super.write(block); + } + [DIRECTORY]() { + /* c8 ignore start */ + if (!this.stat) { + throw new Error('cannot create directory entry without stat'); + } + /* c8 ignore stop */ + if (this.path.slice(-1) !== '/') { + this.path += '/'; + } + this.stat.size = 0; + this[HEADER](); + this.end(); + } + [SYMLINK]() { + fs_1.default.readlink(this.absolute, (er, linkpath) => { + if (er) { + return this.emit('error', er); + } + this[ONREADLINK](linkpath); + }); + } + [ONREADLINK](linkpath) { + this.linkpath = (0, normalize_windows_path_js_1.normalizeWindowsPath)(linkpath); + this[HEADER](); + this.end(); + } + [HARDLINK](linkpath) { + /* c8 ignore start */ + if (!this.stat) { + throw new Error('cannot create link entry without stat'); + } + /* c8 ignore stop */ + this.type = 'Link'; + this.linkpath = (0, normalize_windows_path_js_1.normalizeWindowsPath)(path_1.default.relative(this.cwd, linkpath)); + this.stat.size = 0; + this[HEADER](); + this.end(); + } + [FILE]() { + /* c8 ignore start */ + if (!this.stat) { + throw new Error('cannot create file entry without stat'); + } + /* c8 ignore stop */ + if (this.stat.nlink > 1) { + const linkKey = `${this.stat.dev}:${this.stat.ino}`; + const linkpath = this.linkCache.get(linkKey); + if (linkpath?.indexOf(this.cwd) === 0) { + return this[HARDLINK](linkpath); + } + this.linkCache.set(linkKey, this.absolute); + } + this[HEADER](); + if (this.stat.size === 0) { + return this.end(); + } + this[OPENFILE](); + } + [OPENFILE]() { + fs_1.default.open(this.absolute, 'r', (er, fd) => { + if (er) { + return this.emit('error', er); + } + this[ONOPENFILE](fd); + }); + } + [ONOPENFILE](fd) { + this.fd = fd; + if (this.#hadError) { + return this[CLOSE](); + } + /* c8 ignore start */ + if (!this.stat) { + throw new Error('should stat before calling onopenfile'); + } + /* c8 ignore start */ + this.blockLen = 512 * Math.ceil(this.stat.size / 512); + this.blockRemain = this.blockLen; + const bufLen = Math.min(this.blockLen, this.maxReadSize); + this.buf = Buffer.allocUnsafe(bufLen); + this.offset = 0; + this.pos = 0; + this.remain = this.stat.size; + this.length = this.buf.length; + this[READ](); + } + [READ]() { + const { fd, buf, offset, length, pos } = this; + if (fd === undefined || buf === undefined) { + throw new Error('cannot read file without first opening'); + } + fs_1.default.read(fd, buf, offset, length, pos, (er, bytesRead) => { + if (er) { + // ignoring the error from close(2) is a bad practice, but at + // this point we already have an error, don't need another one + return this[CLOSE](() => this.emit('error', er)); + } + this[ONREAD](bytesRead); + }); + } + /* c8 ignore start */ + [CLOSE](cb = () => { }) { + /* c8 ignore stop */ + if (this.fd !== undefined) + fs_1.default.close(this.fd, cb); + } + [ONREAD](bytesRead) { + if (bytesRead <= 0 && this.remain > 0) { + const er = Object.assign(new Error('encountered unexpected EOF'), { + path: this.absolute, + syscall: 'read', + code: 'EOF', + }); + return this[CLOSE](() => this.emit('error', er)); + } + if (bytesRead > this.remain) { + const er = Object.assign(new Error('did not encounter expected EOF'), { + path: this.absolute, + syscall: 'read', + code: 'EOF', + }); + return this[CLOSE](() => this.emit('error', er)); + } + /* c8 ignore start */ + if (!this.buf) { + throw new Error('should have created buffer prior to reading'); + } + /* c8 ignore stop */ + // null out the rest of the buffer, if we could fit the block padding + // at the end of this loop, we've incremented bytesRead and this.remain + // to be incremented up to the blockRemain level, as if we had expected + // to get a null-padded file, and read it until the end. then we will + // decrement both remain and blockRemain by bytesRead, and know that we + // reached the expected EOF, without any null buffer to append. + if (bytesRead === this.remain) { + for (let i = bytesRead; i < this.length && bytesRead < this.blockRemain; i++) { + this.buf[i + this.offset] = 0; + bytesRead++; + this.remain++; + } + } + const chunk = this.offset === 0 && bytesRead === this.buf.length ? + this.buf + : this.buf.subarray(this.offset, this.offset + bytesRead); + const flushed = this.write(chunk); + if (!flushed) { + this[AWAITDRAIN](() => this[ONDRAIN]()); + } + else { + this[ONDRAIN](); + } + } + [AWAITDRAIN](cb) { + this.once('drain', cb); + } + write(chunk, encoding, cb) { + /* c8 ignore start - just junk to comply with NodeJS.WritableStream */ + if (typeof encoding === 'function') { + cb = encoding; + encoding = undefined; + } + if (typeof chunk === 'string') { + chunk = Buffer.from(chunk, typeof encoding === 'string' ? encoding : 'utf8'); + } + /* c8 ignore stop */ + if (this.blockRemain < chunk.length) { + const er = Object.assign(new Error('writing more data than expected'), { + path: this.absolute, + }); + return this.emit('error', er); + } + this.remain -= chunk.length; + this.blockRemain -= chunk.length; + this.pos += chunk.length; + this.offset += chunk.length; + return super.write(chunk, null, cb); + } + [ONDRAIN]() { + if (!this.remain) { + if (this.blockRemain) { + super.write(Buffer.alloc(this.blockRemain)); + } + return this[CLOSE](er => er ? this.emit('error', er) : this.end()); + } + /* c8 ignore start */ + if (!this.buf) { + throw new Error('buffer lost somehow in ONDRAIN'); + } + /* c8 ignore stop */ + if (this.offset >= this.length) { + // if we only have a smaller bit left to read, alloc a smaller buffer + // otherwise, keep it the same length it was before. + this.buf = Buffer.allocUnsafe(Math.min(this.blockRemain, this.buf.length)); + this.offset = 0; + } + this.length = this.buf.length - this.offset; + this[READ](); + } +} +exports.WriteEntry = WriteEntry; +class WriteEntrySync extends WriteEntry { + sync = true; + [LSTAT]() { + this[ONLSTAT](fs_1.default.lstatSync(this.absolute)); + } + [SYMLINK]() { + this[ONREADLINK](fs_1.default.readlinkSync(this.absolute)); + } + [OPENFILE]() { + this[ONOPENFILE](fs_1.default.openSync(this.absolute, 'r')); + } + [READ]() { + let threw = true; + try { + const { fd, buf, offset, length, pos } = this; + /* c8 ignore start */ + if (fd === undefined || buf === undefined) { + throw new Error('fd and buf must be set in READ method'); + } + /* c8 ignore stop */ + const bytesRead = fs_1.default.readSync(fd, buf, offset, length, pos); + this[ONREAD](bytesRead); + threw = false; + } + finally { + // ignoring the error from close(2) is a bad practice, but at + // this point we already have an error, don't need another one + if (threw) { + try { + this[CLOSE](() => { }); + } + catch (er) { } + } + } + } + [AWAITDRAIN](cb) { + cb(); + } + /* c8 ignore start */ + [CLOSE](cb = () => { }) { + /* c8 ignore stop */ + if (this.fd !== undefined) + fs_1.default.closeSync(this.fd); + cb(); + } +} +exports.WriteEntrySync = WriteEntrySync; +class WriteEntryTar extends minipass_1.Minipass { + blockLen = 0; + blockRemain = 0; + buf = 0; + pos = 0; + remain = 0; + length = 0; + preservePaths; + portable; + strict; + noPax; + noMtime; + readEntry; + type; + prefix; + path; + mode; + uid; + gid; + uname; + gname; + header; + mtime; + atime; + ctime; + linkpath; + size; + onWriteEntry; + warn(code, message, data = {}) { + return (0, warn_method_js_1.warnMethod)(this, code, message, data); + } + constructor(readEntry, opt_ = {}) { + const opt = (0, options_js_1.dealias)(opt_); + super(); + this.preservePaths = !!opt.preservePaths; + this.portable = !!opt.portable; + this.strict = !!opt.strict; + this.noPax = !!opt.noPax; + this.noMtime = !!opt.noMtime; + this.onWriteEntry = opt.onWriteEntry; + this.readEntry = readEntry; + const { type } = readEntry; + /* c8 ignore start */ + if (type === 'Unsupported') { + throw new Error('writing entry that should be ignored'); + } + /* c8 ignore stop */ + this.type = type; + if (this.type === 'Directory' && this.portable) { + this.noMtime = true; + } + this.prefix = opt.prefix; + this.path = (0, normalize_windows_path_js_1.normalizeWindowsPath)(readEntry.path); + this.mode = + readEntry.mode !== undefined ? + this[MODE](readEntry.mode) + : undefined; + this.uid = this.portable ? undefined : readEntry.uid; + this.gid = this.portable ? undefined : readEntry.gid; + this.uname = this.portable ? undefined : readEntry.uname; + this.gname = this.portable ? undefined : readEntry.gname; + this.size = readEntry.size; + this.mtime = + this.noMtime ? undefined : opt.mtime || readEntry.mtime; + this.atime = this.portable ? undefined : readEntry.atime; + this.ctime = this.portable ? undefined : readEntry.ctime; + this.linkpath = + readEntry.linkpath !== undefined ? + (0, normalize_windows_path_js_1.normalizeWindowsPath)(readEntry.linkpath) + : undefined; + if (typeof opt.onwarn === 'function') { + this.on('warn', opt.onwarn); + } + let pathWarn = false; + if (!this.preservePaths) { + const [root, stripped] = (0, strip_absolute_path_js_1.stripAbsolutePath)(this.path); + if (root && typeof stripped === 'string') { + this.path = stripped; + pathWarn = root; + } + } + this.remain = readEntry.size; + this.blockRemain = readEntry.startBlockSize; + this.onWriteEntry?.(this); + this.header = new header_js_1.Header({ + path: this[PREFIX](this.path), + linkpath: this.type === 'Link' && this.linkpath !== undefined ? + this[PREFIX](this.linkpath) + : this.linkpath, + // only the permissions and setuid/setgid/sticky bitflags + // not the higher-order bits that specify file type + mode: this.mode, + uid: this.portable ? undefined : this.uid, + gid: this.portable ? undefined : this.gid, + size: this.size, + mtime: this.noMtime ? undefined : this.mtime, + type: this.type, + uname: this.portable ? undefined : this.uname, + atime: this.portable ? undefined : this.atime, + ctime: this.portable ? undefined : this.ctime, + }); + if (pathWarn) { + this.warn('TAR_ENTRY_INFO', `stripping ${pathWarn} from absolute path`, { + entry: this, + path: pathWarn + this.path, + }); + } + if (this.header.encode() && !this.noPax) { + super.write(new pax_js_1.Pax({ + atime: this.portable ? undefined : this.atime, + ctime: this.portable ? undefined : this.ctime, + gid: this.portable ? undefined : this.gid, + mtime: this.noMtime ? undefined : this.mtime, + path: this[PREFIX](this.path), + linkpath: this.type === 'Link' && this.linkpath !== undefined ? + this[PREFIX](this.linkpath) + : this.linkpath, + size: this.size, + uid: this.portable ? undefined : this.uid, + uname: this.portable ? undefined : this.uname, + dev: this.portable ? undefined : this.readEntry.dev, + ino: this.portable ? undefined : this.readEntry.ino, + nlink: this.portable ? undefined : this.readEntry.nlink, + }).encode()); + } + const b = this.header?.block; + /* c8 ignore start */ + if (!b) + throw new Error('failed to encode header'); + /* c8 ignore stop */ + super.write(b); + readEntry.pipe(this); + } + [PREFIX](path) { + return prefixPath(path, this.prefix); + } + [MODE](mode) { + return (0, mode_fix_js_1.modeFix)(mode, this.type === 'Directory', this.portable); + } + write(chunk, encoding, cb) { + /* c8 ignore start - just junk to comply with NodeJS.WritableStream */ + if (typeof encoding === 'function') { + cb = encoding; + encoding = undefined; + } + if (typeof chunk === 'string') { + chunk = Buffer.from(chunk, typeof encoding === 'string' ? encoding : 'utf8'); + } + /* c8 ignore stop */ + const writeLen = chunk.length; + if (writeLen > this.blockRemain) { + throw new Error('writing more to entry than is appropriate'); + } + this.blockRemain -= writeLen; + return super.write(chunk, cb); + } + end(chunk, encoding, cb) { + if (this.blockRemain) { + super.write(Buffer.alloc(this.blockRemain)); + } + /* c8 ignore start - just junk to comply with NodeJS.WritableStream */ + if (typeof chunk === 'function') { + cb = chunk; + encoding = undefined; + chunk = undefined; + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = undefined; + } + if (typeof chunk === 'string') { + chunk = Buffer.from(chunk, encoding ?? 'utf8'); + } + if (cb) + this.once('finish', cb); + chunk ? super.end(chunk, cb) : super.end(cb); + /* c8 ignore stop */ + return this; + } +} +exports.WriteEntryTar = WriteEntryTar; +const getType = (stat) => stat.isFile() ? 'File' + : stat.isDirectory() ? 'Directory' + : stat.isSymbolicLink() ? 'SymbolicLink' + : 'Unsupported'; +//# sourceMappingURL=write-entry.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/create.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/create.js new file mode 100644 index 00000000000000..512a9911d70d5b --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/create.js @@ -0,0 +1,77 @@ +import { WriteStream, WriteStreamSync } from '@isaacs/fs-minipass'; +import path from 'node:path'; +import { list } from './list.js'; +import { makeCommand } from './make-command.js'; +import { Pack, PackSync } from './pack.js'; +const createFileSync = (opt, files) => { + const p = new PackSync(opt); + const stream = new WriteStreamSync(opt.file, { + mode: opt.mode || 0o666, + }); + p.pipe(stream); + addFilesSync(p, files); +}; +const createFile = (opt, files) => { + const p = new Pack(opt); + const stream = new WriteStream(opt.file, { + mode: opt.mode || 0o666, + }); + p.pipe(stream); + const promise = new Promise((res, rej) => { + stream.on('error', rej); + stream.on('close', res); + p.on('error', rej); + }); + addFilesAsync(p, files); + return promise; +}; +const addFilesSync = (p, files) => { + files.forEach(file => { + if (file.charAt(0) === '@') { + list({ + file: path.resolve(p.cwd, file.slice(1)), + sync: true, + noResume: true, + onReadEntry: entry => p.add(entry), + }); + } + else { + p.add(file); + } + }); + p.end(); +}; +const addFilesAsync = async (p, files) => { + for (let i = 0; i < files.length; i++) { + const file = String(files[i]); + if (file.charAt(0) === '@') { + await list({ + file: path.resolve(String(p.cwd), file.slice(1)), + noResume: true, + onReadEntry: entry => { + p.add(entry); + }, + }); + } + else { + p.add(file); + } + } + p.end(); +}; +const createSync = (opt, files) => { + const p = new PackSync(opt); + addFilesSync(p, files); + return p; +}; +const createAsync = (opt, files) => { + const p = new Pack(opt); + addFilesAsync(p, files); + return p; +}; +export const create = makeCommand(createFileSync, createFile, createSync, createAsync, (_opt, files) => { + if (!files?.length) { + throw new TypeError('no paths specified to add to archive'); + } +}); +//# sourceMappingURL=create.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/cwd-error.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/cwd-error.js new file mode 100644 index 00000000000000..289a066b8e0317 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/cwd-error.js @@ -0,0 +1,14 @@ +export class CwdError extends Error { + path; + code; + syscall = 'chdir'; + constructor(path, code) { + super(`${code}: Cannot cd into '${path}'`); + this.path = path; + this.code = code; + } + get name() { + return 'CwdError'; + } +} +//# sourceMappingURL=cwd-error.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/extract.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/extract.js new file mode 100644 index 00000000000000..2274feef26e78f --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/extract.js @@ -0,0 +1,49 @@ +// tar -x +import * as fsm from '@isaacs/fs-minipass'; +import fs from 'node:fs'; +import { filesFilter } from './list.js'; +import { makeCommand } from './make-command.js'; +import { Unpack, UnpackSync } from './unpack.js'; +const extractFileSync = (opt) => { + const u = new UnpackSync(opt); + const file = opt.file; + const stat = fs.statSync(file); + // This trades a zero-byte read() syscall for a stat + // However, it will usually result in less memory allocation + const readSize = opt.maxReadSize || 16 * 1024 * 1024; + const stream = new fsm.ReadStreamSync(file, { + readSize: readSize, + size: stat.size, + }); + stream.pipe(u); +}; +const extractFile = (opt, _) => { + const u = new Unpack(opt); + const readSize = opt.maxReadSize || 16 * 1024 * 1024; + const file = opt.file; + const p = new Promise((resolve, reject) => { + u.on('error', reject); + u.on('close', resolve); + // This trades a zero-byte read() syscall for a stat + // However, it will usually result in less memory allocation + fs.stat(file, (er, stat) => { + if (er) { + reject(er); + } + else { + const stream = new fsm.ReadStream(file, { + readSize: readSize, + size: stat.size, + }); + stream.on('error', reject); + stream.pipe(u); + } + }); + }); + return p; +}; +export const extract = makeCommand(extractFileSync, extractFile, opt => new UnpackSync(opt), opt => new Unpack(opt), (opt, files) => { + if (files?.length) + filesFilter(opt, files); +}); +//# sourceMappingURL=extract.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/get-write-flag.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/get-write-flag.js new file mode 100644 index 00000000000000..2c7f3e8b28fdaf --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/get-write-flag.js @@ -0,0 +1,23 @@ +// Get the appropriate flag to use for creating files +// We use fmap on Windows platforms for files less than +// 512kb. This is a fairly low limit, but avoids making +// things slower in some cases. Since most of what this +// library is used for is extracting tarballs of many +// relatively small files in npm packages and the like, +// it can be a big boost on Windows platforms. +import fs from 'fs'; +const platform = process.env.__FAKE_PLATFORM__ || process.platform; +const isWindows = platform === 'win32'; +/* c8 ignore start */ +const { O_CREAT, O_TRUNC, O_WRONLY } = fs.constants; +const UV_FS_O_FILEMAP = Number(process.env.__FAKE_FS_O_FILENAME__) || + fs.constants.UV_FS_O_FILEMAP || + 0; +/* c8 ignore stop */ +const fMapEnabled = isWindows && !!UV_FS_O_FILEMAP; +const fMapLimit = 512 * 1024; +const fMapFlag = UV_FS_O_FILEMAP | O_TRUNC | O_CREAT | O_WRONLY; +export const getWriteFlag = !fMapEnabled ? + () => 'w' + : (size) => (size < fMapLimit ? fMapFlag : 'w'); +//# sourceMappingURL=get-write-flag.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/header.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/header.js new file mode 100644 index 00000000000000..e15192b14b16e1 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/header.js @@ -0,0 +1,279 @@ +// parse a 512-byte header block to a data object, or vice-versa +// encode returns `true` if a pax extended header is needed, because +// the data could not be faithfully encoded in a simple header. +// (Also, check header.needPax to see if it needs a pax header.) +import { posix as pathModule } from 'node:path'; +import * as large from './large-numbers.js'; +import * as types from './types.js'; +export class Header { + cksumValid = false; + needPax = false; + nullBlock = false; + block; + path; + mode; + uid; + gid; + size; + cksum; + #type = 'Unsupported'; + linkpath; + uname; + gname; + devmaj = 0; + devmin = 0; + atime; + ctime; + mtime; + charset; + comment; + constructor(data, off = 0, ex, gex) { + if (Buffer.isBuffer(data)) { + this.decode(data, off || 0, ex, gex); + } + else if (data) { + this.#slurp(data); + } + } + decode(buf, off, ex, gex) { + if (!off) { + off = 0; + } + if (!buf || !(buf.length >= off + 512)) { + throw new Error('need 512 bytes for header'); + } + this.path = decString(buf, off, 100); + this.mode = decNumber(buf, off + 100, 8); + this.uid = decNumber(buf, off + 108, 8); + this.gid = decNumber(buf, off + 116, 8); + this.size = decNumber(buf, off + 124, 12); + this.mtime = decDate(buf, off + 136, 12); + this.cksum = decNumber(buf, off + 148, 12); + // if we have extended or global extended headers, apply them now + // See https://github.com/npm/node-tar/pull/187 + // Apply global before local, so it overrides + if (gex) + this.#slurp(gex, true); + if (ex) + this.#slurp(ex); + // old tar versions marked dirs as a file with a trailing / + const t = decString(buf, off + 156, 1); + if (types.isCode(t)) { + this.#type = t || '0'; + } + if (this.#type === '0' && this.path.slice(-1) === '/') { + this.#type = '5'; + } + // tar implementations sometimes incorrectly put the stat(dir).size + // as the size in the tarball, even though Directory entries are + // not able to have any body at all. In the very rare chance that + // it actually DOES have a body, we weren't going to do anything with + // it anyway, and it'll just be a warning about an invalid header. + if (this.#type === '5') { + this.size = 0; + } + this.linkpath = decString(buf, off + 157, 100); + if (buf.subarray(off + 257, off + 265).toString() === + 'ustar\u000000') { + this.uname = decString(buf, off + 265, 32); + this.gname = decString(buf, off + 297, 32); + /* c8 ignore start */ + this.devmaj = decNumber(buf, off + 329, 8) ?? 0; + this.devmin = decNumber(buf, off + 337, 8) ?? 0; + /* c8 ignore stop */ + if (buf[off + 475] !== 0) { + // definitely a prefix, definitely >130 chars. + const prefix = decString(buf, off + 345, 155); + this.path = prefix + '/' + this.path; + } + else { + const prefix = decString(buf, off + 345, 130); + if (prefix) { + this.path = prefix + '/' + this.path; + } + this.atime = decDate(buf, off + 476, 12); + this.ctime = decDate(buf, off + 488, 12); + } + } + let sum = 8 * 0x20; + for (let i = off; i < off + 148; i++) { + sum += buf[i]; + } + for (let i = off + 156; i < off + 512; i++) { + sum += buf[i]; + } + this.cksumValid = sum === this.cksum; + if (this.cksum === undefined && sum === 8 * 0x20) { + this.nullBlock = true; + } + } + #slurp(ex, gex = false) { + Object.assign(this, Object.fromEntries(Object.entries(ex).filter(([k, v]) => { + // we slurp in everything except for the path attribute in + // a global extended header, because that's weird. Also, any + // null/undefined values are ignored. + return !(v === null || + v === undefined || + (k === 'path' && gex) || + (k === 'linkpath' && gex) || + k === 'global'); + }))); + } + encode(buf, off = 0) { + if (!buf) { + buf = this.block = Buffer.alloc(512); + } + if (this.#type === 'Unsupported') { + this.#type = '0'; + } + if (!(buf.length >= off + 512)) { + throw new Error('need 512 bytes for header'); + } + const prefixSize = this.ctime || this.atime ? 130 : 155; + const split = splitPrefix(this.path || '', prefixSize); + const path = split[0]; + const prefix = split[1]; + this.needPax = !!split[2]; + this.needPax = encString(buf, off, 100, path) || this.needPax; + this.needPax = + encNumber(buf, off + 100, 8, this.mode) || this.needPax; + this.needPax = + encNumber(buf, off + 108, 8, this.uid) || this.needPax; + this.needPax = + encNumber(buf, off + 116, 8, this.gid) || this.needPax; + this.needPax = + encNumber(buf, off + 124, 12, this.size) || this.needPax; + this.needPax = + encDate(buf, off + 136, 12, this.mtime) || this.needPax; + buf[off + 156] = this.#type.charCodeAt(0); + this.needPax = + encString(buf, off + 157, 100, this.linkpath) || this.needPax; + buf.write('ustar\u000000', off + 257, 8); + this.needPax = + encString(buf, off + 265, 32, this.uname) || this.needPax; + this.needPax = + encString(buf, off + 297, 32, this.gname) || this.needPax; + this.needPax = + encNumber(buf, off + 329, 8, this.devmaj) || this.needPax; + this.needPax = + encNumber(buf, off + 337, 8, this.devmin) || this.needPax; + this.needPax = + encString(buf, off + 345, prefixSize, prefix) || this.needPax; + if (buf[off + 475] !== 0) { + this.needPax = + encString(buf, off + 345, 155, prefix) || this.needPax; + } + else { + this.needPax = + encString(buf, off + 345, 130, prefix) || this.needPax; + this.needPax = + encDate(buf, off + 476, 12, this.atime) || this.needPax; + this.needPax = + encDate(buf, off + 488, 12, this.ctime) || this.needPax; + } + let sum = 8 * 0x20; + for (let i = off; i < off + 148; i++) { + sum += buf[i]; + } + for (let i = off + 156; i < off + 512; i++) { + sum += buf[i]; + } + this.cksum = sum; + encNumber(buf, off + 148, 8, this.cksum); + this.cksumValid = true; + return this.needPax; + } + get type() { + return (this.#type === 'Unsupported' ? + this.#type + : types.name.get(this.#type)); + } + get typeKey() { + return this.#type; + } + set type(type) { + const c = String(types.code.get(type)); + if (types.isCode(c) || c === 'Unsupported') { + this.#type = c; + } + else if (types.isCode(type)) { + this.#type = type; + } + else { + throw new TypeError('invalid entry type: ' + type); + } + } +} +const splitPrefix = (p, prefixSize) => { + const pathSize = 100; + let pp = p; + let prefix = ''; + let ret = undefined; + const root = pathModule.parse(p).root || '.'; + if (Buffer.byteLength(pp) < pathSize) { + ret = [pp, prefix, false]; + } + else { + // first set prefix to the dir, and path to the base + prefix = pathModule.dirname(pp); + pp = pathModule.basename(pp); + do { + if (Buffer.byteLength(pp) <= pathSize && + Buffer.byteLength(prefix) <= prefixSize) { + // both fit! + ret = [pp, prefix, false]; + } + else if (Buffer.byteLength(pp) > pathSize && + Buffer.byteLength(prefix) <= prefixSize) { + // prefix fits in prefix, but path doesn't fit in path + ret = [pp.slice(0, pathSize - 1), prefix, true]; + } + else { + // make path take a bit from prefix + pp = pathModule.join(pathModule.basename(prefix), pp); + prefix = pathModule.dirname(prefix); + } + } while (prefix !== root && ret === undefined); + // at this point, found no resolution, just truncate + if (!ret) { + ret = [p.slice(0, pathSize - 1), '', true]; + } + } + return ret; +}; +const decString = (buf, off, size) => buf + .subarray(off, off + size) + .toString('utf8') + .replace(/\0.*/, ''); +const decDate = (buf, off, size) => numToDate(decNumber(buf, off, size)); +const numToDate = (num) => num === undefined ? undefined : new Date(num * 1000); +const decNumber = (buf, off, size) => Number(buf[off]) & 0x80 ? + large.parse(buf.subarray(off, off + size)) + : decSmallNumber(buf, off, size); +const nanUndef = (value) => (isNaN(value) ? undefined : value); +const decSmallNumber = (buf, off, size) => nanUndef(parseInt(buf + .subarray(off, off + size) + .toString('utf8') + .replace(/\0.*$/, '') + .trim(), 8)); +// the maximum encodable as a null-terminated octal, by field size +const MAXNUM = { + 12: 0o77777777777, + 8: 0o7777777, +}; +const encNumber = (buf, off, size, num) => num === undefined ? false + : num > MAXNUM[size] || num < 0 ? + (large.encode(num, buf.subarray(off, off + size)), true) + : (encSmallNumber(buf, off, size, num), false); +const encSmallNumber = (buf, off, size, num) => buf.write(octalString(num, size), off, size, 'ascii'); +const octalString = (num, size) => padOctal(Math.floor(num).toString(8), size); +const padOctal = (str, size) => (str.length === size - 1 ? + str + : new Array(size - str.length - 1).join('0') + str + ' ') + '\0'; +const encDate = (buf, off, size, date) => date === undefined ? false : (encNumber(buf, off, size, date.getTime() / 1000)); +// enough to fill the longest string we've got +const NULLS = new Array(156).join('\0'); +// pad with nulls, return true if it's longer or non-ascii +const encString = (buf, off, size, str) => str === undefined ? false : ((buf.write(str + NULLS, off, size, 'utf8'), + str.length !== Buffer.byteLength(str) || str.length > size)); +//# sourceMappingURL=header.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/index.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/index.js new file mode 100644 index 00000000000000..1bac6415c8d732 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/index.js @@ -0,0 +1,20 @@ +export * from './create.js'; +export { create as c } from './create.js'; +export * from './extract.js'; +export { extract as x } from './extract.js'; +export * from './header.js'; +export * from './list.js'; +export { list as t } from './list.js'; +// classes +export * from './pack.js'; +export * from './parse.js'; +export * from './pax.js'; +export * from './read-entry.js'; +export * from './replace.js'; +export { replace as r } from './replace.js'; +export * as types from './types.js'; +export * from './unpack.js'; +export * from './update.js'; +export { update as u } from './update.js'; +export * from './write-entry.js'; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/large-numbers.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/large-numbers.js new file mode 100644 index 00000000000000..4f2f7e5f14fc1b --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/large-numbers.js @@ -0,0 +1,94 @@ +// Tar can encode large and negative numbers using a leading byte of +// 0xff for negative, and 0x80 for positive. +export const encode = (num, buf) => { + if (!Number.isSafeInteger(num)) { + // The number is so large that javascript cannot represent it with integer + // precision. + throw Error('cannot encode number outside of javascript safe integer range'); + } + else if (num < 0) { + encodeNegative(num, buf); + } + else { + encodePositive(num, buf); + } + return buf; +}; +const encodePositive = (num, buf) => { + buf[0] = 0x80; + for (var i = buf.length; i > 1; i--) { + buf[i - 1] = num & 0xff; + num = Math.floor(num / 0x100); + } +}; +const encodeNegative = (num, buf) => { + buf[0] = 0xff; + var flipped = false; + num = num * -1; + for (var i = buf.length; i > 1; i--) { + var byte = num & 0xff; + num = Math.floor(num / 0x100); + if (flipped) { + buf[i - 1] = onesComp(byte); + } + else if (byte === 0) { + buf[i - 1] = 0; + } + else { + flipped = true; + buf[i - 1] = twosComp(byte); + } + } +}; +export const parse = (buf) => { + const pre = buf[0]; + const value = pre === 0x80 ? pos(buf.subarray(1, buf.length)) + : pre === 0xff ? twos(buf) + : null; + if (value === null) { + throw Error('invalid base256 encoding'); + } + if (!Number.isSafeInteger(value)) { + // The number is so large that javascript cannot represent it with integer + // precision. + throw Error('parsed number outside of javascript safe integer range'); + } + return value; +}; +const twos = (buf) => { + var len = buf.length; + var sum = 0; + var flipped = false; + for (var i = len - 1; i > -1; i--) { + var byte = Number(buf[i]); + var f; + if (flipped) { + f = onesComp(byte); + } + else if (byte === 0) { + f = byte; + } + else { + flipped = true; + f = twosComp(byte); + } + if (f !== 0) { + sum -= f * Math.pow(256, len - i - 1); + } + } + return sum; +}; +const pos = (buf) => { + var len = buf.length; + var sum = 0; + for (var i = len - 1; i > -1; i--) { + var byte = Number(buf[i]); + if (byte !== 0) { + sum += byte * Math.pow(256, len - i - 1); + } + } + return sum; +}; +const onesComp = (byte) => (0xff ^ byte) & 0xff; +const twosComp = (byte) => ((0xff ^ byte) + 1) & 0xff; +//# sourceMappingURL=large-numbers.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/list.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/list.js new file mode 100644 index 00000000000000..f49068400b6c92 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/list.js @@ -0,0 +1,106 @@ +// tar -t +import * as fsm from '@isaacs/fs-minipass'; +import fs from 'node:fs'; +import { dirname, parse } from 'path'; +import { makeCommand } from './make-command.js'; +import { Parser } from './parse.js'; +import { stripTrailingSlashes } from './strip-trailing-slashes.js'; +const onReadEntryFunction = (opt) => { + const onReadEntry = opt.onReadEntry; + opt.onReadEntry = + onReadEntry ? + e => { + onReadEntry(e); + e.resume(); + } + : e => e.resume(); +}; +// construct a filter that limits the file entries listed +// include child entries if a dir is included +export const filesFilter = (opt, files) => { + const map = new Map(files.map(f => [stripTrailingSlashes(f), true])); + const filter = opt.filter; + const mapHas = (file, r = '') => { + const root = r || parse(file).root || '.'; + let ret; + if (file === root) + ret = false; + else { + const m = map.get(file); + if (m !== undefined) { + ret = m; + } + else { + ret = mapHas(dirname(file), root); + } + } + map.set(file, ret); + return ret; + }; + opt.filter = + filter ? + (file, entry) => filter(file, entry) && mapHas(stripTrailingSlashes(file)) + : file => mapHas(stripTrailingSlashes(file)); +}; +const listFileSync = (opt) => { + const p = new Parser(opt); + const file = opt.file; + let fd; + try { + const stat = fs.statSync(file); + const readSize = opt.maxReadSize || 16 * 1024 * 1024; + if (stat.size < readSize) { + p.end(fs.readFileSync(file)); + } + else { + let pos = 0; + const buf = Buffer.allocUnsafe(readSize); + fd = fs.openSync(file, 'r'); + while (pos < stat.size) { + const bytesRead = fs.readSync(fd, buf, 0, readSize, pos); + pos += bytesRead; + p.write(buf.subarray(0, bytesRead)); + } + p.end(); + } + } + finally { + if (typeof fd === 'number') { + try { + fs.closeSync(fd); + /* c8 ignore next */ + } + catch (er) { } + } + } +}; +const listFile = (opt, _files) => { + const parse = new Parser(opt); + const readSize = opt.maxReadSize || 16 * 1024 * 1024; + const file = opt.file; + const p = new Promise((resolve, reject) => { + parse.on('error', reject); + parse.on('end', resolve); + fs.stat(file, (er, stat) => { + if (er) { + reject(er); + } + else { + const stream = new fsm.ReadStream(file, { + readSize: readSize, + size: stat.size, + }); + stream.on('error', reject); + stream.pipe(parse); + } + }); + }); + return p; +}; +export const list = makeCommand(listFileSync, listFile, opt => new Parser(opt), opt => new Parser(opt), (opt, files) => { + if (files?.length) + filesFilter(opt, files); + if (!opt.noResume) + onReadEntryFunction(opt); +}); +//# sourceMappingURL=list.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/make-command.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/make-command.js new file mode 100644 index 00000000000000..f2f737bca78fd7 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/make-command.js @@ -0,0 +1,57 @@ +import { dealias, isAsyncFile, isAsyncNoFile, isSyncFile, isSyncNoFile, } from './options.js'; +export const makeCommand = (syncFile, asyncFile, syncNoFile, asyncNoFile, validate) => { + return Object.assign((opt_ = [], entries, cb) => { + if (Array.isArray(opt_)) { + entries = opt_; + opt_ = {}; + } + if (typeof entries === 'function') { + cb = entries; + entries = undefined; + } + if (!entries) { + entries = []; + } + else { + entries = Array.from(entries); + } + const opt = dealias(opt_); + validate?.(opt, entries); + if (isSyncFile(opt)) { + if (typeof cb === 'function') { + throw new TypeError('callback not supported for sync tar functions'); + } + return syncFile(opt, entries); + } + else if (isAsyncFile(opt)) { + const p = asyncFile(opt, entries); + // weirdness to make TS happy + const c = cb ? cb : undefined; + return c ? p.then(() => c(), c) : p; + } + else if (isSyncNoFile(opt)) { + if (typeof cb === 'function') { + throw new TypeError('callback not supported for sync tar functions'); + } + return syncNoFile(opt, entries); + } + else if (isAsyncNoFile(opt)) { + if (typeof cb === 'function') { + throw new TypeError('callback only supported with file option'); + } + return asyncNoFile(opt, entries); + /* c8 ignore start */ + } + else { + throw new Error('impossible options??'); + } + /* c8 ignore stop */ + }, { + syncFile, + asyncFile, + syncNoFile, + asyncNoFile, + validate, + }); +}; +//# sourceMappingURL=make-command.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/mkdir.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/mkdir.js new file mode 100644 index 00000000000000..13498ef0082f0b --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/mkdir.js @@ -0,0 +1,201 @@ +import { chownr, chownrSync } from 'chownr'; +import fs from 'fs'; +import { mkdirp, mkdirpSync } from 'mkdirp'; +import path from 'node:path'; +import { CwdError } from './cwd-error.js'; +import { normalizeWindowsPath } from './normalize-windows-path.js'; +import { SymlinkError } from './symlink-error.js'; +const cGet = (cache, key) => cache.get(normalizeWindowsPath(key)); +const cSet = (cache, key, val) => cache.set(normalizeWindowsPath(key), val); +const checkCwd = (dir, cb) => { + fs.stat(dir, (er, st) => { + if (er || !st.isDirectory()) { + er = new CwdError(dir, er?.code || 'ENOTDIR'); + } + cb(er); + }); +}; +/** + * Wrapper around mkdirp for tar's needs. + * + * The main purpose is to avoid creating directories if we know that + * they already exist (and track which ones exist for this purpose), + * and prevent entries from being extracted into symlinked folders, + * if `preservePaths` is not set. + */ +export const mkdir = (dir, opt, cb) => { + dir = normalizeWindowsPath(dir); + // if there's any overlap between mask and mode, + // then we'll need an explicit chmod + /* c8 ignore next */ + const umask = opt.umask ?? 0o22; + const mode = opt.mode | 0o0700; + const needChmod = (mode & umask) !== 0; + const uid = opt.uid; + const gid = opt.gid; + const doChown = typeof uid === 'number' && + typeof gid === 'number' && + (uid !== opt.processUid || gid !== opt.processGid); + const preserve = opt.preserve; + const unlink = opt.unlink; + const cache = opt.cache; + const cwd = normalizeWindowsPath(opt.cwd); + const done = (er, created) => { + if (er) { + cb(er); + } + else { + cSet(cache, dir, true); + if (created && doChown) { + chownr(created, uid, gid, er => done(er)); + } + else if (needChmod) { + fs.chmod(dir, mode, cb); + } + else { + cb(); + } + } + }; + if (cache && cGet(cache, dir) === true) { + return done(); + } + if (dir === cwd) { + return checkCwd(dir, done); + } + if (preserve) { + return mkdirp(dir, { mode }).then(made => done(null, made ?? undefined), // oh, ts + done); + } + const sub = normalizeWindowsPath(path.relative(cwd, dir)); + const parts = sub.split('/'); + mkdir_(cwd, parts, mode, cache, unlink, cwd, undefined, done); +}; +const mkdir_ = (base, parts, mode, cache, unlink, cwd, created, cb) => { + if (!parts.length) { + return cb(null, created); + } + const p = parts.shift(); + const part = normalizeWindowsPath(path.resolve(base + '/' + p)); + if (cGet(cache, part)) { + return mkdir_(part, parts, mode, cache, unlink, cwd, created, cb); + } + fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb)); +}; +const onmkdir = (part, parts, mode, cache, unlink, cwd, created, cb) => (er) => { + if (er) { + fs.lstat(part, (statEr, st) => { + if (statEr) { + statEr.path = + statEr.path && normalizeWindowsPath(statEr.path); + cb(statEr); + } + else if (st.isDirectory()) { + mkdir_(part, parts, mode, cache, unlink, cwd, created, cb); + } + else if (unlink) { + fs.unlink(part, er => { + if (er) { + return cb(er); + } + fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb)); + }); + } + else if (st.isSymbolicLink()) { + return cb(new SymlinkError(part, part + '/' + parts.join('/'))); + } + else { + cb(er); + } + }); + } + else { + created = created || part; + mkdir_(part, parts, mode, cache, unlink, cwd, created, cb); + } +}; +const checkCwdSync = (dir) => { + let ok = false; + let code = undefined; + try { + ok = fs.statSync(dir).isDirectory(); + } + catch (er) { + code = er?.code; + } + finally { + if (!ok) { + throw new CwdError(dir, code ?? 'ENOTDIR'); + } + } +}; +export const mkdirSync = (dir, opt) => { + dir = normalizeWindowsPath(dir); + // if there's any overlap between mask and mode, + // then we'll need an explicit chmod + /* c8 ignore next */ + const umask = opt.umask ?? 0o22; + const mode = opt.mode | 0o700; + const needChmod = (mode & umask) !== 0; + const uid = opt.uid; + const gid = opt.gid; + const doChown = typeof uid === 'number' && + typeof gid === 'number' && + (uid !== opt.processUid || gid !== opt.processGid); + const preserve = opt.preserve; + const unlink = opt.unlink; + const cache = opt.cache; + const cwd = normalizeWindowsPath(opt.cwd); + const done = (created) => { + cSet(cache, dir, true); + if (created && doChown) { + chownrSync(created, uid, gid); + } + if (needChmod) { + fs.chmodSync(dir, mode); + } + }; + if (cache && cGet(cache, dir) === true) { + return done(); + } + if (dir === cwd) { + checkCwdSync(cwd); + return done(); + } + if (preserve) { + return done(mkdirpSync(dir, mode) ?? undefined); + } + const sub = normalizeWindowsPath(path.relative(cwd, dir)); + const parts = sub.split('/'); + let created = undefined; + for (let p = parts.shift(), part = cwd; p && (part += '/' + p); p = parts.shift()) { + part = normalizeWindowsPath(path.resolve(part)); + if (cGet(cache, part)) { + continue; + } + try { + fs.mkdirSync(part, mode); + created = created || part; + cSet(cache, part, true); + } + catch (er) { + const st = fs.lstatSync(part); + if (st.isDirectory()) { + cSet(cache, part, true); + continue; + } + else if (unlink) { + fs.unlinkSync(part); + fs.mkdirSync(part, mode); + created = created || part; + cSet(cache, part, true); + continue; + } + else if (st.isSymbolicLink()) { + return new SymlinkError(part, part + '/' + parts.join('/')); + } + } + } + return done(created); +}; +//# sourceMappingURL=mkdir.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/mode-fix.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/mode-fix.js new file mode 100644 index 00000000000000..5fd3bb88c1cb25 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/mode-fix.js @@ -0,0 +1,25 @@ +export const modeFix = (mode, isDir, portable) => { + mode &= 0o7777; + // in portable mode, use the minimum reasonable umask + // if this system creates files with 0o664 by default + // (as some linux distros do), then we'll write the + // archive with 0o644 instead. Also, don't ever create + // a file that is not readable/writable by the owner. + if (portable) { + mode = (mode | 0o600) & ~0o22; + } + // if dirs are readable, then they should be listable + if (isDir) { + if (mode & 0o400) { + mode |= 0o100; + } + if (mode & 0o40) { + mode |= 0o10; + } + if (mode & 0o4) { + mode |= 0o1; + } + } + return mode; +}; +//# sourceMappingURL=mode-fix.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/normalize-unicode.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/normalize-unicode.js new file mode 100644 index 00000000000000..94e5095476d6e0 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/normalize-unicode.js @@ -0,0 +1,13 @@ +// warning: extremely hot code path. +// This has been meticulously optimized for use +// within npm install on large package trees. +// Do not edit without careful benchmarking. +const normalizeCache = Object.create(null); +const { hasOwnProperty } = Object.prototype; +export const normalizeUnicode = (s) => { + if (!hasOwnProperty.call(normalizeCache, s)) { + normalizeCache[s] = s.normalize('NFD'); + } + return normalizeCache[s]; +}; +//# sourceMappingURL=normalize-unicode.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/normalize-windows-path.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/normalize-windows-path.js new file mode 100644 index 00000000000000..2d97d2b884e627 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/normalize-windows-path.js @@ -0,0 +1,9 @@ +// on windows, either \ or / are valid directory separators. +// on unix, \ is a valid character in filenames. +// so, on windows, and only on windows, we replace all \ chars with /, +// so that we can use / as our one and only directory separator char. +const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform; +export const normalizeWindowsPath = platform !== 'win32' ? + (p) => p + : (p) => p && p.replace(/\\/g, '/'); +//# sourceMappingURL=normalize-windows-path.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/options.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/options.js new file mode 100644 index 00000000000000..a006d36c23c923 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/options.js @@ -0,0 +1,54 @@ +// turn tar(1) style args like `C` into the more verbose things like `cwd` +const argmap = new Map([ + ['C', 'cwd'], + ['f', 'file'], + ['z', 'gzip'], + ['P', 'preservePaths'], + ['U', 'unlink'], + ['strip-components', 'strip'], + ['stripComponents', 'strip'], + ['keep-newer', 'newer'], + ['keepNewer', 'newer'], + ['keep-newer-files', 'newer'], + ['keepNewerFiles', 'newer'], + ['k', 'keep'], + ['keep-existing', 'keep'], + ['keepExisting', 'keep'], + ['m', 'noMtime'], + ['no-mtime', 'noMtime'], + ['p', 'preserveOwner'], + ['L', 'follow'], + ['h', 'follow'], + ['onentry', 'onReadEntry'], +]); +export const isSyncFile = (o) => !!o.sync && !!o.file; +export const isAsyncFile = (o) => !o.sync && !!o.file; +export const isSyncNoFile = (o) => !!o.sync && !o.file; +export const isAsyncNoFile = (o) => !o.sync && !o.file; +export const isSync = (o) => !!o.sync; +export const isAsync = (o) => !o.sync; +export const isFile = (o) => !!o.file; +export const isNoFile = (o) => !o.file; +const dealiasKey = (k) => { + const d = argmap.get(k); + if (d) + return d; + return k; +}; +export const dealias = (opt = {}) => { + if (!opt) + return {}; + const result = {}; + for (const [key, v] of Object.entries(opt)) { + // TS doesn't know that aliases are going to always be the same type + const k = dealiasKey(key); + result[k] = v; + } + // affordance for deprecated noChmod -> chmod + if (result.chmod === undefined && result.noChmod === false) { + result.chmod = true; + } + delete result.noChmod; + return result; +}; +//# sourceMappingURL=options.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/pack.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/pack.js new file mode 100644 index 00000000000000..f59f32f94201fa --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/pack.js @@ -0,0 +1,445 @@ +// A readable tar stream creator +// Technically, this is a transform stream that you write paths into, +// and tar format comes out of. +// The `add()` method is like `write()` but returns this, +// and end() return `this` as well, so you can +// do `new Pack(opt).add('files').add('dir').end().pipe(output) +// You could also do something like: +// streamOfPaths().pipe(new Pack()).pipe(new fs.WriteStream('out.tar')) +import fs from 'fs'; +import { WriteEntry, WriteEntrySync, WriteEntryTar, } from './write-entry.js'; +export class PackJob { + path; + absolute; + entry; + stat; + readdir; + pending = false; + ignore = false; + piped = false; + constructor(path, absolute) { + this.path = path || './'; + this.absolute = absolute; + } +} +import { Minipass } from 'minipass'; +import * as zlib from 'minizlib'; +import { Yallist } from 'yallist'; +import { ReadEntry } from './read-entry.js'; +import { warnMethod, } from './warn-method.js'; +const EOF = Buffer.alloc(1024); +const ONSTAT = Symbol('onStat'); +const ENDED = Symbol('ended'); +const QUEUE = Symbol('queue'); +const CURRENT = Symbol('current'); +const PROCESS = Symbol('process'); +const PROCESSING = Symbol('processing'); +const PROCESSJOB = Symbol('processJob'); +const JOBS = Symbol('jobs'); +const JOBDONE = Symbol('jobDone'); +const ADDFSENTRY = Symbol('addFSEntry'); +const ADDTARENTRY = Symbol('addTarEntry'); +const STAT = Symbol('stat'); +const READDIR = Symbol('readdir'); +const ONREADDIR = Symbol('onreaddir'); +const PIPE = Symbol('pipe'); +const ENTRY = Symbol('entry'); +const ENTRYOPT = Symbol('entryOpt'); +const WRITEENTRYCLASS = Symbol('writeEntryClass'); +const WRITE = Symbol('write'); +const ONDRAIN = Symbol('ondrain'); +import path from 'path'; +import { normalizeWindowsPath } from './normalize-windows-path.js'; +export class Pack extends Minipass { + opt; + cwd; + maxReadSize; + preservePaths; + strict; + noPax; + prefix; + linkCache; + statCache; + file; + portable; + zip; + readdirCache; + noDirRecurse; + follow; + noMtime; + mtime; + filter; + jobs; + [WRITEENTRYCLASS]; + onWriteEntry; + [QUEUE]; + [JOBS] = 0; + [PROCESSING] = false; + [ENDED] = false; + constructor(opt = {}) { + //@ts-ignore + super(); + this.opt = opt; + this.file = opt.file || ''; + this.cwd = opt.cwd || process.cwd(); + this.maxReadSize = opt.maxReadSize; + this.preservePaths = !!opt.preservePaths; + this.strict = !!opt.strict; + this.noPax = !!opt.noPax; + this.prefix = normalizeWindowsPath(opt.prefix || ''); + this.linkCache = opt.linkCache || new Map(); + this.statCache = opt.statCache || new Map(); + this.readdirCache = opt.readdirCache || new Map(); + this.onWriteEntry = opt.onWriteEntry; + this[WRITEENTRYCLASS] = WriteEntry; + if (typeof opt.onwarn === 'function') { + this.on('warn', opt.onwarn); + } + this.portable = !!opt.portable; + if (opt.gzip || opt.brotli) { + if (opt.gzip && opt.brotli) { + throw new TypeError('gzip and brotli are mutually exclusive'); + } + if (opt.gzip) { + if (typeof opt.gzip !== 'object') { + opt.gzip = {}; + } + if (this.portable) { + opt.gzip.portable = true; + } + this.zip = new zlib.Gzip(opt.gzip); + } + if (opt.brotli) { + if (typeof opt.brotli !== 'object') { + opt.brotli = {}; + } + this.zip = new zlib.BrotliCompress(opt.brotli); + } + /* c8 ignore next */ + if (!this.zip) + throw new Error('impossible'); + const zip = this.zip; + zip.on('data', chunk => super.write(chunk)); + zip.on('end', () => super.end()); + zip.on('drain', () => this[ONDRAIN]()); + this.on('resume', () => zip.resume()); + } + else { + this.on('drain', this[ONDRAIN]); + } + this.noDirRecurse = !!opt.noDirRecurse; + this.follow = !!opt.follow; + this.noMtime = !!opt.noMtime; + if (opt.mtime) + this.mtime = opt.mtime; + this.filter = + typeof opt.filter === 'function' ? opt.filter : () => true; + this[QUEUE] = new Yallist(); + this[JOBS] = 0; + this.jobs = Number(opt.jobs) || 4; + this[PROCESSING] = false; + this[ENDED] = false; + } + [WRITE](chunk) { + return super.write(chunk); + } + add(path) { + this.write(path); + return this; + } + end(path, encoding, cb) { + /* c8 ignore start */ + if (typeof path === 'function') { + cb = path; + path = undefined; + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = undefined; + } + /* c8 ignore stop */ + if (path) { + this.add(path); + } + this[ENDED] = true; + this[PROCESS](); + /* c8 ignore next */ + if (cb) + cb(); + return this; + } + write(path) { + if (this[ENDED]) { + throw new Error('write after end'); + } + if (path instanceof ReadEntry) { + this[ADDTARENTRY](path); + } + else { + this[ADDFSENTRY](path); + } + return this.flowing; + } + [ADDTARENTRY](p) { + const absolute = normalizeWindowsPath(path.resolve(this.cwd, p.path)); + // in this case, we don't have to wait for the stat + if (!this.filter(p.path, p)) { + p.resume(); + } + else { + const job = new PackJob(p.path, absolute); + job.entry = new WriteEntryTar(p, this[ENTRYOPT](job)); + job.entry.on('end', () => this[JOBDONE](job)); + this[JOBS] += 1; + this[QUEUE].push(job); + } + this[PROCESS](); + } + [ADDFSENTRY](p) { + const absolute = normalizeWindowsPath(path.resolve(this.cwd, p)); + this[QUEUE].push(new PackJob(p, absolute)); + this[PROCESS](); + } + [STAT](job) { + job.pending = true; + this[JOBS] += 1; + const stat = this.follow ? 'stat' : 'lstat'; + fs[stat](job.absolute, (er, stat) => { + job.pending = false; + this[JOBS] -= 1; + if (er) { + this.emit('error', er); + } + else { + this[ONSTAT](job, stat); + } + }); + } + [ONSTAT](job, stat) { + this.statCache.set(job.absolute, stat); + job.stat = stat; + // now we have the stat, we can filter it. + if (!this.filter(job.path, stat)) { + job.ignore = true; + } + this[PROCESS](); + } + [READDIR](job) { + job.pending = true; + this[JOBS] += 1; + fs.readdir(job.absolute, (er, entries) => { + job.pending = false; + this[JOBS] -= 1; + if (er) { + return this.emit('error', er); + } + this[ONREADDIR](job, entries); + }); + } + [ONREADDIR](job, entries) { + this.readdirCache.set(job.absolute, entries); + job.readdir = entries; + this[PROCESS](); + } + [PROCESS]() { + if (this[PROCESSING]) { + return; + } + this[PROCESSING] = true; + for (let w = this[QUEUE].head; !!w && this[JOBS] < this.jobs; w = w.next) { + this[PROCESSJOB](w.value); + if (w.value.ignore) { + const p = w.next; + this[QUEUE].removeNode(w); + w.next = p; + } + } + this[PROCESSING] = false; + if (this[ENDED] && !this[QUEUE].length && this[JOBS] === 0) { + if (this.zip) { + this.zip.end(EOF); + } + else { + super.write(EOF); + super.end(); + } + } + } + get [CURRENT]() { + return this[QUEUE] && this[QUEUE].head && this[QUEUE].head.value; + } + [JOBDONE](_job) { + this[QUEUE].shift(); + this[JOBS] -= 1; + this[PROCESS](); + } + [PROCESSJOB](job) { + if (job.pending) { + return; + } + if (job.entry) { + if (job === this[CURRENT] && !job.piped) { + this[PIPE](job); + } + return; + } + if (!job.stat) { + const sc = this.statCache.get(job.absolute); + if (sc) { + this[ONSTAT](job, sc); + } + else { + this[STAT](job); + } + } + if (!job.stat) { + return; + } + // filtered out! + if (job.ignore) { + return; + } + if (!this.noDirRecurse && + job.stat.isDirectory() && + !job.readdir) { + const rc = this.readdirCache.get(job.absolute); + if (rc) { + this[ONREADDIR](job, rc); + } + else { + this[READDIR](job); + } + if (!job.readdir) { + return; + } + } + // we know it doesn't have an entry, because that got checked above + job.entry = this[ENTRY](job); + if (!job.entry) { + job.ignore = true; + return; + } + if (job === this[CURRENT] && !job.piped) { + this[PIPE](job); + } + } + [ENTRYOPT](job) { + return { + onwarn: (code, msg, data) => this.warn(code, msg, data), + noPax: this.noPax, + cwd: this.cwd, + absolute: job.absolute, + preservePaths: this.preservePaths, + maxReadSize: this.maxReadSize, + strict: this.strict, + portable: this.portable, + linkCache: this.linkCache, + statCache: this.statCache, + noMtime: this.noMtime, + mtime: this.mtime, + prefix: this.prefix, + onWriteEntry: this.onWriteEntry, + }; + } + [ENTRY](job) { + this[JOBS] += 1; + try { + const e = new this[WRITEENTRYCLASS](job.path, this[ENTRYOPT](job)); + return e + .on('end', () => this[JOBDONE](job)) + .on('error', er => this.emit('error', er)); + } + catch (er) { + this.emit('error', er); + } + } + [ONDRAIN]() { + if (this[CURRENT] && this[CURRENT].entry) { + this[CURRENT].entry.resume(); + } + } + // like .pipe() but using super, because our write() is special + [PIPE](job) { + job.piped = true; + if (job.readdir) { + job.readdir.forEach(entry => { + const p = job.path; + const base = p === './' ? '' : p.replace(/\/*$/, '/'); + this[ADDFSENTRY](base + entry); + }); + } + const source = job.entry; + const zip = this.zip; + /* c8 ignore start */ + if (!source) + throw new Error('cannot pipe without source'); + /* c8 ignore stop */ + if (zip) { + source.on('data', chunk => { + if (!zip.write(chunk)) { + source.pause(); + } + }); + } + else { + source.on('data', chunk => { + if (!super.write(chunk)) { + source.pause(); + } + }); + } + } + pause() { + if (this.zip) { + this.zip.pause(); + } + return super.pause(); + } + warn(code, message, data = {}) { + warnMethod(this, code, message, data); + } +} +export class PackSync extends Pack { + sync = true; + constructor(opt) { + super(opt); + this[WRITEENTRYCLASS] = WriteEntrySync; + } + // pause/resume are no-ops in sync streams. + pause() { } + resume() { } + [STAT](job) { + const stat = this.follow ? 'statSync' : 'lstatSync'; + this[ONSTAT](job, fs[stat](job.absolute)); + } + [READDIR](job) { + this[ONREADDIR](job, fs.readdirSync(job.absolute)); + } + // gotta get it all in this tick + [PIPE](job) { + const source = job.entry; + const zip = this.zip; + if (job.readdir) { + job.readdir.forEach(entry => { + const p = job.path; + const base = p === './' ? '' : p.replace(/\/*$/, '/'); + this[ADDFSENTRY](base + entry); + }); + } + /* c8 ignore start */ + if (!source) + throw new Error('Cannot pipe without source'); + /* c8 ignore stop */ + if (zip) { + source.on('data', chunk => { + zip.write(chunk); + }); + } + else { + source.on('data', chunk => { + super[WRITE](chunk); + }); + } + } +} +//# sourceMappingURL=pack.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/package.json b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/package.json new file mode 100644 index 00000000000000..3dbc1ca591c055 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/parse.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/parse.js new file mode 100644 index 00000000000000..f2c802e6eef04d --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/parse.js @@ -0,0 +1,595 @@ +// this[BUFFER] is the remainder of a chunk if we're waiting for +// the full 512 bytes of a header to come in. We will Buffer.concat() +// it to the next write(), which is a mem copy, but a small one. +// +// this[QUEUE] is a Yallist of entries that haven't been emitted +// yet this can only get filled up if the user keeps write()ing after +// a write() returns false, or does a write() with more than one entry +// +// We don't buffer chunks, we always parse them and either create an +// entry, or push it into the active entry. The ReadEntry class knows +// to throw data away if .ignore=true +// +// Shift entry off the buffer when it emits 'end', and emit 'entry' for +// the next one in the list. +// +// At any time, we're pushing body chunks into the entry at WRITEENTRY, +// and waiting for 'end' on the entry at READENTRY +// +// ignored entries get .resume() called on them straight away +import { EventEmitter as EE } from 'events'; +import { BrotliDecompress, Unzip } from 'minizlib'; +import { Yallist } from 'yallist'; +import { Header } from './header.js'; +import { Pax } from './pax.js'; +import { ReadEntry } from './read-entry.js'; +import { warnMethod, } from './warn-method.js'; +const maxMetaEntrySize = 1024 * 1024; +const gzipHeader = Buffer.from([0x1f, 0x8b]); +const STATE = Symbol('state'); +const WRITEENTRY = Symbol('writeEntry'); +const READENTRY = Symbol('readEntry'); +const NEXTENTRY = Symbol('nextEntry'); +const PROCESSENTRY = Symbol('processEntry'); +const EX = Symbol('extendedHeader'); +const GEX = Symbol('globalExtendedHeader'); +const META = Symbol('meta'); +const EMITMETA = Symbol('emitMeta'); +const BUFFER = Symbol('buffer'); +const QUEUE = Symbol('queue'); +const ENDED = Symbol('ended'); +const EMITTEDEND = Symbol('emittedEnd'); +const EMIT = Symbol('emit'); +const UNZIP = Symbol('unzip'); +const CONSUMECHUNK = Symbol('consumeChunk'); +const CONSUMECHUNKSUB = Symbol('consumeChunkSub'); +const CONSUMEBODY = Symbol('consumeBody'); +const CONSUMEMETA = Symbol('consumeMeta'); +const CONSUMEHEADER = Symbol('consumeHeader'); +const CONSUMING = Symbol('consuming'); +const BUFFERCONCAT = Symbol('bufferConcat'); +const MAYBEEND = Symbol('maybeEnd'); +const WRITING = Symbol('writing'); +const ABORTED = Symbol('aborted'); +const DONE = Symbol('onDone'); +const SAW_VALID_ENTRY = Symbol('sawValidEntry'); +const SAW_NULL_BLOCK = Symbol('sawNullBlock'); +const SAW_EOF = Symbol('sawEOF'); +const CLOSESTREAM = Symbol('closeStream'); +const noop = () => true; +export class Parser extends EE { + file; + strict; + maxMetaEntrySize; + filter; + brotli; + writable = true; + readable = false; + [QUEUE] = new Yallist(); + [BUFFER]; + [READENTRY]; + [WRITEENTRY]; + [STATE] = 'begin'; + [META] = ''; + [EX]; + [GEX]; + [ENDED] = false; + [UNZIP]; + [ABORTED] = false; + [SAW_VALID_ENTRY]; + [SAW_NULL_BLOCK] = false; + [SAW_EOF] = false; + [WRITING] = false; + [CONSUMING] = false; + [EMITTEDEND] = false; + constructor(opt = {}) { + super(); + this.file = opt.file || ''; + // these BADARCHIVE errors can't be detected early. listen on DONE. + this.on(DONE, () => { + if (this[STATE] === 'begin' || + this[SAW_VALID_ENTRY] === false) { + // either less than 1 block of data, or all entries were invalid. + // Either way, probably not even a tarball. + this.warn('TAR_BAD_ARCHIVE', 'Unrecognized archive format'); + } + }); + if (opt.ondone) { + this.on(DONE, opt.ondone); + } + else { + this.on(DONE, () => { + this.emit('prefinish'); + this.emit('finish'); + this.emit('end'); + }); + } + this.strict = !!opt.strict; + this.maxMetaEntrySize = opt.maxMetaEntrySize || maxMetaEntrySize; + this.filter = typeof opt.filter === 'function' ? opt.filter : noop; + // Unlike gzip, brotli doesn't have any magic bytes to identify it + // Users need to explicitly tell us they're extracting a brotli file + // Or we infer from the file extension + const isTBR = opt.file && + (opt.file.endsWith('.tar.br') || opt.file.endsWith('.tbr')); + // if it's a tbr file it MIGHT be brotli, but we don't know until + // we look at it and verify it's not a valid tar file. + this.brotli = + !opt.gzip && opt.brotli !== undefined ? opt.brotli + : isTBR ? undefined + : false; + // have to set this so that streams are ok piping into it + this.on('end', () => this[CLOSESTREAM]()); + if (typeof opt.onwarn === 'function') { + this.on('warn', opt.onwarn); + } + if (typeof opt.onReadEntry === 'function') { + this.on('entry', opt.onReadEntry); + } + } + warn(code, message, data = {}) { + warnMethod(this, code, message, data); + } + [CONSUMEHEADER](chunk, position) { + if (this[SAW_VALID_ENTRY] === undefined) { + this[SAW_VALID_ENTRY] = false; + } + let header; + try { + header = new Header(chunk, position, this[EX], this[GEX]); + } + catch (er) { + return this.warn('TAR_ENTRY_INVALID', er); + } + if (header.nullBlock) { + if (this[SAW_NULL_BLOCK]) { + this[SAW_EOF] = true; + // ending an archive with no entries. pointless, but legal. + if (this[STATE] === 'begin') { + this[STATE] = 'header'; + } + this[EMIT]('eof'); + } + else { + this[SAW_NULL_BLOCK] = true; + this[EMIT]('nullBlock'); + } + } + else { + this[SAW_NULL_BLOCK] = false; + if (!header.cksumValid) { + this.warn('TAR_ENTRY_INVALID', 'checksum failure', { header }); + } + else if (!header.path) { + this.warn('TAR_ENTRY_INVALID', 'path is required', { header }); + } + else { + const type = header.type; + if (/^(Symbolic)?Link$/.test(type) && !header.linkpath) { + this.warn('TAR_ENTRY_INVALID', 'linkpath required', { + header, + }); + } + else if (!/^(Symbolic)?Link$/.test(type) && + !/^(Global)?ExtendedHeader$/.test(type) && + header.linkpath) { + this.warn('TAR_ENTRY_INVALID', 'linkpath forbidden', { + header, + }); + } + else { + const entry = (this[WRITEENTRY] = new ReadEntry(header, this[EX], this[GEX])); + // we do this for meta & ignored entries as well, because they + // are still valid tar, or else we wouldn't know to ignore them + if (!this[SAW_VALID_ENTRY]) { + if (entry.remain) { + // this might be the one! + const onend = () => { + if (!entry.invalid) { + this[SAW_VALID_ENTRY] = true; + } + }; + entry.on('end', onend); + } + else { + this[SAW_VALID_ENTRY] = true; + } + } + if (entry.meta) { + if (entry.size > this.maxMetaEntrySize) { + entry.ignore = true; + this[EMIT]('ignoredEntry', entry); + this[STATE] = 'ignore'; + entry.resume(); + } + else if (entry.size > 0) { + this[META] = ''; + entry.on('data', c => (this[META] += c)); + this[STATE] = 'meta'; + } + } + else { + this[EX] = undefined; + entry.ignore = + entry.ignore || !this.filter(entry.path, entry); + if (entry.ignore) { + // probably valid, just not something we care about + this[EMIT]('ignoredEntry', entry); + this[STATE] = entry.remain ? 'ignore' : 'header'; + entry.resume(); + } + else { + if (entry.remain) { + this[STATE] = 'body'; + } + else { + this[STATE] = 'header'; + entry.end(); + } + if (!this[READENTRY]) { + this[QUEUE].push(entry); + this[NEXTENTRY](); + } + else { + this[QUEUE].push(entry); + } + } + } + } + } + } + } + [CLOSESTREAM]() { + queueMicrotask(() => this.emit('close')); + } + [PROCESSENTRY](entry) { + let go = true; + if (!entry) { + this[READENTRY] = undefined; + go = false; + } + else if (Array.isArray(entry)) { + const [ev, ...args] = entry; + this.emit(ev, ...args); + } + else { + this[READENTRY] = entry; + this.emit('entry', entry); + if (!entry.emittedEnd) { + entry.on('end', () => this[NEXTENTRY]()); + go = false; + } + } + return go; + } + [NEXTENTRY]() { + do { } while (this[PROCESSENTRY](this[QUEUE].shift())); + if (!this[QUEUE].length) { + // At this point, there's nothing in the queue, but we may have an + // entry which is being consumed (readEntry). + // If we don't, then we definitely can handle more data. + // If we do, and either it's flowing, or it has never had any data + // written to it, then it needs more. + // The only other possibility is that it has returned false from a + // write() call, so we wait for the next drain to continue. + const re = this[READENTRY]; + const drainNow = !re || re.flowing || re.size === re.remain; + if (drainNow) { + if (!this[WRITING]) { + this.emit('drain'); + } + } + else { + re.once('drain', () => this.emit('drain')); + } + } + } + [CONSUMEBODY](chunk, position) { + // write up to but no more than writeEntry.blockRemain + const entry = this[WRITEENTRY]; + /* c8 ignore start */ + if (!entry) { + throw new Error('attempt to consume body without entry??'); + } + const br = entry.blockRemain ?? 0; + /* c8 ignore stop */ + const c = br >= chunk.length && position === 0 ? + chunk + : chunk.subarray(position, position + br); + entry.write(c); + if (!entry.blockRemain) { + this[STATE] = 'header'; + this[WRITEENTRY] = undefined; + entry.end(); + } + return c.length; + } + [CONSUMEMETA](chunk, position) { + const entry = this[WRITEENTRY]; + const ret = this[CONSUMEBODY](chunk, position); + // if we finished, then the entry is reset + if (!this[WRITEENTRY] && entry) { + this[EMITMETA](entry); + } + return ret; + } + [EMIT](ev, data, extra) { + if (!this[QUEUE].length && !this[READENTRY]) { + this.emit(ev, data, extra); + } + else { + this[QUEUE].push([ev, data, extra]); + } + } + [EMITMETA](entry) { + this[EMIT]('meta', this[META]); + switch (entry.type) { + case 'ExtendedHeader': + case 'OldExtendedHeader': + this[EX] = Pax.parse(this[META], this[EX], false); + break; + case 'GlobalExtendedHeader': + this[GEX] = Pax.parse(this[META], this[GEX], true); + break; + case 'NextFileHasLongPath': + case 'OldGnuLongPath': { + const ex = this[EX] ?? Object.create(null); + this[EX] = ex; + ex.path = this[META].replace(/\0.*/, ''); + break; + } + case 'NextFileHasLongLinkpath': { + const ex = this[EX] || Object.create(null); + this[EX] = ex; + ex.linkpath = this[META].replace(/\0.*/, ''); + break; + } + /* c8 ignore start */ + default: + throw new Error('unknown meta: ' + entry.type); + /* c8 ignore stop */ + } + } + abort(error) { + this[ABORTED] = true; + this.emit('abort', error); + // always throws, even in non-strict mode + this.warn('TAR_ABORT', error, { recoverable: false }); + } + write(chunk, encoding, cb) { + if (typeof encoding === 'function') { + cb = encoding; + encoding = undefined; + } + if (typeof chunk === 'string') { + chunk = Buffer.from(chunk, + /* c8 ignore next */ + typeof encoding === 'string' ? encoding : 'utf8'); + } + if (this[ABORTED]) { + /* c8 ignore next */ + cb?.(); + return false; + } + // first write, might be gzipped + const needSniff = this[UNZIP] === undefined || + (this.brotli === undefined && this[UNZIP] === false); + if (needSniff && chunk) { + if (this[BUFFER]) { + chunk = Buffer.concat([this[BUFFER], chunk]); + this[BUFFER] = undefined; + } + if (chunk.length < gzipHeader.length) { + this[BUFFER] = chunk; + /* c8 ignore next */ + cb?.(); + return true; + } + // look for gzip header + for (let i = 0; this[UNZIP] === undefined && i < gzipHeader.length; i++) { + if (chunk[i] !== gzipHeader[i]) { + this[UNZIP] = false; + } + } + const maybeBrotli = this.brotli === undefined; + if (this[UNZIP] === false && maybeBrotli) { + // read the first header to see if it's a valid tar file. If so, + // we can safely assume that it's not actually brotli, despite the + // .tbr or .tar.br file extension. + // if we ended before getting a full chunk, yes, def brotli + if (chunk.length < 512) { + if (this[ENDED]) { + this.brotli = true; + } + else { + this[BUFFER] = chunk; + /* c8 ignore next */ + cb?.(); + return true; + } + } + else { + // if it's tar, it's pretty reliably not brotli, chances of + // that happening are astronomical. + try { + new Header(chunk.subarray(0, 512)); + this.brotli = false; + } + catch (_) { + this.brotli = true; + } + } + } + if (this[UNZIP] === undefined || + (this[UNZIP] === false && this.brotli)) { + const ended = this[ENDED]; + this[ENDED] = false; + this[UNZIP] = + this[UNZIP] === undefined ? + new Unzip({}) + : new BrotliDecompress({}); + this[UNZIP].on('data', chunk => this[CONSUMECHUNK](chunk)); + this[UNZIP].on('error', er => this.abort(er)); + this[UNZIP].on('end', () => { + this[ENDED] = true; + this[CONSUMECHUNK](); + }); + this[WRITING] = true; + const ret = !!this[UNZIP][ended ? 'end' : 'write'](chunk); + this[WRITING] = false; + cb?.(); + return ret; + } + } + this[WRITING] = true; + if (this[UNZIP]) { + this[UNZIP].write(chunk); + } + else { + this[CONSUMECHUNK](chunk); + } + this[WRITING] = false; + // return false if there's a queue, or if the current entry isn't flowing + const ret = this[QUEUE].length ? false + : this[READENTRY] ? this[READENTRY].flowing + : true; + // if we have no queue, then that means a clogged READENTRY + if (!ret && !this[QUEUE].length) { + this[READENTRY]?.once('drain', () => this.emit('drain')); + } + /* c8 ignore next */ + cb?.(); + return ret; + } + [BUFFERCONCAT](c) { + if (c && !this[ABORTED]) { + this[BUFFER] = + this[BUFFER] ? Buffer.concat([this[BUFFER], c]) : c; + } + } + [MAYBEEND]() { + if (this[ENDED] && + !this[EMITTEDEND] && + !this[ABORTED] && + !this[CONSUMING]) { + this[EMITTEDEND] = true; + const entry = this[WRITEENTRY]; + if (entry && entry.blockRemain) { + // truncated, likely a damaged file + const have = this[BUFFER] ? this[BUFFER].length : 0; + this.warn('TAR_BAD_ARCHIVE', `Truncated input (needed ${entry.blockRemain} more bytes, only ${have} available)`, { entry }); + if (this[BUFFER]) { + entry.write(this[BUFFER]); + } + entry.end(); + } + this[EMIT](DONE); + } + } + [CONSUMECHUNK](chunk) { + if (this[CONSUMING] && chunk) { + this[BUFFERCONCAT](chunk); + } + else if (!chunk && !this[BUFFER]) { + this[MAYBEEND](); + } + else if (chunk) { + this[CONSUMING] = true; + if (this[BUFFER]) { + this[BUFFERCONCAT](chunk); + const c = this[BUFFER]; + this[BUFFER] = undefined; + this[CONSUMECHUNKSUB](c); + } + else { + this[CONSUMECHUNKSUB](chunk); + } + while (this[BUFFER] && + this[BUFFER]?.length >= 512 && + !this[ABORTED] && + !this[SAW_EOF]) { + const c = this[BUFFER]; + this[BUFFER] = undefined; + this[CONSUMECHUNKSUB](c); + } + this[CONSUMING] = false; + } + if (!this[BUFFER] || this[ENDED]) { + this[MAYBEEND](); + } + } + [CONSUMECHUNKSUB](chunk) { + // we know that we are in CONSUMING mode, so anything written goes into + // the buffer. Advance the position and put any remainder in the buffer. + let position = 0; + const length = chunk.length; + while (position + 512 <= length && + !this[ABORTED] && + !this[SAW_EOF]) { + switch (this[STATE]) { + case 'begin': + case 'header': + this[CONSUMEHEADER](chunk, position); + position += 512; + break; + case 'ignore': + case 'body': + position += this[CONSUMEBODY](chunk, position); + break; + case 'meta': + position += this[CONSUMEMETA](chunk, position); + break; + /* c8 ignore start */ + default: + throw new Error('invalid state: ' + this[STATE]); + /* c8 ignore stop */ + } + } + if (position < length) { + if (this[BUFFER]) { + this[BUFFER] = Buffer.concat([ + chunk.subarray(position), + this[BUFFER], + ]); + } + else { + this[BUFFER] = chunk.subarray(position); + } + } + } + end(chunk, encoding, cb) { + if (typeof chunk === 'function') { + cb = chunk; + encoding = undefined; + chunk = undefined; + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = undefined; + } + if (typeof chunk === 'string') { + chunk = Buffer.from(chunk, encoding); + } + if (cb) + this.once('finish', cb); + if (!this[ABORTED]) { + if (this[UNZIP]) { + /* c8 ignore start */ + if (chunk) + this[UNZIP].write(chunk); + /* c8 ignore stop */ + this[UNZIP].end(); + } + else { + this[ENDED] = true; + if (this.brotli === undefined) + chunk = chunk || Buffer.alloc(0); + if (chunk) + this.write(chunk); + this[MAYBEEND](); + } + } + return this; + } +} +//# sourceMappingURL=parse.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/path-reservations.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/path-reservations.js new file mode 100644 index 00000000000000..e63b9c91e9a808 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/path-reservations.js @@ -0,0 +1,166 @@ +// A path exclusive reservation system +// reserve([list, of, paths], fn) +// When the fn is first in line for all its paths, it +// is called with a cb that clears the reservation. +// +// Used by async unpack to avoid clobbering paths in use, +// while still allowing maximal safe parallelization. +import { join } from 'node:path'; +import { normalizeUnicode } from './normalize-unicode.js'; +import { stripTrailingSlashes } from './strip-trailing-slashes.js'; +const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform; +const isWindows = platform === 'win32'; +// return a set of parent dirs for a given path +// '/a/b/c/d' -> ['/', '/a', '/a/b', '/a/b/c', '/a/b/c/d'] +const getDirs = (path) => { + const dirs = path + .split('/') + .slice(0, -1) + .reduce((set, path) => { + const s = set[set.length - 1]; + if (s !== undefined) { + path = join(s, path); + } + set.push(path || '/'); + return set; + }, []); + return dirs; +}; +export class PathReservations { + // path => [function or Set] + // A Set object means a directory reservation + // A fn is a direct reservation on that path + #queues = new Map(); + // fn => {paths:[path,...], dirs:[path, ...]} + #reservations = new Map(); + // functions currently running + #running = new Set(); + reserve(paths, fn) { + paths = + isWindows ? + ['win32 parallelization disabled'] + : paths.map(p => { + // don't need normPath, because we skip this entirely for windows + return stripTrailingSlashes(join(normalizeUnicode(p))).toLowerCase(); + }); + const dirs = new Set(paths.map(path => getDirs(path)).reduce((a, b) => a.concat(b))); + this.#reservations.set(fn, { dirs, paths }); + for (const p of paths) { + const q = this.#queues.get(p); + if (!q) { + this.#queues.set(p, [fn]); + } + else { + q.push(fn); + } + } + for (const dir of dirs) { + const q = this.#queues.get(dir); + if (!q) { + this.#queues.set(dir, [new Set([fn])]); + } + else { + const l = q[q.length - 1]; + if (l instanceof Set) { + l.add(fn); + } + else { + q.push(new Set([fn])); + } + } + } + return this.#run(fn); + } + // return the queues for each path the function cares about + // fn => {paths, dirs} + #getQueues(fn) { + const res = this.#reservations.get(fn); + /* c8 ignore start */ + if (!res) { + throw new Error('function does not have any path reservations'); + } + /* c8 ignore stop */ + return { + paths: res.paths.map((path) => this.#queues.get(path)), + dirs: [...res.dirs].map(path => this.#queues.get(path)), + }; + } + // check if fn is first in line for all its paths, and is + // included in the first set for all its dir queues + check(fn) { + const { paths, dirs } = this.#getQueues(fn); + return (paths.every(q => q && q[0] === fn) && + dirs.every(q => q && q[0] instanceof Set && q[0].has(fn))); + } + // run the function if it's first in line and not already running + #run(fn) { + if (this.#running.has(fn) || !this.check(fn)) { + return false; + } + this.#running.add(fn); + fn(() => this.#clear(fn)); + return true; + } + #clear(fn) { + if (!this.#running.has(fn)) { + return false; + } + const res = this.#reservations.get(fn); + /* c8 ignore start */ + if (!res) { + throw new Error('invalid reservation'); + } + /* c8 ignore stop */ + const { paths, dirs } = res; + const next = new Set(); + for (const path of paths) { + const q = this.#queues.get(path); + /* c8 ignore start */ + if (!q || q?.[0] !== fn) { + continue; + } + /* c8 ignore stop */ + const q0 = q[1]; + if (!q0) { + this.#queues.delete(path); + continue; + } + q.shift(); + if (typeof q0 === 'function') { + next.add(q0); + } + else { + for (const f of q0) { + next.add(f); + } + } + } + for (const dir of dirs) { + const q = this.#queues.get(dir); + const q0 = q?.[0]; + /* c8 ignore next - type safety only */ + if (!q || !(q0 instanceof Set)) + continue; + if (q0.size === 1 && q.length === 1) { + this.#queues.delete(dir); + continue; + } + else if (q0.size === 1) { + q.shift(); + // next one must be a function, + // or else the Set would've been reused + const n = q[0]; + if (typeof n === 'function') { + next.add(n); + } + } + else { + q0.delete(fn); + } + } + this.#running.delete(fn); + next.forEach(fn => this.#run(fn)); + return true; + } +} +//# sourceMappingURL=path-reservations.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/pax.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/pax.js new file mode 100644 index 00000000000000..832808f344da53 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/pax.js @@ -0,0 +1,154 @@ +import { basename } from 'node:path'; +import { Header } from './header.js'; +export class Pax { + atime; + mtime; + ctime; + charset; + comment; + gid; + uid; + gname; + uname; + linkpath; + dev; + ino; + nlink; + path; + size; + mode; + global; + constructor(obj, global = false) { + this.atime = obj.atime; + this.charset = obj.charset; + this.comment = obj.comment; + this.ctime = obj.ctime; + this.dev = obj.dev; + this.gid = obj.gid; + this.global = global; + this.gname = obj.gname; + this.ino = obj.ino; + this.linkpath = obj.linkpath; + this.mtime = obj.mtime; + this.nlink = obj.nlink; + this.path = obj.path; + this.size = obj.size; + this.uid = obj.uid; + this.uname = obj.uname; + } + encode() { + const body = this.encodeBody(); + if (body === '') { + return Buffer.allocUnsafe(0); + } + const bodyLen = Buffer.byteLength(body); + // round up to 512 bytes + // add 512 for header + const bufLen = 512 * Math.ceil(1 + bodyLen / 512); + const buf = Buffer.allocUnsafe(bufLen); + // 0-fill the header section, it might not hit every field + for (let i = 0; i < 512; i++) { + buf[i] = 0; + } + new Header({ + // XXX split the path + // then the path should be PaxHeader + basename, but less than 99, + // prepend with the dirname + /* c8 ignore start */ + path: ('PaxHeader/' + basename(this.path ?? '')).slice(0, 99), + /* c8 ignore stop */ + mode: this.mode || 0o644, + uid: this.uid, + gid: this.gid, + size: bodyLen, + mtime: this.mtime, + type: this.global ? 'GlobalExtendedHeader' : 'ExtendedHeader', + linkpath: '', + uname: this.uname || '', + gname: this.gname || '', + devmaj: 0, + devmin: 0, + atime: this.atime, + ctime: this.ctime, + }).encode(buf); + buf.write(body, 512, bodyLen, 'utf8'); + // null pad after the body + for (let i = bodyLen + 512; i < buf.length; i++) { + buf[i] = 0; + } + return buf; + } + encodeBody() { + return (this.encodeField('path') + + this.encodeField('ctime') + + this.encodeField('atime') + + this.encodeField('dev') + + this.encodeField('ino') + + this.encodeField('nlink') + + this.encodeField('charset') + + this.encodeField('comment') + + this.encodeField('gid') + + this.encodeField('gname') + + this.encodeField('linkpath') + + this.encodeField('mtime') + + this.encodeField('size') + + this.encodeField('uid') + + this.encodeField('uname')); + } + encodeField(field) { + if (this[field] === undefined) { + return ''; + } + const r = this[field]; + const v = r instanceof Date ? r.getTime() / 1000 : r; + const s = ' ' + + (field === 'dev' || field === 'ino' || field === 'nlink' ? + 'SCHILY.' + : '') + + field + + '=' + + v + + '\n'; + const byteLen = Buffer.byteLength(s); + // the digits includes the length of the digits in ascii base-10 + // so if it's 9 characters, then adding 1 for the 9 makes it 10 + // which makes it 11 chars. + let digits = Math.floor(Math.log(byteLen) / Math.log(10)) + 1; + if (byteLen + digits >= Math.pow(10, digits)) { + digits += 1; + } + const len = digits + byteLen; + return len + s; + } + static parse(str, ex, g = false) { + return new Pax(merge(parseKV(str), ex), g); + } +} +const merge = (a, b) => b ? Object.assign({}, b, a) : a; +const parseKV = (str) => str + .replace(/\n$/, '') + .split('\n') + .reduce(parseKVLine, Object.create(null)); +const parseKVLine = (set, line) => { + const n = parseInt(line, 10); + // XXX Values with \n in them will fail this. + // Refactor to not be a naive line-by-line parse. + if (n !== Buffer.byteLength(line) + 1) { + return set; + } + line = line.slice((n + ' ').length); + const kv = line.split('='); + const r = kv.shift(); + if (!r) { + return set; + } + const k = r.replace(/^SCHILY\.(dev|ino|nlink)/, '$1'); + const v = kv.join('='); + set[k] = + /^([A-Z]+\.)?([mac]|birth|creation)time$/.test(k) ? + new Date(Number(v) * 1000) + : /^[0-9]+$/.test(v) ? +v + : v; + return set; +}; +//# sourceMappingURL=pax.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/read-entry.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/read-entry.js new file mode 100644 index 00000000000000..23cc673e610879 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/read-entry.js @@ -0,0 +1,136 @@ +import { Minipass } from 'minipass'; +import { normalizeWindowsPath } from './normalize-windows-path.js'; +export class ReadEntry extends Minipass { + extended; + globalExtended; + header; + startBlockSize; + blockRemain; + remain; + type; + meta = false; + ignore = false; + path; + mode; + uid; + gid; + uname; + gname; + size = 0; + mtime; + atime; + ctime; + linkpath; + dev; + ino; + nlink; + invalid = false; + absolute; + unsupported = false; + constructor(header, ex, gex) { + super({}); + // read entries always start life paused. this is to avoid the + // situation where Minipass's auto-ending empty streams results + // in an entry ending before we're ready for it. + this.pause(); + this.extended = ex; + this.globalExtended = gex; + this.header = header; + /* c8 ignore start */ + this.remain = header.size ?? 0; + /* c8 ignore stop */ + this.startBlockSize = 512 * Math.ceil(this.remain / 512); + this.blockRemain = this.startBlockSize; + this.type = header.type; + switch (this.type) { + case 'File': + case 'OldFile': + case 'Link': + case 'SymbolicLink': + case 'CharacterDevice': + case 'BlockDevice': + case 'Directory': + case 'FIFO': + case 'ContiguousFile': + case 'GNUDumpDir': + break; + case 'NextFileHasLongLinkpath': + case 'NextFileHasLongPath': + case 'OldGnuLongPath': + case 'GlobalExtendedHeader': + case 'ExtendedHeader': + case 'OldExtendedHeader': + this.meta = true; + break; + // NOTE: gnutar and bsdtar treat unrecognized types as 'File' + // it may be worth doing the same, but with a warning. + default: + this.ignore = true; + } + /* c8 ignore start */ + if (!header.path) { + throw new Error('no path provided for tar.ReadEntry'); + } + /* c8 ignore stop */ + this.path = normalizeWindowsPath(header.path); + this.mode = header.mode; + if (this.mode) { + this.mode = this.mode & 0o7777; + } + this.uid = header.uid; + this.gid = header.gid; + this.uname = header.uname; + this.gname = header.gname; + this.size = this.remain; + this.mtime = header.mtime; + this.atime = header.atime; + this.ctime = header.ctime; + /* c8 ignore start */ + this.linkpath = + header.linkpath ? + normalizeWindowsPath(header.linkpath) + : undefined; + /* c8 ignore stop */ + this.uname = header.uname; + this.gname = header.gname; + if (ex) { + this.#slurp(ex); + } + if (gex) { + this.#slurp(gex, true); + } + } + write(data) { + const writeLen = data.length; + if (writeLen > this.blockRemain) { + throw new Error('writing more to entry than is appropriate'); + } + const r = this.remain; + const br = this.blockRemain; + this.remain = Math.max(0, r - writeLen); + this.blockRemain = Math.max(0, br - writeLen); + if (this.ignore) { + return true; + } + if (r >= writeLen) { + return super.write(data); + } + // r < writeLen + return super.write(data.subarray(0, r)); + } + #slurp(ex, gex = false) { + if (ex.path) + ex.path = normalizeWindowsPath(ex.path); + if (ex.linkpath) + ex.linkpath = normalizeWindowsPath(ex.linkpath); + Object.assign(this, Object.fromEntries(Object.entries(ex).filter(([k, v]) => { + // we slurp in everything except for the path attribute in + // a global extended header, because that's weird. Also, any + // null/undefined values are ignored. + return !(v === null || + v === undefined || + (k === 'path' && gex)); + }))); + } +} +//# sourceMappingURL=read-entry.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/replace.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/replace.js new file mode 100644 index 00000000000000..c461a4c7d8b63c --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/replace.js @@ -0,0 +1,225 @@ +// tar -r +import { WriteStream, WriteStreamSync } from '@isaacs/fs-minipass'; +import fs from 'node:fs'; +import path from 'node:path'; +import { Header } from './header.js'; +import { list } from './list.js'; +import { makeCommand } from './make-command.js'; +import { isFile, } from './options.js'; +import { Pack, PackSync } from './pack.js'; +// starting at the head of the file, read a Header +// If the checksum is invalid, that's our position to start writing +// If it is, jump forward by the specified size (round up to 512) +// and try again. +// Write the new Pack stream starting there. +const replaceSync = (opt, files) => { + const p = new PackSync(opt); + let threw = true; + let fd; + let position; + try { + try { + fd = fs.openSync(opt.file, 'r+'); + } + catch (er) { + if (er?.code === 'ENOENT') { + fd = fs.openSync(opt.file, 'w+'); + } + else { + throw er; + } + } + const st = fs.fstatSync(fd); + const headBuf = Buffer.alloc(512); + POSITION: for (position = 0; position < st.size; position += 512) { + for (let bufPos = 0, bytes = 0; bufPos < 512; bufPos += bytes) { + bytes = fs.readSync(fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos); + if (position === 0 && + headBuf[0] === 0x1f && + headBuf[1] === 0x8b) { + throw new Error('cannot append to compressed archives'); + } + if (!bytes) { + break POSITION; + } + } + const h = new Header(headBuf); + if (!h.cksumValid) { + break; + } + const entryBlockSize = 512 * Math.ceil((h.size || 0) / 512); + if (position + entryBlockSize + 512 > st.size) { + break; + } + // the 512 for the header we just parsed will be added as well + // also jump ahead all the blocks for the body + position += entryBlockSize; + if (opt.mtimeCache && h.mtime) { + opt.mtimeCache.set(String(h.path), h.mtime); + } + } + threw = false; + streamSync(opt, p, position, fd, files); + } + finally { + if (threw) { + try { + fs.closeSync(fd); + } + catch (er) { } + } + } +}; +const streamSync = (opt, p, position, fd, files) => { + const stream = new WriteStreamSync(opt.file, { + fd: fd, + start: position, + }); + p.pipe(stream); + addFilesSync(p, files); +}; +const replaceAsync = (opt, files) => { + files = Array.from(files); + const p = new Pack(opt); + const getPos = (fd, size, cb_) => { + const cb = (er, pos) => { + if (er) { + fs.close(fd, _ => cb_(er)); + } + else { + cb_(null, pos); + } + }; + let position = 0; + if (size === 0) { + return cb(null, 0); + } + let bufPos = 0; + const headBuf = Buffer.alloc(512); + const onread = (er, bytes) => { + if (er || typeof bytes === 'undefined') { + return cb(er); + } + bufPos += bytes; + if (bufPos < 512 && bytes) { + return fs.read(fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos, onread); + } + if (position === 0 && + headBuf[0] === 0x1f && + headBuf[1] === 0x8b) { + return cb(new Error('cannot append to compressed archives')); + } + // truncated header + if (bufPos < 512) { + return cb(null, position); + } + const h = new Header(headBuf); + if (!h.cksumValid) { + return cb(null, position); + } + /* c8 ignore next */ + const entryBlockSize = 512 * Math.ceil((h.size ?? 0) / 512); + if (position + entryBlockSize + 512 > size) { + return cb(null, position); + } + position += entryBlockSize + 512; + if (position >= size) { + return cb(null, position); + } + if (opt.mtimeCache && h.mtime) { + opt.mtimeCache.set(String(h.path), h.mtime); + } + bufPos = 0; + fs.read(fd, headBuf, 0, 512, position, onread); + }; + fs.read(fd, headBuf, 0, 512, position, onread); + }; + const promise = new Promise((resolve, reject) => { + p.on('error', reject); + let flag = 'r+'; + const onopen = (er, fd) => { + if (er && er.code === 'ENOENT' && flag === 'r+') { + flag = 'w+'; + return fs.open(opt.file, flag, onopen); + } + if (er || !fd) { + return reject(er); + } + fs.fstat(fd, (er, st) => { + if (er) { + return fs.close(fd, () => reject(er)); + } + getPos(fd, st.size, (er, position) => { + if (er) { + return reject(er); + } + const stream = new WriteStream(opt.file, { + fd: fd, + start: position, + }); + p.pipe(stream); + stream.on('error', reject); + stream.on('close', resolve); + addFilesAsync(p, files); + }); + }); + }; + fs.open(opt.file, flag, onopen); + }); + return promise; +}; +const addFilesSync = (p, files) => { + files.forEach(file => { + if (file.charAt(0) === '@') { + list({ + file: path.resolve(p.cwd, file.slice(1)), + sync: true, + noResume: true, + onReadEntry: entry => p.add(entry), + }); + } + else { + p.add(file); + } + }); + p.end(); +}; +const addFilesAsync = async (p, files) => { + for (let i = 0; i < files.length; i++) { + const file = String(files[i]); + if (file.charAt(0) === '@') { + await list({ + file: path.resolve(String(p.cwd), file.slice(1)), + noResume: true, + onReadEntry: entry => p.add(entry), + }); + } + else { + p.add(file); + } + } + p.end(); +}; +export const replace = makeCommand(replaceSync, replaceAsync, +/* c8 ignore start */ +() => { + throw new TypeError('file is required'); +}, () => { + throw new TypeError('file is required'); +}, +/* c8 ignore stop */ +(opt, entries) => { + if (!isFile(opt)) { + throw new TypeError('file is required'); + } + if (opt.gzip || + opt.brotli || + opt.file.endsWith('.br') || + opt.file.endsWith('.tbr')) { + throw new TypeError('cannot append to compressed archives'); + } + if (!entries?.length) { + throw new TypeError('no paths specified to add/replace'); + } +}); +//# sourceMappingURL=replace.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/strip-absolute-path.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/strip-absolute-path.js new file mode 100644 index 00000000000000..cce5ff80b00db3 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/strip-absolute-path.js @@ -0,0 +1,25 @@ +// unix absolute paths are also absolute on win32, so we use this for both +import { win32 } from 'node:path'; +const { isAbsolute, parse } = win32; +// returns [root, stripped] +// Note that windows will think that //x/y/z/a has a "root" of //x/y, and in +// those cases, we want to sanitize it to x/y/z/a, not z/a, so we strip / +// explicitly if it's the first character. +// drive-specific relative paths on Windows get their root stripped off even +// though they are not absolute, so `c:../foo` becomes ['c:', '../foo'] +export const stripAbsolutePath = (path) => { + let r = ''; + let parsed = parse(path); + while (isAbsolute(path) || parsed.root) { + // windows will think that //x/y/z has a "root" of //x/y/ + // but strip the //?/C:/ off of //?/C:/path + const root = path.charAt(0) === '/' && path.slice(0, 4) !== '//?/' ? + '/' + : parsed.root; + path = path.slice(root.length); + r += root; + parsed = parse(path); + } + return [r, path]; +}; +//# sourceMappingURL=strip-absolute-path.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/strip-trailing-slashes.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/strip-trailing-slashes.js new file mode 100644 index 00000000000000..ace4218a7547bf --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/strip-trailing-slashes.js @@ -0,0 +1,14 @@ +// warning: extremely hot code path. +// This has been meticulously optimized for use +// within npm install on large package trees. +// Do not edit without careful benchmarking. +export const stripTrailingSlashes = (str) => { + let i = str.length - 1; + let slashesStart = -1; + while (i > -1 && str.charAt(i) === '/') { + slashesStart = i; + i--; + } + return slashesStart === -1 ? str : str.slice(0, slashesStart); +}; +//# sourceMappingURL=strip-trailing-slashes.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/symlink-error.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/symlink-error.js new file mode 100644 index 00000000000000..d31766e2e0afa0 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/symlink-error.js @@ -0,0 +1,15 @@ +export class SymlinkError extends Error { + path; + symlink; + syscall = 'symlink'; + code = 'TAR_SYMLINK_ERROR'; + constructor(symlink, path) { + super('TAR_SYMLINK_ERROR: Cannot extract through symbolic link'); + this.symlink = symlink; + this.path = path; + } + get name() { + return 'SymlinkError'; + } +} +//# sourceMappingURL=symlink-error.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/types.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/types.js new file mode 100644 index 00000000000000..27b982ae1e0922 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/types.js @@ -0,0 +1,45 @@ +export const isCode = (c) => name.has(c); +export const isName = (c) => code.has(c); +// map types from key to human-friendly name +export const name = new Map([ + ['0', 'File'], + // same as File + ['', 'OldFile'], + ['1', 'Link'], + ['2', 'SymbolicLink'], + // Devices and FIFOs aren't fully supported + // they are parsed, but skipped when unpacking + ['3', 'CharacterDevice'], + ['4', 'BlockDevice'], + ['5', 'Directory'], + ['6', 'FIFO'], + // same as File + ['7', 'ContiguousFile'], + // pax headers + ['g', 'GlobalExtendedHeader'], + ['x', 'ExtendedHeader'], + // vendor-specific stuff + // skip + ['A', 'SolarisACL'], + // like 5, but with data, which should be skipped + ['D', 'GNUDumpDir'], + // metadata only, skip + ['I', 'Inode'], + // data = link path of next file + ['K', 'NextFileHasLongLinkpath'], + // data = path of next file + ['L', 'NextFileHasLongPath'], + // skip + ['M', 'ContinuationFile'], + // like L + ['N', 'OldGnuLongPath'], + // skip + ['S', 'SparseFile'], + // skip + ['V', 'TapeVolumeHeader'], + // like x + ['X', 'OldExtendedHeader'], +]); +// map the other direction +export const code = new Map(Array.from(name).map(kv => [kv[1], kv[0]])); +//# sourceMappingURL=types.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/unpack.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/unpack.js new file mode 100644 index 00000000000000..6e744cfc1a6f9f --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/unpack.js @@ -0,0 +1,888 @@ +// the PEND/UNPEND stuff tracks whether we're ready to emit end/close yet. +// but the path reservations are required to avoid race conditions where +// parallelized unpack ops may mess with one another, due to dependencies +// (like a Link depending on its target) or destructive operations (like +// clobbering an fs object to create one of a different type.) +import * as fsm from '@isaacs/fs-minipass'; +import assert from 'node:assert'; +import { randomBytes } from 'node:crypto'; +import fs from 'node:fs'; +import path from 'node:path'; +import { getWriteFlag } from './get-write-flag.js'; +import { mkdir, mkdirSync } from './mkdir.js'; +import { normalizeUnicode } from './normalize-unicode.js'; +import { normalizeWindowsPath } from './normalize-windows-path.js'; +import { Parser } from './parse.js'; +import { stripAbsolutePath } from './strip-absolute-path.js'; +import { stripTrailingSlashes } from './strip-trailing-slashes.js'; +import * as wc from './winchars.js'; +import { PathReservations } from './path-reservations.js'; +const ONENTRY = Symbol('onEntry'); +const CHECKFS = Symbol('checkFs'); +const CHECKFS2 = Symbol('checkFs2'); +const PRUNECACHE = Symbol('pruneCache'); +const ISREUSABLE = Symbol('isReusable'); +const MAKEFS = Symbol('makeFs'); +const FILE = Symbol('file'); +const DIRECTORY = Symbol('directory'); +const LINK = Symbol('link'); +const SYMLINK = Symbol('symlink'); +const HARDLINK = Symbol('hardlink'); +const UNSUPPORTED = Symbol('unsupported'); +const CHECKPATH = Symbol('checkPath'); +const MKDIR = Symbol('mkdir'); +const ONERROR = Symbol('onError'); +const PENDING = Symbol('pending'); +const PEND = Symbol('pend'); +const UNPEND = Symbol('unpend'); +const ENDED = Symbol('ended'); +const MAYBECLOSE = Symbol('maybeClose'); +const SKIP = Symbol('skip'); +const DOCHOWN = Symbol('doChown'); +const UID = Symbol('uid'); +const GID = Symbol('gid'); +const CHECKED_CWD = Symbol('checkedCwd'); +const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform; +const isWindows = platform === 'win32'; +const DEFAULT_MAX_DEPTH = 1024; +// Unlinks on Windows are not atomic. +// +// This means that if you have a file entry, followed by another +// file entry with an identical name, and you cannot re-use the file +// (because it's a hardlink, or because unlink:true is set, or it's +// Windows, which does not have useful nlink values), then the unlink +// will be committed to the disk AFTER the new file has been written +// over the old one, deleting the new file. +// +// To work around this, on Windows systems, we rename the file and then +// delete the renamed file. It's a sloppy kludge, but frankly, I do not +// know of a better way to do this, given windows' non-atomic unlink +// semantics. +// +// See: https://github.com/npm/node-tar/issues/183 +/* c8 ignore start */ +const unlinkFile = (path, cb) => { + if (!isWindows) { + return fs.unlink(path, cb); + } + const name = path + '.DELETE.' + randomBytes(16).toString('hex'); + fs.rename(path, name, er => { + if (er) { + return cb(er); + } + fs.unlink(name, cb); + }); +}; +/* c8 ignore stop */ +/* c8 ignore start */ +const unlinkFileSync = (path) => { + if (!isWindows) { + return fs.unlinkSync(path); + } + const name = path + '.DELETE.' + randomBytes(16).toString('hex'); + fs.renameSync(path, name); + fs.unlinkSync(name); +}; +/* c8 ignore stop */ +// this.gid, entry.gid, this.processUid +const uint32 = (a, b, c) => a !== undefined && a === a >>> 0 ? a + : b !== undefined && b === b >>> 0 ? b + : c; +// clear the cache if it's a case-insensitive unicode-squashing match. +// we can't know if the current file system is case-sensitive or supports +// unicode fully, so we check for similarity on the maximally compatible +// representation. Err on the side of pruning, since all it's doing is +// preventing lstats, and it's not the end of the world if we get a false +// positive. +// Note that on windows, we always drop the entire cache whenever a +// symbolic link is encountered, because 8.3 filenames are impossible +// to reason about, and collisions are hazards rather than just failures. +const cacheKeyNormalize = (path) => stripTrailingSlashes(normalizeWindowsPath(normalizeUnicode(path))).toLowerCase(); +// remove all cache entries matching ${abs}/** +const pruneCache = (cache, abs) => { + abs = cacheKeyNormalize(abs); + for (const path of cache.keys()) { + const pnorm = cacheKeyNormalize(path); + if (pnorm === abs || pnorm.indexOf(abs + '/') === 0) { + cache.delete(path); + } + } +}; +const dropCache = (cache) => { + for (const key of cache.keys()) { + cache.delete(key); + } +}; +export class Unpack extends Parser { + [ENDED] = false; + [CHECKED_CWD] = false; + [PENDING] = 0; + reservations = new PathReservations(); + transform; + writable = true; + readable = false; + dirCache; + uid; + gid; + setOwner; + preserveOwner; + processGid; + processUid; + maxDepth; + forceChown; + win32; + newer; + keep; + noMtime; + preservePaths; + unlink; + cwd; + strip; + processUmask; + umask; + dmode; + fmode; + chmod; + constructor(opt = {}) { + opt.ondone = () => { + this[ENDED] = true; + this[MAYBECLOSE](); + }; + super(opt); + this.transform = opt.transform; + this.dirCache = opt.dirCache || new Map(); + this.chmod = !!opt.chmod; + if (typeof opt.uid === 'number' || typeof opt.gid === 'number') { + // need both or neither + if (typeof opt.uid !== 'number' || + typeof opt.gid !== 'number') { + throw new TypeError('cannot set owner without number uid and gid'); + } + if (opt.preserveOwner) { + throw new TypeError('cannot preserve owner in archive and also set owner explicitly'); + } + this.uid = opt.uid; + this.gid = opt.gid; + this.setOwner = true; + } + else { + this.uid = undefined; + this.gid = undefined; + this.setOwner = false; + } + // default true for root + if (opt.preserveOwner === undefined && + typeof opt.uid !== 'number') { + this.preserveOwner = !!(process.getuid && process.getuid() === 0); + } + else { + this.preserveOwner = !!opt.preserveOwner; + } + this.processUid = + (this.preserveOwner || this.setOwner) && process.getuid ? + process.getuid() + : undefined; + this.processGid = + (this.preserveOwner || this.setOwner) && process.getgid ? + process.getgid() + : undefined; + // prevent excessively deep nesting of subfolders + // set to `Infinity` to remove this restriction + this.maxDepth = + typeof opt.maxDepth === 'number' ? + opt.maxDepth + : DEFAULT_MAX_DEPTH; + // mostly just for testing, but useful in some cases. + // Forcibly trigger a chown on every entry, no matter what + this.forceChown = opt.forceChown === true; + // turn > this[ONENTRY](entry)); + } + // a bad or damaged archive is a warning for Parser, but an error + // when extracting. Mark those errors as unrecoverable, because + // the Unpack contract cannot be met. + warn(code, msg, data = {}) { + if (code === 'TAR_BAD_ARCHIVE' || code === 'TAR_ABORT') { + data.recoverable = false; + } + return super.warn(code, msg, data); + } + [MAYBECLOSE]() { + if (this[ENDED] && this[PENDING] === 0) { + this.emit('prefinish'); + this.emit('finish'); + this.emit('end'); + } + } + [CHECKPATH](entry) { + const p = normalizeWindowsPath(entry.path); + const parts = p.split('/'); + if (this.strip) { + if (parts.length < this.strip) { + return false; + } + if (entry.type === 'Link') { + const linkparts = normalizeWindowsPath(String(entry.linkpath)).split('/'); + if (linkparts.length >= this.strip) { + entry.linkpath = linkparts.slice(this.strip).join('/'); + } + else { + return false; + } + } + parts.splice(0, this.strip); + entry.path = parts.join('/'); + } + if (isFinite(this.maxDepth) && parts.length > this.maxDepth) { + this.warn('TAR_ENTRY_ERROR', 'path excessively deep', { + entry, + path: p, + depth: parts.length, + maxDepth: this.maxDepth, + }); + return false; + } + if (!this.preservePaths) { + if (parts.includes('..') || + /* c8 ignore next */ + (isWindows && /^[a-z]:\.\.$/i.test(parts[0] ?? ''))) { + this.warn('TAR_ENTRY_ERROR', `path contains '..'`, { + entry, + path: p, + }); + return false; + } + // strip off the root + const [root, stripped] = stripAbsolutePath(p); + if (root) { + entry.path = String(stripped); + this.warn('TAR_ENTRY_INFO', `stripping ${root} from absolute path`, { + entry, + path: p, + }); + } + } + if (path.isAbsolute(entry.path)) { + entry.absolute = normalizeWindowsPath(path.resolve(entry.path)); + } + else { + entry.absolute = normalizeWindowsPath(path.resolve(this.cwd, entry.path)); + } + // if we somehow ended up with a path that escapes the cwd, and we are + // not in preservePaths mode, then something is fishy! This should have + // been prevented above, so ignore this for coverage. + /* c8 ignore start - defense in depth */ + if (!this.preservePaths && + typeof entry.absolute === 'string' && + entry.absolute.indexOf(this.cwd + '/') !== 0 && + entry.absolute !== this.cwd) { + this.warn('TAR_ENTRY_ERROR', 'path escaped extraction target', { + entry, + path: normalizeWindowsPath(entry.path), + resolvedPath: entry.absolute, + cwd: this.cwd, + }); + return false; + } + /* c8 ignore stop */ + // an archive can set properties on the extraction directory, but it + // may not replace the cwd with a different kind of thing entirely. + if (entry.absolute === this.cwd && + entry.type !== 'Directory' && + entry.type !== 'GNUDumpDir') { + return false; + } + // only encode : chars that aren't drive letter indicators + if (this.win32) { + const { root: aRoot } = path.win32.parse(String(entry.absolute)); + entry.absolute = + aRoot + wc.encode(String(entry.absolute).slice(aRoot.length)); + const { root: pRoot } = path.win32.parse(entry.path); + entry.path = pRoot + wc.encode(entry.path.slice(pRoot.length)); + } + return true; + } + [ONENTRY](entry) { + if (!this[CHECKPATH](entry)) { + return entry.resume(); + } + assert.equal(typeof entry.absolute, 'string'); + switch (entry.type) { + case 'Directory': + case 'GNUDumpDir': + if (entry.mode) { + entry.mode = entry.mode | 0o700; + } + // eslint-disable-next-line no-fallthrough + case 'File': + case 'OldFile': + case 'ContiguousFile': + case 'Link': + case 'SymbolicLink': + return this[CHECKFS](entry); + case 'CharacterDevice': + case 'BlockDevice': + case 'FIFO': + default: + return this[UNSUPPORTED](entry); + } + } + [ONERROR](er, entry) { + // Cwd has to exist, or else nothing works. That's serious. + // Other errors are warnings, which raise the error in strict + // mode, but otherwise continue on. + if (er.name === 'CwdError') { + this.emit('error', er); + } + else { + this.warn('TAR_ENTRY_ERROR', er, { entry }); + this[UNPEND](); + entry.resume(); + } + } + [MKDIR](dir, mode, cb) { + mkdir(normalizeWindowsPath(dir), { + uid: this.uid, + gid: this.gid, + processUid: this.processUid, + processGid: this.processGid, + umask: this.processUmask, + preserve: this.preservePaths, + unlink: this.unlink, + cache: this.dirCache, + cwd: this.cwd, + mode: mode, + }, cb); + } + [DOCHOWN](entry) { + // in preserve owner mode, chown if the entry doesn't match process + // in set owner mode, chown if setting doesn't match process + return (this.forceChown || + (this.preserveOwner && + ((typeof entry.uid === 'number' && + entry.uid !== this.processUid) || + (typeof entry.gid === 'number' && + entry.gid !== this.processGid))) || + (typeof this.uid === 'number' && + this.uid !== this.processUid) || + (typeof this.gid === 'number' && this.gid !== this.processGid)); + } + [UID](entry) { + return uint32(this.uid, entry.uid, this.processUid); + } + [GID](entry) { + return uint32(this.gid, entry.gid, this.processGid); + } + [FILE](entry, fullyDone) { + const mode = typeof entry.mode === 'number' ? + entry.mode & 0o7777 + : this.fmode; + const stream = new fsm.WriteStream(String(entry.absolute), { + // slight lie, but it can be numeric flags + flags: getWriteFlag(entry.size), + mode: mode, + autoClose: false, + }); + stream.on('error', (er) => { + if (stream.fd) { + fs.close(stream.fd, () => { }); + } + // flush all the data out so that we aren't left hanging + // if the error wasn't actually fatal. otherwise the parse + // is blocked, and we never proceed. + stream.write = () => true; + this[ONERROR](er, entry); + fullyDone(); + }); + let actions = 1; + const done = (er) => { + if (er) { + /* c8 ignore start - we should always have a fd by now */ + if (stream.fd) { + fs.close(stream.fd, () => { }); + } + /* c8 ignore stop */ + this[ONERROR](er, entry); + fullyDone(); + return; + } + if (--actions === 0) { + if (stream.fd !== undefined) { + fs.close(stream.fd, er => { + if (er) { + this[ONERROR](er, entry); + } + else { + this[UNPEND](); + } + fullyDone(); + }); + } + } + }; + stream.on('finish', () => { + // if futimes fails, try utimes + // if utimes fails, fail with the original error + // same for fchown/chown + const abs = String(entry.absolute); + const fd = stream.fd; + if (typeof fd === 'number' && entry.mtime && !this.noMtime) { + actions++; + const atime = entry.atime || new Date(); + const mtime = entry.mtime; + fs.futimes(fd, atime, mtime, er => er ? + fs.utimes(abs, atime, mtime, er2 => done(er2 && er)) + : done()); + } + if (typeof fd === 'number' && this[DOCHOWN](entry)) { + actions++; + const uid = this[UID](entry); + const gid = this[GID](entry); + if (typeof uid === 'number' && typeof gid === 'number') { + fs.fchown(fd, uid, gid, er => er ? + fs.chown(abs, uid, gid, er2 => done(er2 && er)) + : done()); + } + } + done(); + }); + const tx = this.transform ? this.transform(entry) || entry : entry; + if (tx !== entry) { + tx.on('error', (er) => { + this[ONERROR](er, entry); + fullyDone(); + }); + entry.pipe(tx); + } + tx.pipe(stream); + } + [DIRECTORY](entry, fullyDone) { + const mode = typeof entry.mode === 'number' ? + entry.mode & 0o7777 + : this.dmode; + this[MKDIR](String(entry.absolute), mode, er => { + if (er) { + this[ONERROR](er, entry); + fullyDone(); + return; + } + let actions = 1; + const done = () => { + if (--actions === 0) { + fullyDone(); + this[UNPEND](); + entry.resume(); + } + }; + if (entry.mtime && !this.noMtime) { + actions++; + fs.utimes(String(entry.absolute), entry.atime || new Date(), entry.mtime, done); + } + if (this[DOCHOWN](entry)) { + actions++; + fs.chown(String(entry.absolute), Number(this[UID](entry)), Number(this[GID](entry)), done); + } + done(); + }); + } + [UNSUPPORTED](entry) { + entry.unsupported = true; + this.warn('TAR_ENTRY_UNSUPPORTED', `unsupported entry type: ${entry.type}`, { entry }); + entry.resume(); + } + [SYMLINK](entry, done) { + this[LINK](entry, String(entry.linkpath), 'symlink', done); + } + [HARDLINK](entry, done) { + const linkpath = normalizeWindowsPath(path.resolve(this.cwd, String(entry.linkpath))); + this[LINK](entry, linkpath, 'link', done); + } + [PEND]() { + this[PENDING]++; + } + [UNPEND]() { + this[PENDING]--; + this[MAYBECLOSE](); + } + [SKIP](entry) { + this[UNPEND](); + entry.resume(); + } + // Check if we can reuse an existing filesystem entry safely and + // overwrite it, rather than unlinking and recreating + // Windows doesn't report a useful nlink, so we just never reuse entries + [ISREUSABLE](entry, st) { + return (entry.type === 'File' && + !this.unlink && + st.isFile() && + st.nlink <= 1 && + !isWindows); + } + // check if a thing is there, and if so, try to clobber it + [CHECKFS](entry) { + this[PEND](); + const paths = [entry.path]; + if (entry.linkpath) { + paths.push(entry.linkpath); + } + this.reservations.reserve(paths, done => this[CHECKFS2](entry, done)); + } + [PRUNECACHE](entry) { + // if we are not creating a directory, and the path is in the dirCache, + // then that means we are about to delete the directory we created + // previously, and it is no longer going to be a directory, and neither + // is any of its children. + // If a symbolic link is encountered, all bets are off. There is no + // reasonable way to sanitize the cache in such a way we will be able to + // avoid having filesystem collisions. If this happens with a non-symlink + // entry, it'll just fail to unpack, but a symlink to a directory, using an + // 8.3 shortname or certain unicode attacks, can evade detection and lead + // to arbitrary writes to anywhere on the system. + if (entry.type === 'SymbolicLink') { + dropCache(this.dirCache); + } + else if (entry.type !== 'Directory') { + pruneCache(this.dirCache, String(entry.absolute)); + } + } + [CHECKFS2](entry, fullyDone) { + this[PRUNECACHE](entry); + const done = (er) => { + this[PRUNECACHE](entry); + fullyDone(er); + }; + const checkCwd = () => { + this[MKDIR](this.cwd, this.dmode, er => { + if (er) { + this[ONERROR](er, entry); + done(); + return; + } + this[CHECKED_CWD] = true; + start(); + }); + }; + const start = () => { + if (entry.absolute !== this.cwd) { + const parent = normalizeWindowsPath(path.dirname(String(entry.absolute))); + if (parent !== this.cwd) { + return this[MKDIR](parent, this.dmode, er => { + if (er) { + this[ONERROR](er, entry); + done(); + return; + } + afterMakeParent(); + }); + } + } + afterMakeParent(); + }; + const afterMakeParent = () => { + fs.lstat(String(entry.absolute), (lstatEr, st) => { + if (st && + (this.keep || + /* c8 ignore next */ + (this.newer && st.mtime > (entry.mtime ?? st.mtime)))) { + this[SKIP](entry); + done(); + return; + } + if (lstatEr || this[ISREUSABLE](entry, st)) { + return this[MAKEFS](null, entry, done); + } + if (st.isDirectory()) { + if (entry.type === 'Directory') { + const needChmod = this.chmod && + entry.mode && + (st.mode & 0o7777) !== entry.mode; + const afterChmod = (er) => this[MAKEFS](er ?? null, entry, done); + if (!needChmod) { + return afterChmod(); + } + return fs.chmod(String(entry.absolute), Number(entry.mode), afterChmod); + } + // Not a dir entry, have to remove it. + // NB: the only way to end up with an entry that is the cwd + // itself, in such a way that == does not detect, is a + // tricky windows absolute path with UNC or 8.3 parts (and + // preservePaths:true, or else it will have been stripped). + // In that case, the user has opted out of path protections + // explicitly, so if they blow away the cwd, c'est la vie. + if (entry.absolute !== this.cwd) { + return fs.rmdir(String(entry.absolute), (er) => this[MAKEFS](er ?? null, entry, done)); + } + } + // not a dir, and not reusable + // don't remove if the cwd, we want that error + if (entry.absolute === this.cwd) { + return this[MAKEFS](null, entry, done); + } + unlinkFile(String(entry.absolute), er => this[MAKEFS](er ?? null, entry, done)); + }); + }; + if (this[CHECKED_CWD]) { + start(); + } + else { + checkCwd(); + } + } + [MAKEFS](er, entry, done) { + if (er) { + this[ONERROR](er, entry); + done(); + return; + } + switch (entry.type) { + case 'File': + case 'OldFile': + case 'ContiguousFile': + return this[FILE](entry, done); + case 'Link': + return this[HARDLINK](entry, done); + case 'SymbolicLink': + return this[SYMLINK](entry, done); + case 'Directory': + case 'GNUDumpDir': + return this[DIRECTORY](entry, done); + } + } + [LINK](entry, linkpath, link, done) { + // XXX: get the type ('symlink' or 'junction') for windows + fs[link](linkpath, String(entry.absolute), er => { + if (er) { + this[ONERROR](er, entry); + } + else { + this[UNPEND](); + entry.resume(); + } + done(); + }); + } +} +const callSync = (fn) => { + try { + return [null, fn()]; + } + catch (er) { + return [er, null]; + } +}; +export class UnpackSync extends Unpack { + sync = true; + [MAKEFS](er, entry) { + return super[MAKEFS](er, entry, () => { }); + } + [CHECKFS](entry) { + this[PRUNECACHE](entry); + if (!this[CHECKED_CWD]) { + const er = this[MKDIR](this.cwd, this.dmode); + if (er) { + return this[ONERROR](er, entry); + } + this[CHECKED_CWD] = true; + } + // don't bother to make the parent if the current entry is the cwd, + // we've already checked it. + if (entry.absolute !== this.cwd) { + const parent = normalizeWindowsPath(path.dirname(String(entry.absolute))); + if (parent !== this.cwd) { + const mkParent = this[MKDIR](parent, this.dmode); + if (mkParent) { + return this[ONERROR](mkParent, entry); + } + } + } + const [lstatEr, st] = callSync(() => fs.lstatSync(String(entry.absolute))); + if (st && + (this.keep || + /* c8 ignore next */ + (this.newer && st.mtime > (entry.mtime ?? st.mtime)))) { + return this[SKIP](entry); + } + if (lstatEr || this[ISREUSABLE](entry, st)) { + return this[MAKEFS](null, entry); + } + if (st.isDirectory()) { + if (entry.type === 'Directory') { + const needChmod = this.chmod && + entry.mode && + (st.mode & 0o7777) !== entry.mode; + const [er] = needChmod ? + callSync(() => { + fs.chmodSync(String(entry.absolute), Number(entry.mode)); + }) + : []; + return this[MAKEFS](er, entry); + } + // not a dir entry, have to remove it + const [er] = callSync(() => fs.rmdirSync(String(entry.absolute))); + this[MAKEFS](er, entry); + } + // not a dir, and not reusable. + // don't remove if it's the cwd, since we want that error. + const [er] = entry.absolute === this.cwd ? + [] + : callSync(() => unlinkFileSync(String(entry.absolute))); + this[MAKEFS](er, entry); + } + [FILE](entry, done) { + const mode = typeof entry.mode === 'number' ? + entry.mode & 0o7777 + : this.fmode; + const oner = (er) => { + let closeError; + try { + fs.closeSync(fd); + } + catch (e) { + closeError = e; + } + if (er || closeError) { + this[ONERROR](er || closeError, entry); + } + done(); + }; + let fd; + try { + fd = fs.openSync(String(entry.absolute), getWriteFlag(entry.size), mode); + } + catch (er) { + return oner(er); + } + const tx = this.transform ? this.transform(entry) || entry : entry; + if (tx !== entry) { + tx.on('error', (er) => this[ONERROR](er, entry)); + entry.pipe(tx); + } + tx.on('data', (chunk) => { + try { + fs.writeSync(fd, chunk, 0, chunk.length); + } + catch (er) { + oner(er); + } + }); + tx.on('end', () => { + let er = null; + // try both, falling futimes back to utimes + // if either fails, handle the first error + if (entry.mtime && !this.noMtime) { + const atime = entry.atime || new Date(); + const mtime = entry.mtime; + try { + fs.futimesSync(fd, atime, mtime); + } + catch (futimeser) { + try { + fs.utimesSync(String(entry.absolute), atime, mtime); + } + catch (utimeser) { + er = futimeser; + } + } + } + if (this[DOCHOWN](entry)) { + const uid = this[UID](entry); + const gid = this[GID](entry); + try { + fs.fchownSync(fd, Number(uid), Number(gid)); + } + catch (fchowner) { + try { + fs.chownSync(String(entry.absolute), Number(uid), Number(gid)); + } + catch (chowner) { + er = er || fchowner; + } + } + } + oner(er); + }); + } + [DIRECTORY](entry, done) { + const mode = typeof entry.mode === 'number' ? + entry.mode & 0o7777 + : this.dmode; + const er = this[MKDIR](String(entry.absolute), mode); + if (er) { + this[ONERROR](er, entry); + done(); + return; + } + if (entry.mtime && !this.noMtime) { + try { + fs.utimesSync(String(entry.absolute), entry.atime || new Date(), entry.mtime); + /* c8 ignore next */ + } + catch (er) { } + } + if (this[DOCHOWN](entry)) { + try { + fs.chownSync(String(entry.absolute), Number(this[UID](entry)), Number(this[GID](entry))); + } + catch (er) { } + } + done(); + entry.resume(); + } + [MKDIR](dir, mode) { + try { + return mkdirSync(normalizeWindowsPath(dir), { + uid: this.uid, + gid: this.gid, + processUid: this.processUid, + processGid: this.processGid, + umask: this.processUmask, + preserve: this.preservePaths, + unlink: this.unlink, + cache: this.dirCache, + cwd: this.cwd, + mode: mode, + }); + } + catch (er) { + return er; + } + } + [LINK](entry, linkpath, link, done) { + const ls = `${link}Sync`; + try { + fs[ls](linkpath, String(entry.absolute)); + done(); + entry.resume(); + } + catch (er) { + return this[ONERROR](er, entry); + } + } +} +//# sourceMappingURL=unpack.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/update.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/update.js new file mode 100644 index 00000000000000..21398e9766663d --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/update.js @@ -0,0 +1,30 @@ +// tar -u +import { makeCommand } from './make-command.js'; +import { replace as r } from './replace.js'; +// just call tar.r with the filter and mtimeCache +export const update = makeCommand(r.syncFile, r.asyncFile, r.syncNoFile, r.asyncNoFile, (opt, entries = []) => { + r.validate?.(opt, entries); + mtimeFilter(opt); +}); +const mtimeFilter = (opt) => { + const filter = opt.filter; + if (!opt.mtimeCache) { + opt.mtimeCache = new Map(); + } + opt.filter = + filter ? + (path, stat) => filter(path, stat) && + !( + /* c8 ignore start */ + ((opt.mtimeCache?.get(path) ?? stat.mtime ?? 0) > + (stat.mtime ?? 0)) + /* c8 ignore stop */ + ) + : (path, stat) => !( + /* c8 ignore start */ + ((opt.mtimeCache?.get(path) ?? stat.mtime ?? 0) > + (stat.mtime ?? 0)) + /* c8 ignore stop */ + ); +}; +//# sourceMappingURL=update.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/warn-method.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/warn-method.js new file mode 100644 index 00000000000000..13e798afefc85e --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/warn-method.js @@ -0,0 +1,27 @@ +export const warnMethod = (self, code, message, data = {}) => { + if (self.file) { + data.file = self.file; + } + if (self.cwd) { + data.cwd = self.cwd; + } + data.code = + (message instanceof Error && + message.code) || + code; + data.tarCode = code; + if (!self.strict && data.recoverable !== false) { + if (message instanceof Error) { + data = Object.assign(message, data); + message = message.message; + } + self.emit('warn', code, message, data); + } + else if (message instanceof Error) { + self.emit('error', Object.assign(message, data)); + } + else { + self.emit('error', Object.assign(new Error(`${code}: ${message}`), data)); + } +}; +//# sourceMappingURL=warn-method.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/winchars.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/winchars.js new file mode 100644 index 00000000000000..c41eb86d69a4bb --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/winchars.js @@ -0,0 +1,9 @@ +// When writing files on Windows, translate the characters to their +// 0xf000 higher-encoded versions. +const raw = ['|', '<', '>', '?', ':']; +const win = raw.map(char => String.fromCharCode(0xf000 + char.charCodeAt(0))); +const toWin = new Map(raw.map((char, i) => [char, win[i]])); +const toRaw = new Map(win.map((char, i) => [char, raw[i]])); +export const encode = (s) => raw.reduce((s, c) => s.split(c).join(toWin.get(c)), s); +export const decode = (s) => win.reduce((s, c) => s.split(c).join(toRaw.get(c)), s); +//# sourceMappingURL=winchars.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/write-entry.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/write-entry.js new file mode 100644 index 00000000000000..9028cd676b4cd2 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/write-entry.js @@ -0,0 +1,657 @@ +import fs from 'fs'; +import { Minipass } from 'minipass'; +import path from 'path'; +import { Header } from './header.js'; +import { modeFix } from './mode-fix.js'; +import { normalizeWindowsPath } from './normalize-windows-path.js'; +import { dealias, } from './options.js'; +import { Pax } from './pax.js'; +import { stripAbsolutePath } from './strip-absolute-path.js'; +import { stripTrailingSlashes } from './strip-trailing-slashes.js'; +import { warnMethod, } from './warn-method.js'; +import * as winchars from './winchars.js'; +const prefixPath = (path, prefix) => { + if (!prefix) { + return normalizeWindowsPath(path); + } + path = normalizeWindowsPath(path).replace(/^\.(\/|$)/, ''); + return stripTrailingSlashes(prefix) + '/' + path; +}; +const maxReadSize = 16 * 1024 * 1024; +const PROCESS = Symbol('process'); +const FILE = Symbol('file'); +const DIRECTORY = Symbol('directory'); +const SYMLINK = Symbol('symlink'); +const HARDLINK = Symbol('hardlink'); +const HEADER = Symbol('header'); +const READ = Symbol('read'); +const LSTAT = Symbol('lstat'); +const ONLSTAT = Symbol('onlstat'); +const ONREAD = Symbol('onread'); +const ONREADLINK = Symbol('onreadlink'); +const OPENFILE = Symbol('openfile'); +const ONOPENFILE = Symbol('onopenfile'); +const CLOSE = Symbol('close'); +const MODE = Symbol('mode'); +const AWAITDRAIN = Symbol('awaitDrain'); +const ONDRAIN = Symbol('ondrain'); +const PREFIX = Symbol('prefix'); +export class WriteEntry extends Minipass { + path; + portable; + myuid = (process.getuid && process.getuid()) || 0; + // until node has builtin pwnam functions, this'll have to do + myuser = process.env.USER || ''; + maxReadSize; + linkCache; + statCache; + preservePaths; + cwd; + strict; + mtime; + noPax; + noMtime; + prefix; + fd; + blockLen = 0; + blockRemain = 0; + buf; + pos = 0; + remain = 0; + length = 0; + offset = 0; + win32; + absolute; + header; + type; + linkpath; + stat; + onWriteEntry; + #hadError = false; + constructor(p, opt_ = {}) { + const opt = dealias(opt_); + super(); + this.path = normalizeWindowsPath(p); + // suppress atime, ctime, uid, gid, uname, gname + this.portable = !!opt.portable; + this.maxReadSize = opt.maxReadSize || maxReadSize; + this.linkCache = opt.linkCache || new Map(); + this.statCache = opt.statCache || new Map(); + this.preservePaths = !!opt.preservePaths; + this.cwd = normalizeWindowsPath(opt.cwd || process.cwd()); + this.strict = !!opt.strict; + this.noPax = !!opt.noPax; + this.noMtime = !!opt.noMtime; + this.mtime = opt.mtime; + this.prefix = + opt.prefix ? normalizeWindowsPath(opt.prefix) : undefined; + this.onWriteEntry = opt.onWriteEntry; + if (typeof opt.onwarn === 'function') { + this.on('warn', opt.onwarn); + } + let pathWarn = false; + if (!this.preservePaths) { + const [root, stripped] = stripAbsolutePath(this.path); + if (root && typeof stripped === 'string') { + this.path = stripped; + pathWarn = root; + } + } + this.win32 = !!opt.win32 || process.platform === 'win32'; + if (this.win32) { + // force the \ to / normalization, since we might not *actually* + // be on windows, but want \ to be considered a path separator. + this.path = winchars.decode(this.path.replace(/\\/g, '/')); + p = p.replace(/\\/g, '/'); + } + this.absolute = normalizeWindowsPath(opt.absolute || path.resolve(this.cwd, p)); + if (this.path === '') { + this.path = './'; + } + if (pathWarn) { + this.warn('TAR_ENTRY_INFO', `stripping ${pathWarn} from absolute path`, { + entry: this, + path: pathWarn + this.path, + }); + } + const cs = this.statCache.get(this.absolute); + if (cs) { + this[ONLSTAT](cs); + } + else { + this[LSTAT](); + } + } + warn(code, message, data = {}) { + return warnMethod(this, code, message, data); + } + emit(ev, ...data) { + if (ev === 'error') { + this.#hadError = true; + } + return super.emit(ev, ...data); + } + [LSTAT]() { + fs.lstat(this.absolute, (er, stat) => { + if (er) { + return this.emit('error', er); + } + this[ONLSTAT](stat); + }); + } + [ONLSTAT](stat) { + this.statCache.set(this.absolute, stat); + this.stat = stat; + if (!stat.isFile()) { + stat.size = 0; + } + this.type = getType(stat); + this.emit('stat', stat); + this[PROCESS](); + } + [PROCESS]() { + switch (this.type) { + case 'File': + return this[FILE](); + case 'Directory': + return this[DIRECTORY](); + case 'SymbolicLink': + return this[SYMLINK](); + // unsupported types are ignored. + default: + return this.end(); + } + } + [MODE](mode) { + return modeFix(mode, this.type === 'Directory', this.portable); + } + [PREFIX](path) { + return prefixPath(path, this.prefix); + } + [HEADER]() { + /* c8 ignore start */ + if (!this.stat) { + throw new Error('cannot write header before stat'); + } + /* c8 ignore stop */ + if (this.type === 'Directory' && this.portable) { + this.noMtime = true; + } + this.onWriteEntry?.(this); + this.header = new Header({ + path: this[PREFIX](this.path), + // only apply the prefix to hard links. + linkpath: this.type === 'Link' && this.linkpath !== undefined ? + this[PREFIX](this.linkpath) + : this.linkpath, + // only the permissions and setuid/setgid/sticky bitflags + // not the higher-order bits that specify file type + mode: this[MODE](this.stat.mode), + uid: this.portable ? undefined : this.stat.uid, + gid: this.portable ? undefined : this.stat.gid, + size: this.stat.size, + mtime: this.noMtime ? undefined : this.mtime || this.stat.mtime, + /* c8 ignore next */ + type: this.type === 'Unsupported' ? undefined : this.type, + uname: this.portable ? undefined + : this.stat.uid === this.myuid ? this.myuser + : '', + atime: this.portable ? undefined : this.stat.atime, + ctime: this.portable ? undefined : this.stat.ctime, + }); + if (this.header.encode() && !this.noPax) { + super.write(new Pax({ + atime: this.portable ? undefined : this.header.atime, + ctime: this.portable ? undefined : this.header.ctime, + gid: this.portable ? undefined : this.header.gid, + mtime: this.noMtime ? undefined : (this.mtime || this.header.mtime), + path: this[PREFIX](this.path), + linkpath: this.type === 'Link' && this.linkpath !== undefined ? + this[PREFIX](this.linkpath) + : this.linkpath, + size: this.header.size, + uid: this.portable ? undefined : this.header.uid, + uname: this.portable ? undefined : this.header.uname, + dev: this.portable ? undefined : this.stat.dev, + ino: this.portable ? undefined : this.stat.ino, + nlink: this.portable ? undefined : this.stat.nlink, + }).encode()); + } + const block = this.header?.block; + /* c8 ignore start */ + if (!block) { + throw new Error('failed to encode header'); + } + /* c8 ignore stop */ + super.write(block); + } + [DIRECTORY]() { + /* c8 ignore start */ + if (!this.stat) { + throw new Error('cannot create directory entry without stat'); + } + /* c8 ignore stop */ + if (this.path.slice(-1) !== '/') { + this.path += '/'; + } + this.stat.size = 0; + this[HEADER](); + this.end(); + } + [SYMLINK]() { + fs.readlink(this.absolute, (er, linkpath) => { + if (er) { + return this.emit('error', er); + } + this[ONREADLINK](linkpath); + }); + } + [ONREADLINK](linkpath) { + this.linkpath = normalizeWindowsPath(linkpath); + this[HEADER](); + this.end(); + } + [HARDLINK](linkpath) { + /* c8 ignore start */ + if (!this.stat) { + throw new Error('cannot create link entry without stat'); + } + /* c8 ignore stop */ + this.type = 'Link'; + this.linkpath = normalizeWindowsPath(path.relative(this.cwd, linkpath)); + this.stat.size = 0; + this[HEADER](); + this.end(); + } + [FILE]() { + /* c8 ignore start */ + if (!this.stat) { + throw new Error('cannot create file entry without stat'); + } + /* c8 ignore stop */ + if (this.stat.nlink > 1) { + const linkKey = `${this.stat.dev}:${this.stat.ino}`; + const linkpath = this.linkCache.get(linkKey); + if (linkpath?.indexOf(this.cwd) === 0) { + return this[HARDLINK](linkpath); + } + this.linkCache.set(linkKey, this.absolute); + } + this[HEADER](); + if (this.stat.size === 0) { + return this.end(); + } + this[OPENFILE](); + } + [OPENFILE]() { + fs.open(this.absolute, 'r', (er, fd) => { + if (er) { + return this.emit('error', er); + } + this[ONOPENFILE](fd); + }); + } + [ONOPENFILE](fd) { + this.fd = fd; + if (this.#hadError) { + return this[CLOSE](); + } + /* c8 ignore start */ + if (!this.stat) { + throw new Error('should stat before calling onopenfile'); + } + /* c8 ignore start */ + this.blockLen = 512 * Math.ceil(this.stat.size / 512); + this.blockRemain = this.blockLen; + const bufLen = Math.min(this.blockLen, this.maxReadSize); + this.buf = Buffer.allocUnsafe(bufLen); + this.offset = 0; + this.pos = 0; + this.remain = this.stat.size; + this.length = this.buf.length; + this[READ](); + } + [READ]() { + const { fd, buf, offset, length, pos } = this; + if (fd === undefined || buf === undefined) { + throw new Error('cannot read file without first opening'); + } + fs.read(fd, buf, offset, length, pos, (er, bytesRead) => { + if (er) { + // ignoring the error from close(2) is a bad practice, but at + // this point we already have an error, don't need another one + return this[CLOSE](() => this.emit('error', er)); + } + this[ONREAD](bytesRead); + }); + } + /* c8 ignore start */ + [CLOSE](cb = () => { }) { + /* c8 ignore stop */ + if (this.fd !== undefined) + fs.close(this.fd, cb); + } + [ONREAD](bytesRead) { + if (bytesRead <= 0 && this.remain > 0) { + const er = Object.assign(new Error('encountered unexpected EOF'), { + path: this.absolute, + syscall: 'read', + code: 'EOF', + }); + return this[CLOSE](() => this.emit('error', er)); + } + if (bytesRead > this.remain) { + const er = Object.assign(new Error('did not encounter expected EOF'), { + path: this.absolute, + syscall: 'read', + code: 'EOF', + }); + return this[CLOSE](() => this.emit('error', er)); + } + /* c8 ignore start */ + if (!this.buf) { + throw new Error('should have created buffer prior to reading'); + } + /* c8 ignore stop */ + // null out the rest of the buffer, if we could fit the block padding + // at the end of this loop, we've incremented bytesRead and this.remain + // to be incremented up to the blockRemain level, as if we had expected + // to get a null-padded file, and read it until the end. then we will + // decrement both remain and blockRemain by bytesRead, and know that we + // reached the expected EOF, without any null buffer to append. + if (bytesRead === this.remain) { + for (let i = bytesRead; i < this.length && bytesRead < this.blockRemain; i++) { + this.buf[i + this.offset] = 0; + bytesRead++; + this.remain++; + } + } + const chunk = this.offset === 0 && bytesRead === this.buf.length ? + this.buf + : this.buf.subarray(this.offset, this.offset + bytesRead); + const flushed = this.write(chunk); + if (!flushed) { + this[AWAITDRAIN](() => this[ONDRAIN]()); + } + else { + this[ONDRAIN](); + } + } + [AWAITDRAIN](cb) { + this.once('drain', cb); + } + write(chunk, encoding, cb) { + /* c8 ignore start - just junk to comply with NodeJS.WritableStream */ + if (typeof encoding === 'function') { + cb = encoding; + encoding = undefined; + } + if (typeof chunk === 'string') { + chunk = Buffer.from(chunk, typeof encoding === 'string' ? encoding : 'utf8'); + } + /* c8 ignore stop */ + if (this.blockRemain < chunk.length) { + const er = Object.assign(new Error('writing more data than expected'), { + path: this.absolute, + }); + return this.emit('error', er); + } + this.remain -= chunk.length; + this.blockRemain -= chunk.length; + this.pos += chunk.length; + this.offset += chunk.length; + return super.write(chunk, null, cb); + } + [ONDRAIN]() { + if (!this.remain) { + if (this.blockRemain) { + super.write(Buffer.alloc(this.blockRemain)); + } + return this[CLOSE](er => er ? this.emit('error', er) : this.end()); + } + /* c8 ignore start */ + if (!this.buf) { + throw new Error('buffer lost somehow in ONDRAIN'); + } + /* c8 ignore stop */ + if (this.offset >= this.length) { + // if we only have a smaller bit left to read, alloc a smaller buffer + // otherwise, keep it the same length it was before. + this.buf = Buffer.allocUnsafe(Math.min(this.blockRemain, this.buf.length)); + this.offset = 0; + } + this.length = this.buf.length - this.offset; + this[READ](); + } +} +export class WriteEntrySync extends WriteEntry { + sync = true; + [LSTAT]() { + this[ONLSTAT](fs.lstatSync(this.absolute)); + } + [SYMLINK]() { + this[ONREADLINK](fs.readlinkSync(this.absolute)); + } + [OPENFILE]() { + this[ONOPENFILE](fs.openSync(this.absolute, 'r')); + } + [READ]() { + let threw = true; + try { + const { fd, buf, offset, length, pos } = this; + /* c8 ignore start */ + if (fd === undefined || buf === undefined) { + throw new Error('fd and buf must be set in READ method'); + } + /* c8 ignore stop */ + const bytesRead = fs.readSync(fd, buf, offset, length, pos); + this[ONREAD](bytesRead); + threw = false; + } + finally { + // ignoring the error from close(2) is a bad practice, but at + // this point we already have an error, don't need another one + if (threw) { + try { + this[CLOSE](() => { }); + } + catch (er) { } + } + } + } + [AWAITDRAIN](cb) { + cb(); + } + /* c8 ignore start */ + [CLOSE](cb = () => { }) { + /* c8 ignore stop */ + if (this.fd !== undefined) + fs.closeSync(this.fd); + cb(); + } +} +export class WriteEntryTar extends Minipass { + blockLen = 0; + blockRemain = 0; + buf = 0; + pos = 0; + remain = 0; + length = 0; + preservePaths; + portable; + strict; + noPax; + noMtime; + readEntry; + type; + prefix; + path; + mode; + uid; + gid; + uname; + gname; + header; + mtime; + atime; + ctime; + linkpath; + size; + onWriteEntry; + warn(code, message, data = {}) { + return warnMethod(this, code, message, data); + } + constructor(readEntry, opt_ = {}) { + const opt = dealias(opt_); + super(); + this.preservePaths = !!opt.preservePaths; + this.portable = !!opt.portable; + this.strict = !!opt.strict; + this.noPax = !!opt.noPax; + this.noMtime = !!opt.noMtime; + this.onWriteEntry = opt.onWriteEntry; + this.readEntry = readEntry; + const { type } = readEntry; + /* c8 ignore start */ + if (type === 'Unsupported') { + throw new Error('writing entry that should be ignored'); + } + /* c8 ignore stop */ + this.type = type; + if (this.type === 'Directory' && this.portable) { + this.noMtime = true; + } + this.prefix = opt.prefix; + this.path = normalizeWindowsPath(readEntry.path); + this.mode = + readEntry.mode !== undefined ? + this[MODE](readEntry.mode) + : undefined; + this.uid = this.portable ? undefined : readEntry.uid; + this.gid = this.portable ? undefined : readEntry.gid; + this.uname = this.portable ? undefined : readEntry.uname; + this.gname = this.portable ? undefined : readEntry.gname; + this.size = readEntry.size; + this.mtime = + this.noMtime ? undefined : opt.mtime || readEntry.mtime; + this.atime = this.portable ? undefined : readEntry.atime; + this.ctime = this.portable ? undefined : readEntry.ctime; + this.linkpath = + readEntry.linkpath !== undefined ? + normalizeWindowsPath(readEntry.linkpath) + : undefined; + if (typeof opt.onwarn === 'function') { + this.on('warn', opt.onwarn); + } + let pathWarn = false; + if (!this.preservePaths) { + const [root, stripped] = stripAbsolutePath(this.path); + if (root && typeof stripped === 'string') { + this.path = stripped; + pathWarn = root; + } + } + this.remain = readEntry.size; + this.blockRemain = readEntry.startBlockSize; + this.onWriteEntry?.(this); + this.header = new Header({ + path: this[PREFIX](this.path), + linkpath: this.type === 'Link' && this.linkpath !== undefined ? + this[PREFIX](this.linkpath) + : this.linkpath, + // only the permissions and setuid/setgid/sticky bitflags + // not the higher-order bits that specify file type + mode: this.mode, + uid: this.portable ? undefined : this.uid, + gid: this.portable ? undefined : this.gid, + size: this.size, + mtime: this.noMtime ? undefined : this.mtime, + type: this.type, + uname: this.portable ? undefined : this.uname, + atime: this.portable ? undefined : this.atime, + ctime: this.portable ? undefined : this.ctime, + }); + if (pathWarn) { + this.warn('TAR_ENTRY_INFO', `stripping ${pathWarn} from absolute path`, { + entry: this, + path: pathWarn + this.path, + }); + } + if (this.header.encode() && !this.noPax) { + super.write(new Pax({ + atime: this.portable ? undefined : this.atime, + ctime: this.portable ? undefined : this.ctime, + gid: this.portable ? undefined : this.gid, + mtime: this.noMtime ? undefined : this.mtime, + path: this[PREFIX](this.path), + linkpath: this.type === 'Link' && this.linkpath !== undefined ? + this[PREFIX](this.linkpath) + : this.linkpath, + size: this.size, + uid: this.portable ? undefined : this.uid, + uname: this.portable ? undefined : this.uname, + dev: this.portable ? undefined : this.readEntry.dev, + ino: this.portable ? undefined : this.readEntry.ino, + nlink: this.portable ? undefined : this.readEntry.nlink, + }).encode()); + } + const b = this.header?.block; + /* c8 ignore start */ + if (!b) + throw new Error('failed to encode header'); + /* c8 ignore stop */ + super.write(b); + readEntry.pipe(this); + } + [PREFIX](path) { + return prefixPath(path, this.prefix); + } + [MODE](mode) { + return modeFix(mode, this.type === 'Directory', this.portable); + } + write(chunk, encoding, cb) { + /* c8 ignore start - just junk to comply with NodeJS.WritableStream */ + if (typeof encoding === 'function') { + cb = encoding; + encoding = undefined; + } + if (typeof chunk === 'string') { + chunk = Buffer.from(chunk, typeof encoding === 'string' ? encoding : 'utf8'); + } + /* c8 ignore stop */ + const writeLen = chunk.length; + if (writeLen > this.blockRemain) { + throw new Error('writing more to entry than is appropriate'); + } + this.blockRemain -= writeLen; + return super.write(chunk, cb); + } + end(chunk, encoding, cb) { + if (this.blockRemain) { + super.write(Buffer.alloc(this.blockRemain)); + } + /* c8 ignore start - just junk to comply with NodeJS.WritableStream */ + if (typeof chunk === 'function') { + cb = chunk; + encoding = undefined; + chunk = undefined; + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = undefined; + } + if (typeof chunk === 'string') { + chunk = Buffer.from(chunk, encoding ?? 'utf8'); + } + if (cb) + this.once('finish', cb); + chunk ? super.end(chunk, cb) : super.end(cb); + /* c8 ignore stop */ + return this; + } +} +const getType = (stat) => stat.isFile() ? 'File' + : stat.isDirectory() ? 'Directory' + : stat.isSymbolicLink() ? 'SymbolicLink' + : 'Unsupported'; +//# sourceMappingURL=write-entry.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/package.json b/deps/npm/node_modules/cacache/node_modules/tar/package.json new file mode 100644 index 00000000000000..0283103ee9eaf9 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/package.json @@ -0,0 +1,325 @@ +{ + "author": "Isaac Z. Schlueter", + "name": "tar", + "description": "tar for node", + "version": "7.4.3", + "repository": { + "type": "git", + "url": "https://github.com/isaacs/node-tar.git" + }, + "scripts": { + "genparse": "node scripts/generate-parse-fixtures.js", + "snap": "tap", + "test": "tap", + "pretest": "npm run prepare", + "presnap": "npm run prepare", + "prepare": "tshy", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "format": "prettier --write . --log-level warn", + "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts" + }, + "dependencies": { + "@isaacs/fs-minipass": "^4.0.0", + "chownr": "^3.0.0", + "minipass": "^7.1.2", + "minizlib": "^3.0.1", + "mkdirp": "^3.0.1", + "yallist": "^5.0.0" + }, + "devDependencies": { + "chmodr": "^1.2.0", + "end-of-stream": "^1.4.3", + "events-to-array": "^2.0.3", + "mutate-fs": "^2.1.1", + "nock": "^13.5.4", + "prettier": "^3.2.5", + "rimraf": "^5.0.5", + "tap": "^18.7.2", + "tshy": "^1.13.1", + "typedoc": "^0.25.13" + }, + "license": "ISC", + "engines": { + "node": ">=18" + }, + "files": [ + "dist" + ], + "tap": { + "coverage-map": "map.js", + "timeout": 0, + "typecheck": true + }, + "prettier": { + "experimentalTernaries": true, + "semi": false, + "printWidth": 70, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + }, + "tshy": { + "exports": { + "./package.json": "./package.json", + ".": "./src/index.ts", + "./c": "./src/create.ts", + "./create": "./src/create.ts", + "./replace": "./src/create.ts", + "./r": "./src/create.ts", + "./list": "./src/list.ts", + "./t": "./src/list.ts", + "./update": "./src/update.ts", + "./u": "./src/update.ts", + "./extract": "./src/extract.ts", + "./x": "./src/extract.ts", + "./pack": "./src/pack.ts", + "./unpack": "./src/unpack.ts", + "./parse": "./src/parse.ts", + "./read-entry": "./src/read-entry.ts", + "./write-entry": "./src/write-entry.ts", + "./header": "./src/header.ts", + "./pax": "./src/pax.ts", + "./types": "./src/types.ts" + } + }, + "exports": { + "./package.json": "./package.json", + ".": { + "import": { + "source": "./src/index.ts", + "types": "./dist/esm/index.d.ts", + "default": "./dist/esm/index.js" + }, + "require": { + "source": "./src/index.ts", + "types": "./dist/commonjs/index.d.ts", + "default": "./dist/commonjs/index.js" + } + }, + "./c": { + "import": { + "source": "./src/create.ts", + "types": "./dist/esm/create.d.ts", + "default": "./dist/esm/create.js" + }, + "require": { + "source": "./src/create.ts", + "types": "./dist/commonjs/create.d.ts", + "default": "./dist/commonjs/create.js" + } + }, + "./create": { + "import": { + "source": "./src/create.ts", + "types": "./dist/esm/create.d.ts", + "default": "./dist/esm/create.js" + }, + "require": { + "source": "./src/create.ts", + "types": "./dist/commonjs/create.d.ts", + "default": "./dist/commonjs/create.js" + } + }, + "./replace": { + "import": { + "source": "./src/create.ts", + "types": "./dist/esm/create.d.ts", + "default": "./dist/esm/create.js" + }, + "require": { + "source": "./src/create.ts", + "types": "./dist/commonjs/create.d.ts", + "default": "./dist/commonjs/create.js" + } + }, + "./r": { + "import": { + "source": "./src/create.ts", + "types": "./dist/esm/create.d.ts", + "default": "./dist/esm/create.js" + }, + "require": { + "source": "./src/create.ts", + "types": "./dist/commonjs/create.d.ts", + "default": "./dist/commonjs/create.js" + } + }, + "./list": { + "import": { + "source": "./src/list.ts", + "types": "./dist/esm/list.d.ts", + "default": "./dist/esm/list.js" + }, + "require": { + "source": "./src/list.ts", + "types": "./dist/commonjs/list.d.ts", + "default": "./dist/commonjs/list.js" + } + }, + "./t": { + "import": { + "source": "./src/list.ts", + "types": "./dist/esm/list.d.ts", + "default": "./dist/esm/list.js" + }, + "require": { + "source": "./src/list.ts", + "types": "./dist/commonjs/list.d.ts", + "default": "./dist/commonjs/list.js" + } + }, + "./update": { + "import": { + "source": "./src/update.ts", + "types": "./dist/esm/update.d.ts", + "default": "./dist/esm/update.js" + }, + "require": { + "source": "./src/update.ts", + "types": "./dist/commonjs/update.d.ts", + "default": "./dist/commonjs/update.js" + } + }, + "./u": { + "import": { + "source": "./src/update.ts", + "types": "./dist/esm/update.d.ts", + "default": "./dist/esm/update.js" + }, + "require": { + "source": "./src/update.ts", + "types": "./dist/commonjs/update.d.ts", + "default": "./dist/commonjs/update.js" + } + }, + "./extract": { + "import": { + "source": "./src/extract.ts", + "types": "./dist/esm/extract.d.ts", + "default": "./dist/esm/extract.js" + }, + "require": { + "source": "./src/extract.ts", + "types": "./dist/commonjs/extract.d.ts", + "default": "./dist/commonjs/extract.js" + } + }, + "./x": { + "import": { + "source": "./src/extract.ts", + "types": "./dist/esm/extract.d.ts", + "default": "./dist/esm/extract.js" + }, + "require": { + "source": "./src/extract.ts", + "types": "./dist/commonjs/extract.d.ts", + "default": "./dist/commonjs/extract.js" + } + }, + "./pack": { + "import": { + "source": "./src/pack.ts", + "types": "./dist/esm/pack.d.ts", + "default": "./dist/esm/pack.js" + }, + "require": { + "source": "./src/pack.ts", + "types": "./dist/commonjs/pack.d.ts", + "default": "./dist/commonjs/pack.js" + } + }, + "./unpack": { + "import": { + "source": "./src/unpack.ts", + "types": "./dist/esm/unpack.d.ts", + "default": "./dist/esm/unpack.js" + }, + "require": { + "source": "./src/unpack.ts", + "types": "./dist/commonjs/unpack.d.ts", + "default": "./dist/commonjs/unpack.js" + } + }, + "./parse": { + "import": { + "source": "./src/parse.ts", + "types": "./dist/esm/parse.d.ts", + "default": "./dist/esm/parse.js" + }, + "require": { + "source": "./src/parse.ts", + "types": "./dist/commonjs/parse.d.ts", + "default": "./dist/commonjs/parse.js" + } + }, + "./read-entry": { + "import": { + "source": "./src/read-entry.ts", + "types": "./dist/esm/read-entry.d.ts", + "default": "./dist/esm/read-entry.js" + }, + "require": { + "source": "./src/read-entry.ts", + "types": "./dist/commonjs/read-entry.d.ts", + "default": "./dist/commonjs/read-entry.js" + } + }, + "./write-entry": { + "import": { + "source": "./src/write-entry.ts", + "types": "./dist/esm/write-entry.d.ts", + "default": "./dist/esm/write-entry.js" + }, + "require": { + "source": "./src/write-entry.ts", + "types": "./dist/commonjs/write-entry.d.ts", + "default": "./dist/commonjs/write-entry.js" + } + }, + "./header": { + "import": { + "source": "./src/header.ts", + "types": "./dist/esm/header.d.ts", + "default": "./dist/esm/header.js" + }, + "require": { + "source": "./src/header.ts", + "types": "./dist/commonjs/header.d.ts", + "default": "./dist/commonjs/header.js" + } + }, + "./pax": { + "import": { + "source": "./src/pax.ts", + "types": "./dist/esm/pax.d.ts", + "default": "./dist/esm/pax.js" + }, + "require": { + "source": "./src/pax.ts", + "types": "./dist/commonjs/pax.d.ts", + "default": "./dist/commonjs/pax.js" + } + }, + "./types": { + "import": { + "source": "./src/types.ts", + "types": "./dist/esm/types.d.ts", + "default": "./dist/esm/types.js" + }, + "require": { + "source": "./src/types.ts", + "types": "./dist/commonjs/types.d.ts", + "default": "./dist/commonjs/types.js" + } + } + }, + "type": "module", + "main": "./dist/commonjs/index.js", + "types": "./dist/commonjs/index.d.ts" +} diff --git a/deps/npm/node_modules/cacache/node_modules/yallist/LICENSE.md b/deps/npm/node_modules/cacache/node_modules/yallist/LICENSE.md new file mode 100644 index 00000000000000..881248b6d7f0ca --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/yallist/LICENSE.md @@ -0,0 +1,63 @@ +All packages under `src/` are licensed according to the terms in +their respective `LICENSE` or `LICENSE.md` files. + +The remainder of this project is licensed under the Blue Oak +Model License, as follows: + +----- + +# Blue Oak Model License + +Version 1.0.0 + +## Purpose + +This license gives everyone as much permission to work with +this software as possible, while protecting contributors +from liability. + +## Acceptance + +In order to receive this license, you must agree to its +rules. The rules of this license are both obligations +under that agreement and conditions to your license. +You must not do anything with this software that triggers +a rule that you cannot or will not follow. + +## Copyright + +Each contributor licenses you to do everything with this +software that would otherwise infringe that contributor's +copyright in it. + +## Notices + +You must ensure that everyone who gets a copy of +any part of this software from you, with or without +changes, also gets the text of this license or a link to +. + +## Excuse + +If anyone notifies you in writing that you have not +complied with [Notices](#notices), you can keep your +license by taking all practical steps to comply within 30 +days after the notice. If you do not do so, your license +ends immediately. + +## Patent + +Each contributor licenses you to do everything with this +software that would otherwise infringe any patent claims +they can license or become able to license. + +## Reliability + +No contributor can revoke this license. + +## No Liability + +***As far as the law allows, this software comes as is, +without any warranty or condition, and no contributor +will be liable to anyone for any damages related to this +software or this license, under any kind of legal claim.*** diff --git a/deps/npm/node_modules/cacache/node_modules/yallist/dist/commonjs/index.js b/deps/npm/node_modules/cacache/node_modules/yallist/dist/commonjs/index.js new file mode 100644 index 00000000000000..c1e1e4741689d9 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/yallist/dist/commonjs/index.js @@ -0,0 +1,384 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Node = exports.Yallist = void 0; +class Yallist { + tail; + head; + length = 0; + static create(list = []) { + return new Yallist(list); + } + constructor(list = []) { + for (const item of list) { + this.push(item); + } + } + *[Symbol.iterator]() { + for (let walker = this.head; walker; walker = walker.next) { + yield walker.value; + } + } + removeNode(node) { + if (node.list !== this) { + throw new Error('removing node which does not belong to this list'); + } + const next = node.next; + const prev = node.prev; + if (next) { + next.prev = prev; + } + if (prev) { + prev.next = next; + } + if (node === this.head) { + this.head = next; + } + if (node === this.tail) { + this.tail = prev; + } + this.length--; + node.next = undefined; + node.prev = undefined; + node.list = undefined; + return next; + } + unshiftNode(node) { + if (node === this.head) { + return; + } + if (node.list) { + node.list.removeNode(node); + } + const head = this.head; + node.list = this; + node.next = head; + if (head) { + head.prev = node; + } + this.head = node; + if (!this.tail) { + this.tail = node; + } + this.length++; + } + pushNode(node) { + if (node === this.tail) { + return; + } + if (node.list) { + node.list.removeNode(node); + } + const tail = this.tail; + node.list = this; + node.prev = tail; + if (tail) { + tail.next = node; + } + this.tail = node; + if (!this.head) { + this.head = node; + } + this.length++; + } + push(...args) { + for (let i = 0, l = args.length; i < l; i++) { + push(this, args[i]); + } + return this.length; + } + unshift(...args) { + for (var i = 0, l = args.length; i < l; i++) { + unshift(this, args[i]); + } + return this.length; + } + pop() { + if (!this.tail) { + return undefined; + } + const res = this.tail.value; + const t = this.tail; + this.tail = this.tail.prev; + if (this.tail) { + this.tail.next = undefined; + } + else { + this.head = undefined; + } + t.list = undefined; + this.length--; + return res; + } + shift() { + if (!this.head) { + return undefined; + } + const res = this.head.value; + const h = this.head; + this.head = this.head.next; + if (this.head) { + this.head.prev = undefined; + } + else { + this.tail = undefined; + } + h.list = undefined; + this.length--; + return res; + } + forEach(fn, thisp) { + thisp = thisp || this; + for (let walker = this.head, i = 0; !!walker; i++) { + fn.call(thisp, walker.value, i, this); + walker = walker.next; + } + } + forEachReverse(fn, thisp) { + thisp = thisp || this; + for (let walker = this.tail, i = this.length - 1; !!walker; i--) { + fn.call(thisp, walker.value, i, this); + walker = walker.prev; + } + } + get(n) { + let i = 0; + let walker = this.head; + for (; !!walker && i < n; i++) { + walker = walker.next; + } + if (i === n && !!walker) { + return walker.value; + } + } + getReverse(n) { + let i = 0; + let walker = this.tail; + for (; !!walker && i < n; i++) { + // abort out of the list early if we hit a cycle + walker = walker.prev; + } + if (i === n && !!walker) { + return walker.value; + } + } + map(fn, thisp) { + thisp = thisp || this; + const res = new Yallist(); + for (let walker = this.head; !!walker;) { + res.push(fn.call(thisp, walker.value, this)); + walker = walker.next; + } + return res; + } + mapReverse(fn, thisp) { + thisp = thisp || this; + var res = new Yallist(); + for (let walker = this.tail; !!walker;) { + res.push(fn.call(thisp, walker.value, this)); + walker = walker.prev; + } + return res; + } + reduce(fn, initial) { + let acc; + let walker = this.head; + if (arguments.length > 1) { + acc = initial; + } + else if (this.head) { + walker = this.head.next; + acc = this.head.value; + } + else { + throw new TypeError('Reduce of empty list with no initial value'); + } + for (var i = 0; !!walker; i++) { + acc = fn(acc, walker.value, i); + walker = walker.next; + } + return acc; + } + reduceReverse(fn, initial) { + let acc; + let walker = this.tail; + if (arguments.length > 1) { + acc = initial; + } + else if (this.tail) { + walker = this.tail.prev; + acc = this.tail.value; + } + else { + throw new TypeError('Reduce of empty list with no initial value'); + } + for (let i = this.length - 1; !!walker; i--) { + acc = fn(acc, walker.value, i); + walker = walker.prev; + } + return acc; + } + toArray() { + const arr = new Array(this.length); + for (let i = 0, walker = this.head; !!walker; i++) { + arr[i] = walker.value; + walker = walker.next; + } + return arr; + } + toArrayReverse() { + const arr = new Array(this.length); + for (let i = 0, walker = this.tail; !!walker; i++) { + arr[i] = walker.value; + walker = walker.prev; + } + return arr; + } + slice(from = 0, to = this.length) { + if (to < 0) { + to += this.length; + } + if (from < 0) { + from += this.length; + } + const ret = new Yallist(); + if (to < from || to < 0) { + return ret; + } + if (from < 0) { + from = 0; + } + if (to > this.length) { + to = this.length; + } + let walker = this.head; + let i = 0; + for (i = 0; !!walker && i < from; i++) { + walker = walker.next; + } + for (; !!walker && i < to; i++, walker = walker.next) { + ret.push(walker.value); + } + return ret; + } + sliceReverse(from = 0, to = this.length) { + if (to < 0) { + to += this.length; + } + if (from < 0) { + from += this.length; + } + const ret = new Yallist(); + if (to < from || to < 0) { + return ret; + } + if (from < 0) { + from = 0; + } + if (to > this.length) { + to = this.length; + } + let i = this.length; + let walker = this.tail; + for (; !!walker && i > to; i--) { + walker = walker.prev; + } + for (; !!walker && i > from; i--, walker = walker.prev) { + ret.push(walker.value); + } + return ret; + } + splice(start, deleteCount = 0, ...nodes) { + if (start > this.length) { + start = this.length - 1; + } + if (start < 0) { + start = this.length + start; + } + let walker = this.head; + for (let i = 0; !!walker && i < start; i++) { + walker = walker.next; + } + const ret = []; + for (let i = 0; !!walker && i < deleteCount; i++) { + ret.push(walker.value); + walker = this.removeNode(walker); + } + if (!walker) { + walker = this.tail; + } + else if (walker !== this.tail) { + walker = walker.prev; + } + for (const v of nodes) { + walker = insertAfter(this, walker, v); + } + return ret; + } + reverse() { + const head = this.head; + const tail = this.tail; + for (let walker = head; !!walker; walker = walker.prev) { + const p = walker.prev; + walker.prev = walker.next; + walker.next = p; + } + this.head = tail; + this.tail = head; + return this; + } +} +exports.Yallist = Yallist; +// insertAfter undefined means "make the node the new head of list" +function insertAfter(self, node, value) { + const prev = node; + const next = node ? node.next : self.head; + const inserted = new Node(value, prev, next, self); + if (inserted.next === undefined) { + self.tail = inserted; + } + if (inserted.prev === undefined) { + self.head = inserted; + } + self.length++; + return inserted; +} +function push(self, item) { + self.tail = new Node(item, self.tail, undefined, self); + if (!self.head) { + self.head = self.tail; + } + self.length++; +} +function unshift(self, item) { + self.head = new Node(item, undefined, self.head, self); + if (!self.tail) { + self.tail = self.head; + } + self.length++; +} +class Node { + list; + next; + prev; + value; + constructor(value, prev, next, list) { + this.list = list; + this.value = value; + if (prev) { + prev.next = this; + this.prev = prev; + } + else { + this.prev = undefined; + } + if (next) { + next.prev = this; + this.next = next; + } + else { + this.next = undefined; + } + } +} +exports.Node = Node; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/yallist/dist/commonjs/package.json b/deps/npm/node_modules/cacache/node_modules/yallist/dist/commonjs/package.json new file mode 100644 index 00000000000000..5bbefffbabee39 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/yallist/dist/commonjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/deps/npm/node_modules/cacache/node_modules/yallist/dist/esm/index.js b/deps/npm/node_modules/cacache/node_modules/yallist/dist/esm/index.js new file mode 100644 index 00000000000000..3d81c5113b93a8 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/yallist/dist/esm/index.js @@ -0,0 +1,379 @@ +export class Yallist { + tail; + head; + length = 0; + static create(list = []) { + return new Yallist(list); + } + constructor(list = []) { + for (const item of list) { + this.push(item); + } + } + *[Symbol.iterator]() { + for (let walker = this.head; walker; walker = walker.next) { + yield walker.value; + } + } + removeNode(node) { + if (node.list !== this) { + throw new Error('removing node which does not belong to this list'); + } + const next = node.next; + const prev = node.prev; + if (next) { + next.prev = prev; + } + if (prev) { + prev.next = next; + } + if (node === this.head) { + this.head = next; + } + if (node === this.tail) { + this.tail = prev; + } + this.length--; + node.next = undefined; + node.prev = undefined; + node.list = undefined; + return next; + } + unshiftNode(node) { + if (node === this.head) { + return; + } + if (node.list) { + node.list.removeNode(node); + } + const head = this.head; + node.list = this; + node.next = head; + if (head) { + head.prev = node; + } + this.head = node; + if (!this.tail) { + this.tail = node; + } + this.length++; + } + pushNode(node) { + if (node === this.tail) { + return; + } + if (node.list) { + node.list.removeNode(node); + } + const tail = this.tail; + node.list = this; + node.prev = tail; + if (tail) { + tail.next = node; + } + this.tail = node; + if (!this.head) { + this.head = node; + } + this.length++; + } + push(...args) { + for (let i = 0, l = args.length; i < l; i++) { + push(this, args[i]); + } + return this.length; + } + unshift(...args) { + for (var i = 0, l = args.length; i < l; i++) { + unshift(this, args[i]); + } + return this.length; + } + pop() { + if (!this.tail) { + return undefined; + } + const res = this.tail.value; + const t = this.tail; + this.tail = this.tail.prev; + if (this.tail) { + this.tail.next = undefined; + } + else { + this.head = undefined; + } + t.list = undefined; + this.length--; + return res; + } + shift() { + if (!this.head) { + return undefined; + } + const res = this.head.value; + const h = this.head; + this.head = this.head.next; + if (this.head) { + this.head.prev = undefined; + } + else { + this.tail = undefined; + } + h.list = undefined; + this.length--; + return res; + } + forEach(fn, thisp) { + thisp = thisp || this; + for (let walker = this.head, i = 0; !!walker; i++) { + fn.call(thisp, walker.value, i, this); + walker = walker.next; + } + } + forEachReverse(fn, thisp) { + thisp = thisp || this; + for (let walker = this.tail, i = this.length - 1; !!walker; i--) { + fn.call(thisp, walker.value, i, this); + walker = walker.prev; + } + } + get(n) { + let i = 0; + let walker = this.head; + for (; !!walker && i < n; i++) { + walker = walker.next; + } + if (i === n && !!walker) { + return walker.value; + } + } + getReverse(n) { + let i = 0; + let walker = this.tail; + for (; !!walker && i < n; i++) { + // abort out of the list early if we hit a cycle + walker = walker.prev; + } + if (i === n && !!walker) { + return walker.value; + } + } + map(fn, thisp) { + thisp = thisp || this; + const res = new Yallist(); + for (let walker = this.head; !!walker;) { + res.push(fn.call(thisp, walker.value, this)); + walker = walker.next; + } + return res; + } + mapReverse(fn, thisp) { + thisp = thisp || this; + var res = new Yallist(); + for (let walker = this.tail; !!walker;) { + res.push(fn.call(thisp, walker.value, this)); + walker = walker.prev; + } + return res; + } + reduce(fn, initial) { + let acc; + let walker = this.head; + if (arguments.length > 1) { + acc = initial; + } + else if (this.head) { + walker = this.head.next; + acc = this.head.value; + } + else { + throw new TypeError('Reduce of empty list with no initial value'); + } + for (var i = 0; !!walker; i++) { + acc = fn(acc, walker.value, i); + walker = walker.next; + } + return acc; + } + reduceReverse(fn, initial) { + let acc; + let walker = this.tail; + if (arguments.length > 1) { + acc = initial; + } + else if (this.tail) { + walker = this.tail.prev; + acc = this.tail.value; + } + else { + throw new TypeError('Reduce of empty list with no initial value'); + } + for (let i = this.length - 1; !!walker; i--) { + acc = fn(acc, walker.value, i); + walker = walker.prev; + } + return acc; + } + toArray() { + const arr = new Array(this.length); + for (let i = 0, walker = this.head; !!walker; i++) { + arr[i] = walker.value; + walker = walker.next; + } + return arr; + } + toArrayReverse() { + const arr = new Array(this.length); + for (let i = 0, walker = this.tail; !!walker; i++) { + arr[i] = walker.value; + walker = walker.prev; + } + return arr; + } + slice(from = 0, to = this.length) { + if (to < 0) { + to += this.length; + } + if (from < 0) { + from += this.length; + } + const ret = new Yallist(); + if (to < from || to < 0) { + return ret; + } + if (from < 0) { + from = 0; + } + if (to > this.length) { + to = this.length; + } + let walker = this.head; + let i = 0; + for (i = 0; !!walker && i < from; i++) { + walker = walker.next; + } + for (; !!walker && i < to; i++, walker = walker.next) { + ret.push(walker.value); + } + return ret; + } + sliceReverse(from = 0, to = this.length) { + if (to < 0) { + to += this.length; + } + if (from < 0) { + from += this.length; + } + const ret = new Yallist(); + if (to < from || to < 0) { + return ret; + } + if (from < 0) { + from = 0; + } + if (to > this.length) { + to = this.length; + } + let i = this.length; + let walker = this.tail; + for (; !!walker && i > to; i--) { + walker = walker.prev; + } + for (; !!walker && i > from; i--, walker = walker.prev) { + ret.push(walker.value); + } + return ret; + } + splice(start, deleteCount = 0, ...nodes) { + if (start > this.length) { + start = this.length - 1; + } + if (start < 0) { + start = this.length + start; + } + let walker = this.head; + for (let i = 0; !!walker && i < start; i++) { + walker = walker.next; + } + const ret = []; + for (let i = 0; !!walker && i < deleteCount; i++) { + ret.push(walker.value); + walker = this.removeNode(walker); + } + if (!walker) { + walker = this.tail; + } + else if (walker !== this.tail) { + walker = walker.prev; + } + for (const v of nodes) { + walker = insertAfter(this, walker, v); + } + return ret; + } + reverse() { + const head = this.head; + const tail = this.tail; + for (let walker = head; !!walker; walker = walker.prev) { + const p = walker.prev; + walker.prev = walker.next; + walker.next = p; + } + this.head = tail; + this.tail = head; + return this; + } +} +// insertAfter undefined means "make the node the new head of list" +function insertAfter(self, node, value) { + const prev = node; + const next = node ? node.next : self.head; + const inserted = new Node(value, prev, next, self); + if (inserted.next === undefined) { + self.tail = inserted; + } + if (inserted.prev === undefined) { + self.head = inserted; + } + self.length++; + return inserted; +} +function push(self, item) { + self.tail = new Node(item, self.tail, undefined, self); + if (!self.head) { + self.head = self.tail; + } + self.length++; +} +function unshift(self, item) { + self.head = new Node(item, undefined, self.head, self); + if (!self.tail) { + self.tail = self.head; + } + self.length++; +} +export class Node { + list; + next; + prev; + value; + constructor(value, prev, next, list) { + this.list = list; + this.value = value; + if (prev) { + prev.next = this; + this.prev = prev; + } + else { + this.prev = undefined; + } + if (next) { + next.prev = this; + this.next = next; + } + else { + this.next = undefined; + } + } +} +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/yallist/dist/esm/package.json b/deps/npm/node_modules/cacache/node_modules/yallist/dist/esm/package.json new file mode 100644 index 00000000000000..3dbc1ca591c055 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/yallist/dist/esm/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/deps/npm/node_modules/cacache/node_modules/yallist/package.json b/deps/npm/node_modules/cacache/node_modules/yallist/package.json new file mode 100644 index 00000000000000..2f5247808bbea8 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/yallist/package.json @@ -0,0 +1,68 @@ +{ + "name": "yallist", + "version": "5.0.0", + "description": "Yet Another Linked List", + "files": [ + "dist" + ], + "devDependencies": { + "prettier": "^3.2.5", + "tap": "^18.7.2", + "tshy": "^1.13.1", + "typedoc": "^0.25.13" + }, + "scripts": { + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "prepare": "tshy", + "pretest": "npm run prepare", + "presnap": "npm run prepare", + "test": "tap", + "snap": "tap", + "format": "prettier --write . --loglevel warn --ignore-path ../../.prettierignore --cache", + "typedoc": "typedoc" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/isaacs/yallist.git" + }, + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "BlueOak-1.0.0", + "tshy": { + "exports": { + "./package.json": "./package.json", + ".": "./src/index.ts" + } + }, + "exports": { + "./package.json": "./package.json", + ".": { + "import": { + "types": "./dist/esm/index.d.ts", + "default": "./dist/esm/index.js" + }, + "require": { + "types": "./dist/commonjs/index.d.ts", + "default": "./dist/commonjs/index.js" + } + } + }, + "main": "./dist/commonjs/index.js", + "types": "./dist/commonjs/index.d.ts", + "type": "module", + "prettier": { + "semi": false, + "printWidth": 70, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + }, + "engines": { + "node": ">=18" + } +} diff --git a/deps/npm/node_modules/cacache/package.json b/deps/npm/node_modules/cacache/package.json index 6e6219158ed759..ebb0f3f8ed4108 100644 --- a/deps/npm/node_modules/cacache/package.json +++ b/deps/npm/node_modules/cacache/package.json @@ -1,6 +1,6 @@ { "name": "cacache", - "version": "18.0.4", + "version": "19.0.1", "cache-version": { "content": "2", "index": "5" @@ -16,13 +16,14 @@ "snap": "tap", "coverage": "tap", "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", "npmclilint": "npmcli-lint", - "lintfix": "npm run lint -- --fix", + "lintfix": "npm run eslint -- --fix", "postsnap": "npm run lintfix --", "postlint": "template-oss-check", "posttest": "npm run lint", - "template-oss-apply": "template-oss-apply --force" + "template-oss-apply": "template-oss-apply --force", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "repository": { "type": "git", @@ -45,7 +46,7 @@ ], "license": "ISC", "dependencies": { - "@npmcli/fs": "^3.1.0", + "@npmcli/fs": "^4.0.0", "fs-minipass": "^3.0.0", "glob": "^10.2.2", "lru-cache": "^10.0.1", @@ -53,23 +54,23 @@ "minipass-collect": "^2.0.1", "minipass-flush": "^1.0.5", "minipass-pipeline": "^1.2.4", - "p-map": "^4.0.0", - "ssri": "^10.0.0", - "tar": "^6.1.11", - "unique-filename": "^3.0.0" + "p-map": "^7.0.2", + "ssri": "^12.0.0", + "tar": "^7.4.3", + "unique-filename": "^4.0.0" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.0.0" }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", "windowsCI": false, - "version": "4.22.0", + "version": "4.23.3", "publish": "true" }, "author": "GitHub Inc.", diff --git a/deps/npm/node_modules/cmd-shim/package.json b/deps/npm/node_modules/cmd-shim/package.json index 094ca2df619d21..5f2e85d1c73db0 100644 --- a/deps/npm/node_modules/cmd-shim/package.json +++ b/deps/npm/node_modules/cmd-shim/package.json @@ -1,15 +1,16 @@ { "name": "cmd-shim", - "version": "6.0.3", + "version": "7.0.0", "description": "Used in npm for command line application support", "scripts": { "test": "tap", "snap": "tap", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", - "posttest": "npm run lint" + "lintfix": "npm run eslint -- --fix", + "posttest": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "repository": { "type": "git", @@ -17,8 +18,8 @@ }, "license": "ISC", "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.1", "tap": "^16.0.1" }, "files": [ @@ -36,12 +37,12 @@ ] }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "author": "GitHub Inc.", "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.22.0", + "version": "4.23.1", "publish": true } } diff --git a/deps/npm/node_modules/hosted-git-info/package.json b/deps/npm/node_modules/hosted-git-info/package.json index d7eebd474f6258..3bb8bcd1f49825 100644 --- a/deps/npm/node_modules/hosted-git-info/package.json +++ b/deps/npm/node_modules/hosted-git-info/package.json @@ -1,6 +1,6 @@ { "name": "hosted-git-info", - "version": "7.0.2", + "version": "8.0.0", "description": "Provides metadata and conversions from repository urls for GitHub, Bitbucket and GitLab", "main": "./lib/index.js", "repository": { @@ -24,17 +24,18 @@ "snap": "tap", "test": "tap", "test:coverage": "tap --coverage-report=html", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", "postlint": "template-oss-check", - "lintfix": "npm run lint -- --fix", - "template-oss-apply": "template-oss-apply --force" + "lintfix": "npm run eslint -- --fix", + "template-oss-apply": "template-oss-apply --force", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "dependencies": { "lru-cache": "^10.0.1" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.0.1" }, "files": [ @@ -42,7 +43,7 @@ "lib/" ], "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "tap": { "color": 1, @@ -54,7 +55,7 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.22.0", + "version": "4.23.3", "publish": "true" } } diff --git a/deps/npm/node_modules/ignore-walk/package.json b/deps/npm/node_modules/ignore-walk/package.json index f44a7a587a10bb..125fc071939dbf 100644 --- a/deps/npm/node_modules/ignore-walk/package.json +++ b/deps/npm/node_modules/ignore-walk/package.json @@ -1,23 +1,24 @@ { "name": "ignore-walk", - "version": "6.0.5", + "version": "7.0.0", "description": "Nested/recursive `.gitignore`/`.npmignore` parsing and filtering.", "main": "lib/index.js", "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "mutate-fs": "^2.1.1", "tap": "^16.0.1" }, "scripts": { "test": "tap", "posttest": "npm run lint", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", - "lintfix": "npm run lint -- --fix", + "lint": "npm run eslint", + "lintfix": "npm run eslint -- --fix", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force", "test:windows-coverage": "npm pkg set tap.statements=99 --json && npm pkg set tap.branches=98 --json && npm pkg set tap.lines=99 --json", - "snap": "tap" + "snap": "tap", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "keywords": [ "ignorefile", @@ -52,11 +53,11 @@ ] }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.22.0", + "version": "4.23.3", "content": "scripts/template-oss", "publish": "true" } diff --git a/deps/npm/node_modules/ini/package.json b/deps/npm/node_modules/ini/package.json index 67aa9278259473..6a3995f158cc5b 100644 --- a/deps/npm/node_modules/ini/package.json +++ b/deps/npm/node_modules/ini/package.json @@ -2,16 +2,16 @@ "author": "GitHub Inc.", "name": "ini", "description": "An ini encoder/decoder for node", - "version": "4.1.3", + "version": "5.0.0", "repository": { "type": "git", "url": "git+https://github.com/npm/ini.git" }, "main": "lib/ini.js", "scripts": { - "eslint": "eslint", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", - "lintfix": "npm run lint -- --fix", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", + "lintfix": "npm run eslint -- --fix", "test": "tap", "snap": "tap", "posttest": "npm run lint", @@ -19,8 +19,8 @@ "template-oss-apply": "template-oss-apply --force" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.0.1" }, "license": "ISC", @@ -29,11 +29,11 @@ "lib/" ], "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.22.0", + "version": "4.23.3", "publish": "true" }, "tap": { diff --git a/deps/npm/node_modules/init-package-json/package.json b/deps/npm/node_modules/init-package-json/package.json index 969e124378966e..21021ab701244b 100644 --- a/deps/npm/node_modules/init-package-json/package.json +++ b/deps/npm/node_modules/init-package-json/package.json @@ -1,15 +1,16 @@ { "name": "init-package-json", - "version": "6.0.3", + "version": "7.0.1", "main": "lib/init-package-json.js", "scripts": { "test": "tap", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", "postlint": "template-oss-check", - "lintfix": "npm run lint -- --fix", + "lintfix": "npm run eslint -- --fix", "snap": "tap", "posttest": "npm run lint", - "template-oss-apply": "template-oss-apply --force" + "template-oss-apply": "template-oss-apply --force", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "repository": { "type": "git", @@ -19,22 +20,22 @@ "license": "ISC", "description": "A node module to get your node module started", "dependencies": { - "@npmcli/package-json": "^5.0.0", - "npm-package-arg": "^11.0.0", - "promzard": "^1.0.0", - "read": "^3.0.1", + "@npmcli/package-json": "^6.0.0", + "npm-package-arg": "^12.0.0", + "promzard": "^2.0.0", + "read": "^4.0.0", "semver": "^7.3.5", "validate-npm-package-license": "^3.0.4", - "validate-npm-package-name": "^5.0.0" + "validate-npm-package-name": "^6.0.0" }, "devDependencies": { "@npmcli/config": "^8.2.0", - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.0.1" }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "tap": { "test-ignore": "fixtures/", @@ -60,7 +61,7 @@ ], "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.22.0", + "version": "4.23.3", "publish": true } } diff --git a/deps/npm/node_modules/json-parse-even-better-errors/package.json b/deps/npm/node_modules/json-parse-even-better-errors/package.json index c7156df325fa2b..193f0d9d459a53 100644 --- a/deps/npm/node_modules/json-parse-even-better-errors/package.json +++ b/deps/npm/node_modules/json-parse-even-better-errors/package.json @@ -1,6 +1,6 @@ { "name": "json-parse-even-better-errors", - "version": "3.0.2", + "version": "4.0.0", "description": "JSON.parse with context information on error", "main": "lib/index.js", "files": [ @@ -10,11 +10,12 @@ "scripts": { "test": "tap", "snap": "tap", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", - "posttest": "npm run lint" + "lintfix": "npm run eslint -- --fix", + "posttest": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "repository": { "type": "git", @@ -27,8 +28,8 @@ "author": "GitHub Inc.", "license": "MIT", "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.3.0" }, "tap": { @@ -39,11 +40,11 @@ ] }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.22.0", + "version": "4.23.3", "publish": true } } diff --git a/deps/npm/node_modules/libnpmaccess/package.json b/deps/npm/node_modules/libnpmaccess/package.json index b9d92e4eda6674..0022437adadc60 100644 --- a/deps/npm/node_modules/libnpmaccess/package.json +++ b/deps/npm/node_modules/libnpmaccess/package.json @@ -1,6 +1,6 @@ { "name": "libnpmaccess", - "version": "8.0.6", + "version": "9.0.0", "description": "programmatic library for `npm access` commands", "author": "GitHub Inc.", "license": "ISC", @@ -16,7 +16,7 @@ "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", + "@npmcli/eslint-config": "^5.0.1", "@npmcli/mock-registry": "^1.0.0", "@npmcli/template-oss": "4.23.3", "nock": "^13.3.3", @@ -30,11 +30,11 @@ "bugs": "https://github.com/npm/libnpmaccess/issues", "homepage": "https://npmjs.com/package/libnpmaccess", "dependencies": { - "npm-package-arg": "^11.0.2", - "npm-registry-fetch": "^17.0.1" + "npm-package-arg": "^12.0.0", + "npm-registry-fetch": "^18.0.1" }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "files": [ "bin/", diff --git a/deps/npm/node_modules/libnpmdiff/package.json b/deps/npm/node_modules/libnpmdiff/package.json index 0e6d23d54d1c7a..ccb499e78ff66d 100644 --- a/deps/npm/node_modules/libnpmdiff/package.json +++ b/deps/npm/node_modules/libnpmdiff/package.json @@ -1,6 +1,6 @@ { "name": "libnpmdiff", - "version": "6.1.4", + "version": "7.0.0", "description": "The registry diff", "repository": { "type": "git", @@ -13,7 +13,7 @@ "lib/" ], "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "keywords": [ "npm", @@ -42,18 +42,18 @@ "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", + "@npmcli/eslint-config": "^5.0.1", "@npmcli/template-oss": "4.23.3", "tap": "^16.3.8" }, "dependencies": { - "@npmcli/arborist": "^7.5.4", - "@npmcli/installed-package-contents": "^2.1.0", + "@npmcli/arborist": "^8.0.0", + "@npmcli/installed-package-contents": "^3.0.0", "binary-extensions": "^2.3.0", "diff": "^5.1.0", "minimatch": "^9.0.4", - "npm-package-arg": "^11.0.2", - "pacote": "^18.0.6", + "npm-package-arg": "^12.0.0", + "pacote": "^19.0.0", "tar": "^6.2.1" }, "templateOSS": { diff --git a/deps/npm/node_modules/libnpmexec/package.json b/deps/npm/node_modules/libnpmexec/package.json index 627af537b4240b..497b971a0841bc 100644 --- a/deps/npm/node_modules/libnpmexec/package.json +++ b/deps/npm/node_modules/libnpmexec/package.json @@ -1,13 +1,13 @@ { "name": "libnpmexec", - "version": "8.1.4", + "version": "9.0.0", "files": [ "bin/", "lib/" ], "main": "lib/index.js", "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "description": "npm exec (npx) programmatic API", "repository": { @@ -50,24 +50,24 @@ ] }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", + "@npmcli/eslint-config": "^5.0.1", "@npmcli/mock-registry": "^1.0.0", "@npmcli/template-oss": "4.23.3", - "bin-links": "^4.0.4", + "bin-links": "^5.0.0", "chalk": "^5.2.0", "just-extend": "^6.2.0", "just-safe-set": "^4.2.1", "tap": "^16.3.8" }, "dependencies": { - "@npmcli/arborist": "^7.5.4", - "@npmcli/run-script": "^8.1.0", + "@npmcli/arborist": "^8.0.0", + "@npmcli/run-script": "^9.0.1", "ci-info": "^4.0.0", - "npm-package-arg": "^11.0.2", - "pacote": "^18.0.6", - "proc-log": "^4.2.0", - "read": "^3.0.1", - "read-package-json-fast": "^3.0.2", + "npm-package-arg": "^12.0.0", + "pacote": "^19.0.0", + "proc-log": "^5.0.0", + "read": "^4.0.0", + "read-package-json-fast": "^4.0.0", "semver": "^7.3.7", "walk-up-path": "^3.0.1" }, diff --git a/deps/npm/node_modules/libnpmfund/package.json b/deps/npm/node_modules/libnpmfund/package.json index 03fe5f9b4e5a5f..07c1e33f2a7c38 100644 --- a/deps/npm/node_modules/libnpmfund/package.json +++ b/deps/npm/node_modules/libnpmfund/package.json @@ -1,6 +1,6 @@ { "name": "libnpmfund", - "version": "5.0.12", + "version": "6.0.0", "main": "lib/index.js", "files": [ "bin/", @@ -41,15 +41,15 @@ "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", + "@npmcli/eslint-config": "^5.0.1", "@npmcli/template-oss": "4.23.3", "tap": "^16.3.8" }, "dependencies": { - "@npmcli/arborist": "^7.5.4" + "@npmcli/arborist": "^8.0.0" }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", diff --git a/deps/npm/node_modules/libnpmhook/package.json b/deps/npm/node_modules/libnpmhook/package.json index 741cf8a975b0c2..09157ab08cb209 100644 --- a/deps/npm/node_modules/libnpmhook/package.json +++ b/deps/npm/node_modules/libnpmhook/package.json @@ -1,6 +1,6 @@ { "name": "libnpmhook", - "version": "10.0.5", + "version": "11.0.0", "description": "programmatic API for managing npm registry hooks", "main": "lib/index.js", "files": [ @@ -32,16 +32,16 @@ "license": "ISC", "dependencies": { "aproba": "^2.0.0", - "npm-registry-fetch": "^17.0.1" + "npm-registry-fetch": "^18.0.1" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", + "@npmcli/eslint-config": "^5.0.1", "@npmcli/template-oss": "4.23.3", "nock": "^13.3.3", "tap": "^16.3.8" }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", diff --git a/deps/npm/node_modules/libnpmorg/package.json b/deps/npm/node_modules/libnpmorg/package.json index 1076474eb97b1f..38800308a31a4e 100644 --- a/deps/npm/node_modules/libnpmorg/package.json +++ b/deps/npm/node_modules/libnpmorg/package.json @@ -1,6 +1,6 @@ { "name": "libnpmorg", - "version": "6.0.6", + "version": "7.0.0", "description": "Programmatic api for `npm org` commands", "author": "GitHub Inc.", "main": "lib/index.js", @@ -28,7 +28,7 @@ "lib/" ], "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", + "@npmcli/eslint-config": "^5.0.1", "@npmcli/template-oss": "4.23.3", "minipass": "^7.1.1", "nock": "^13.3.3", @@ -43,10 +43,10 @@ "homepage": "https://npmjs.com/package/libnpmorg", "dependencies": { "aproba": "^2.0.0", - "npm-registry-fetch": "^17.0.1" + "npm-registry-fetch": "^18.0.1" }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", diff --git a/deps/npm/node_modules/libnpmpack/package.json b/deps/npm/node_modules/libnpmpack/package.json index 588921bbfa9655..35d12425b02ff7 100644 --- a/deps/npm/node_modules/libnpmpack/package.json +++ b/deps/npm/node_modules/libnpmpack/package.json @@ -1,6 +1,6 @@ { "name": "libnpmpack", - "version": "7.0.4", + "version": "8.0.0", "description": "Programmatic API for the bits behind npm pack", "author": "GitHub Inc.", "main": "lib/index.js", @@ -23,7 +23,7 @@ "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", + "@npmcli/eslint-config": "^5.0.1", "@npmcli/template-oss": "4.23.3", "nock": "^13.3.3", "spawk": "^1.7.1", @@ -37,13 +37,13 @@ "bugs": "https://github.com/npm/libnpmpack/issues", "homepage": "https://npmjs.com/package/libnpmpack", "dependencies": { - "@npmcli/arborist": "^7.5.4", - "@npmcli/run-script": "^8.1.0", - "npm-package-arg": "^11.0.2", - "pacote": "^18.0.6" + "@npmcli/arborist": "^8.0.0", + "@npmcli/run-script": "^9.0.1", + "npm-package-arg": "^12.0.0", + "pacote": "^19.0.0" }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", diff --git a/deps/npm/node_modules/libnpmpublish/package.json b/deps/npm/node_modules/libnpmpublish/package.json index e19d82dd349edb..f63d50f4e7b9c1 100644 --- a/deps/npm/node_modules/libnpmpublish/package.json +++ b/deps/npm/node_modules/libnpmpublish/package.json @@ -1,6 +1,6 @@ { "name": "libnpmpublish", - "version": "9.0.9", + "version": "10.0.0", "description": "Programmatic API for the bits behind npm publish and unpublish", "author": "GitHub Inc.", "main": "lib/index.js", @@ -24,7 +24,7 @@ "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", + "@npmcli/eslint-config": "^5.0.1", "@npmcli/mock-globals": "^1.0.0", "@npmcli/mock-registry": "^1.0.0", "@npmcli/template-oss": "4.23.3", @@ -40,16 +40,16 @@ "homepage": "https://npmjs.com/package/libnpmpublish", "dependencies": { "ci-info": "^4.0.0", - "normalize-package-data": "^6.0.1", - "npm-package-arg": "^11.0.2", - "npm-registry-fetch": "^17.0.1", - "proc-log": "^4.2.0", + "normalize-package-data": "^7.0.0", + "npm-package-arg": "^12.0.0", + "npm-registry-fetch": "^18.0.1", + "proc-log": "^5.0.0", "semver": "^7.3.7", "sigstore": "^2.2.0", - "ssri": "^10.0.6" + "ssri": "^12.0.0" }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", diff --git a/deps/npm/node_modules/libnpmsearch/package.json b/deps/npm/node_modules/libnpmsearch/package.json index d43aaa4a45459c..a5d2ae424913ef 100644 --- a/deps/npm/node_modules/libnpmsearch/package.json +++ b/deps/npm/node_modules/libnpmsearch/package.json @@ -1,6 +1,6 @@ { "name": "libnpmsearch", - "version": "7.0.6", + "version": "8.0.0", "description": "Programmatic API for searching in npm and compatible registries.", "author": "GitHub Inc.", "main": "lib/index.js", @@ -26,7 +26,7 @@ "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", + "@npmcli/eslint-config": "^5.0.1", "@npmcli/template-oss": "4.23.3", "nock": "^13.3.3", "tap": "^16.3.8" @@ -39,10 +39,10 @@ "bugs": "https://github.com/npm/libnpmsearch/issues", "homepage": "https://npmjs.com/package/libnpmsearch", "dependencies": { - "npm-registry-fetch": "^17.0.1" + "npm-registry-fetch": "^18.0.1" }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", diff --git a/deps/npm/node_modules/libnpmteam/package.json b/deps/npm/node_modules/libnpmteam/package.json index 848769961182a5..fd8f69669f15cf 100644 --- a/deps/npm/node_modules/libnpmteam/package.json +++ b/deps/npm/node_modules/libnpmteam/package.json @@ -1,7 +1,7 @@ { "name": "libnpmteam", "description": "npm Team management APIs", - "version": "6.0.5", + "version": "7.0.0", "author": "GitHub Inc.", "license": "ISC", "main": "lib/index.js", @@ -16,7 +16,7 @@ "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", + "@npmcli/eslint-config": "^5.0.1", "@npmcli/template-oss": "4.23.3", "nock": "^13.3.3", "tap": "^16.3.8" @@ -33,10 +33,10 @@ "homepage": "https://npmjs.com/package/libnpmteam", "dependencies": { "aproba": "^2.0.0", - "npm-registry-fetch": "^17.0.1" + "npm-registry-fetch": "^18.0.1" }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", diff --git a/deps/npm/node_modules/libnpmversion/package.json b/deps/npm/node_modules/libnpmversion/package.json index ccfe5a05cb30a2..cdc9e7bbdf718a 100644 --- a/deps/npm/node_modules/libnpmversion/package.json +++ b/deps/npm/node_modules/libnpmversion/package.json @@ -1,6 +1,6 @@ { "name": "libnpmversion", - "version": "6.0.3", + "version": "7.0.0", "main": "lib/index.js", "files": [ "bin/", @@ -32,20 +32,20 @@ ] }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", + "@npmcli/eslint-config": "^5.0.1", "@npmcli/template-oss": "4.23.3", "require-inject": "^1.4.4", "tap": "^16.3.8" }, "dependencies": { - "@npmcli/git": "^5.0.7", - "@npmcli/run-script": "^8.1.0", - "json-parse-even-better-errors": "^3.0.2", - "proc-log": "^4.2.0", + "@npmcli/git": "^6.0.1", + "@npmcli/run-script": "^9.0.1", + "json-parse-even-better-errors": "^4.0.0", + "proc-log": "^5.0.0", "semver": "^7.3.7" }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", diff --git a/deps/npm/node_modules/make-fetch-happen/package.json b/deps/npm/node_modules/make-fetch-happen/package.json index 7adb4d1e7f9719..0868ff6d7efa54 100644 --- a/deps/npm/node_modules/make-fetch-happen/package.json +++ b/deps/npm/node_modules/make-fetch-happen/package.json @@ -1,6 +1,6 @@ { "name": "make-fetch-happen", - "version": "13.0.1", + "version": "14.0.1", "description": "Opinionated, caching, retrying fetch client", "main": "lib/index.js", "files": [ @@ -10,16 +10,16 @@ "scripts": { "test": "tap", "posttest": "npm run lint", - "eslint": "eslint", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", - "lintfix": "npm run lint -- --fix", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", + "lintfix": "npm run eslint -- --fix", "postlint": "template-oss-check", "snap": "tap", "template-oss-apply": "template-oss-apply --force" }, "repository": { "type": "git", - "url": "https://github.com/npm/make-fetch-happen.git" + "url": "git+https://github.com/npm/make-fetch-happen.git" }, "keywords": [ "http", @@ -33,29 +33,28 @@ "author": "GitHub Inc.", "license": "ISC", "dependencies": { - "@npmcli/agent": "^2.0.0", - "cacache": "^18.0.0", + "@npmcli/agent": "^3.0.0", + "cacache": "^19.0.1", "http-cache-semantics": "^4.1.1", - "is-lambda": "^1.0.1", "minipass": "^7.0.2", - "minipass-fetch": "^3.0.0", + "minipass-fetch": "^4.0.0", "minipass-flush": "^1.0.5", "minipass-pipeline": "^1.2.4", "negotiator": "^0.6.3", - "proc-log": "^4.2.0", + "proc-log": "^5.0.0", "promise-retry": "^2.0.1", - "ssri": "^10.0.0" + "ssri": "^12.0.0" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.21.4", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "nock": "^13.2.4", "safe-buffer": "^5.2.1", "standard-version": "^9.3.2", "tap": "^16.0.0" }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "tap": { "color": 1, @@ -69,7 +68,7 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.21.4", + "version": "4.23.3", "publish": "true" } } diff --git a/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/LICENSE b/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/LICENSE new file mode 100644 index 00000000000000..49f7efe431c9ea --- /dev/null +++ b/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/LICENSE @@ -0,0 +1,26 @@ +Minizlib was created by Isaac Z. Schlueter. +It is a derivative work of the Node.js project. + +""" +Copyright (c) 2017-2023 Isaac Z. Schlueter and Contributors +Copyright (c) 2017-2023 Node.js contributors. All rights reserved. +Copyright (c) 2017-2023 Joyent, Inc. and other Node contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +""" diff --git a/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/dist/commonjs/constants.js b/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/dist/commonjs/constants.js new file mode 100644 index 00000000000000..dfc2c1957bfc99 --- /dev/null +++ b/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/dist/commonjs/constants.js @@ -0,0 +1,123 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.constants = void 0; +// Update with any zlib constants that are added or changed in the future. +// Node v6 didn't export this, so we just hard code the version and rely +// on all the other hard-coded values from zlib v4736. When node v6 +// support drops, we can just export the realZlibConstants object. +const zlib_1 = __importDefault(require("zlib")); +/* c8 ignore start */ +const realZlibConstants = zlib_1.default.constants || { ZLIB_VERNUM: 4736 }; +/* c8 ignore stop */ +exports.constants = Object.freeze(Object.assign(Object.create(null), { + Z_NO_FLUSH: 0, + Z_PARTIAL_FLUSH: 1, + Z_SYNC_FLUSH: 2, + Z_FULL_FLUSH: 3, + Z_FINISH: 4, + Z_BLOCK: 5, + Z_OK: 0, + Z_STREAM_END: 1, + Z_NEED_DICT: 2, + Z_ERRNO: -1, + Z_STREAM_ERROR: -2, + Z_DATA_ERROR: -3, + Z_MEM_ERROR: -4, + Z_BUF_ERROR: -5, + Z_VERSION_ERROR: -6, + Z_NO_COMPRESSION: 0, + Z_BEST_SPEED: 1, + Z_BEST_COMPRESSION: 9, + Z_DEFAULT_COMPRESSION: -1, + Z_FILTERED: 1, + Z_HUFFMAN_ONLY: 2, + Z_RLE: 3, + Z_FIXED: 4, + Z_DEFAULT_STRATEGY: 0, + DEFLATE: 1, + INFLATE: 2, + GZIP: 3, + GUNZIP: 4, + DEFLATERAW: 5, + INFLATERAW: 6, + UNZIP: 7, + BROTLI_DECODE: 8, + BROTLI_ENCODE: 9, + Z_MIN_WINDOWBITS: 8, + Z_MAX_WINDOWBITS: 15, + Z_DEFAULT_WINDOWBITS: 15, + Z_MIN_CHUNK: 64, + Z_MAX_CHUNK: Infinity, + Z_DEFAULT_CHUNK: 16384, + Z_MIN_MEMLEVEL: 1, + Z_MAX_MEMLEVEL: 9, + Z_DEFAULT_MEMLEVEL: 8, + Z_MIN_LEVEL: -1, + Z_MAX_LEVEL: 9, + Z_DEFAULT_LEVEL: -1, + BROTLI_OPERATION_PROCESS: 0, + BROTLI_OPERATION_FLUSH: 1, + BROTLI_OPERATION_FINISH: 2, + BROTLI_OPERATION_EMIT_METADATA: 3, + BROTLI_MODE_GENERIC: 0, + BROTLI_MODE_TEXT: 1, + BROTLI_MODE_FONT: 2, + BROTLI_DEFAULT_MODE: 0, + BROTLI_MIN_QUALITY: 0, + BROTLI_MAX_QUALITY: 11, + BROTLI_DEFAULT_QUALITY: 11, + BROTLI_MIN_WINDOW_BITS: 10, + BROTLI_MAX_WINDOW_BITS: 24, + BROTLI_LARGE_MAX_WINDOW_BITS: 30, + BROTLI_DEFAULT_WINDOW: 22, + BROTLI_MIN_INPUT_BLOCK_BITS: 16, + BROTLI_MAX_INPUT_BLOCK_BITS: 24, + BROTLI_PARAM_MODE: 0, + BROTLI_PARAM_QUALITY: 1, + BROTLI_PARAM_LGWIN: 2, + BROTLI_PARAM_LGBLOCK: 3, + BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING: 4, + BROTLI_PARAM_SIZE_HINT: 5, + BROTLI_PARAM_LARGE_WINDOW: 6, + BROTLI_PARAM_NPOSTFIX: 7, + BROTLI_PARAM_NDIRECT: 8, + BROTLI_DECODER_RESULT_ERROR: 0, + BROTLI_DECODER_RESULT_SUCCESS: 1, + BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT: 2, + BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT: 3, + BROTLI_DECODER_PARAM_DISABLE_RING_BUFFER_REALLOCATION: 0, + BROTLI_DECODER_PARAM_LARGE_WINDOW: 1, + BROTLI_DECODER_NO_ERROR: 0, + BROTLI_DECODER_SUCCESS: 1, + BROTLI_DECODER_NEEDS_MORE_INPUT: 2, + BROTLI_DECODER_NEEDS_MORE_OUTPUT: 3, + BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_NIBBLE: -1, + BROTLI_DECODER_ERROR_FORMAT_RESERVED: -2, + BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_META_NIBBLE: -3, + BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_ALPHABET: -4, + BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_SAME: -5, + BROTLI_DECODER_ERROR_FORMAT_CL_SPACE: -6, + BROTLI_DECODER_ERROR_FORMAT_HUFFMAN_SPACE: -7, + BROTLI_DECODER_ERROR_FORMAT_CONTEXT_MAP_REPEAT: -8, + BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_1: -9, + BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_2: -10, + BROTLI_DECODER_ERROR_FORMAT_TRANSFORM: -11, + BROTLI_DECODER_ERROR_FORMAT_DICTIONARY: -12, + BROTLI_DECODER_ERROR_FORMAT_WINDOW_BITS: -13, + BROTLI_DECODER_ERROR_FORMAT_PADDING_1: -14, + BROTLI_DECODER_ERROR_FORMAT_PADDING_2: -15, + BROTLI_DECODER_ERROR_FORMAT_DISTANCE: -16, + BROTLI_DECODER_ERROR_DICTIONARY_NOT_SET: -19, + BROTLI_DECODER_ERROR_INVALID_ARGUMENTS: -20, + BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MODES: -21, + BROTLI_DECODER_ERROR_ALLOC_TREE_GROUPS: -22, + BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MAP: -25, + BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_1: -26, + BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_2: -27, + BROTLI_DECODER_ERROR_ALLOC_BLOCK_TYPE_TREES: -30, + BROTLI_DECODER_ERROR_UNREACHABLE: -31, +}, realZlibConstants)); +//# sourceMappingURL=constants.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/dist/commonjs/index.js b/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/dist/commonjs/index.js new file mode 100644 index 00000000000000..ad65eef0495076 --- /dev/null +++ b/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/dist/commonjs/index.js @@ -0,0 +1,352 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.BrotliDecompress = exports.BrotliCompress = exports.Brotli = exports.Unzip = exports.InflateRaw = exports.DeflateRaw = exports.Gunzip = exports.Gzip = exports.Inflate = exports.Deflate = exports.Zlib = exports.ZlibError = exports.constants = void 0; +const assert_1 = __importDefault(require("assert")); +const buffer_1 = require("buffer"); +const minipass_1 = require("minipass"); +const zlib_1 = __importDefault(require("zlib")); +const constants_js_1 = require("./constants.js"); +var constants_js_2 = require("./constants.js"); +Object.defineProperty(exports, "constants", { enumerable: true, get: function () { return constants_js_2.constants; } }); +const OriginalBufferConcat = buffer_1.Buffer.concat; +const _superWrite = Symbol('_superWrite'); +class ZlibError extends Error { + code; + errno; + constructor(err) { + super('zlib: ' + err.message); + this.code = err.code; + this.errno = err.errno; + /* c8 ignore next */ + if (!this.code) + this.code = 'ZLIB_ERROR'; + this.message = 'zlib: ' + err.message; + Error.captureStackTrace(this, this.constructor); + } + get name() { + return 'ZlibError'; + } +} +exports.ZlibError = ZlibError; +// the Zlib class they all inherit from +// This thing manages the queue of requests, and returns +// true or false if there is anything in the queue when +// you call the .write() method. +const _flushFlag = Symbol('flushFlag'); +class ZlibBase extends minipass_1.Minipass { + #sawError = false; + #ended = false; + #flushFlag; + #finishFlushFlag; + #fullFlushFlag; + #handle; + #onError; + get sawError() { + return this.#sawError; + } + get handle() { + return this.#handle; + } + /* c8 ignore start */ + get flushFlag() { + return this.#flushFlag; + } + /* c8 ignore stop */ + constructor(opts, mode) { + if (!opts || typeof opts !== 'object') + throw new TypeError('invalid options for ZlibBase constructor'); + //@ts-ignore + super(opts); + /* c8 ignore start */ + this.#flushFlag = opts.flush ?? 0; + this.#finishFlushFlag = opts.finishFlush ?? 0; + this.#fullFlushFlag = opts.fullFlushFlag ?? 0; + /* c8 ignore stop */ + // this will throw if any options are invalid for the class selected + try { + // @types/node doesn't know that it exports the classes, but they're there + //@ts-ignore + this.#handle = new zlib_1.default[mode](opts); + } + catch (er) { + // make sure that all errors get decorated properly + throw new ZlibError(er); + } + this.#onError = err => { + // no sense raising multiple errors, since we abort on the first one. + if (this.#sawError) + return; + this.#sawError = true; + // there is no way to cleanly recover. + // continuing only obscures problems. + this.close(); + this.emit('error', err); + }; + this.#handle?.on('error', er => this.#onError(new ZlibError(er))); + this.once('end', () => this.close); + } + close() { + if (this.#handle) { + this.#handle.close(); + this.#handle = undefined; + this.emit('close'); + } + } + reset() { + if (!this.#sawError) { + (0, assert_1.default)(this.#handle, 'zlib binding closed'); + //@ts-ignore + return this.#handle.reset?.(); + } + } + flush(flushFlag) { + if (this.ended) + return; + if (typeof flushFlag !== 'number') + flushFlag = this.#fullFlushFlag; + this.write(Object.assign(buffer_1.Buffer.alloc(0), { [_flushFlag]: flushFlag })); + } + end(chunk, encoding, cb) { + /* c8 ignore start */ + if (typeof chunk === 'function') { + cb = chunk; + encoding = undefined; + chunk = undefined; + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = undefined; + } + /* c8 ignore stop */ + if (chunk) { + if (encoding) + this.write(chunk, encoding); + else + this.write(chunk); + } + this.flush(this.#finishFlushFlag); + this.#ended = true; + return super.end(cb); + } + get ended() { + return this.#ended; + } + // overridden in the gzip classes to do portable writes + [_superWrite](data) { + return super.write(data); + } + write(chunk, encoding, cb) { + // process the chunk using the sync process + // then super.write() all the outputted chunks + if (typeof encoding === 'function') + (cb = encoding), (encoding = 'utf8'); + if (typeof chunk === 'string') + chunk = buffer_1.Buffer.from(chunk, encoding); + if (this.#sawError) + return; + (0, assert_1.default)(this.#handle, 'zlib binding closed'); + // _processChunk tries to .close() the native handle after it's done, so we + // intercept that by temporarily making it a no-op. + // diving into the node:zlib internals a bit here + const nativeHandle = this.#handle + ._handle; + const originalNativeClose = nativeHandle.close; + nativeHandle.close = () => { }; + const originalClose = this.#handle.close; + this.#handle.close = () => { }; + // It also calls `Buffer.concat()` at the end, which may be convenient + // for some, but which we are not interested in as it slows us down. + buffer_1.Buffer.concat = args => args; + let result = undefined; + try { + const flushFlag = typeof chunk[_flushFlag] === 'number' + ? chunk[_flushFlag] + : this.#flushFlag; + result = this.#handle._processChunk(chunk, flushFlag); + // if we don't throw, reset it back how it was + buffer_1.Buffer.concat = OriginalBufferConcat; + } + catch (err) { + // or if we do, put Buffer.concat() back before we emit error + // Error events call into user code, which may call Buffer.concat() + buffer_1.Buffer.concat = OriginalBufferConcat; + this.#onError(new ZlibError(err)); + } + finally { + if (this.#handle) { + // Core zlib resets `_handle` to null after attempting to close the + // native handle. Our no-op handler prevented actual closure, but we + // need to restore the `._handle` property. + ; + this.#handle._handle = + nativeHandle; + nativeHandle.close = originalNativeClose; + this.#handle.close = originalClose; + // `_processChunk()` adds an 'error' listener. If we don't remove it + // after each call, these handlers start piling up. + this.#handle.removeAllListeners('error'); + // make sure OUR error listener is still attached tho + } + } + if (this.#handle) + this.#handle.on('error', er => this.#onError(new ZlibError(er))); + let writeReturn; + if (result) { + if (Array.isArray(result) && result.length > 0) { + const r = result[0]; + // The first buffer is always `handle._outBuffer`, which would be + // re-used for later invocations; so, we always have to copy that one. + writeReturn = this[_superWrite](buffer_1.Buffer.from(r)); + for (let i = 1; i < result.length; i++) { + writeReturn = this[_superWrite](result[i]); + } + } + else { + // either a single Buffer or an empty array + writeReturn = this[_superWrite](buffer_1.Buffer.from(result)); + } + } + if (cb) + cb(); + return writeReturn; + } +} +class Zlib extends ZlibBase { + #level; + #strategy; + constructor(opts, mode) { + opts = opts || {}; + opts.flush = opts.flush || constants_js_1.constants.Z_NO_FLUSH; + opts.finishFlush = opts.finishFlush || constants_js_1.constants.Z_FINISH; + opts.fullFlushFlag = constants_js_1.constants.Z_FULL_FLUSH; + super(opts, mode); + this.#level = opts.level; + this.#strategy = opts.strategy; + } + params(level, strategy) { + if (this.sawError) + return; + if (!this.handle) + throw new Error('cannot switch params when binding is closed'); + // no way to test this without also not supporting params at all + /* c8 ignore start */ + if (!this.handle.params) + throw new Error('not supported in this implementation'); + /* c8 ignore stop */ + if (this.#level !== level || this.#strategy !== strategy) { + this.flush(constants_js_1.constants.Z_SYNC_FLUSH); + (0, assert_1.default)(this.handle, 'zlib binding closed'); + // .params() calls .flush(), but the latter is always async in the + // core zlib. We override .flush() temporarily to intercept that and + // flush synchronously. + const origFlush = this.handle.flush; + this.handle.flush = (flushFlag, cb) => { + /* c8 ignore start */ + if (typeof flushFlag === 'function') { + cb = flushFlag; + flushFlag = this.flushFlag; + } + /* c8 ignore stop */ + this.flush(flushFlag); + cb?.(); + }; + try { + ; + this.handle.params(level, strategy); + } + finally { + this.handle.flush = origFlush; + } + /* c8 ignore start */ + if (this.handle) { + this.#level = level; + this.#strategy = strategy; + } + /* c8 ignore stop */ + } + } +} +exports.Zlib = Zlib; +// minimal 2-byte header +class Deflate extends Zlib { + constructor(opts) { + super(opts, 'Deflate'); + } +} +exports.Deflate = Deflate; +class Inflate extends Zlib { + constructor(opts) { + super(opts, 'Inflate'); + } +} +exports.Inflate = Inflate; +class Gzip extends Zlib { + #portable; + constructor(opts) { + super(opts, 'Gzip'); + this.#portable = opts && !!opts.portable; + } + [_superWrite](data) { + if (!this.#portable) + return super[_superWrite](data); + // we'll always get the header emitted in one first chunk + // overwrite the OS indicator byte with 0xFF + this.#portable = false; + data[9] = 255; + return super[_superWrite](data); + } +} +exports.Gzip = Gzip; +class Gunzip extends Zlib { + constructor(opts) { + super(opts, 'Gunzip'); + } +} +exports.Gunzip = Gunzip; +// raw - no header +class DeflateRaw extends Zlib { + constructor(opts) { + super(opts, 'DeflateRaw'); + } +} +exports.DeflateRaw = DeflateRaw; +class InflateRaw extends Zlib { + constructor(opts) { + super(opts, 'InflateRaw'); + } +} +exports.InflateRaw = InflateRaw; +// auto-detect header. +class Unzip extends Zlib { + constructor(opts) { + super(opts, 'Unzip'); + } +} +exports.Unzip = Unzip; +class Brotli extends ZlibBase { + constructor(opts, mode) { + opts = opts || {}; + opts.flush = opts.flush || constants_js_1.constants.BROTLI_OPERATION_PROCESS; + opts.finishFlush = + opts.finishFlush || constants_js_1.constants.BROTLI_OPERATION_FINISH; + opts.fullFlushFlag = constants_js_1.constants.BROTLI_OPERATION_FLUSH; + super(opts, mode); + } +} +exports.Brotli = Brotli; +class BrotliCompress extends Brotli { + constructor(opts) { + super(opts, 'BrotliCompress'); + } +} +exports.BrotliCompress = BrotliCompress; +class BrotliDecompress extends Brotli { + constructor(opts) { + super(opts, 'BrotliDecompress'); + } +} +exports.BrotliDecompress = BrotliDecompress; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/dist/commonjs/package.json b/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/dist/commonjs/package.json new file mode 100644 index 00000000000000..5bbefffbabee39 --- /dev/null +++ b/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/dist/commonjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/dist/esm/constants.js b/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/dist/esm/constants.js new file mode 100644 index 00000000000000..7faf40be5068d0 --- /dev/null +++ b/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/dist/esm/constants.js @@ -0,0 +1,117 @@ +// Update with any zlib constants that are added or changed in the future. +// Node v6 didn't export this, so we just hard code the version and rely +// on all the other hard-coded values from zlib v4736. When node v6 +// support drops, we can just export the realZlibConstants object. +import realZlib from 'zlib'; +/* c8 ignore start */ +const realZlibConstants = realZlib.constants || { ZLIB_VERNUM: 4736 }; +/* c8 ignore stop */ +export const constants = Object.freeze(Object.assign(Object.create(null), { + Z_NO_FLUSH: 0, + Z_PARTIAL_FLUSH: 1, + Z_SYNC_FLUSH: 2, + Z_FULL_FLUSH: 3, + Z_FINISH: 4, + Z_BLOCK: 5, + Z_OK: 0, + Z_STREAM_END: 1, + Z_NEED_DICT: 2, + Z_ERRNO: -1, + Z_STREAM_ERROR: -2, + Z_DATA_ERROR: -3, + Z_MEM_ERROR: -4, + Z_BUF_ERROR: -5, + Z_VERSION_ERROR: -6, + Z_NO_COMPRESSION: 0, + Z_BEST_SPEED: 1, + Z_BEST_COMPRESSION: 9, + Z_DEFAULT_COMPRESSION: -1, + Z_FILTERED: 1, + Z_HUFFMAN_ONLY: 2, + Z_RLE: 3, + Z_FIXED: 4, + Z_DEFAULT_STRATEGY: 0, + DEFLATE: 1, + INFLATE: 2, + GZIP: 3, + GUNZIP: 4, + DEFLATERAW: 5, + INFLATERAW: 6, + UNZIP: 7, + BROTLI_DECODE: 8, + BROTLI_ENCODE: 9, + Z_MIN_WINDOWBITS: 8, + Z_MAX_WINDOWBITS: 15, + Z_DEFAULT_WINDOWBITS: 15, + Z_MIN_CHUNK: 64, + Z_MAX_CHUNK: Infinity, + Z_DEFAULT_CHUNK: 16384, + Z_MIN_MEMLEVEL: 1, + Z_MAX_MEMLEVEL: 9, + Z_DEFAULT_MEMLEVEL: 8, + Z_MIN_LEVEL: -1, + Z_MAX_LEVEL: 9, + Z_DEFAULT_LEVEL: -1, + BROTLI_OPERATION_PROCESS: 0, + BROTLI_OPERATION_FLUSH: 1, + BROTLI_OPERATION_FINISH: 2, + BROTLI_OPERATION_EMIT_METADATA: 3, + BROTLI_MODE_GENERIC: 0, + BROTLI_MODE_TEXT: 1, + BROTLI_MODE_FONT: 2, + BROTLI_DEFAULT_MODE: 0, + BROTLI_MIN_QUALITY: 0, + BROTLI_MAX_QUALITY: 11, + BROTLI_DEFAULT_QUALITY: 11, + BROTLI_MIN_WINDOW_BITS: 10, + BROTLI_MAX_WINDOW_BITS: 24, + BROTLI_LARGE_MAX_WINDOW_BITS: 30, + BROTLI_DEFAULT_WINDOW: 22, + BROTLI_MIN_INPUT_BLOCK_BITS: 16, + BROTLI_MAX_INPUT_BLOCK_BITS: 24, + BROTLI_PARAM_MODE: 0, + BROTLI_PARAM_QUALITY: 1, + BROTLI_PARAM_LGWIN: 2, + BROTLI_PARAM_LGBLOCK: 3, + BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING: 4, + BROTLI_PARAM_SIZE_HINT: 5, + BROTLI_PARAM_LARGE_WINDOW: 6, + BROTLI_PARAM_NPOSTFIX: 7, + BROTLI_PARAM_NDIRECT: 8, + BROTLI_DECODER_RESULT_ERROR: 0, + BROTLI_DECODER_RESULT_SUCCESS: 1, + BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT: 2, + BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT: 3, + BROTLI_DECODER_PARAM_DISABLE_RING_BUFFER_REALLOCATION: 0, + BROTLI_DECODER_PARAM_LARGE_WINDOW: 1, + BROTLI_DECODER_NO_ERROR: 0, + BROTLI_DECODER_SUCCESS: 1, + BROTLI_DECODER_NEEDS_MORE_INPUT: 2, + BROTLI_DECODER_NEEDS_MORE_OUTPUT: 3, + BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_NIBBLE: -1, + BROTLI_DECODER_ERROR_FORMAT_RESERVED: -2, + BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_META_NIBBLE: -3, + BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_ALPHABET: -4, + BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_SAME: -5, + BROTLI_DECODER_ERROR_FORMAT_CL_SPACE: -6, + BROTLI_DECODER_ERROR_FORMAT_HUFFMAN_SPACE: -7, + BROTLI_DECODER_ERROR_FORMAT_CONTEXT_MAP_REPEAT: -8, + BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_1: -9, + BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_2: -10, + BROTLI_DECODER_ERROR_FORMAT_TRANSFORM: -11, + BROTLI_DECODER_ERROR_FORMAT_DICTIONARY: -12, + BROTLI_DECODER_ERROR_FORMAT_WINDOW_BITS: -13, + BROTLI_DECODER_ERROR_FORMAT_PADDING_1: -14, + BROTLI_DECODER_ERROR_FORMAT_PADDING_2: -15, + BROTLI_DECODER_ERROR_FORMAT_DISTANCE: -16, + BROTLI_DECODER_ERROR_DICTIONARY_NOT_SET: -19, + BROTLI_DECODER_ERROR_INVALID_ARGUMENTS: -20, + BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MODES: -21, + BROTLI_DECODER_ERROR_ALLOC_TREE_GROUPS: -22, + BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MAP: -25, + BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_1: -26, + BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_2: -27, + BROTLI_DECODER_ERROR_ALLOC_BLOCK_TYPE_TREES: -30, + BROTLI_DECODER_ERROR_UNREACHABLE: -31, +}, realZlibConstants)); +//# sourceMappingURL=constants.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/dist/esm/index.js b/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/dist/esm/index.js new file mode 100644 index 00000000000000..a6269b505f47cc --- /dev/null +++ b/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/dist/esm/index.js @@ -0,0 +1,333 @@ +import assert from 'assert'; +import { Buffer } from 'buffer'; +import { Minipass } from 'minipass'; +import realZlib from 'zlib'; +import { constants } from './constants.js'; +export { constants } from './constants.js'; +const OriginalBufferConcat = Buffer.concat; +const _superWrite = Symbol('_superWrite'); +export class ZlibError extends Error { + code; + errno; + constructor(err) { + super('zlib: ' + err.message); + this.code = err.code; + this.errno = err.errno; + /* c8 ignore next */ + if (!this.code) + this.code = 'ZLIB_ERROR'; + this.message = 'zlib: ' + err.message; + Error.captureStackTrace(this, this.constructor); + } + get name() { + return 'ZlibError'; + } +} +// the Zlib class they all inherit from +// This thing manages the queue of requests, and returns +// true or false if there is anything in the queue when +// you call the .write() method. +const _flushFlag = Symbol('flushFlag'); +class ZlibBase extends Minipass { + #sawError = false; + #ended = false; + #flushFlag; + #finishFlushFlag; + #fullFlushFlag; + #handle; + #onError; + get sawError() { + return this.#sawError; + } + get handle() { + return this.#handle; + } + /* c8 ignore start */ + get flushFlag() { + return this.#flushFlag; + } + /* c8 ignore stop */ + constructor(opts, mode) { + if (!opts || typeof opts !== 'object') + throw new TypeError('invalid options for ZlibBase constructor'); + //@ts-ignore + super(opts); + /* c8 ignore start */ + this.#flushFlag = opts.flush ?? 0; + this.#finishFlushFlag = opts.finishFlush ?? 0; + this.#fullFlushFlag = opts.fullFlushFlag ?? 0; + /* c8 ignore stop */ + // this will throw if any options are invalid for the class selected + try { + // @types/node doesn't know that it exports the classes, but they're there + //@ts-ignore + this.#handle = new realZlib[mode](opts); + } + catch (er) { + // make sure that all errors get decorated properly + throw new ZlibError(er); + } + this.#onError = err => { + // no sense raising multiple errors, since we abort on the first one. + if (this.#sawError) + return; + this.#sawError = true; + // there is no way to cleanly recover. + // continuing only obscures problems. + this.close(); + this.emit('error', err); + }; + this.#handle?.on('error', er => this.#onError(new ZlibError(er))); + this.once('end', () => this.close); + } + close() { + if (this.#handle) { + this.#handle.close(); + this.#handle = undefined; + this.emit('close'); + } + } + reset() { + if (!this.#sawError) { + assert(this.#handle, 'zlib binding closed'); + //@ts-ignore + return this.#handle.reset?.(); + } + } + flush(flushFlag) { + if (this.ended) + return; + if (typeof flushFlag !== 'number') + flushFlag = this.#fullFlushFlag; + this.write(Object.assign(Buffer.alloc(0), { [_flushFlag]: flushFlag })); + } + end(chunk, encoding, cb) { + /* c8 ignore start */ + if (typeof chunk === 'function') { + cb = chunk; + encoding = undefined; + chunk = undefined; + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = undefined; + } + /* c8 ignore stop */ + if (chunk) { + if (encoding) + this.write(chunk, encoding); + else + this.write(chunk); + } + this.flush(this.#finishFlushFlag); + this.#ended = true; + return super.end(cb); + } + get ended() { + return this.#ended; + } + // overridden in the gzip classes to do portable writes + [_superWrite](data) { + return super.write(data); + } + write(chunk, encoding, cb) { + // process the chunk using the sync process + // then super.write() all the outputted chunks + if (typeof encoding === 'function') + (cb = encoding), (encoding = 'utf8'); + if (typeof chunk === 'string') + chunk = Buffer.from(chunk, encoding); + if (this.#sawError) + return; + assert(this.#handle, 'zlib binding closed'); + // _processChunk tries to .close() the native handle after it's done, so we + // intercept that by temporarily making it a no-op. + // diving into the node:zlib internals a bit here + const nativeHandle = this.#handle + ._handle; + const originalNativeClose = nativeHandle.close; + nativeHandle.close = () => { }; + const originalClose = this.#handle.close; + this.#handle.close = () => { }; + // It also calls `Buffer.concat()` at the end, which may be convenient + // for some, but which we are not interested in as it slows us down. + Buffer.concat = args => args; + let result = undefined; + try { + const flushFlag = typeof chunk[_flushFlag] === 'number' + ? chunk[_flushFlag] + : this.#flushFlag; + result = this.#handle._processChunk(chunk, flushFlag); + // if we don't throw, reset it back how it was + Buffer.concat = OriginalBufferConcat; + } + catch (err) { + // or if we do, put Buffer.concat() back before we emit error + // Error events call into user code, which may call Buffer.concat() + Buffer.concat = OriginalBufferConcat; + this.#onError(new ZlibError(err)); + } + finally { + if (this.#handle) { + // Core zlib resets `_handle` to null after attempting to close the + // native handle. Our no-op handler prevented actual closure, but we + // need to restore the `._handle` property. + ; + this.#handle._handle = + nativeHandle; + nativeHandle.close = originalNativeClose; + this.#handle.close = originalClose; + // `_processChunk()` adds an 'error' listener. If we don't remove it + // after each call, these handlers start piling up. + this.#handle.removeAllListeners('error'); + // make sure OUR error listener is still attached tho + } + } + if (this.#handle) + this.#handle.on('error', er => this.#onError(new ZlibError(er))); + let writeReturn; + if (result) { + if (Array.isArray(result) && result.length > 0) { + const r = result[0]; + // The first buffer is always `handle._outBuffer`, which would be + // re-used for later invocations; so, we always have to copy that one. + writeReturn = this[_superWrite](Buffer.from(r)); + for (let i = 1; i < result.length; i++) { + writeReturn = this[_superWrite](result[i]); + } + } + else { + // either a single Buffer or an empty array + writeReturn = this[_superWrite](Buffer.from(result)); + } + } + if (cb) + cb(); + return writeReturn; + } +} +export class Zlib extends ZlibBase { + #level; + #strategy; + constructor(opts, mode) { + opts = opts || {}; + opts.flush = opts.flush || constants.Z_NO_FLUSH; + opts.finishFlush = opts.finishFlush || constants.Z_FINISH; + opts.fullFlushFlag = constants.Z_FULL_FLUSH; + super(opts, mode); + this.#level = opts.level; + this.#strategy = opts.strategy; + } + params(level, strategy) { + if (this.sawError) + return; + if (!this.handle) + throw new Error('cannot switch params when binding is closed'); + // no way to test this without also not supporting params at all + /* c8 ignore start */ + if (!this.handle.params) + throw new Error('not supported in this implementation'); + /* c8 ignore stop */ + if (this.#level !== level || this.#strategy !== strategy) { + this.flush(constants.Z_SYNC_FLUSH); + assert(this.handle, 'zlib binding closed'); + // .params() calls .flush(), but the latter is always async in the + // core zlib. We override .flush() temporarily to intercept that and + // flush synchronously. + const origFlush = this.handle.flush; + this.handle.flush = (flushFlag, cb) => { + /* c8 ignore start */ + if (typeof flushFlag === 'function') { + cb = flushFlag; + flushFlag = this.flushFlag; + } + /* c8 ignore stop */ + this.flush(flushFlag); + cb?.(); + }; + try { + ; + this.handle.params(level, strategy); + } + finally { + this.handle.flush = origFlush; + } + /* c8 ignore start */ + if (this.handle) { + this.#level = level; + this.#strategy = strategy; + } + /* c8 ignore stop */ + } + } +} +// minimal 2-byte header +export class Deflate extends Zlib { + constructor(opts) { + super(opts, 'Deflate'); + } +} +export class Inflate extends Zlib { + constructor(opts) { + super(opts, 'Inflate'); + } +} +export class Gzip extends Zlib { + #portable; + constructor(opts) { + super(opts, 'Gzip'); + this.#portable = opts && !!opts.portable; + } + [_superWrite](data) { + if (!this.#portable) + return super[_superWrite](data); + // we'll always get the header emitted in one first chunk + // overwrite the OS indicator byte with 0xFF + this.#portable = false; + data[9] = 255; + return super[_superWrite](data); + } +} +export class Gunzip extends Zlib { + constructor(opts) { + super(opts, 'Gunzip'); + } +} +// raw - no header +export class DeflateRaw extends Zlib { + constructor(opts) { + super(opts, 'DeflateRaw'); + } +} +export class InflateRaw extends Zlib { + constructor(opts) { + super(opts, 'InflateRaw'); + } +} +// auto-detect header. +export class Unzip extends Zlib { + constructor(opts) { + super(opts, 'Unzip'); + } +} +export class Brotli extends ZlibBase { + constructor(opts, mode) { + opts = opts || {}; + opts.flush = opts.flush || constants.BROTLI_OPERATION_PROCESS; + opts.finishFlush = + opts.finishFlush || constants.BROTLI_OPERATION_FINISH; + opts.fullFlushFlag = constants.BROTLI_OPERATION_FLUSH; + super(opts, mode); + } +} +export class BrotliCompress extends Brotli { + constructor(opts) { + super(opts, 'BrotliCompress'); + } +} +export class BrotliDecompress extends Brotli { + constructor(opts) { + super(opts, 'BrotliDecompress'); + } +} +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/dist/esm/package.json b/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/dist/esm/package.json new file mode 100644 index 00000000000000..3dbc1ca591c055 --- /dev/null +++ b/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/dist/esm/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/package.json b/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/package.json new file mode 100644 index 00000000000000..e94623ff43d353 --- /dev/null +++ b/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/package.json @@ -0,0 +1,81 @@ +{ + "name": "minizlib", + "version": "3.0.1", + "description": "A small fast zlib stream built on [minipass](http://npm.im/minipass) and Node.js's zlib binding.", + "main": "./dist/commonjs/index.js", + "dependencies": { + "minipass": "^7.0.4", + "rimraf": "^5.0.5" + }, + "scripts": { + "prepare": "tshy", + "pretest": "npm run prepare", + "test": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "format": "prettier --write . --loglevel warn", + "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/isaacs/minizlib.git" + }, + "keywords": [ + "zlib", + "gzip", + "gunzip", + "deflate", + "inflate", + "compression", + "zip", + "unzip" + ], + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "MIT", + "devDependencies": { + "@types/node": "^20.11.29", + "mkdirp": "^3.0.1", + "tap": "^18.7.1", + "tshy": "^1.12.0", + "typedoc": "^0.25.12" + }, + "files": [ + "dist" + ], + "engines": { + "node": ">= 18" + }, + "tshy": { + "exports": { + "./package.json": "./package.json", + ".": "./src/index.ts" + } + }, + "exports": { + "./package.json": "./package.json", + ".": { + "import": { + "types": "./dist/esm/index.d.ts", + "default": "./dist/esm/index.js" + }, + "require": { + "types": "./dist/commonjs/index.d.ts", + "default": "./dist/commonjs/index.js" + } + } + }, + "types": "./dist/commonjs/index.d.ts", + "type": "module", + "prettier": { + "semi": false, + "printWidth": 75, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + } +} diff --git a/deps/npm/node_modules/minipass-fetch/package.json b/deps/npm/node_modules/minipass-fetch/package.json index d491a7fba126d0..6e248345980387 100644 --- a/deps/npm/node_modules/minipass-fetch/package.json +++ b/deps/npm/node_modules/minipass-fetch/package.json @@ -1,6 +1,6 @@ { "name": "minipass-fetch", - "version": "3.0.5", + "version": "4.0.0", "description": "An implementation of window.fetch in Node.js using Minipass streams", "license": "MIT", "main": "lib/index.js", @@ -8,11 +8,12 @@ "test:tls-fixtures": "./test/fixtures/tls/setup.sh", "test": "tap", "snap": "tap", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", "postlint": "template-oss-check", - "lintfix": "npm run lint -- --fix", + "lintfix": "npm run eslint -- --fix", "posttest": "npm run lint", - "template-oss-apply": "template-oss-apply --force" + "template-oss-apply": "template-oss-apply --force", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "tap": { "coverage-map": "map.js", @@ -23,8 +24,8 @@ ] }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "@ungap/url-search-params": "^0.2.2", "abort-controller": "^3.0.0", "abortcontroller-polyfill": "~1.7.3", @@ -38,7 +39,7 @@ "dependencies": { "minipass": "^7.0.3", "minipass-sized": "^1.0.3", - "minizlib": "^2.1.2" + "minizlib": "^3.0.1" }, "optionalDependencies": { "encoding": "^0.1.13" @@ -58,12 +59,12 @@ "lib/" ], "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "author": "GitHub Inc.", "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.22.0", + "version": "4.23.3", "publish": "true" } } diff --git a/deps/npm/node_modules/mute-stream/package.json b/deps/npm/node_modules/mute-stream/package.json index 37b2f5070ed69f..3725daf0810feb 100644 --- a/deps/npm/node_modules/mute-stream/package.json +++ b/deps/npm/node_modules/mute-stream/package.json @@ -1,24 +1,25 @@ { "name": "mute-stream", - "version": "1.0.0", + "version": "2.0.0", "main": "lib/index.js", "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.11.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.3.0" }, "scripts": { "test": "tap", - "lint": "eslint \"**/*.js\"", + "lint": "npm run eslint", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", + "lintfix": "npm run eslint -- --fix", "snap": "tap", - "posttest": "npm run lint" + "posttest": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "repository": { "type": "git", - "url": "https://github.com/npm/mute-stream.git" + "url": "git+https://github.com/npm/mute-stream.git" }, "keywords": [ "mute", @@ -43,10 +44,11 @@ ] }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.11.0" + "version": "4.23.3", + "publish": true } } diff --git a/deps/npm/node_modules/node-gyp/node_modules/@npmcli/agent/lib/agents.js b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/agent/lib/agents.js new file mode 100644 index 00000000000000..c541b93001517e --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/agent/lib/agents.js @@ -0,0 +1,206 @@ +'use strict' + +const net = require('net') +const tls = require('tls') +const { once } = require('events') +const timers = require('timers/promises') +const { normalizeOptions, cacheOptions } = require('./options') +const { getProxy, getProxyAgent, proxyCache } = require('./proxy.js') +const Errors = require('./errors.js') +const { Agent: AgentBase } = require('agent-base') + +module.exports = class Agent extends AgentBase { + #options + #timeouts + #proxy + #noProxy + #ProxyAgent + + constructor (options = {}) { + const { timeouts, proxy, noProxy, ...normalizedOptions } = normalizeOptions(options) + + super(normalizedOptions) + + this.#options = normalizedOptions + this.#timeouts = timeouts + + if (proxy) { + this.#proxy = new URL(proxy) + this.#noProxy = noProxy + this.#ProxyAgent = getProxyAgent(proxy) + } + } + + get proxy () { + return this.#proxy ? { url: this.#proxy } : {} + } + + #getProxy (options) { + if (!this.#proxy) { + return + } + + const proxy = getProxy(`${options.protocol}//${options.host}:${options.port}`, { + proxy: this.#proxy, + noProxy: this.#noProxy, + }) + + if (!proxy) { + return + } + + const cacheKey = cacheOptions({ + ...options, + ...this.#options, + timeouts: this.#timeouts, + proxy, + }) + + if (proxyCache.has(cacheKey)) { + return proxyCache.get(cacheKey) + } + + let ProxyAgent = this.#ProxyAgent + if (Array.isArray(ProxyAgent)) { + ProxyAgent = this.isSecureEndpoint(options) ? ProxyAgent[1] : ProxyAgent[0] + } + + const proxyAgent = new ProxyAgent(proxy, { + ...this.#options, + socketOptions: { family: this.#options.family }, + }) + proxyCache.set(cacheKey, proxyAgent) + + return proxyAgent + } + + // takes an array of promises and races them against the connection timeout + // which will throw the necessary error if it is hit. This will return the + // result of the promise race. + async #timeoutConnection ({ promises, options, timeout }, ac = new AbortController()) { + if (timeout) { + const connectionTimeout = timers.setTimeout(timeout, null, { signal: ac.signal }) + .then(() => { + throw new Errors.ConnectionTimeoutError(`${options.host}:${options.port}`) + }).catch((err) => { + if (err.name === 'AbortError') { + return + } + throw err + }) + promises.push(connectionTimeout) + } + + let result + try { + result = await Promise.race(promises) + ac.abort() + } catch (err) { + ac.abort() + throw err + } + return result + } + + async connect (request, options) { + // if the connection does not have its own lookup function + // set, then use the one from our options + options.lookup ??= this.#options.lookup + + let socket + let timeout = this.#timeouts.connection + const isSecureEndpoint = this.isSecureEndpoint(options) + + const proxy = this.#getProxy(options) + if (proxy) { + // some of the proxies will wait for the socket to fully connect before + // returning so we have to await this while also racing it against the + // connection timeout. + const start = Date.now() + socket = await this.#timeoutConnection({ + options, + timeout, + promises: [proxy.connect(request, options)], + }) + // see how much time proxy.connect took and subtract it from + // the timeout + if (timeout) { + timeout = timeout - (Date.now() - start) + } + } else { + socket = (isSecureEndpoint ? tls : net).connect(options) + } + + socket.setKeepAlive(this.keepAlive, this.keepAliveMsecs) + socket.setNoDelay(this.keepAlive) + + const abortController = new AbortController() + const { signal } = abortController + + const connectPromise = socket[isSecureEndpoint ? 'secureConnecting' : 'connecting'] + ? once(socket, isSecureEndpoint ? 'secureConnect' : 'connect', { signal }) + : Promise.resolve() + + await this.#timeoutConnection({ + options, + timeout, + promises: [ + connectPromise, + once(socket, 'error', { signal }).then((err) => { + throw err[0] + }), + ], + }, abortController) + + if (this.#timeouts.idle) { + socket.setTimeout(this.#timeouts.idle, () => { + socket.destroy(new Errors.IdleTimeoutError(`${options.host}:${options.port}`)) + }) + } + + return socket + } + + addRequest (request, options) { + const proxy = this.#getProxy(options) + // it would be better to call proxy.addRequest here but this causes the + // http-proxy-agent to call its super.addRequest which causes the request + // to be added to the agent twice. since we only support 3 agents + // currently (see the required agents in proxy.js) we have manually + // checked that the only public methods we need to call are called in the + // next block. this could change in the future and presumably we would get + // failing tests until we have properly called the necessary methods on + // each of our proxy agents + if (proxy?.setRequestProps) { + proxy.setRequestProps(request, options) + } + + request.setHeader('connection', this.keepAlive ? 'keep-alive' : 'close') + + if (this.#timeouts.response) { + let responseTimeout + request.once('finish', () => { + setTimeout(() => { + request.destroy(new Errors.ResponseTimeoutError(request, this.#proxy)) + }, this.#timeouts.response) + }) + request.once('response', () => { + clearTimeout(responseTimeout) + }) + } + + if (this.#timeouts.transfer) { + let transferTimeout + request.once('response', (res) => { + setTimeout(() => { + res.destroy(new Errors.TransferTimeoutError(request, this.#proxy)) + }, this.#timeouts.transfer) + res.once('close', () => { + clearTimeout(transferTimeout) + }) + }) + } + + return super.addRequest(request, options) + } +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/@npmcli/agent/lib/dns.js b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/agent/lib/dns.js new file mode 100644 index 00000000000000..3c6946c566d736 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/agent/lib/dns.js @@ -0,0 +1,53 @@ +'use strict' + +const { LRUCache } = require('lru-cache') +const dns = require('dns') + +// this is a factory so that each request can have its own opts (i.e. ttl) +// while still sharing the cache across all requests +const cache = new LRUCache({ max: 50 }) + +const getOptions = ({ + family = 0, + hints = dns.ADDRCONFIG, + all = false, + verbatim = undefined, + ttl = 5 * 60 * 1000, + lookup = dns.lookup, +}) => ({ + // hints and lookup are returned since both are top level properties to (net|tls).connect + hints, + lookup: (hostname, ...args) => { + const callback = args.pop() // callback is always last arg + const lookupOptions = args[0] ?? {} + + const options = { + family, + hints, + all, + verbatim, + ...(typeof lookupOptions === 'number' ? { family: lookupOptions } : lookupOptions), + } + + const key = JSON.stringify({ hostname, ...options }) + + if (cache.has(key)) { + const cached = cache.get(key) + return process.nextTick(callback, null, ...cached) + } + + lookup(hostname, options, (err, ...result) => { + if (err) { + return callback(err) + } + + cache.set(key, result, { ttl }) + return callback(null, ...result) + }) + }, +}) + +module.exports = { + cache, + getOptions, +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/@npmcli/agent/lib/errors.js b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/agent/lib/errors.js new file mode 100644 index 00000000000000..70475aec8eb357 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/agent/lib/errors.js @@ -0,0 +1,61 @@ +'use strict' + +class InvalidProxyProtocolError extends Error { + constructor (url) { + super(`Invalid protocol \`${url.protocol}\` connecting to proxy \`${url.host}\``) + this.code = 'EINVALIDPROXY' + this.proxy = url + } +} + +class ConnectionTimeoutError extends Error { + constructor (host) { + super(`Timeout connecting to host \`${host}\``) + this.code = 'ECONNECTIONTIMEOUT' + this.host = host + } +} + +class IdleTimeoutError extends Error { + constructor (host) { + super(`Idle timeout reached for host \`${host}\``) + this.code = 'EIDLETIMEOUT' + this.host = host + } +} + +class ResponseTimeoutError extends Error { + constructor (request, proxy) { + let msg = 'Response timeout ' + if (proxy) { + msg += `from proxy \`${proxy.host}\` ` + } + msg += `connecting to host \`${request.host}\`` + super(msg) + this.code = 'ERESPONSETIMEOUT' + this.proxy = proxy + this.request = request + } +} + +class TransferTimeoutError extends Error { + constructor (request, proxy) { + let msg = 'Transfer timeout ' + if (proxy) { + msg += `from proxy \`${proxy.host}\` ` + } + msg += `for \`${request.host}\`` + super(msg) + this.code = 'ETRANSFERTIMEOUT' + this.proxy = proxy + this.request = request + } +} + +module.exports = { + InvalidProxyProtocolError, + ConnectionTimeoutError, + IdleTimeoutError, + ResponseTimeoutError, + TransferTimeoutError, +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/@npmcli/agent/lib/index.js b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/agent/lib/index.js new file mode 100644 index 00000000000000..b33d6eaef07a21 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/agent/lib/index.js @@ -0,0 +1,56 @@ +'use strict' + +const { LRUCache } = require('lru-cache') +const { normalizeOptions, cacheOptions } = require('./options') +const { getProxy, proxyCache } = require('./proxy.js') +const dns = require('./dns.js') +const Agent = require('./agents.js') + +const agentCache = new LRUCache({ max: 20 }) + +const getAgent = (url, { agent, proxy, noProxy, ...options } = {}) => { + // false has meaning so this can't be a simple truthiness check + if (agent != null) { + return agent + } + + url = new URL(url) + + const proxyForUrl = getProxy(url, { proxy, noProxy }) + const normalizedOptions = { + ...normalizeOptions(options), + proxy: proxyForUrl, + } + + const cacheKey = cacheOptions({ + ...normalizedOptions, + secureEndpoint: url.protocol === 'https:', + }) + + if (agentCache.has(cacheKey)) { + return agentCache.get(cacheKey) + } + + const newAgent = new Agent(normalizedOptions) + agentCache.set(cacheKey, newAgent) + + return newAgent +} + +module.exports = { + getAgent, + Agent, + // these are exported for backwards compatability + HttpAgent: Agent, + HttpsAgent: Agent, + cache: { + proxy: proxyCache, + agent: agentCache, + dns: dns.cache, + clear: () => { + proxyCache.clear() + agentCache.clear() + dns.cache.clear() + }, + }, +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/@npmcli/agent/lib/options.js b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/agent/lib/options.js new file mode 100644 index 00000000000000..0bf53f725f0846 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/agent/lib/options.js @@ -0,0 +1,86 @@ +'use strict' + +const dns = require('./dns') + +const normalizeOptions = (opts) => { + const family = parseInt(opts.family ?? '0', 10) + const keepAlive = opts.keepAlive ?? true + + const normalized = { + // nodejs http agent options. these are all the defaults + // but kept here to increase the likelihood of cache hits + // https://nodejs.org/api/http.html#new-agentoptions + keepAliveMsecs: keepAlive ? 1000 : undefined, + maxSockets: opts.maxSockets ?? 15, + maxTotalSockets: Infinity, + maxFreeSockets: keepAlive ? 256 : undefined, + scheduling: 'fifo', + // then spread the rest of the options + ...opts, + // we already set these to their defaults that we want + family, + keepAlive, + // our custom timeout options + timeouts: { + // the standard timeout option is mapped to our idle timeout + // and then deleted below + idle: opts.timeout ?? 0, + connection: 0, + response: 0, + transfer: 0, + ...opts.timeouts, + }, + // get the dns options that go at the top level of socket connection + ...dns.getOptions({ family, ...opts.dns }), + } + + // remove timeout since we already used it to set our own idle timeout + delete normalized.timeout + + return normalized +} + +const createKey = (obj) => { + let key = '' + const sorted = Object.entries(obj).sort((a, b) => a[0] - b[0]) + for (let [k, v] of sorted) { + if (v == null) { + v = 'null' + } else if (v instanceof URL) { + v = v.toString() + } else if (typeof v === 'object') { + v = createKey(v) + } + key += `${k}:${v}:` + } + return key +} + +const cacheOptions = ({ secureEndpoint, ...options }) => createKey({ + secureEndpoint: !!secureEndpoint, + // socket connect options + family: options.family, + hints: options.hints, + localAddress: options.localAddress, + // tls specific connect options + strictSsl: secureEndpoint ? !!options.rejectUnauthorized : false, + ca: secureEndpoint ? options.ca : null, + cert: secureEndpoint ? options.cert : null, + key: secureEndpoint ? options.key : null, + // http agent options + keepAlive: options.keepAlive, + keepAliveMsecs: options.keepAliveMsecs, + maxSockets: options.maxSockets, + maxTotalSockets: options.maxTotalSockets, + maxFreeSockets: options.maxFreeSockets, + scheduling: options.scheduling, + // timeout options + timeouts: options.timeouts, + // proxy + proxy: options.proxy, +}) + +module.exports = { + normalizeOptions, + cacheOptions, +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/@npmcli/agent/lib/proxy.js b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/agent/lib/proxy.js new file mode 100644 index 00000000000000..6272e929e57bcf --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/agent/lib/proxy.js @@ -0,0 +1,88 @@ +'use strict' + +const { HttpProxyAgent } = require('http-proxy-agent') +const { HttpsProxyAgent } = require('https-proxy-agent') +const { SocksProxyAgent } = require('socks-proxy-agent') +const { LRUCache } = require('lru-cache') +const { InvalidProxyProtocolError } = require('./errors.js') + +const PROXY_CACHE = new LRUCache({ max: 20 }) + +const SOCKS_PROTOCOLS = new Set(SocksProxyAgent.protocols) + +const PROXY_ENV_KEYS = new Set(['https_proxy', 'http_proxy', 'proxy', 'no_proxy']) + +const PROXY_ENV = Object.entries(process.env).reduce((acc, [key, value]) => { + key = key.toLowerCase() + if (PROXY_ENV_KEYS.has(key)) { + acc[key] = value + } + return acc +}, {}) + +const getProxyAgent = (url) => { + url = new URL(url) + + const protocol = url.protocol.slice(0, -1) + if (SOCKS_PROTOCOLS.has(protocol)) { + return SocksProxyAgent + } + if (protocol === 'https' || protocol === 'http') { + return [HttpProxyAgent, HttpsProxyAgent] + } + + throw new InvalidProxyProtocolError(url) +} + +const isNoProxy = (url, noProxy) => { + if (typeof noProxy === 'string') { + noProxy = noProxy.split(',').map((p) => p.trim()).filter(Boolean) + } + + if (!noProxy || !noProxy.length) { + return false + } + + const hostSegments = url.hostname.split('.').reverse() + + return noProxy.some((no) => { + const noSegments = no.split('.').filter(Boolean).reverse() + if (!noSegments.length) { + return false + } + + for (let i = 0; i < noSegments.length; i++) { + if (hostSegments[i] !== noSegments[i]) { + return false + } + } + + return true + }) +} + +const getProxy = (url, { proxy, noProxy }) => { + url = new URL(url) + + if (!proxy) { + proxy = url.protocol === 'https:' + ? PROXY_ENV.https_proxy + : PROXY_ENV.https_proxy || PROXY_ENV.http_proxy || PROXY_ENV.proxy + } + + if (!noProxy) { + noProxy = PROXY_ENV.no_proxy + } + + if (!proxy || isNoProxy(url, noProxy)) { + return null + } + + return new URL(proxy) +} + +module.exports = { + getProxyAgent, + getProxy, + proxyCache: PROXY_CACHE, +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/@npmcli/agent/package.json b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/agent/package.json new file mode 100644 index 00000000000000..ef5b4e3228cc46 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/agent/package.json @@ -0,0 +1,60 @@ +{ + "name": "@npmcli/agent", + "version": "2.2.2", + "description": "the http/https agent used by the npm cli", + "main": "lib/index.js", + "scripts": { + "gencerts": "bash scripts/create-cert.sh", + "test": "tap", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force", + "lintfix": "npm run lint -- --fix", + "snap": "tap", + "posttest": "npm run lint" + }, + "author": "GitHub Inc.", + "license": "ISC", + "bugs": { + "url": "https://github.com/npm/agent/issues" + }, + "homepage": "https://github.com/npm/agent#readme", + "files": [ + "bin/", + "lib/" + ], + "engines": { + "node": "^16.14.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.21.3", + "publish": "true" + }, + "dependencies": { + "agent-base": "^7.1.0", + "http-proxy-agent": "^7.0.0", + "https-proxy-agent": "^7.0.1", + "lru-cache": "^10.0.1", + "socks-proxy-agent": "^8.0.3" + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.21.3", + "minipass-fetch": "^3.0.3", + "nock": "^13.2.7", + "semver": "^7.5.4", + "simple-socks": "^3.1.0", + "tap": "^16.3.0" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/agent.git" + }, + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + } +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/LICENSE.md b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/LICENSE.md new file mode 100644 index 00000000000000..5fc208ff122e08 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/LICENSE.md @@ -0,0 +1,20 @@ + + +ISC License + +Copyright npm, Inc. + +Permission to use, copy, modify, and/or distribute this +software for any purpose with or without fee is hereby +granted, provided that the above copyright notice and this +permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL +WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO +EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/common/get-options.js b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/common/get-options.js new file mode 100644 index 00000000000000..cb5982f79077ac --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/common/get-options.js @@ -0,0 +1,20 @@ +// given an input that may or may not be an object, return an object that has +// a copy of every defined property listed in 'copy'. if the input is not an +// object, assign it to the property named by 'wrap' +const getOptions = (input, { copy, wrap }) => { + const result = {} + + if (input && typeof input === 'object') { + for (const prop of copy) { + if (input[prop] !== undefined) { + result[prop] = input[prop] + } + } + } else { + result[wrap] = input + } + + return result +} + +module.exports = getOptions diff --git a/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/common/node.js b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/common/node.js new file mode 100644 index 00000000000000..4d13bc037359d7 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/common/node.js @@ -0,0 +1,9 @@ +const semver = require('semver') + +const satisfies = (range) => { + return semver.satisfies(process.version, range, { includePrerelease: true }) +} + +module.exports = { + satisfies, +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/LICENSE b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/LICENSE new file mode 100644 index 00000000000000..93546dfb7655bf --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/LICENSE @@ -0,0 +1,15 @@ +(The MIT License) + +Copyright (c) 2011-2017 JP Richardson + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files +(the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, + merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS +OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, + ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/errors.js b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/errors.js new file mode 100644 index 00000000000000..1cd1e05d0c533d --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/errors.js @@ -0,0 +1,129 @@ +'use strict' +const { inspect } = require('util') + +// adapted from node's internal/errors +// https://github.com/nodejs/node/blob/c8a04049/lib/internal/errors.js + +// close copy of node's internal SystemError class. +class SystemError { + constructor (code, prefix, context) { + // XXX context.code is undefined in all constructors used in cp/polyfill + // that may be a bug copied from node, maybe the constructor should use + // `code` not `errno`? nodejs/node#41104 + let message = `${prefix}: ${context.syscall} returned ` + + `${context.code} (${context.message})` + + if (context.path !== undefined) { + message += ` ${context.path}` + } + if (context.dest !== undefined) { + message += ` => ${context.dest}` + } + + this.code = code + Object.defineProperties(this, { + name: { + value: 'SystemError', + enumerable: false, + writable: true, + configurable: true, + }, + message: { + value: message, + enumerable: false, + writable: true, + configurable: true, + }, + info: { + value: context, + enumerable: true, + configurable: true, + writable: false, + }, + errno: { + get () { + return context.errno + }, + set (value) { + context.errno = value + }, + enumerable: true, + configurable: true, + }, + syscall: { + get () { + return context.syscall + }, + set (value) { + context.syscall = value + }, + enumerable: true, + configurable: true, + }, + }) + + if (context.path !== undefined) { + Object.defineProperty(this, 'path', { + get () { + return context.path + }, + set (value) { + context.path = value + }, + enumerable: true, + configurable: true, + }) + } + + if (context.dest !== undefined) { + Object.defineProperty(this, 'dest', { + get () { + return context.dest + }, + set (value) { + context.dest = value + }, + enumerable: true, + configurable: true, + }) + } + } + + toString () { + return `${this.name} [${this.code}]: ${this.message}` + } + + [Symbol.for('nodejs.util.inspect.custom')] (_recurseTimes, ctx) { + return inspect(this, { + ...ctx, + getters: true, + customInspect: false, + }) + } +} + +function E (code, message) { + module.exports[code] = class NodeError extends SystemError { + constructor (ctx) { + super(code, message, ctx) + } + } +} + +E('ERR_FS_CP_DIR_TO_NON_DIR', 'Cannot overwrite directory with non-directory') +E('ERR_FS_CP_EEXIST', 'Target already exists') +E('ERR_FS_CP_EINVAL', 'Invalid src or dest') +E('ERR_FS_CP_FIFO_PIPE', 'Cannot copy a FIFO pipe') +E('ERR_FS_CP_NON_DIR_TO_DIR', 'Cannot overwrite non-directory with directory') +E('ERR_FS_CP_SOCKET', 'Cannot copy a socket file') +E('ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY', 'Cannot overwrite symlink in subdirectory of self') +E('ERR_FS_CP_UNKNOWN', 'Cannot copy an unknown file type') +E('ERR_FS_EISDIR', 'Path is a directory') + +module.exports.ERR_INVALID_ARG_TYPE = class ERR_INVALID_ARG_TYPE extends Error { + constructor (name, expected, actual) { + super() + this.code = 'ERR_INVALID_ARG_TYPE' + this.message = `The ${name} argument must be ${expected}. Received ${typeof actual}` + } +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/index.js b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/index.js new file mode 100644 index 00000000000000..972ce7aa12abef --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/index.js @@ -0,0 +1,22 @@ +const fs = require('fs/promises') +const getOptions = require('../common/get-options.js') +const node = require('../common/node.js') +const polyfill = require('./polyfill.js') + +// node 16.7.0 added fs.cp +const useNative = node.satisfies('>=16.7.0') + +const cp = async (src, dest, opts) => { + const options = getOptions(opts, { + copy: ['dereference', 'errorOnExist', 'filter', 'force', 'preserveTimestamps', 'recursive'], + }) + + // the polyfill is tested separately from this module, no need to hack + // process.version to try to trigger it just for coverage + // istanbul ignore next + return useNative + ? fs.cp(src, dest, options) + : polyfill(src, dest, options) +} + +module.exports = cp diff --git a/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/polyfill.js b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/polyfill.js new file mode 100644 index 00000000000000..80eb10de971918 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/polyfill.js @@ -0,0 +1,428 @@ +// this file is a modified version of the code in node 17.2.0 +// which is, in turn, a modified version of the fs-extra module on npm +// node core changes: +// - Use of the assert module has been replaced with core's error system. +// - All code related to the glob dependency has been removed. +// - Bring your own custom fs module is not currently supported. +// - Some basic code cleanup. +// changes here: +// - remove all callback related code +// - drop sync support +// - change assertions back to non-internal methods (see options.js) +// - throws ENOTDIR when rmdir gets an ENOENT for a path that exists in Windows +'use strict' + +const { + ERR_FS_CP_DIR_TO_NON_DIR, + ERR_FS_CP_EEXIST, + ERR_FS_CP_EINVAL, + ERR_FS_CP_FIFO_PIPE, + ERR_FS_CP_NON_DIR_TO_DIR, + ERR_FS_CP_SOCKET, + ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY, + ERR_FS_CP_UNKNOWN, + ERR_FS_EISDIR, + ERR_INVALID_ARG_TYPE, +} = require('./errors.js') +const { + constants: { + errno: { + EEXIST, + EISDIR, + EINVAL, + ENOTDIR, + }, + }, +} = require('os') +const { + chmod, + copyFile, + lstat, + mkdir, + readdir, + readlink, + stat, + symlink, + unlink, + utimes, +} = require('fs/promises') +const { + dirname, + isAbsolute, + join, + parse, + resolve, + sep, + toNamespacedPath, +} = require('path') +const { fileURLToPath } = require('url') + +const defaultOptions = { + dereference: false, + errorOnExist: false, + filter: undefined, + force: true, + preserveTimestamps: false, + recursive: false, +} + +async function cp (src, dest, opts) { + if (opts != null && typeof opts !== 'object') { + throw new ERR_INVALID_ARG_TYPE('options', ['Object'], opts) + } + return cpFn( + toNamespacedPath(getValidatedPath(src)), + toNamespacedPath(getValidatedPath(dest)), + { ...defaultOptions, ...opts }) +} + +function getValidatedPath (fileURLOrPath) { + const path = fileURLOrPath != null && fileURLOrPath.href + && fileURLOrPath.origin + ? fileURLToPath(fileURLOrPath) + : fileURLOrPath + return path +} + +async function cpFn (src, dest, opts) { + // Warn about using preserveTimestamps on 32-bit node + // istanbul ignore next + if (opts.preserveTimestamps && process.arch === 'ia32') { + const warning = 'Using the preserveTimestamps option in 32-bit ' + + 'node is not recommended' + process.emitWarning(warning, 'TimestampPrecisionWarning') + } + const stats = await checkPaths(src, dest, opts) + const { srcStat, destStat } = stats + await checkParentPaths(src, srcStat, dest) + if (opts.filter) { + return handleFilter(checkParentDir, destStat, src, dest, opts) + } + return checkParentDir(destStat, src, dest, opts) +} + +async function checkPaths (src, dest, opts) { + const { 0: srcStat, 1: destStat } = await getStats(src, dest, opts) + if (destStat) { + if (areIdentical(srcStat, destStat)) { + throw new ERR_FS_CP_EINVAL({ + message: 'src and dest cannot be the same', + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } + if (srcStat.isDirectory() && !destStat.isDirectory()) { + throw new ERR_FS_CP_DIR_TO_NON_DIR({ + message: `cannot overwrite directory ${src} ` + + `with non-directory ${dest}`, + path: dest, + syscall: 'cp', + errno: EISDIR, + }) + } + if (!srcStat.isDirectory() && destStat.isDirectory()) { + throw new ERR_FS_CP_NON_DIR_TO_DIR({ + message: `cannot overwrite non-directory ${src} ` + + `with directory ${dest}`, + path: dest, + syscall: 'cp', + errno: ENOTDIR, + }) + } + } + + if (srcStat.isDirectory() && isSrcSubdir(src, dest)) { + throw new ERR_FS_CP_EINVAL({ + message: `cannot copy ${src} to a subdirectory of self ${dest}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } + return { srcStat, destStat } +} + +function areIdentical (srcStat, destStat) { + return destStat.ino && destStat.dev && destStat.ino === srcStat.ino && + destStat.dev === srcStat.dev +} + +function getStats (src, dest, opts) { + const statFunc = opts.dereference ? + (file) => stat(file, { bigint: true }) : + (file) => lstat(file, { bigint: true }) + return Promise.all([ + statFunc(src), + statFunc(dest).catch((err) => { + // istanbul ignore next: unsure how to cover. + if (err.code === 'ENOENT') { + return null + } + // istanbul ignore next: unsure how to cover. + throw err + }), + ]) +} + +async function checkParentDir (destStat, src, dest, opts) { + const destParent = dirname(dest) + const dirExists = await pathExists(destParent) + if (dirExists) { + return getStatsForCopy(destStat, src, dest, opts) + } + await mkdir(destParent, { recursive: true }) + return getStatsForCopy(destStat, src, dest, opts) +} + +function pathExists (dest) { + return stat(dest).then( + () => true, + // istanbul ignore next: not sure when this would occur + (err) => (err.code === 'ENOENT' ? false : Promise.reject(err))) +} + +// Recursively check if dest parent is a subdirectory of src. +// It works for all file types including symlinks since it +// checks the src and dest inodes. It starts from the deepest +// parent and stops once it reaches the src parent or the root path. +async function checkParentPaths (src, srcStat, dest) { + const srcParent = resolve(dirname(src)) + const destParent = resolve(dirname(dest)) + if (destParent === srcParent || destParent === parse(destParent).root) { + return + } + let destStat + try { + destStat = await stat(destParent, { bigint: true }) + } catch (err) { + // istanbul ignore else: not sure when this would occur + if (err.code === 'ENOENT') { + return + } + // istanbul ignore next: not sure when this would occur + throw err + } + if (areIdentical(srcStat, destStat)) { + throw new ERR_FS_CP_EINVAL({ + message: `cannot copy ${src} to a subdirectory of self ${dest}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } + return checkParentPaths(src, srcStat, destParent) +} + +const normalizePathToArray = (path) => + resolve(path).split(sep).filter(Boolean) + +// Return true if dest is a subdir of src, otherwise false. +// It only checks the path strings. +function isSrcSubdir (src, dest) { + const srcArr = normalizePathToArray(src) + const destArr = normalizePathToArray(dest) + return srcArr.every((cur, i) => destArr[i] === cur) +} + +async function handleFilter (onInclude, destStat, src, dest, opts, cb) { + const include = await opts.filter(src, dest) + if (include) { + return onInclude(destStat, src, dest, opts, cb) + } +} + +function startCopy (destStat, src, dest, opts) { + if (opts.filter) { + return handleFilter(getStatsForCopy, destStat, src, dest, opts) + } + return getStatsForCopy(destStat, src, dest, opts) +} + +async function getStatsForCopy (destStat, src, dest, opts) { + const statFn = opts.dereference ? stat : lstat + const srcStat = await statFn(src) + // istanbul ignore else: can't portably test FIFO + if (srcStat.isDirectory() && opts.recursive) { + return onDir(srcStat, destStat, src, dest, opts) + } else if (srcStat.isDirectory()) { + throw new ERR_FS_EISDIR({ + message: `${src} is a directory (not copied)`, + path: src, + syscall: 'cp', + errno: EINVAL, + }) + } else if (srcStat.isFile() || + srcStat.isCharacterDevice() || + srcStat.isBlockDevice()) { + return onFile(srcStat, destStat, src, dest, opts) + } else if (srcStat.isSymbolicLink()) { + return onLink(destStat, src, dest) + } else if (srcStat.isSocket()) { + throw new ERR_FS_CP_SOCKET({ + message: `cannot copy a socket file: ${dest}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } else if (srcStat.isFIFO()) { + throw new ERR_FS_CP_FIFO_PIPE({ + message: `cannot copy a FIFO pipe: ${dest}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } + // istanbul ignore next: should be unreachable + throw new ERR_FS_CP_UNKNOWN({ + message: `cannot copy an unknown file type: ${dest}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) +} + +function onFile (srcStat, destStat, src, dest, opts) { + if (!destStat) { + return _copyFile(srcStat, src, dest, opts) + } + return mayCopyFile(srcStat, src, dest, opts) +} + +async function mayCopyFile (srcStat, src, dest, opts) { + if (opts.force) { + await unlink(dest) + return _copyFile(srcStat, src, dest, opts) + } else if (opts.errorOnExist) { + throw new ERR_FS_CP_EEXIST({ + message: `${dest} already exists`, + path: dest, + syscall: 'cp', + errno: EEXIST, + }) + } +} + +async function _copyFile (srcStat, src, dest, opts) { + await copyFile(src, dest) + if (opts.preserveTimestamps) { + return handleTimestampsAndMode(srcStat.mode, src, dest) + } + return setDestMode(dest, srcStat.mode) +} + +async function handleTimestampsAndMode (srcMode, src, dest) { + // Make sure the file is writable before setting the timestamp + // otherwise open fails with EPERM when invoked with 'r+' + // (through utimes call) + if (fileIsNotWritable(srcMode)) { + await makeFileWritable(dest, srcMode) + return setDestTimestampsAndMode(srcMode, src, dest) + } + return setDestTimestampsAndMode(srcMode, src, dest) +} + +function fileIsNotWritable (srcMode) { + return (srcMode & 0o200) === 0 +} + +function makeFileWritable (dest, srcMode) { + return setDestMode(dest, srcMode | 0o200) +} + +async function setDestTimestampsAndMode (srcMode, src, dest) { + await setDestTimestamps(src, dest) + return setDestMode(dest, srcMode) +} + +function setDestMode (dest, srcMode) { + return chmod(dest, srcMode) +} + +async function setDestTimestamps (src, dest) { + // The initial srcStat.atime cannot be trusted + // because it is modified by the read(2) system call + // (See https://nodejs.org/api/fs.html#fs_stat_time_values) + const updatedSrcStat = await stat(src) + return utimes(dest, updatedSrcStat.atime, updatedSrcStat.mtime) +} + +function onDir (srcStat, destStat, src, dest, opts) { + if (!destStat) { + return mkDirAndCopy(srcStat.mode, src, dest, opts) + } + return copyDir(src, dest, opts) +} + +async function mkDirAndCopy (srcMode, src, dest, opts) { + await mkdir(dest) + await copyDir(src, dest, opts) + return setDestMode(dest, srcMode) +} + +async function copyDir (src, dest, opts) { + const dir = await readdir(src) + for (let i = 0; i < dir.length; i++) { + const item = dir[i] + const srcItem = join(src, item) + const destItem = join(dest, item) + const { destStat } = await checkPaths(srcItem, destItem, opts) + await startCopy(destStat, srcItem, destItem, opts) + } +} + +async function onLink (destStat, src, dest) { + let resolvedSrc = await readlink(src) + if (!isAbsolute(resolvedSrc)) { + resolvedSrc = resolve(dirname(src), resolvedSrc) + } + if (!destStat) { + return symlink(resolvedSrc, dest) + } + let resolvedDest + try { + resolvedDest = await readlink(dest) + } catch (err) { + // Dest exists and is a regular file or directory, + // Windows may throw UNKNOWN error. If dest already exists, + // fs throws error anyway, so no need to guard against it here. + // istanbul ignore next: can only test on windows + if (err.code === 'EINVAL' || err.code === 'UNKNOWN') { + return symlink(resolvedSrc, dest) + } + // istanbul ignore next: should not be possible + throw err + } + if (!isAbsolute(resolvedDest)) { + resolvedDest = resolve(dirname(dest), resolvedDest) + } + if (isSrcSubdir(resolvedSrc, resolvedDest)) { + throw new ERR_FS_CP_EINVAL({ + message: `cannot copy ${resolvedSrc} to a subdirectory of self ` + + `${resolvedDest}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } + // Do not copy if src is a subdir of dest since unlinking + // dest in this case would result in removing src contents + // and therefore a broken symlink would be created. + const srcStat = await stat(src) + if (srcStat.isDirectory() && isSrcSubdir(resolvedDest, resolvedSrc)) { + throw new ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY({ + message: `cannot overwrite ${resolvedDest} with ${resolvedSrc}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } + return copyLink(resolvedSrc, dest) +} + +async function copyLink (resolvedSrc, dest) { + await unlink(dest) + return symlink(resolvedSrc, dest) +} + +module.exports = cp diff --git a/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/index.js b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/index.js new file mode 100644 index 00000000000000..81c746304cc428 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/index.js @@ -0,0 +1,13 @@ +'use strict' + +const cp = require('./cp/index.js') +const withTempDir = require('./with-temp-dir.js') +const readdirScoped = require('./readdir-scoped.js') +const moveFile = require('./move-file.js') + +module.exports = { + cp, + withTempDir, + readdirScoped, + moveFile, +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/move-file.js b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/move-file.js new file mode 100644 index 00000000000000..d56e06d384659a --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/move-file.js @@ -0,0 +1,78 @@ +const { dirname, join, resolve, relative, isAbsolute } = require('path') +const fs = require('fs/promises') + +const pathExists = async path => { + try { + await fs.access(path) + return true + } catch (er) { + return er.code !== 'ENOENT' + } +} + +const moveFile = async (source, destination, options = {}, root = true, symlinks = []) => { + if (!source || !destination) { + throw new TypeError('`source` and `destination` file required') + } + + options = { + overwrite: true, + ...options, + } + + if (!options.overwrite && await pathExists(destination)) { + throw new Error(`The destination file exists: ${destination}`) + } + + await fs.mkdir(dirname(destination), { recursive: true }) + + try { + await fs.rename(source, destination) + } catch (error) { + if (error.code === 'EXDEV' || error.code === 'EPERM') { + const sourceStat = await fs.lstat(source) + if (sourceStat.isDirectory()) { + const files = await fs.readdir(source) + await Promise.all(files.map((file) => + moveFile(join(source, file), join(destination, file), options, false, symlinks) + )) + } else if (sourceStat.isSymbolicLink()) { + symlinks.push({ source, destination }) + } else { + await fs.copyFile(source, destination) + } + } else { + throw error + } + } + + if (root) { + await Promise.all(symlinks.map(async ({ source: symSource, destination: symDestination }) => { + let target = await fs.readlink(symSource) + // junction symlinks in windows will be absolute paths, so we need to + // make sure they point to the symlink destination + if (isAbsolute(target)) { + target = resolve(symDestination, relative(symSource, target)) + } + // try to determine what the actual file is so we can create the correct + // type of symlink in windows + let targetStat = 'file' + try { + targetStat = await fs.stat(resolve(dirname(symSource), target)) + if (targetStat.isDirectory()) { + targetStat = 'junction' + } + } catch { + // targetStat remains 'file' + } + await fs.symlink( + target, + symDestination, + targetStat + ) + })) + await fs.rm(source, { recursive: true, force: true }) + } +} + +module.exports = moveFile diff --git a/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/readdir-scoped.js b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/readdir-scoped.js new file mode 100644 index 00000000000000..cd601dfbe7486b --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/readdir-scoped.js @@ -0,0 +1,20 @@ +const { readdir } = require('fs/promises') +const { join } = require('path') + +const readdirScoped = async (dir) => { + const results = [] + + for (const item of await readdir(dir)) { + if (item.startsWith('@')) { + for (const scopedItem of await readdir(join(dir, item))) { + results.push(join(item, scopedItem)) + } + } else { + results.push(item) + } + } + + return results +} + +module.exports = readdirScoped diff --git a/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/with-temp-dir.js b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/with-temp-dir.js new file mode 100644 index 00000000000000..0738ac4f29e1be --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/with-temp-dir.js @@ -0,0 +1,39 @@ +const { join, sep } = require('path') + +const getOptions = require('./common/get-options.js') +const { mkdir, mkdtemp, rm } = require('fs/promises') + +// create a temp directory, ensure its permissions match its parent, then call +// the supplied function passing it the path to the directory. clean up after +// the function finishes, whether it throws or not +const withTempDir = async (root, fn, opts) => { + const options = getOptions(opts, { + copy: ['tmpPrefix'], + }) + // create the directory + await mkdir(root, { recursive: true }) + + const target = await mkdtemp(join(`${root}${sep}`, options.tmpPrefix || '')) + let err + let result + + try { + result = await fn(target) + } catch (_err) { + err = _err + } + + try { + await rm(target, { force: true, recursive: true }) + } catch { + // ignore errors + } + + if (err) { + throw err + } + + return result +} + +module.exports = withTempDir diff --git a/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/package.json b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/package.json new file mode 100644 index 00000000000000..5261a11b78000e --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/package.json @@ -0,0 +1,52 @@ +{ + "name": "@npmcli/fs", + "version": "3.1.1", + "description": "filesystem utilities for the npm cli", + "main": "lib/index.js", + "files": [ + "bin/", + "lib/" + ], + "scripts": { + "snap": "tap", + "test": "tap", + "npmclilint": "npmcli-lint", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lintfix": "npm run lint -- --fix", + "posttest": "npm run lint", + "postsnap": "npm run lintfix --", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/npm/fs.git" + }, + "keywords": [ + "npm", + "oss" + ], + "author": "GitHub Inc.", + "license": "ISC", + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.22.0", + "tap": "^16.0.1" + }, + "dependencies": { + "semver": "^7.3.5" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.22.0" + }, + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + } +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/abbrev/LICENSE b/deps/npm/node_modules/node-gyp/node_modules/abbrev/LICENSE new file mode 100644 index 00000000000000..9bcfa9d7d8d26e --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/abbrev/LICENSE @@ -0,0 +1,46 @@ +This software is dual-licensed under the ISC and MIT licenses. +You may use this software under EITHER of the following licenses. + +---------- + +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +---------- + +Copyright Isaac Z. Schlueter and Contributors +All rights reserved. + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/deps/npm/node_modules/node-gyp/node_modules/abbrev/lib/index.js b/deps/npm/node_modules/node-gyp/node_modules/abbrev/lib/index.js new file mode 100644 index 00000000000000..9f48801f049c9e --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/abbrev/lib/index.js @@ -0,0 +1,50 @@ +module.exports = abbrev + +function abbrev (...args) { + let list = args.length === 1 || Array.isArray(args[0]) ? args[0] : args + + for (let i = 0, l = list.length; i < l; i++) { + list[i] = typeof list[i] === 'string' ? list[i] : String(list[i]) + } + + // sort them lexicographically, so that they're next to their nearest kin + list = list.sort(lexSort) + + // walk through each, seeing how much it has in common with the next and previous + const abbrevs = {} + let prev = '' + for (let ii = 0, ll = list.length; ii < ll; ii++) { + const current = list[ii] + const next = list[ii + 1] || '' + let nextMatches = true + let prevMatches = true + if (current === next) { + continue + } + let j = 0 + const cl = current.length + for (; j < cl; j++) { + const curChar = current.charAt(j) + nextMatches = nextMatches && curChar === next.charAt(j) + prevMatches = prevMatches && curChar === prev.charAt(j) + if (!nextMatches && !prevMatches) { + j++ + break + } + } + prev = current + if (j === cl) { + abbrevs[current] = current + continue + } + for (let a = current.slice(0, j); j <= cl; j++) { + abbrevs[a] = current + a += current.charAt(j) + } + } + return abbrevs +} + +function lexSort (a, b) { + return a === b ? 0 : a > b ? 1 : -1 +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/abbrev/package.json b/deps/npm/node_modules/node-gyp/node_modules/abbrev/package.json new file mode 100644 index 00000000000000..e26400445631ad --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/abbrev/package.json @@ -0,0 +1,43 @@ +{ + "name": "abbrev", + "version": "2.0.0", + "description": "Like ruby's abbrev module, but in js", + "author": "GitHub Inc.", + "main": "lib/index.js", + "scripts": { + "test": "tap", + "lint": "eslint \"**/*.js\"", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force", + "lintfix": "npm run lint -- --fix", + "snap": "tap", + "posttest": "npm run lint" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/abbrev-js.git" + }, + "license": "ISC", + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.8.0", + "tap": "^16.3.0" + }, + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "files": [ + "bin/", + "lib/" + ], + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.8.0" + } +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/LICENSE.md b/deps/npm/node_modules/node-gyp/node_modules/cacache/LICENSE.md new file mode 100644 index 00000000000000..8d28acf866d932 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/LICENSE.md @@ -0,0 +1,16 @@ +ISC License + +Copyright (c) npm, Inc. + +Permission to use, copy, modify, and/or distribute this software for +any purpose with or without fee is hereby granted, provided that the +above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS +ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/content/path.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/content/path.js new file mode 100644 index 00000000000000..ad5a76a4f73f26 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/content/path.js @@ -0,0 +1,29 @@ +'use strict' + +const contentVer = require('../../package.json')['cache-version'].content +const hashToSegments = require('../util/hash-to-segments') +const path = require('path') +const ssri = require('ssri') + +// Current format of content file path: +// +// sha512-BaSE64Hex= -> +// ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee +// +module.exports = contentPath + +function contentPath (cache, integrity) { + const sri = ssri.parse(integrity, { single: true }) + // contentPath is the *strongest* algo given + return path.join( + contentDir(cache), + sri.algorithm, + ...hashToSegments(sri.hexDigest()) + ) +} + +module.exports.contentDir = contentDir + +function contentDir (cache) { + return path.join(cache, `content-v${contentVer}`) +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/content/read.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/content/read.js new file mode 100644 index 00000000000000..5f6192c3cec566 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/content/read.js @@ -0,0 +1,165 @@ +'use strict' + +const fs = require('fs/promises') +const fsm = require('fs-minipass') +const ssri = require('ssri') +const contentPath = require('./path') +const Pipeline = require('minipass-pipeline') + +module.exports = read + +const MAX_SINGLE_READ_SIZE = 64 * 1024 * 1024 +async function read (cache, integrity, opts = {}) { + const { size } = opts + const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => { + // get size + const stat = size ? { size } : await fs.stat(cpath) + return { stat, cpath, sri } + }) + + if (stat.size > MAX_SINGLE_READ_SIZE) { + return readPipeline(cpath, stat.size, sri, new Pipeline()).concat() + } + + const data = await fs.readFile(cpath, { encoding: null }) + + if (stat.size !== data.length) { + throw sizeError(stat.size, data.length) + } + + if (!ssri.checkData(data, sri)) { + throw integrityError(sri, cpath) + } + + return data +} + +const readPipeline = (cpath, size, sri, stream) => { + stream.push( + new fsm.ReadStream(cpath, { + size, + readSize: MAX_SINGLE_READ_SIZE, + }), + ssri.integrityStream({ + integrity: sri, + size, + }) + ) + return stream +} + +module.exports.stream = readStream +module.exports.readStream = readStream + +function readStream (cache, integrity, opts = {}) { + const { size } = opts + const stream = new Pipeline() + // Set all this up to run on the stream and then just return the stream + Promise.resolve().then(async () => { + const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => { + // get size + const stat = size ? { size } : await fs.stat(cpath) + return { stat, cpath, sri } + }) + + return readPipeline(cpath, stat.size, sri, stream) + }).catch(err => stream.emit('error', err)) + + return stream +} + +module.exports.copy = copy + +function copy (cache, integrity, dest) { + return withContentSri(cache, integrity, (cpath) => { + return fs.copyFile(cpath, dest) + }) +} + +module.exports.hasContent = hasContent + +async function hasContent (cache, integrity) { + if (!integrity) { + return false + } + + try { + return await withContentSri(cache, integrity, async (cpath, sri) => { + const stat = await fs.stat(cpath) + return { size: stat.size, sri, stat } + }) + } catch (err) { + if (err.code === 'ENOENT') { + return false + } + + if (err.code === 'EPERM') { + /* istanbul ignore else */ + if (process.platform !== 'win32') { + throw err + } else { + return false + } + } + } +} + +async function withContentSri (cache, integrity, fn) { + const sri = ssri.parse(integrity) + // If `integrity` has multiple entries, pick the first digest + // with available local data. + const algo = sri.pickAlgorithm() + const digests = sri[algo] + + if (digests.length <= 1) { + const cpath = contentPath(cache, digests[0]) + return fn(cpath, digests[0]) + } else { + // Can't use race here because a generic error can happen before + // a ENOENT error, and can happen before a valid result + const results = await Promise.all(digests.map(async (meta) => { + try { + return await withContentSri(cache, meta, fn) + } catch (err) { + if (err.code === 'ENOENT') { + return Object.assign( + new Error('No matching content found for ' + sri.toString()), + { code: 'ENOENT' } + ) + } + return err + } + })) + // Return the first non error if it is found + const result = results.find((r) => !(r instanceof Error)) + if (result) { + return result + } + + // Throw the No matching content found error + const enoentError = results.find((r) => r.code === 'ENOENT') + if (enoentError) { + throw enoentError + } + + // Throw generic error + throw results.find((r) => r instanceof Error) + } +} + +function sizeError (expected, found) { + /* eslint-disable-next-line max-len */ + const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) + err.expected = expected + err.found = found + err.code = 'EBADSIZE' + return err +} + +function integrityError (sri, path) { + const err = new Error(`Integrity verification failed for ${sri} (${path})`) + err.code = 'EINTEGRITY' + err.sri = sri + err.path = path + return err +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/content/rm.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/content/rm.js new file mode 100644 index 00000000000000..ce58d679e4cb25 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/content/rm.js @@ -0,0 +1,18 @@ +'use strict' + +const fs = require('fs/promises') +const contentPath = require('./path') +const { hasContent } = require('./read') + +module.exports = rm + +async function rm (cache, integrity) { + const content = await hasContent(cache, integrity) + // ~pretty~ sure we can't end up with a content lacking sri, but be safe + if (content && content.sri) { + await fs.rm(contentPath(cache, content.sri), { recursive: true, force: true }) + return true + } else { + return false + } +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/content/write.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/content/write.js new file mode 100644 index 00000000000000..e7187abca8788a --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/content/write.js @@ -0,0 +1,206 @@ +'use strict' + +const events = require('events') + +const contentPath = require('./path') +const fs = require('fs/promises') +const { moveFile } = require('@npmcli/fs') +const { Minipass } = require('minipass') +const Pipeline = require('minipass-pipeline') +const Flush = require('minipass-flush') +const path = require('path') +const ssri = require('ssri') +const uniqueFilename = require('unique-filename') +const fsm = require('fs-minipass') + +module.exports = write + +// Cache of move operations in process so we don't duplicate +const moveOperations = new Map() + +async function write (cache, data, opts = {}) { + const { algorithms, size, integrity } = opts + + if (typeof size === 'number' && data.length !== size) { + throw sizeError(size, data.length) + } + + const sri = ssri.fromData(data, algorithms ? { algorithms } : {}) + if (integrity && !ssri.checkData(data, integrity, opts)) { + throw checksumError(integrity, sri) + } + + for (const algo in sri) { + const tmp = await makeTmp(cache, opts) + const hash = sri[algo].toString() + try { + await fs.writeFile(tmp.target, data, { flag: 'wx' }) + await moveToDestination(tmp, cache, hash, opts) + } finally { + if (!tmp.moved) { + await fs.rm(tmp.target, { recursive: true, force: true }) + } + } + } + return { integrity: sri, size: data.length } +} + +module.exports.stream = writeStream + +// writes proxied to the 'inputStream' that is passed to the Promise +// 'end' is deferred until content is handled. +class CacacheWriteStream extends Flush { + constructor (cache, opts) { + super() + this.opts = opts + this.cache = cache + this.inputStream = new Minipass() + this.inputStream.on('error', er => this.emit('error', er)) + this.inputStream.on('drain', () => this.emit('drain')) + this.handleContentP = null + } + + write (chunk, encoding, cb) { + if (!this.handleContentP) { + this.handleContentP = handleContent( + this.inputStream, + this.cache, + this.opts + ) + this.handleContentP.catch(error => this.emit('error', error)) + } + return this.inputStream.write(chunk, encoding, cb) + } + + flush (cb) { + this.inputStream.end(() => { + if (!this.handleContentP) { + const e = new Error('Cache input stream was empty') + e.code = 'ENODATA' + // empty streams are probably emitting end right away. + // defer this one tick by rejecting a promise on it. + return Promise.reject(e).catch(cb) + } + // eslint-disable-next-line promise/catch-or-return + this.handleContentP.then( + (res) => { + res.integrity && this.emit('integrity', res.integrity) + // eslint-disable-next-line promise/always-return + res.size !== null && this.emit('size', res.size) + cb() + }, + (er) => cb(er) + ) + }) + } +} + +function writeStream (cache, opts = {}) { + return new CacacheWriteStream(cache, opts) +} + +async function handleContent (inputStream, cache, opts) { + const tmp = await makeTmp(cache, opts) + try { + const res = await pipeToTmp(inputStream, cache, tmp.target, opts) + await moveToDestination( + tmp, + cache, + res.integrity, + opts + ) + return res + } finally { + if (!tmp.moved) { + await fs.rm(tmp.target, { recursive: true, force: true }) + } + } +} + +async function pipeToTmp (inputStream, cache, tmpTarget, opts) { + const outStream = new fsm.WriteStream(tmpTarget, { + flags: 'wx', + }) + + if (opts.integrityEmitter) { + // we need to create these all simultaneously since they can fire in any order + const [integrity, size] = await Promise.all([ + events.once(opts.integrityEmitter, 'integrity').then(res => res[0]), + events.once(opts.integrityEmitter, 'size').then(res => res[0]), + new Pipeline(inputStream, outStream).promise(), + ]) + return { integrity, size } + } + + let integrity + let size + const hashStream = ssri.integrityStream({ + integrity: opts.integrity, + algorithms: opts.algorithms, + size: opts.size, + }) + hashStream.on('integrity', i => { + integrity = i + }) + hashStream.on('size', s => { + size = s + }) + + const pipeline = new Pipeline(inputStream, hashStream, outStream) + await pipeline.promise() + return { integrity, size } +} + +async function makeTmp (cache, opts) { + const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) + await fs.mkdir(path.dirname(tmpTarget), { recursive: true }) + return { + target: tmpTarget, + moved: false, + } +} + +async function moveToDestination (tmp, cache, sri) { + const destination = contentPath(cache, sri) + const destDir = path.dirname(destination) + if (moveOperations.has(destination)) { + return moveOperations.get(destination) + } + moveOperations.set( + destination, + fs.mkdir(destDir, { recursive: true }) + .then(async () => { + await moveFile(tmp.target, destination, { overwrite: false }) + tmp.moved = true + return tmp.moved + }) + .catch(err => { + if (!err.message.startsWith('The destination file exists')) { + throw Object.assign(err, { code: 'EEXIST' }) + } + }).finally(() => { + moveOperations.delete(destination) + }) + + ) + return moveOperations.get(destination) +} + +function sizeError (expected, found) { + /* eslint-disable-next-line max-len */ + const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) + err.expected = expected + err.found = found + err.code = 'EBADSIZE' + return err +} + +function checksumError (expected, found) { + const err = new Error(`Integrity check failed: + Wanted: ${expected} + Found: ${found}`) + err.code = 'EINTEGRITY' + err.expected = expected + err.found = found + return err +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/entry-index.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/entry-index.js new file mode 100644 index 00000000000000..89c28f2f257d48 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/entry-index.js @@ -0,0 +1,336 @@ +'use strict' + +const crypto = require('crypto') +const { + appendFile, + mkdir, + readFile, + readdir, + rm, + writeFile, +} = require('fs/promises') +const { Minipass } = require('minipass') +const path = require('path') +const ssri = require('ssri') +const uniqueFilename = require('unique-filename') + +const contentPath = require('./content/path') +const hashToSegments = require('./util/hash-to-segments') +const indexV = require('../package.json')['cache-version'].index +const { moveFile } = require('@npmcli/fs') + +const pMap = require('p-map') +const lsStreamConcurrency = 5 + +module.exports.NotFoundError = class NotFoundError extends Error { + constructor (cache, key) { + super(`No cache entry for ${key} found in ${cache}`) + this.code = 'ENOENT' + this.cache = cache + this.key = key + } +} + +module.exports.compact = compact + +async function compact (cache, key, matchFn, opts = {}) { + const bucket = bucketPath(cache, key) + const entries = await bucketEntries(bucket) + const newEntries = [] + // we loop backwards because the bottom-most result is the newest + // since we add new entries with appendFile + for (let i = entries.length - 1; i >= 0; --i) { + const entry = entries[i] + // a null integrity could mean either a delete was appended + // or the user has simply stored an index that does not map + // to any content. we determine if the user wants to keep the + // null integrity based on the validateEntry function passed in options. + // if the integrity is null and no validateEntry is provided, we break + // as we consider the null integrity to be a deletion of everything + // that came before it. + if (entry.integrity === null && !opts.validateEntry) { + break + } + + // if this entry is valid, and it is either the first entry or + // the newEntries array doesn't already include an entry that + // matches this one based on the provided matchFn, then we add + // it to the beginning of our list + if ((!opts.validateEntry || opts.validateEntry(entry) === true) && + (newEntries.length === 0 || + !newEntries.find((oldEntry) => matchFn(oldEntry, entry)))) { + newEntries.unshift(entry) + } + } + + const newIndex = '\n' + newEntries.map((entry) => { + const stringified = JSON.stringify(entry) + const hash = hashEntry(stringified) + return `${hash}\t${stringified}` + }).join('\n') + + const setup = async () => { + const target = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) + await mkdir(path.dirname(target), { recursive: true }) + return { + target, + moved: false, + } + } + + const teardown = async (tmp) => { + if (!tmp.moved) { + return rm(tmp.target, { recursive: true, force: true }) + } + } + + const write = async (tmp) => { + await writeFile(tmp.target, newIndex, { flag: 'wx' }) + await mkdir(path.dirname(bucket), { recursive: true }) + // we use @npmcli/move-file directly here because we + // want to overwrite the existing file + await moveFile(tmp.target, bucket) + tmp.moved = true + } + + // write the file atomically + const tmp = await setup() + try { + await write(tmp) + } finally { + await teardown(tmp) + } + + // we reverse the list we generated such that the newest + // entries come first in order to make looping through them easier + // the true passed to formatEntry tells it to keep null + // integrity values, if they made it this far it's because + // validateEntry returned true, and as such we should return it + return newEntries.reverse().map((entry) => formatEntry(cache, entry, true)) +} + +module.exports.insert = insert + +async function insert (cache, key, integrity, opts = {}) { + const { metadata, size, time } = opts + const bucket = bucketPath(cache, key) + const entry = { + key, + integrity: integrity && ssri.stringify(integrity), + time: time || Date.now(), + size, + metadata, + } + try { + await mkdir(path.dirname(bucket), { recursive: true }) + const stringified = JSON.stringify(entry) + // NOTE - Cleverness ahoy! + // + // This works because it's tremendously unlikely for an entry to corrupt + // another while still preserving the string length of the JSON in + // question. So, we just slap the length in there and verify it on read. + // + // Thanks to @isaacs for the whiteboarding session that ended up with + // this. + await appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`) + } catch (err) { + if (err.code === 'ENOENT') { + return undefined + } + + throw err + } + return formatEntry(cache, entry) +} + +module.exports.find = find + +async function find (cache, key) { + const bucket = bucketPath(cache, key) + try { + const entries = await bucketEntries(bucket) + return entries.reduce((latest, next) => { + if (next && next.key === key) { + return formatEntry(cache, next) + } else { + return latest + } + }, null) + } catch (err) { + if (err.code === 'ENOENT') { + return null + } else { + throw err + } + } +} + +module.exports.delete = del + +function del (cache, key, opts = {}) { + if (!opts.removeFully) { + return insert(cache, key, null, opts) + } + + const bucket = bucketPath(cache, key) + return rm(bucket, { recursive: true, force: true }) +} + +module.exports.lsStream = lsStream + +function lsStream (cache) { + const indexDir = bucketDir(cache) + const stream = new Minipass({ objectMode: true }) + + // Set all this up to run on the stream and then just return the stream + Promise.resolve().then(async () => { + const buckets = await readdirOrEmpty(indexDir) + await pMap(buckets, async (bucket) => { + const bucketPath = path.join(indexDir, bucket) + const subbuckets = await readdirOrEmpty(bucketPath) + await pMap(subbuckets, async (subbucket) => { + const subbucketPath = path.join(bucketPath, subbucket) + + // "/cachename//./*" + const subbucketEntries = await readdirOrEmpty(subbucketPath) + await pMap(subbucketEntries, async (entry) => { + const entryPath = path.join(subbucketPath, entry) + try { + const entries = await bucketEntries(entryPath) + // using a Map here prevents duplicate keys from showing up + // twice, I guess? + const reduced = entries.reduce((acc, entry) => { + acc.set(entry.key, entry) + return acc + }, new Map()) + // reduced is a map of key => entry + for (const entry of reduced.values()) { + const formatted = formatEntry(cache, entry) + if (formatted) { + stream.write(formatted) + } + } + } catch (err) { + if (err.code === 'ENOENT') { + return undefined + } + throw err + } + }, + { concurrency: lsStreamConcurrency }) + }, + { concurrency: lsStreamConcurrency }) + }, + { concurrency: lsStreamConcurrency }) + stream.end() + return stream + }).catch(err => stream.emit('error', err)) + + return stream +} + +module.exports.ls = ls + +async function ls (cache) { + const entries = await lsStream(cache).collect() + return entries.reduce((acc, xs) => { + acc[xs.key] = xs + return acc + }, {}) +} + +module.exports.bucketEntries = bucketEntries + +async function bucketEntries (bucket, filter) { + const data = await readFile(bucket, 'utf8') + return _bucketEntries(data, filter) +} + +function _bucketEntries (data) { + const entries = [] + data.split('\n').forEach((entry) => { + if (!entry) { + return + } + + const pieces = entry.split('\t') + if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) { + // Hash is no good! Corruption or malice? Doesn't matter! + // EJECT EJECT + return + } + let obj + try { + obj = JSON.parse(pieces[1]) + } catch (_) { + // eslint-ignore-next-line no-empty-block + } + // coverage disabled here, no need to test with an entry that parses to something falsey + // istanbul ignore else + if (obj) { + entries.push(obj) + } + }) + return entries +} + +module.exports.bucketDir = bucketDir + +function bucketDir (cache) { + return path.join(cache, `index-v${indexV}`) +} + +module.exports.bucketPath = bucketPath + +function bucketPath (cache, key) { + const hashed = hashKey(key) + return path.join.apply( + path, + [bucketDir(cache)].concat(hashToSegments(hashed)) + ) +} + +module.exports.hashKey = hashKey + +function hashKey (key) { + return hash(key, 'sha256') +} + +module.exports.hashEntry = hashEntry + +function hashEntry (str) { + return hash(str, 'sha1') +} + +function hash (str, digest) { + return crypto + .createHash(digest) + .update(str) + .digest('hex') +} + +function formatEntry (cache, entry, keepAll) { + // Treat null digests as deletions. They'll shadow any previous entries. + if (!entry.integrity && !keepAll) { + return null + } + + return { + key: entry.key, + integrity: entry.integrity, + path: entry.integrity ? contentPath(cache, entry.integrity) : undefined, + size: entry.size, + time: entry.time, + metadata: entry.metadata, + } +} + +function readdirOrEmpty (dir) { + return readdir(dir).catch((err) => { + if (err.code === 'ENOENT' || err.code === 'ENOTDIR') { + return [] + } + + throw err + }) +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/get.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/get.js new file mode 100644 index 00000000000000..80ec206c7ecaaa --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/get.js @@ -0,0 +1,170 @@ +'use strict' + +const Collect = require('minipass-collect') +const { Minipass } = require('minipass') +const Pipeline = require('minipass-pipeline') + +const index = require('./entry-index') +const memo = require('./memoization') +const read = require('./content/read') + +async function getData (cache, key, opts = {}) { + const { integrity, memoize, size } = opts + const memoized = memo.get(cache, key, opts) + if (memoized && memoize !== false) { + return { + metadata: memoized.entry.metadata, + data: memoized.data, + integrity: memoized.entry.integrity, + size: memoized.entry.size, + } + } + + const entry = await index.find(cache, key, opts) + if (!entry) { + throw new index.NotFoundError(cache, key) + } + const data = await read(cache, entry.integrity, { integrity, size }) + if (memoize) { + memo.put(cache, entry, data, opts) + } + + return { + data, + metadata: entry.metadata, + size: entry.size, + integrity: entry.integrity, + } +} +module.exports = getData + +async function getDataByDigest (cache, key, opts = {}) { + const { integrity, memoize, size } = opts + const memoized = memo.get.byDigest(cache, key, opts) + if (memoized && memoize !== false) { + return memoized + } + + const res = await read(cache, key, { integrity, size }) + if (memoize) { + memo.put.byDigest(cache, key, res, opts) + } + return res +} +module.exports.byDigest = getDataByDigest + +const getMemoizedStream = (memoized) => { + const stream = new Minipass() + stream.on('newListener', function (ev, cb) { + ev === 'metadata' && cb(memoized.entry.metadata) + ev === 'integrity' && cb(memoized.entry.integrity) + ev === 'size' && cb(memoized.entry.size) + }) + stream.end(memoized.data) + return stream +} + +function getStream (cache, key, opts = {}) { + const { memoize, size } = opts + const memoized = memo.get(cache, key, opts) + if (memoized && memoize !== false) { + return getMemoizedStream(memoized) + } + + const stream = new Pipeline() + // Set all this up to run on the stream and then just return the stream + Promise.resolve().then(async () => { + const entry = await index.find(cache, key) + if (!entry) { + throw new index.NotFoundError(cache, key) + } + + stream.emit('metadata', entry.metadata) + stream.emit('integrity', entry.integrity) + stream.emit('size', entry.size) + stream.on('newListener', function (ev, cb) { + ev === 'metadata' && cb(entry.metadata) + ev === 'integrity' && cb(entry.integrity) + ev === 'size' && cb(entry.size) + }) + + const src = read.readStream( + cache, + entry.integrity, + { ...opts, size: typeof size !== 'number' ? entry.size : size } + ) + + if (memoize) { + const memoStream = new Collect.PassThrough() + memoStream.on('collect', data => memo.put(cache, entry, data, opts)) + stream.unshift(memoStream) + } + stream.unshift(src) + return stream + }).catch((err) => stream.emit('error', err)) + + return stream +} + +module.exports.stream = getStream + +function getStreamDigest (cache, integrity, opts = {}) { + const { memoize } = opts + const memoized = memo.get.byDigest(cache, integrity, opts) + if (memoized && memoize !== false) { + const stream = new Minipass() + stream.end(memoized) + return stream + } else { + const stream = read.readStream(cache, integrity, opts) + if (!memoize) { + return stream + } + + const memoStream = new Collect.PassThrough() + memoStream.on('collect', data => memo.put.byDigest( + cache, + integrity, + data, + opts + )) + return new Pipeline(stream, memoStream) + } +} + +module.exports.stream.byDigest = getStreamDigest + +function info (cache, key, opts = {}) { + const { memoize } = opts + const memoized = memo.get(cache, key, opts) + if (memoized && memoize !== false) { + return Promise.resolve(memoized.entry) + } else { + return index.find(cache, key) + } +} +module.exports.info = info + +async function copy (cache, key, dest, opts = {}) { + const entry = await index.find(cache, key, opts) + if (!entry) { + throw new index.NotFoundError(cache, key) + } + await read.copy(cache, entry.integrity, dest, opts) + return { + metadata: entry.metadata, + size: entry.size, + integrity: entry.integrity, + } +} + +module.exports.copy = copy + +async function copyByDigest (cache, key, dest, opts = {}) { + await read.copy(cache, key, dest, opts) + return key +} + +module.exports.copy.byDigest = copyByDigest + +module.exports.hasContent = read.hasContent diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/index.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/index.js new file mode 100644 index 00000000000000..c9b0da5f3a271b --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/index.js @@ -0,0 +1,42 @@ +'use strict' + +const get = require('./get.js') +const put = require('./put.js') +const rm = require('./rm.js') +const verify = require('./verify.js') +const { clearMemoized } = require('./memoization.js') +const tmp = require('./util/tmp.js') +const index = require('./entry-index.js') + +module.exports.index = {} +module.exports.index.compact = index.compact +module.exports.index.insert = index.insert + +module.exports.ls = index.ls +module.exports.ls.stream = index.lsStream + +module.exports.get = get +module.exports.get.byDigest = get.byDigest +module.exports.get.stream = get.stream +module.exports.get.stream.byDigest = get.stream.byDigest +module.exports.get.copy = get.copy +module.exports.get.copy.byDigest = get.copy.byDigest +module.exports.get.info = get.info +module.exports.get.hasContent = get.hasContent + +module.exports.put = put +module.exports.put.stream = put.stream + +module.exports.rm = rm.entry +module.exports.rm.all = rm.all +module.exports.rm.entry = module.exports.rm +module.exports.rm.content = rm.content + +module.exports.clearMemoized = clearMemoized + +module.exports.tmp = {} +module.exports.tmp.mkdir = tmp.mkdir +module.exports.tmp.withTmp = tmp.withTmp + +module.exports.verify = verify +module.exports.verify.lastRun = verify.lastRun diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/memoization.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/memoization.js new file mode 100644 index 00000000000000..2ecc60912e4563 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/memoization.js @@ -0,0 +1,72 @@ +'use strict' + +const { LRUCache } = require('lru-cache') + +const MEMOIZED = new LRUCache({ + max: 500, + maxSize: 50 * 1024 * 1024, // 50MB + ttl: 3 * 60 * 1000, // 3 minutes + sizeCalculation: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length, +}) + +module.exports.clearMemoized = clearMemoized + +function clearMemoized () { + const old = {} + MEMOIZED.forEach((v, k) => { + old[k] = v + }) + MEMOIZED.clear() + return old +} + +module.exports.put = put + +function put (cache, entry, data, opts) { + pickMem(opts).set(`key:${cache}:${entry.key}`, { entry, data }) + putDigest(cache, entry.integrity, data, opts) +} + +module.exports.put.byDigest = putDigest + +function putDigest (cache, integrity, data, opts) { + pickMem(opts).set(`digest:${cache}:${integrity}`, data) +} + +module.exports.get = get + +function get (cache, key, opts) { + return pickMem(opts).get(`key:${cache}:${key}`) +} + +module.exports.get.byDigest = getDigest + +function getDigest (cache, integrity, opts) { + return pickMem(opts).get(`digest:${cache}:${integrity}`) +} + +class ObjProxy { + constructor (obj) { + this.obj = obj + } + + get (key) { + return this.obj[key] + } + + set (key, val) { + this.obj[key] = val + } +} + +function pickMem (opts) { + if (!opts || !opts.memoize) { + return MEMOIZED + } else if (opts.memoize.get && opts.memoize.set) { + return opts.memoize + } else if (typeof opts.memoize === 'object') { + return new ObjProxy(opts.memoize) + } else { + return MEMOIZED + } +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/put.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/put.js new file mode 100644 index 00000000000000..9fc932d5f6dec5 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/put.js @@ -0,0 +1,80 @@ +'use strict' + +const index = require('./entry-index') +const memo = require('./memoization') +const write = require('./content/write') +const Flush = require('minipass-flush') +const { PassThrough } = require('minipass-collect') +const Pipeline = require('minipass-pipeline') + +const putOpts = (opts) => ({ + algorithms: ['sha512'], + ...opts, +}) + +module.exports = putData + +async function putData (cache, key, data, opts = {}) { + const { memoize } = opts + opts = putOpts(opts) + const res = await write(cache, data, opts) + const entry = await index.insert(cache, key, res.integrity, { ...opts, size: res.size }) + if (memoize) { + memo.put(cache, entry, data, opts) + } + + return res.integrity +} + +module.exports.stream = putStream + +function putStream (cache, key, opts = {}) { + const { memoize } = opts + opts = putOpts(opts) + let integrity + let size + let error + + let memoData + const pipeline = new Pipeline() + // first item in the pipeline is the memoizer, because we need + // that to end first and get the collected data. + if (memoize) { + const memoizer = new PassThrough().on('collect', data => { + memoData = data + }) + pipeline.push(memoizer) + } + + // contentStream is a write-only, not a passthrough + // no data comes out of it. + const contentStream = write.stream(cache, opts) + .on('integrity', (int) => { + integrity = int + }) + .on('size', (s) => { + size = s + }) + .on('error', (err) => { + error = err + }) + + pipeline.push(contentStream) + + // last but not least, we write the index and emit hash and size, + // and memoize if we're doing that + pipeline.push(new Flush({ + async flush () { + if (!error) { + const entry = await index.insert(cache, key, integrity, { ...opts, size }) + if (memoize && memoData) { + memo.put(cache, entry, memoData, opts) + } + pipeline.emit('integrity', integrity) + pipeline.emit('size', size) + } + }, + })) + + return pipeline +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/rm.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/rm.js new file mode 100644 index 00000000000000..a94760c7cf2430 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/rm.js @@ -0,0 +1,31 @@ +'use strict' + +const { rm } = require('fs/promises') +const glob = require('./util/glob.js') +const index = require('./entry-index') +const memo = require('./memoization') +const path = require('path') +const rmContent = require('./content/rm') + +module.exports = entry +module.exports.entry = entry + +function entry (cache, key, opts) { + memo.clearMemoized() + return index.delete(cache, key, opts) +} + +module.exports.content = content + +function content (cache, integrity) { + memo.clearMemoized() + return rmContent(cache, integrity) +} + +module.exports.all = all + +async function all (cache) { + memo.clearMemoized() + const paths = await glob(path.join(cache, '*(content-*|index-*)'), { silent: true, nosort: true }) + return Promise.all(paths.map((p) => rm(p, { recursive: true, force: true }))) +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/util/glob.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/util/glob.js new file mode 100644 index 00000000000000..8500c1c16a429f --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/util/glob.js @@ -0,0 +1,7 @@ +'use strict' + +const { glob } = require('glob') +const path = require('path') + +const globify = (pattern) => pattern.split(path.win32.sep).join(path.posix.sep) +module.exports = (path, options) => glob(globify(path), options) diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/util/hash-to-segments.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/util/hash-to-segments.js new file mode 100644 index 00000000000000..445599b5038088 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/util/hash-to-segments.js @@ -0,0 +1,7 @@ +'use strict' + +module.exports = hashToSegments + +function hashToSegments (hash) { + return [hash.slice(0, 2), hash.slice(2, 4), hash.slice(4)] +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/util/tmp.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/util/tmp.js new file mode 100644 index 00000000000000..0bf5302136ebeb --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/util/tmp.js @@ -0,0 +1,26 @@ +'use strict' + +const { withTempDir } = require('@npmcli/fs') +const fs = require('fs/promises') +const path = require('path') + +module.exports.mkdir = mktmpdir + +async function mktmpdir (cache, opts = {}) { + const { tmpPrefix } = opts + const tmpDir = path.join(cache, 'tmp') + await fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' }) + // do not use path.join(), it drops the trailing / if tmpPrefix is unset + const target = `${tmpDir}${path.sep}${tmpPrefix || ''}` + return fs.mkdtemp(target, { owner: 'inherit' }) +} + +module.exports.withTmp = withTmp + +function withTmp (cache, opts, cb) { + if (!cb) { + cb = opts + opts = {} + } + return withTempDir(path.join(cache, 'tmp'), cb, opts) +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/verify.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/verify.js new file mode 100644 index 00000000000000..d7423da1295b68 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/verify.js @@ -0,0 +1,257 @@ +'use strict' + +const { + mkdir, + readFile, + rm, + stat, + truncate, + writeFile, +} = require('fs/promises') +const pMap = require('p-map') +const contentPath = require('./content/path') +const fsm = require('fs-minipass') +const glob = require('./util/glob.js') +const index = require('./entry-index') +const path = require('path') +const ssri = require('ssri') + +const hasOwnProperty = (obj, key) => + Object.prototype.hasOwnProperty.call(obj, key) + +const verifyOpts = (opts) => ({ + concurrency: 20, + log: { silly () {} }, + ...opts, +}) + +module.exports = verify + +async function verify (cache, opts) { + opts = verifyOpts(opts) + opts.log.silly('verify', 'verifying cache at', cache) + + const steps = [ + markStartTime, + fixPerms, + garbageCollect, + rebuildIndex, + cleanTmp, + writeVerifile, + markEndTime, + ] + + const stats = {} + for (const step of steps) { + const label = step.name + const start = new Date() + const s = await step(cache, opts) + if (s) { + Object.keys(s).forEach((k) => { + stats[k] = s[k] + }) + } + const end = new Date() + if (!stats.runTime) { + stats.runTime = {} + } + stats.runTime[label] = end - start + } + stats.runTime.total = stats.endTime - stats.startTime + opts.log.silly( + 'verify', + 'verification finished for', + cache, + 'in', + `${stats.runTime.total}ms` + ) + return stats +} + +async function markStartTime () { + return { startTime: new Date() } +} + +async function markEndTime () { + return { endTime: new Date() } +} + +async function fixPerms (cache, opts) { + opts.log.silly('verify', 'fixing cache permissions') + await mkdir(cache, { recursive: true }) + return null +} + +// Implements a naive mark-and-sweep tracing garbage collector. +// +// The algorithm is basically as follows: +// 1. Read (and filter) all index entries ("pointers") +// 2. Mark each integrity value as "live" +// 3. Read entire filesystem tree in `content-vX/` dir +// 4. If content is live, verify its checksum and delete it if it fails +// 5. If content is not marked as live, rm it. +// +async function garbageCollect (cache, opts) { + opts.log.silly('verify', 'garbage collecting content') + const indexStream = index.lsStream(cache) + const liveContent = new Set() + indexStream.on('data', (entry) => { + if (opts.filter && !opts.filter(entry)) { + return + } + + // integrity is stringified, re-parse it so we can get each hash + const integrity = ssri.parse(entry.integrity) + for (const algo in integrity) { + liveContent.add(integrity[algo].toString()) + } + }) + await new Promise((resolve, reject) => { + indexStream.on('end', resolve).on('error', reject) + }) + const contentDir = contentPath.contentDir(cache) + const files = await glob(path.join(contentDir, '**'), { + follow: false, + nodir: true, + nosort: true, + }) + const stats = { + verifiedContent: 0, + reclaimedCount: 0, + reclaimedSize: 0, + badContentCount: 0, + keptSize: 0, + } + await pMap( + files, + async (f) => { + const split = f.split(/[/\\]/) + const digest = split.slice(split.length - 3).join('') + const algo = split[split.length - 4] + const integrity = ssri.fromHex(digest, algo) + if (liveContent.has(integrity.toString())) { + const info = await verifyContent(f, integrity) + if (!info.valid) { + stats.reclaimedCount++ + stats.badContentCount++ + stats.reclaimedSize += info.size + } else { + stats.verifiedContent++ + stats.keptSize += info.size + } + } else { + // No entries refer to this content. We can delete. + stats.reclaimedCount++ + const s = await stat(f) + await rm(f, { recursive: true, force: true }) + stats.reclaimedSize += s.size + } + return stats + }, + { concurrency: opts.concurrency } + ) + return stats +} + +async function verifyContent (filepath, sri) { + const contentInfo = {} + try { + const { size } = await stat(filepath) + contentInfo.size = size + contentInfo.valid = true + await ssri.checkStream(new fsm.ReadStream(filepath), sri) + } catch (err) { + if (err.code === 'ENOENT') { + return { size: 0, valid: false } + } + if (err.code !== 'EINTEGRITY') { + throw err + } + + await rm(filepath, { recursive: true, force: true }) + contentInfo.valid = false + } + return contentInfo +} + +async function rebuildIndex (cache, opts) { + opts.log.silly('verify', 'rebuilding index') + const entries = await index.ls(cache) + const stats = { + missingContent: 0, + rejectedEntries: 0, + totalEntries: 0, + } + const buckets = {} + for (const k in entries) { + /* istanbul ignore else */ + if (hasOwnProperty(entries, k)) { + const hashed = index.hashKey(k) + const entry = entries[k] + const excluded = opts.filter && !opts.filter(entry) + excluded && stats.rejectedEntries++ + if (buckets[hashed] && !excluded) { + buckets[hashed].push(entry) + } else if (buckets[hashed] && excluded) { + // skip + } else if (excluded) { + buckets[hashed] = [] + buckets[hashed]._path = index.bucketPath(cache, k) + } else { + buckets[hashed] = [entry] + buckets[hashed]._path = index.bucketPath(cache, k) + } + } + } + await pMap( + Object.keys(buckets), + (key) => { + return rebuildBucket(cache, buckets[key], stats, opts) + }, + { concurrency: opts.concurrency } + ) + return stats +} + +async function rebuildBucket (cache, bucket, stats) { + await truncate(bucket._path) + // This needs to be serialized because cacache explicitly + // lets very racy bucket conflicts clobber each other. + for (const entry of bucket) { + const content = contentPath(cache, entry.integrity) + try { + await stat(content) + await index.insert(cache, entry.key, entry.integrity, { + metadata: entry.metadata, + size: entry.size, + time: entry.time, + }) + stats.totalEntries++ + } catch (err) { + if (err.code === 'ENOENT') { + stats.rejectedEntries++ + stats.missingContent++ + } else { + throw err + } + } + } +} + +function cleanTmp (cache, opts) { + opts.log.silly('verify', 'cleaning tmp directory') + return rm(path.join(cache, 'tmp'), { recursive: true, force: true }) +} + +async function writeVerifile (cache, opts) { + const verifile = path.join(cache, '_lastverified') + opts.log.silly('verify', 'writing verifile to ' + verifile) + return writeFile(verifile, `${Date.now()}`) +} + +module.exports.lastRun = lastRun + +async function lastRun (cache) { + const data = await readFile(path.join(cache, '_lastverified'), { encoding: 'utf8' }) + return new Date(+data) +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/package.json b/deps/npm/node_modules/node-gyp/node_modules/cacache/package.json new file mode 100644 index 00000000000000..6e6219158ed759 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/package.json @@ -0,0 +1,82 @@ +{ + "name": "cacache", + "version": "18.0.4", + "cache-version": { + "content": "2", + "index": "5" + }, + "description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.", + "main": "lib/index.js", + "files": [ + "bin/", + "lib/" + ], + "scripts": { + "test": "tap", + "snap": "tap", + "coverage": "tap", + "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "npmclilint": "npmcli-lint", + "lintfix": "npm run lint -- --fix", + "postsnap": "npm run lintfix --", + "postlint": "template-oss-check", + "posttest": "npm run lint", + "template-oss-apply": "template-oss-apply --force" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/npm/cacache.git" + }, + "keywords": [ + "cache", + "caching", + "content-addressable", + "sri", + "sri hash", + "subresource integrity", + "cache", + "storage", + "store", + "file store", + "filesystem", + "disk cache", + "disk storage" + ], + "license": "ISC", + "dependencies": { + "@npmcli/fs": "^3.1.0", + "fs-minipass": "^3.0.0", + "glob": "^10.2.2", + "lru-cache": "^10.0.1", + "minipass": "^7.0.3", + "minipass-collect": "^2.0.1", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "p-map": "^4.0.0", + "ssri": "^10.0.0", + "tar": "^6.1.11", + "unique-filename": "^3.0.0" + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.22.0", + "tap": "^16.0.0" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "windowsCI": false, + "version": "4.22.0", + "publish": "true" + }, + "author": "GitHub Inc.", + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + } +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/isexe/LICENSE b/deps/npm/node_modules/node-gyp/node_modules/isexe/LICENSE new file mode 100644 index 00000000000000..c925dbe826b670 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/isexe/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) 2016-2022 Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/node-gyp/node_modules/isexe/dist/cjs/index.js b/deps/npm/node_modules/node-gyp/node_modules/isexe/dist/cjs/index.js new file mode 100644 index 00000000000000..cefcb66b5c5434 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/isexe/dist/cjs/index.js @@ -0,0 +1,46 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.sync = exports.isexe = exports.posix = exports.win32 = void 0; +const posix = __importStar(require("./posix.js")); +exports.posix = posix; +const win32 = __importStar(require("./win32.js")); +exports.win32 = win32; +__exportStar(require("./options.js"), exports); +const platform = process.env._ISEXE_TEST_PLATFORM_ || process.platform; +const impl = platform === 'win32' ? win32 : posix; +/** + * Determine whether a path is executable on the current platform. + */ +exports.isexe = impl.isexe; +/** + * Synchronously determine whether a path is executable on the + * current platform. + */ +exports.sync = impl.sync; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/isexe/dist/cjs/options.js b/deps/npm/node_modules/node-gyp/node_modules/isexe/dist/cjs/options.js new file mode 100644 index 00000000000000..0dfad0762cc32c --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/isexe/dist/cjs/options.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=options.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/isexe/dist/cjs/package.json b/deps/npm/node_modules/node-gyp/node_modules/isexe/dist/cjs/package.json new file mode 100644 index 00000000000000..5bbefffbabee39 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/isexe/dist/cjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/isexe/dist/cjs/posix.js b/deps/npm/node_modules/node-gyp/node_modules/isexe/dist/cjs/posix.js new file mode 100644 index 00000000000000..3bc5e79d7007e9 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/isexe/dist/cjs/posix.js @@ -0,0 +1,67 @@ +"use strict"; +/** + * This is the Posix implementation of isexe, which uses the file + * mode and uid/gid values. + * + * @module + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.sync = exports.isexe = void 0; +const fs_1 = require("fs"); +const promises_1 = require("fs/promises"); +/** + * Determine whether a path is executable according to the mode and + * current (or specified) user and group IDs. + */ +const isexe = async (path, options = {}) => { + const { ignoreErrors = false } = options; + try { + return checkStat(await (0, promises_1.stat)(path), options); + } + catch (e) { + const er = e; + if (ignoreErrors || er.code === 'EACCES') + return false; + throw er; + } +}; +exports.isexe = isexe; +/** + * Synchronously determine whether a path is executable according to + * the mode and current (or specified) user and group IDs. + */ +const sync = (path, options = {}) => { + const { ignoreErrors = false } = options; + try { + return checkStat((0, fs_1.statSync)(path), options); + } + catch (e) { + const er = e; + if (ignoreErrors || er.code === 'EACCES') + return false; + throw er; + } +}; +exports.sync = sync; +const checkStat = (stat, options) => stat.isFile() && checkMode(stat, options); +const checkMode = (stat, options) => { + const myUid = options.uid ?? process.getuid?.(); + const myGroups = options.groups ?? process.getgroups?.() ?? []; + const myGid = options.gid ?? process.getgid?.() ?? myGroups[0]; + if (myUid === undefined || myGid === undefined) { + throw new Error('cannot get uid or gid'); + } + const groups = new Set([myGid, ...myGroups]); + const mod = stat.mode; + const uid = stat.uid; + const gid = stat.gid; + const u = parseInt('100', 8); + const g = parseInt('010', 8); + const o = parseInt('001', 8); + const ug = u | g; + return !!(mod & o || + (mod & g && groups.has(gid)) || + (mod & u && uid === myUid) || + (mod & ug && myUid === 0)); +}; +//# sourceMappingURL=posix.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/isexe/dist/cjs/win32.js b/deps/npm/node_modules/node-gyp/node_modules/isexe/dist/cjs/win32.js new file mode 100644 index 00000000000000..fa7a4d2f7d240d --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/isexe/dist/cjs/win32.js @@ -0,0 +1,62 @@ +"use strict"; +/** + * This is the Windows implementation of isexe, which uses the file + * extension and PATHEXT setting. + * + * @module + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.sync = exports.isexe = void 0; +const fs_1 = require("fs"); +const promises_1 = require("fs/promises"); +/** + * Determine whether a path is executable based on the file extension + * and PATHEXT environment variable (or specified pathExt option) + */ +const isexe = async (path, options = {}) => { + const { ignoreErrors = false } = options; + try { + return checkStat(await (0, promises_1.stat)(path), path, options); + } + catch (e) { + const er = e; + if (ignoreErrors || er.code === 'EACCES') + return false; + throw er; + } +}; +exports.isexe = isexe; +/** + * Synchronously determine whether a path is executable based on the file + * extension and PATHEXT environment variable (or specified pathExt option) + */ +const sync = (path, options = {}) => { + const { ignoreErrors = false } = options; + try { + return checkStat((0, fs_1.statSync)(path), path, options); + } + catch (e) { + const er = e; + if (ignoreErrors || er.code === 'EACCES') + return false; + throw er; + } +}; +exports.sync = sync; +const checkPathExt = (path, options) => { + const { pathExt = process.env.PATHEXT || '' } = options; + const peSplit = pathExt.split(';'); + if (peSplit.indexOf('') !== -1) { + return true; + } + for (let i = 0; i < peSplit.length; i++) { + const p = peSplit[i].toLowerCase(); + const ext = path.substring(path.length - p.length).toLowerCase(); + if (p && ext === p) { + return true; + } + } + return false; +}; +const checkStat = (stat, path, options) => stat.isFile() && checkPathExt(path, options); +//# sourceMappingURL=win32.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/isexe/dist/mjs/index.js b/deps/npm/node_modules/node-gyp/node_modules/isexe/dist/mjs/index.js new file mode 100644 index 00000000000000..1e309acd7355ec --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/isexe/dist/mjs/index.js @@ -0,0 +1,16 @@ +import * as posix from './posix.js'; +import * as win32 from './win32.js'; +export * from './options.js'; +export { win32, posix }; +const platform = process.env._ISEXE_TEST_PLATFORM_ || process.platform; +const impl = platform === 'win32' ? win32 : posix; +/** + * Determine whether a path is executable on the current platform. + */ +export const isexe = impl.isexe; +/** + * Synchronously determine whether a path is executable on the + * current platform. + */ +export const sync = impl.sync; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/isexe/dist/mjs/options.js b/deps/npm/node_modules/node-gyp/node_modules/isexe/dist/mjs/options.js new file mode 100644 index 00000000000000..e9ded40bd5b2cd --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/isexe/dist/mjs/options.js @@ -0,0 +1,2 @@ +export {}; +//# sourceMappingURL=options.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/isexe/dist/mjs/package.json b/deps/npm/node_modules/node-gyp/node_modules/isexe/dist/mjs/package.json new file mode 100644 index 00000000000000..3dbc1ca591c055 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/isexe/dist/mjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/isexe/dist/mjs/posix.js b/deps/npm/node_modules/node-gyp/node_modules/isexe/dist/mjs/posix.js new file mode 100644 index 00000000000000..c453776c0452f7 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/isexe/dist/mjs/posix.js @@ -0,0 +1,62 @@ +/** + * This is the Posix implementation of isexe, which uses the file + * mode and uid/gid values. + * + * @module + */ +import { statSync } from 'fs'; +import { stat } from 'fs/promises'; +/** + * Determine whether a path is executable according to the mode and + * current (or specified) user and group IDs. + */ +export const isexe = async (path, options = {}) => { + const { ignoreErrors = false } = options; + try { + return checkStat(await stat(path), options); + } + catch (e) { + const er = e; + if (ignoreErrors || er.code === 'EACCES') + return false; + throw er; + } +}; +/** + * Synchronously determine whether a path is executable according to + * the mode and current (or specified) user and group IDs. + */ +export const sync = (path, options = {}) => { + const { ignoreErrors = false } = options; + try { + return checkStat(statSync(path), options); + } + catch (e) { + const er = e; + if (ignoreErrors || er.code === 'EACCES') + return false; + throw er; + } +}; +const checkStat = (stat, options) => stat.isFile() && checkMode(stat, options); +const checkMode = (stat, options) => { + const myUid = options.uid ?? process.getuid?.(); + const myGroups = options.groups ?? process.getgroups?.() ?? []; + const myGid = options.gid ?? process.getgid?.() ?? myGroups[0]; + if (myUid === undefined || myGid === undefined) { + throw new Error('cannot get uid or gid'); + } + const groups = new Set([myGid, ...myGroups]); + const mod = stat.mode; + const uid = stat.uid; + const gid = stat.gid; + const u = parseInt('100', 8); + const g = parseInt('010', 8); + const o = parseInt('001', 8); + const ug = u | g; + return !!(mod & o || + (mod & g && groups.has(gid)) || + (mod & u && uid === myUid) || + (mod & ug && myUid === 0)); +}; +//# sourceMappingURL=posix.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/isexe/dist/mjs/win32.js b/deps/npm/node_modules/node-gyp/node_modules/isexe/dist/mjs/win32.js new file mode 100644 index 00000000000000..a354ee2a5115c7 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/isexe/dist/mjs/win32.js @@ -0,0 +1,57 @@ +/** + * This is the Windows implementation of isexe, which uses the file + * extension and PATHEXT setting. + * + * @module + */ +import { statSync } from 'fs'; +import { stat } from 'fs/promises'; +/** + * Determine whether a path is executable based on the file extension + * and PATHEXT environment variable (or specified pathExt option) + */ +export const isexe = async (path, options = {}) => { + const { ignoreErrors = false } = options; + try { + return checkStat(await stat(path), path, options); + } + catch (e) { + const er = e; + if (ignoreErrors || er.code === 'EACCES') + return false; + throw er; + } +}; +/** + * Synchronously determine whether a path is executable based on the file + * extension and PATHEXT environment variable (or specified pathExt option) + */ +export const sync = (path, options = {}) => { + const { ignoreErrors = false } = options; + try { + return checkStat(statSync(path), path, options); + } + catch (e) { + const er = e; + if (ignoreErrors || er.code === 'EACCES') + return false; + throw er; + } +}; +const checkPathExt = (path, options) => { + const { pathExt = process.env.PATHEXT || '' } = options; + const peSplit = pathExt.split(';'); + if (peSplit.indexOf('') !== -1) { + return true; + } + for (let i = 0; i < peSplit.length; i++) { + const p = peSplit[i].toLowerCase(); + const ext = path.substring(path.length - p.length).toLowerCase(); + if (p && ext === p) { + return true; + } + } + return false; +}; +const checkStat = (stat, path, options) => stat.isFile() && checkPathExt(path, options); +//# sourceMappingURL=win32.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/isexe/package.json b/deps/npm/node_modules/node-gyp/node_modules/isexe/package.json new file mode 100644 index 00000000000000..a0e2cd04bfdbfe --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/isexe/package.json @@ -0,0 +1,96 @@ +{ + "name": "isexe", + "version": "3.1.1", + "description": "Minimal module to check if a file is executable.", + "main": "./dist/cjs/index.js", + "module": "./dist/mjs/index.js", + "types": "./dist/cjs/index.js", + "files": [ + "dist" + ], + "exports": { + ".": { + "import": { + "types": "./dist/mjs/index.d.ts", + "default": "./dist/mjs/index.js" + }, + "require": { + "types": "./dist/cjs/index.d.ts", + "default": "./dist/cjs/index.js" + } + }, + "./posix": { + "import": { + "types": "./dist/mjs/posix.d.ts", + "default": "./dist/mjs/posix.js" + }, + "require": { + "types": "./dist/cjs/posix.d.ts", + "default": "./dist/cjs/posix.js" + } + }, + "./win32": { + "import": { + "types": "./dist/mjs/win32.d.ts", + "default": "./dist/mjs/win32.js" + }, + "require": { + "types": "./dist/cjs/win32.d.ts", + "default": "./dist/cjs/win32.js" + } + }, + "./package.json": "./package.json" + }, + "devDependencies": { + "@types/node": "^20.4.5", + "@types/tap": "^15.0.8", + "c8": "^8.0.1", + "mkdirp": "^0.5.1", + "prettier": "^2.8.8", + "rimraf": "^2.5.0", + "sync-content": "^1.0.2", + "tap": "^16.3.8", + "ts-node": "^10.9.1", + "typedoc": "^0.24.8", + "typescript": "^5.1.6" + }, + "scripts": { + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "prepare": "tsc -p tsconfig/cjs.json && tsc -p tsconfig/esm.json && bash ./scripts/fixup.sh", + "pretest": "npm run prepare", + "presnap": "npm run prepare", + "test": "c8 tap", + "snap": "c8 tap", + "format": "prettier --write . --loglevel warn --ignore-path ../../.prettierignore --cache", + "typedoc": "typedoc --tsconfig tsconfig/esm.json ./src/*.ts" + }, + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC", + "tap": { + "coverage": false, + "node-arg": [ + "--enable-source-maps", + "--no-warnings", + "--loader", + "ts-node/esm" + ], + "ts": false + }, + "prettier": { + "semi": false, + "printWidth": 75, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + }, + "repository": "https://github.com/isaacs/isexe", + "engines": { + "node": ">=16" + } +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/LICENSE b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/LICENSE new file mode 100644 index 00000000000000..1808eb2844231c --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/LICENSE @@ -0,0 +1,16 @@ +ISC License + +Copyright 2017-2022 (c) npm, Inc. + +Permission to use, copy, modify, and/or distribute this software for +any purpose with or without fee is hereby granted, provided that the +above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS +ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/entry.js b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/entry.js new file mode 100644 index 00000000000000..bfcfacbcc95e18 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/entry.js @@ -0,0 +1,471 @@ +const { Request, Response } = require('minipass-fetch') +const { Minipass } = require('minipass') +const MinipassFlush = require('minipass-flush') +const cacache = require('cacache') +const url = require('url') + +const CachingMinipassPipeline = require('../pipeline.js') +const CachePolicy = require('./policy.js') +const cacheKey = require('./key.js') +const remote = require('../remote.js') + +const hasOwnProperty = (obj, prop) => Object.prototype.hasOwnProperty.call(obj, prop) + +// allow list for request headers that will be written to the cache index +// note: we will also store any request headers +// that are named in a response's vary header +const KEEP_REQUEST_HEADERS = [ + 'accept-charset', + 'accept-encoding', + 'accept-language', + 'accept', + 'cache-control', +] + +// allow list for response headers that will be written to the cache index +// note: we must not store the real response's age header, or when we load +// a cache policy based on the metadata it will think the cached response +// is always stale +const KEEP_RESPONSE_HEADERS = [ + 'cache-control', + 'content-encoding', + 'content-language', + 'content-type', + 'date', + 'etag', + 'expires', + 'last-modified', + 'link', + 'location', + 'pragma', + 'vary', +] + +// return an object containing all metadata to be written to the index +const getMetadata = (request, response, options) => { + const metadata = { + time: Date.now(), + url: request.url, + reqHeaders: {}, + resHeaders: {}, + + // options on which we must match the request and vary the response + options: { + compress: options.compress != null ? options.compress : request.compress, + }, + } + + // only save the status if it's not a 200 or 304 + if (response.status !== 200 && response.status !== 304) { + metadata.status = response.status + } + + for (const name of KEEP_REQUEST_HEADERS) { + if (request.headers.has(name)) { + metadata.reqHeaders[name] = request.headers.get(name) + } + } + + // if the request's host header differs from the host in the url + // we need to keep it, otherwise it's just noise and we ignore it + const host = request.headers.get('host') + const parsedUrl = new url.URL(request.url) + if (host && parsedUrl.host !== host) { + metadata.reqHeaders.host = host + } + + // if the response has a vary header, make sure + // we store the relevant request headers too + if (response.headers.has('vary')) { + const vary = response.headers.get('vary') + // a vary of "*" means every header causes a different response. + // in that scenario, we do not include any additional headers + // as the freshness check will always fail anyway and we don't + // want to bloat the cache indexes + if (vary !== '*') { + // copy any other request headers that will vary the response + const varyHeaders = vary.trim().toLowerCase().split(/\s*,\s*/) + for (const name of varyHeaders) { + if (request.headers.has(name)) { + metadata.reqHeaders[name] = request.headers.get(name) + } + } + } + } + + for (const name of KEEP_RESPONSE_HEADERS) { + if (response.headers.has(name)) { + metadata.resHeaders[name] = response.headers.get(name) + } + } + + for (const name of options.cacheAdditionalHeaders) { + if (response.headers.has(name)) { + metadata.resHeaders[name] = response.headers.get(name) + } + } + + return metadata +} + +// symbols used to hide objects that may be lazily evaluated in a getter +const _request = Symbol('request') +const _response = Symbol('response') +const _policy = Symbol('policy') + +class CacheEntry { + constructor ({ entry, request, response, options }) { + if (entry) { + this.key = entry.key + this.entry = entry + // previous versions of this module didn't write an explicit timestamp in + // the metadata, so fall back to the entry's timestamp. we can't use the + // entry timestamp to determine staleness because cacache will update it + // when it verifies its data + this.entry.metadata.time = this.entry.metadata.time || this.entry.time + } else { + this.key = cacheKey(request) + } + + this.options = options + + // these properties are behind getters that lazily evaluate + this[_request] = request + this[_response] = response + this[_policy] = null + } + + // returns a CacheEntry instance that satisfies the given request + // or undefined if no existing entry satisfies + static async find (request, options) { + try { + // compacts the index and returns an array of unique entries + var matches = await cacache.index.compact(options.cachePath, cacheKey(request), (A, B) => { + const entryA = new CacheEntry({ entry: A, options }) + const entryB = new CacheEntry({ entry: B, options }) + return entryA.policy.satisfies(entryB.request) + }, { + validateEntry: (entry) => { + // clean out entries with a buggy content-encoding value + if (entry.metadata && + entry.metadata.resHeaders && + entry.metadata.resHeaders['content-encoding'] === null) { + return false + } + + // if an integrity is null, it needs to have a status specified + if (entry.integrity === null) { + return !!(entry.metadata && entry.metadata.status) + } + + return true + }, + }) + } catch (err) { + // if the compact request fails, ignore the error and return + return + } + + // a cache mode of 'reload' means to behave as though we have no cache + // on the way to the network. return undefined to allow cacheFetch to + // create a brand new request no matter what. + if (options.cache === 'reload') { + return + } + + // find the specific entry that satisfies the request + let match + for (const entry of matches) { + const _entry = new CacheEntry({ + entry, + options, + }) + + if (_entry.policy.satisfies(request)) { + match = _entry + break + } + } + + return match + } + + // if the user made a PUT/POST/PATCH then we invalidate our + // cache for the same url by deleting the index entirely + static async invalidate (request, options) { + const key = cacheKey(request) + try { + await cacache.rm.entry(options.cachePath, key, { removeFully: true }) + } catch (err) { + // ignore errors + } + } + + get request () { + if (!this[_request]) { + this[_request] = new Request(this.entry.metadata.url, { + method: 'GET', + headers: this.entry.metadata.reqHeaders, + ...this.entry.metadata.options, + }) + } + + return this[_request] + } + + get response () { + if (!this[_response]) { + this[_response] = new Response(null, { + url: this.entry.metadata.url, + counter: this.options.counter, + status: this.entry.metadata.status || 200, + headers: { + ...this.entry.metadata.resHeaders, + 'content-length': this.entry.size, + }, + }) + } + + return this[_response] + } + + get policy () { + if (!this[_policy]) { + this[_policy] = new CachePolicy({ + entry: this.entry, + request: this.request, + response: this.response, + options: this.options, + }) + } + + return this[_policy] + } + + // wraps the response in a pipeline that stores the data + // in the cache while the user consumes it + async store (status) { + // if we got a status other than 200, 301, or 308, + // or the CachePolicy forbid storage, append the + // cache status header and return it untouched + if ( + this.request.method !== 'GET' || + ![200, 301, 308].includes(this.response.status) || + !this.policy.storable() + ) { + this.response.headers.set('x-local-cache-status', 'skip') + return this.response + } + + const size = this.response.headers.get('content-length') + const cacheOpts = { + algorithms: this.options.algorithms, + metadata: getMetadata(this.request, this.response, this.options), + size, + integrity: this.options.integrity, + integrityEmitter: this.response.body.hasIntegrityEmitter && this.response.body, + } + + let body = null + // we only set a body if the status is a 200, redirects are + // stored as metadata only + if (this.response.status === 200) { + let cacheWriteResolve, cacheWriteReject + const cacheWritePromise = new Promise((resolve, reject) => { + cacheWriteResolve = resolve + cacheWriteReject = reject + }).catch((err) => { + body.emit('error', err) + }) + + body = new CachingMinipassPipeline({ events: ['integrity', 'size'] }, new MinipassFlush({ + flush () { + return cacheWritePromise + }, + })) + // this is always true since if we aren't reusing the one from the remote fetch, we + // are using the one from cacache + body.hasIntegrityEmitter = true + + const onResume = () => { + const tee = new Minipass() + const cacheStream = cacache.put.stream(this.options.cachePath, this.key, cacheOpts) + // re-emit the integrity and size events on our new response body so they can be reused + cacheStream.on('integrity', i => body.emit('integrity', i)) + cacheStream.on('size', s => body.emit('size', s)) + // stick a flag on here so downstream users will know if they can expect integrity events + tee.pipe(cacheStream) + // TODO if the cache write fails, log a warning but return the response anyway + // eslint-disable-next-line promise/catch-or-return + cacheStream.promise().then(cacheWriteResolve, cacheWriteReject) + body.unshift(tee) + body.unshift(this.response.body) + } + + body.once('resume', onResume) + body.once('end', () => body.removeListener('resume', onResume)) + } else { + await cacache.index.insert(this.options.cachePath, this.key, null, cacheOpts) + } + + // note: we do not set the x-local-cache-hash header because we do not know + // the hash value until after the write to the cache completes, which doesn't + // happen until after the response has been sent and it's too late to write + // the header anyway + this.response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath)) + this.response.headers.set('x-local-cache-key', encodeURIComponent(this.key)) + this.response.headers.set('x-local-cache-mode', 'stream') + this.response.headers.set('x-local-cache-status', status) + this.response.headers.set('x-local-cache-time', new Date().toISOString()) + const newResponse = new Response(body, { + url: this.response.url, + status: this.response.status, + headers: this.response.headers, + counter: this.options.counter, + }) + return newResponse + } + + // use the cached data to create a response and return it + async respond (method, options, status) { + let response + if (method === 'HEAD' || [301, 308].includes(this.response.status)) { + // if the request is a HEAD, or the response is a redirect, + // then the metadata in the entry already includes everything + // we need to build a response + response = this.response + } else { + // we're responding with a full cached response, so create a body + // that reads from cacache and attach it to a new Response + const body = new Minipass() + const headers = { ...this.policy.responseHeaders() } + + const onResume = () => { + const cacheStream = cacache.get.stream.byDigest( + this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize } + ) + cacheStream.on('error', async (err) => { + cacheStream.pause() + if (err.code === 'EINTEGRITY') { + await cacache.rm.content( + this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize } + ) + } + if (err.code === 'ENOENT' || err.code === 'EINTEGRITY') { + await CacheEntry.invalidate(this.request, this.options) + } + body.emit('error', err) + cacheStream.resume() + }) + // emit the integrity and size events based on our metadata so we're consistent + body.emit('integrity', this.entry.integrity) + body.emit('size', Number(headers['content-length'])) + cacheStream.pipe(body) + } + + body.once('resume', onResume) + body.once('end', () => body.removeListener('resume', onResume)) + response = new Response(body, { + url: this.entry.metadata.url, + counter: options.counter, + status: 200, + headers, + }) + } + + response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath)) + response.headers.set('x-local-cache-hash', encodeURIComponent(this.entry.integrity)) + response.headers.set('x-local-cache-key', encodeURIComponent(this.key)) + response.headers.set('x-local-cache-mode', 'stream') + response.headers.set('x-local-cache-status', status) + response.headers.set('x-local-cache-time', new Date(this.entry.metadata.time).toUTCString()) + return response + } + + // use the provided request along with this cache entry to + // revalidate the stored response. returns a response, either + // from the cache or from the update + async revalidate (request, options) { + const revalidateRequest = new Request(request, { + headers: this.policy.revalidationHeaders(request), + }) + + try { + // NOTE: be sure to remove the headers property from the + // user supplied options, since we have already defined + // them on the new request object. if they're still in the + // options then those will overwrite the ones from the policy + var response = await remote(revalidateRequest, { + ...options, + headers: undefined, + }) + } catch (err) { + // if the network fetch fails, return the stale + // cached response unless it has a cache-control + // of 'must-revalidate' + if (!this.policy.mustRevalidate) { + return this.respond(request.method, options, 'stale') + } + + throw err + } + + if (this.policy.revalidated(revalidateRequest, response)) { + // we got a 304, write a new index to the cache and respond from cache + const metadata = getMetadata(request, response, options) + // 304 responses do not include headers that are specific to the response data + // since they do not include a body, so we copy values for headers that were + // in the old cache entry to the new one, if the new metadata does not already + // include that header + for (const name of KEEP_RESPONSE_HEADERS) { + if ( + !hasOwnProperty(metadata.resHeaders, name) && + hasOwnProperty(this.entry.metadata.resHeaders, name) + ) { + metadata.resHeaders[name] = this.entry.metadata.resHeaders[name] + } + } + + for (const name of options.cacheAdditionalHeaders) { + const inMeta = hasOwnProperty(metadata.resHeaders, name) + const inEntry = hasOwnProperty(this.entry.metadata.resHeaders, name) + const inPolicy = hasOwnProperty(this.policy.response.headers, name) + + // if the header is in the existing entry, but it is not in the metadata + // then we need to write it to the metadata as this will refresh the on-disk cache + if (!inMeta && inEntry) { + metadata.resHeaders[name] = this.entry.metadata.resHeaders[name] + } + // if the header is in the metadata, but not in the policy, then we need to set + // it in the policy so that it's included in the immediate response. future + // responses will load a new cache entry, so we don't need to change that + if (!inPolicy && inMeta) { + this.policy.response.headers[name] = metadata.resHeaders[name] + } + } + + try { + await cacache.index.insert(options.cachePath, this.key, this.entry.integrity, { + size: this.entry.size, + metadata, + }) + } catch (err) { + // if updating the cache index fails, we ignore it and + // respond anyway + } + return this.respond(request.method, options, 'revalidated') + } + + // if we got a modified response, create a new entry based on it + const newEntry = new CacheEntry({ + request, + response, + options, + }) + + // respond with the new entry while writing it to the cache + return newEntry.store('updated') + } +} + +module.exports = CacheEntry diff --git a/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/errors.js b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/errors.js new file mode 100644 index 00000000000000..67a66573bebe66 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/errors.js @@ -0,0 +1,11 @@ +class NotCachedError extends Error { + constructor (url) { + /* eslint-disable-next-line max-len */ + super(`request to ${url} failed: cache mode is 'only-if-cached' but no cached response is available.`) + this.code = 'ENOTCACHED' + } +} + +module.exports = { + NotCachedError, +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/index.js b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/index.js new file mode 100644 index 00000000000000..0de49d23fb9336 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/index.js @@ -0,0 +1,49 @@ +const { NotCachedError } = require('./errors.js') +const CacheEntry = require('./entry.js') +const remote = require('../remote.js') + +// do whatever is necessary to get a Response and return it +const cacheFetch = async (request, options) => { + // try to find a cached entry that satisfies this request + const entry = await CacheEntry.find(request, options) + if (!entry) { + // no cached result, if the cache mode is 'only-if-cached' that's a failure + if (options.cache === 'only-if-cached') { + throw new NotCachedError(request.url) + } + + // otherwise, we make a request, store it and return it + const response = await remote(request, options) + const newEntry = new CacheEntry({ request, response, options }) + return newEntry.store('miss') + } + + // we have a cached response that satisfies this request, however if the cache + // mode is 'no-cache' then we send the revalidation request no matter what + if (options.cache === 'no-cache') { + return entry.revalidate(request, options) + } + + // if the cached entry is not stale, or if the cache mode is 'force-cache' or + // 'only-if-cached' we can respond with the cached entry. set the status + // based on the result of needsRevalidation and respond + const _needsRevalidation = entry.policy.needsRevalidation(request) + if (options.cache === 'force-cache' || + options.cache === 'only-if-cached' || + !_needsRevalidation) { + return entry.respond(request.method, options, _needsRevalidation ? 'stale' : 'hit') + } + + // if we got here, the cache entry is stale so revalidate it + return entry.revalidate(request, options) +} + +cacheFetch.invalidate = async (request, options) => { + if (!options.cachePath) { + return + } + + return CacheEntry.invalidate(request, options) +} + +module.exports = cacheFetch diff --git a/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/key.js b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/key.js new file mode 100644 index 00000000000000..f7684d562b7fae --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/key.js @@ -0,0 +1,17 @@ +const { URL, format } = require('url') + +// options passed to url.format() when generating a key +const formatOptions = { + auth: false, + fragment: false, + search: true, + unicode: false, +} + +// returns a string to be used as the cache key for the Request +const cacheKey = (request) => { + const parsed = new URL(request.url) + return `make-fetch-happen:request-cache:${format(parsed, formatOptions)}` +} + +module.exports = cacheKey diff --git a/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/policy.js b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/policy.js new file mode 100644 index 00000000000000..ada3c8600dae92 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/policy.js @@ -0,0 +1,161 @@ +const CacheSemantics = require('http-cache-semantics') +const Negotiator = require('negotiator') +const ssri = require('ssri') + +// options passed to http-cache-semantics constructor +const policyOptions = { + shared: false, + ignoreCargoCult: true, +} + +// a fake empty response, used when only testing the +// request for storability +const emptyResponse = { status: 200, headers: {} } + +// returns a plain object representation of the Request +const requestObject = (request) => { + const _obj = { + method: request.method, + url: request.url, + headers: {}, + compress: request.compress, + } + + request.headers.forEach((value, key) => { + _obj.headers[key] = value + }) + + return _obj +} + +// returns a plain object representation of the Response +const responseObject = (response) => { + const _obj = { + status: response.status, + headers: {}, + } + + response.headers.forEach((value, key) => { + _obj.headers[key] = value + }) + + return _obj +} + +class CachePolicy { + constructor ({ entry, request, response, options }) { + this.entry = entry + this.request = requestObject(request) + this.response = responseObject(response) + this.options = options + this.policy = new CacheSemantics(this.request, this.response, policyOptions) + + if (this.entry) { + // if we have an entry, copy the timestamp to the _responseTime + // this is necessary because the CacheSemantics constructor forces + // the value to Date.now() which means a policy created from a + // cache entry is likely to always identify itself as stale + this.policy._responseTime = this.entry.metadata.time + } + } + + // static method to quickly determine if a request alone is storable + static storable (request, options) { + // no cachePath means no caching + if (!options.cachePath) { + return false + } + + // user explicitly asked not to cache + if (options.cache === 'no-store') { + return false + } + + // we only cache GET and HEAD requests + if (!['GET', 'HEAD'].includes(request.method)) { + return false + } + + // otherwise, let http-cache-semantics make the decision + // based on the request's headers + const policy = new CacheSemantics(requestObject(request), emptyResponse, policyOptions) + return policy.storable() + } + + // returns true if the policy satisfies the request + satisfies (request) { + const _req = requestObject(request) + if (this.request.headers.host !== _req.headers.host) { + return false + } + + if (this.request.compress !== _req.compress) { + return false + } + + const negotiatorA = new Negotiator(this.request) + const negotiatorB = new Negotiator(_req) + + if (JSON.stringify(negotiatorA.mediaTypes()) !== JSON.stringify(negotiatorB.mediaTypes())) { + return false + } + + if (JSON.stringify(negotiatorA.languages()) !== JSON.stringify(negotiatorB.languages())) { + return false + } + + if (JSON.stringify(negotiatorA.encodings()) !== JSON.stringify(negotiatorB.encodings())) { + return false + } + + if (this.options.integrity) { + return ssri.parse(this.options.integrity).match(this.entry.integrity) + } + + return true + } + + // returns true if the request and response allow caching + storable () { + return this.policy.storable() + } + + // NOTE: this is a hack to avoid parsing the cache-control + // header ourselves, it returns true if the response's + // cache-control contains must-revalidate + get mustRevalidate () { + return !!this.policy._rescc['must-revalidate'] + } + + // returns true if the cached response requires revalidation + // for the given request + needsRevalidation (request) { + const _req = requestObject(request) + // force method to GET because we only cache GETs + // but can serve a HEAD from a cached GET + _req.method = 'GET' + return !this.policy.satisfiesWithoutRevalidation(_req) + } + + responseHeaders () { + return this.policy.responseHeaders() + } + + // returns a new object containing the appropriate headers + // to send a revalidation request + revalidationHeaders (request) { + const _req = requestObject(request) + return this.policy.revalidationHeaders(_req) + } + + // returns true if the request/response was revalidated + // successfully. returns false if a new response was received + revalidated (request, response) { + const _req = requestObject(request) + const _res = responseObject(response) + const policy = this.policy.revalidatedPolicy(_req, _res) + return !policy.modified + } +} + +module.exports = CachePolicy diff --git a/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/fetch.js b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/fetch.js new file mode 100644 index 00000000000000..233ba67e165502 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/fetch.js @@ -0,0 +1,118 @@ +'use strict' + +const { FetchError, Request, isRedirect } = require('minipass-fetch') +const url = require('url') + +const CachePolicy = require('./cache/policy.js') +const cache = require('./cache/index.js') +const remote = require('./remote.js') + +// given a Request, a Response and user options +// return true if the response is a redirect that +// can be followed. we throw errors that will result +// in the fetch being rejected if the redirect is +// possible but invalid for some reason +const canFollowRedirect = (request, response, options) => { + if (!isRedirect(response.status)) { + return false + } + + if (options.redirect === 'manual') { + return false + } + + if (options.redirect === 'error') { + throw new FetchError(`redirect mode is set to error: ${request.url}`, + 'no-redirect', { code: 'ENOREDIRECT' }) + } + + if (!response.headers.has('location')) { + throw new FetchError(`redirect location header missing for: ${request.url}`, + 'no-location', { code: 'EINVALIDREDIRECT' }) + } + + if (request.counter >= request.follow) { + throw new FetchError(`maximum redirect reached at: ${request.url}`, + 'max-redirect', { code: 'EMAXREDIRECT' }) + } + + return true +} + +// given a Request, a Response, and the user's options return an object +// with a new Request and a new options object that will be used for +// following the redirect +const getRedirect = (request, response, options) => { + const _opts = { ...options } + const location = response.headers.get('location') + const redirectUrl = new url.URL(location, /^https?:/.test(location) ? undefined : request.url) + // Comment below is used under the following license: + /** + * @license + * Copyright (c) 2010-2012 Mikeal Rogers + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an "AS + * IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language + * governing permissions and limitations under the License. + */ + + // Remove authorization if changing hostnames (but not if just + // changing ports or protocols). This matches the behavior of request: + // https://github.com/request/request/blob/b12a6245/lib/redirect.js#L134-L138 + if (new url.URL(request.url).hostname !== redirectUrl.hostname) { + request.headers.delete('authorization') + request.headers.delete('cookie') + } + + // for POST request with 301/302 response, or any request with 303 response, + // use GET when following redirect + if ( + response.status === 303 || + (request.method === 'POST' && [301, 302].includes(response.status)) + ) { + _opts.method = 'GET' + _opts.body = null + request.headers.delete('content-length') + } + + _opts.headers = {} + request.headers.forEach((value, key) => { + _opts.headers[key] = value + }) + + _opts.counter = ++request.counter + const redirectReq = new Request(url.format(redirectUrl), _opts) + return { + request: redirectReq, + options: _opts, + } +} + +const fetch = async (request, options) => { + const response = CachePolicy.storable(request, options) + ? await cache(request, options) + : await remote(request, options) + + // if the request wasn't a GET or HEAD, and the response + // status is between 200 and 399 inclusive, invalidate the + // request url + if (!['GET', 'HEAD'].includes(request.method) && + response.status >= 200 && + response.status <= 399) { + await cache.invalidate(request, options) + } + + if (!canFollowRedirect(request, response, options)) { + return response + } + + const redirect = getRedirect(request, response, options) + return fetch(redirect.request, redirect.options) +} + +module.exports = fetch diff --git a/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/index.js b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/index.js new file mode 100644 index 00000000000000..2f12e8e1b61131 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/index.js @@ -0,0 +1,41 @@ +const { FetchError, Headers, Request, Response } = require('minipass-fetch') + +const configureOptions = require('./options.js') +const fetch = require('./fetch.js') + +const makeFetchHappen = (url, opts) => { + const options = configureOptions(opts) + + const request = new Request(url, options) + return fetch(request, options) +} + +makeFetchHappen.defaults = (defaultUrl, defaultOptions = {}, wrappedFetch = makeFetchHappen) => { + if (typeof defaultUrl === 'object') { + defaultOptions = defaultUrl + defaultUrl = null + } + + const defaultedFetch = (url, options = {}) => { + const finalUrl = url || defaultUrl + const finalOptions = { + ...defaultOptions, + ...options, + headers: { + ...defaultOptions.headers, + ...options.headers, + }, + } + return wrappedFetch(finalUrl, finalOptions) + } + + defaultedFetch.defaults = (defaultUrl1, defaultOptions1 = {}) => + makeFetchHappen.defaults(defaultUrl1, defaultOptions1, defaultedFetch) + return defaultedFetch +} + +module.exports = makeFetchHappen +module.exports.FetchError = FetchError +module.exports.Headers = Headers +module.exports.Request = Request +module.exports.Response = Response diff --git a/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/options.js b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/options.js new file mode 100644 index 00000000000000..f77511279f831d --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/options.js @@ -0,0 +1,54 @@ +const dns = require('dns') + +const conditionalHeaders = [ + 'if-modified-since', + 'if-none-match', + 'if-unmodified-since', + 'if-match', + 'if-range', +] + +const configureOptions = (opts) => { + const { strictSSL, ...options } = { ...opts } + options.method = options.method ? options.method.toUpperCase() : 'GET' + options.rejectUnauthorized = strictSSL !== false + + if (!options.retry) { + options.retry = { retries: 0 } + } else if (typeof options.retry === 'string') { + const retries = parseInt(options.retry, 10) + if (isFinite(retries)) { + options.retry = { retries } + } else { + options.retry = { retries: 0 } + } + } else if (typeof options.retry === 'number') { + options.retry = { retries: options.retry } + } else { + options.retry = { retries: 0, ...options.retry } + } + + options.dns = { ttl: 5 * 60 * 1000, lookup: dns.lookup, ...options.dns } + + options.cache = options.cache || 'default' + if (options.cache === 'default') { + const hasConditionalHeader = Object.keys(options.headers || {}).some((name) => { + return conditionalHeaders.includes(name.toLowerCase()) + }) + if (hasConditionalHeader) { + options.cache = 'no-store' + } + } + + options.cacheAdditionalHeaders = options.cacheAdditionalHeaders || [] + + // cacheManager is deprecated, but if it's set and + // cachePath is not we should copy it to the new field + if (options.cacheManager && !options.cachePath) { + options.cachePath = options.cacheManager + } + + return options +} + +module.exports = configureOptions diff --git a/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/pipeline.js b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/pipeline.js new file mode 100644 index 00000000000000..b1d221b2d0ce31 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/pipeline.js @@ -0,0 +1,41 @@ +'use strict' + +const MinipassPipeline = require('minipass-pipeline') + +class CachingMinipassPipeline extends MinipassPipeline { + #events = [] + #data = new Map() + + constructor (opts, ...streams) { + // CRITICAL: do NOT pass the streams to the call to super(), this will start + // the flow of data and potentially cause the events we need to catch to emit + // before we've finished our own setup. instead we call super() with no args, + // finish our setup, and then push the streams into ourselves to start the + // data flow + super() + this.#events = opts.events + + /* istanbul ignore next - coverage disabled because this is pointless to test here */ + if (streams.length) { + this.push(...streams) + } + } + + on (event, handler) { + if (this.#events.includes(event) && this.#data.has(event)) { + return handler(...this.#data.get(event)) + } + + return super.on(event, handler) + } + + emit (event, ...data) { + if (this.#events.includes(event)) { + this.#data.set(event, data) + } + + return super.emit(event, ...data) + } +} + +module.exports = CachingMinipassPipeline diff --git a/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/remote.js b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/remote.js new file mode 100644 index 00000000000000..8554564074de6e --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/remote.js @@ -0,0 +1,131 @@ +const { Minipass } = require('minipass') +const fetch = require('minipass-fetch') +const promiseRetry = require('promise-retry') +const ssri = require('ssri') +const { log } = require('proc-log') + +const CachingMinipassPipeline = require('./pipeline.js') +const { getAgent } = require('@npmcli/agent') +const pkg = require('../package.json') + +const USER_AGENT = `${pkg.name}/${pkg.version} (+https://npm.im/${pkg.name})` + +const RETRY_ERRORS = [ + 'ECONNRESET', // remote socket closed on us + 'ECONNREFUSED', // remote host refused to open connection + 'EADDRINUSE', // failed to bind to a local port (proxy?) + 'ETIMEDOUT', // someone in the transaction is WAY TOO SLOW + // from @npmcli/agent + 'ECONNECTIONTIMEOUT', + 'EIDLETIMEOUT', + 'ERESPONSETIMEOUT', + 'ETRANSFERTIMEOUT', + // Known codes we do NOT retry on: + // ENOTFOUND (getaddrinfo failure. Either bad hostname, or offline) + // EINVALIDPROXY // invalid protocol from @npmcli/agent + // EINVALIDRESPONSE // invalid status code from @npmcli/agent +] + +const RETRY_TYPES = [ + 'request-timeout', +] + +// make a request directly to the remote source, +// retrying certain classes of errors as well as +// following redirects (through the cache if necessary) +// and verifying response integrity +const remoteFetch = (request, options) => { + const agent = getAgent(request.url, options) + if (!request.headers.has('connection')) { + request.headers.set('connection', agent ? 'keep-alive' : 'close') + } + + if (!request.headers.has('user-agent')) { + request.headers.set('user-agent', USER_AGENT) + } + + // keep our own options since we're overriding the agent + // and the redirect mode + const _opts = { + ...options, + agent, + redirect: 'manual', + } + + return promiseRetry(async (retryHandler, attemptNum) => { + const req = new fetch.Request(request, _opts) + try { + let res = await fetch(req, _opts) + if (_opts.integrity && res.status === 200) { + // we got a 200 response and the user has specified an expected + // integrity value, so wrap the response in an ssri stream to verify it + const integrityStream = ssri.integrityStream({ + algorithms: _opts.algorithms, + integrity: _opts.integrity, + size: _opts.size, + }) + const pipeline = new CachingMinipassPipeline({ + events: ['integrity', 'size'], + }, res.body, integrityStream) + // we also propagate the integrity and size events out to the pipeline so we can use + // this new response body as an integrityEmitter for cacache + integrityStream.on('integrity', i => pipeline.emit('integrity', i)) + integrityStream.on('size', s => pipeline.emit('size', s)) + res = new fetch.Response(pipeline, res) + // set an explicit flag so we know if our response body will emit integrity and size + res.body.hasIntegrityEmitter = true + } + + res.headers.set('x-fetch-attempts', attemptNum) + + // do not retry POST requests, or requests with a streaming body + // do retry requests with a 408, 420, 429 or 500+ status in the response + const isStream = Minipass.isStream(req.body) + const isRetriable = req.method !== 'POST' && + !isStream && + ([408, 420, 429].includes(res.status) || res.status >= 500) + + if (isRetriable) { + if (typeof options.onRetry === 'function') { + options.onRetry(res) + } + + /* eslint-disable-next-line max-len */ + log.http('fetch', `${req.method} ${req.url} attempt ${attemptNum} failed with ${res.status}`) + return retryHandler(res) + } + + return res + } catch (err) { + const code = (err.code === 'EPROMISERETRY') + ? err.retried.code + : err.code + + // err.retried will be the thing that was thrown from above + // if it's a response, we just got a bad status code and we + // can re-throw to allow the retry + const isRetryError = err.retried instanceof fetch.Response || + (RETRY_ERRORS.includes(code) && RETRY_TYPES.includes(err.type)) + + if (req.method === 'POST' || isRetryError) { + throw err + } + + if (typeof options.onRetry === 'function') { + options.onRetry(err) + } + + log.http('fetch', `${req.method} ${req.url} attempt ${attemptNum} failed with ${err.code}`) + return retryHandler(err) + } + }, options.retry).catch((err) => { + // don't reject for http errors, just return them + if (err.status >= 400 && err.type !== 'system') { + return err + } + + throw err + }) +} + +module.exports = remoteFetch diff --git a/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/package.json b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/package.json new file mode 100644 index 00000000000000..7adb4d1e7f9719 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/package.json @@ -0,0 +1,75 @@ +{ + "name": "make-fetch-happen", + "version": "13.0.1", + "description": "Opinionated, caching, retrying fetch client", + "main": "lib/index.js", + "files": [ + "bin/", + "lib/" + ], + "scripts": { + "test": "tap", + "posttest": "npm run lint", + "eslint": "eslint", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lintfix": "npm run lint -- --fix", + "postlint": "template-oss-check", + "snap": "tap", + "template-oss-apply": "template-oss-apply --force" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/make-fetch-happen.git" + }, + "keywords": [ + "http", + "request", + "fetch", + "mean girls", + "caching", + "cache", + "subresource integrity" + ], + "author": "GitHub Inc.", + "license": "ISC", + "dependencies": { + "@npmcli/agent": "^2.0.0", + "cacache": "^18.0.0", + "http-cache-semantics": "^4.1.1", + "is-lambda": "^1.0.1", + "minipass": "^7.0.2", + "minipass-fetch": "^3.0.0", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "negotiator": "^0.6.3", + "proc-log": "^4.2.0", + "promise-retry": "^2.0.1", + "ssri": "^10.0.0" + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.21.4", + "nock": "^13.2.4", + "safe-buffer": "^5.2.1", + "standard-version": "^9.3.2", + "tap": "^16.0.0" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + }, + "tap": { + "color": 1, + "files": "test/*.js", + "check-coverage": true, + "timeout": 60, + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.21.4", + "publish": "true" + } +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/LICENSE b/deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/LICENSE new file mode 100644 index 00000000000000..3c3410cdc12ee3 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/LICENSE @@ -0,0 +1,28 @@ +The MIT License (MIT) + +Copyright (c) Isaac Z. Schlueter and Contributors +Copyright (c) 2016 David Frank + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +--- + +Note: This is a derivative work based on "node-fetch" by David Frank, +modified and distributed under the terms of the MIT license above. +https://github.com/bitinn/node-fetch diff --git a/deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/abort-error.js b/deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/abort-error.js new file mode 100644 index 00000000000000..b18f643269e375 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/abort-error.js @@ -0,0 +1,17 @@ +'use strict' +class AbortError extends Error { + constructor (message) { + super(message) + this.code = 'FETCH_ABORTED' + this.type = 'aborted' + Error.captureStackTrace(this, this.constructor) + } + + get name () { + return 'AbortError' + } + + // don't allow name to be overridden, but don't throw either + set name (s) {} +} +module.exports = AbortError diff --git a/deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/blob.js b/deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/blob.js new file mode 100644 index 00000000000000..121b1730102e72 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/blob.js @@ -0,0 +1,97 @@ +'use strict' +const { Minipass } = require('minipass') +const TYPE = Symbol('type') +const BUFFER = Symbol('buffer') + +class Blob { + constructor (blobParts, options) { + this[TYPE] = '' + + const buffers = [] + let size = 0 + + if (blobParts) { + const a = blobParts + const length = Number(a.length) + for (let i = 0; i < length; i++) { + const element = a[i] + const buffer = element instanceof Buffer ? element + : ArrayBuffer.isView(element) + ? Buffer.from(element.buffer, element.byteOffset, element.byteLength) + : element instanceof ArrayBuffer ? Buffer.from(element) + : element instanceof Blob ? element[BUFFER] + : typeof element === 'string' ? Buffer.from(element) + : Buffer.from(String(element)) + size += buffer.length + buffers.push(buffer) + } + } + + this[BUFFER] = Buffer.concat(buffers, size) + + const type = options && options.type !== undefined + && String(options.type).toLowerCase() + if (type && !/[^\u0020-\u007E]/.test(type)) { + this[TYPE] = type + } + } + + get size () { + return this[BUFFER].length + } + + get type () { + return this[TYPE] + } + + text () { + return Promise.resolve(this[BUFFER].toString()) + } + + arrayBuffer () { + const buf = this[BUFFER] + const off = buf.byteOffset + const len = buf.byteLength + const ab = buf.buffer.slice(off, off + len) + return Promise.resolve(ab) + } + + stream () { + return new Minipass().end(this[BUFFER]) + } + + slice (start, end, type) { + const size = this.size + const relativeStart = start === undefined ? 0 + : start < 0 ? Math.max(size + start, 0) + : Math.min(start, size) + const relativeEnd = end === undefined ? size + : end < 0 ? Math.max(size + end, 0) + : Math.min(end, size) + const span = Math.max(relativeEnd - relativeStart, 0) + + const buffer = this[BUFFER] + const slicedBuffer = buffer.slice( + relativeStart, + relativeStart + span + ) + const blob = new Blob([], { type }) + blob[BUFFER] = slicedBuffer + return blob + } + + get [Symbol.toStringTag] () { + return 'Blob' + } + + static get BUFFER () { + return BUFFER + } +} + +Object.defineProperties(Blob.prototype, { + size: { enumerable: true }, + type: { enumerable: true }, +}) + +module.exports = Blob diff --git a/deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/body.js b/deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/body.js new file mode 100644 index 00000000000000..62286bd1de0d91 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/body.js @@ -0,0 +1,350 @@ +'use strict' +const { Minipass } = require('minipass') +const MinipassSized = require('minipass-sized') + +const Blob = require('./blob.js') +const { BUFFER } = Blob +const FetchError = require('./fetch-error.js') + +// optional dependency on 'encoding' +let convert +try { + convert = require('encoding').convert +} catch (e) { + // defer error until textConverted is called +} + +const INTERNALS = Symbol('Body internals') +const CONSUME_BODY = Symbol('consumeBody') + +class Body { + constructor (bodyArg, options = {}) { + const { size = 0, timeout = 0 } = options + const body = bodyArg === undefined || bodyArg === null ? null + : isURLSearchParams(bodyArg) ? Buffer.from(bodyArg.toString()) + : isBlob(bodyArg) ? bodyArg + : Buffer.isBuffer(bodyArg) ? bodyArg + : Object.prototype.toString.call(bodyArg) === '[object ArrayBuffer]' + ? Buffer.from(bodyArg) + : ArrayBuffer.isView(bodyArg) + ? Buffer.from(bodyArg.buffer, bodyArg.byteOffset, bodyArg.byteLength) + : Minipass.isStream(bodyArg) ? bodyArg + : Buffer.from(String(bodyArg)) + + this[INTERNALS] = { + body, + disturbed: false, + error: null, + } + + this.size = size + this.timeout = timeout + + if (Minipass.isStream(body)) { + body.on('error', er => { + const error = er.name === 'AbortError' ? er + : new FetchError(`Invalid response while trying to fetch ${ + this.url}: ${er.message}`, 'system', er) + this[INTERNALS].error = error + }) + } + } + + get body () { + return this[INTERNALS].body + } + + get bodyUsed () { + return this[INTERNALS].disturbed + } + + arrayBuffer () { + return this[CONSUME_BODY]().then(buf => + buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength)) + } + + blob () { + const ct = this.headers && this.headers.get('content-type') || '' + return this[CONSUME_BODY]().then(buf => Object.assign( + new Blob([], { type: ct.toLowerCase() }), + { [BUFFER]: buf } + )) + } + + async json () { + const buf = await this[CONSUME_BODY]() + try { + return JSON.parse(buf.toString()) + } catch (er) { + throw new FetchError( + `invalid json response body at ${this.url} reason: ${er.message}`, + 'invalid-json' + ) + } + } + + text () { + return this[CONSUME_BODY]().then(buf => buf.toString()) + } + + buffer () { + return this[CONSUME_BODY]() + } + + textConverted () { + return this[CONSUME_BODY]().then(buf => convertBody(buf, this.headers)) + } + + [CONSUME_BODY] () { + if (this[INTERNALS].disturbed) { + return Promise.reject(new TypeError(`body used already for: ${ + this.url}`)) + } + + this[INTERNALS].disturbed = true + + if (this[INTERNALS].error) { + return Promise.reject(this[INTERNALS].error) + } + + // body is null + if (this.body === null) { + return Promise.resolve(Buffer.alloc(0)) + } + + if (Buffer.isBuffer(this.body)) { + return Promise.resolve(this.body) + } + + const upstream = isBlob(this.body) ? this.body.stream() : this.body + + /* istanbul ignore if: should never happen */ + if (!Minipass.isStream(upstream)) { + return Promise.resolve(Buffer.alloc(0)) + } + + const stream = this.size && upstream instanceof MinipassSized ? upstream + : !this.size && upstream instanceof Minipass && + !(upstream instanceof MinipassSized) ? upstream + : this.size ? new MinipassSized({ size: this.size }) + : new Minipass() + + // allow timeout on slow response body, but only if the stream is still writable. this + // makes the timeout center on the socket stream from lib/index.js rather than the + // intermediary minipass stream we create to receive the data + const resTimeout = this.timeout && stream.writable ? setTimeout(() => { + stream.emit('error', new FetchError( + `Response timeout while trying to fetch ${ + this.url} (over ${this.timeout}ms)`, 'body-timeout')) + }, this.timeout) : null + + // do not keep the process open just for this timeout, even + // though we expect it'll get cleared eventually. + if (resTimeout && resTimeout.unref) { + resTimeout.unref() + } + + // do the pipe in the promise, because the pipe() can send too much + // data through right away and upset the MP Sized object + return new Promise((resolve) => { + // if the stream is some other kind of stream, then pipe through a MP + // so we can collect it more easily. + if (stream !== upstream) { + upstream.on('error', er => stream.emit('error', er)) + upstream.pipe(stream) + } + resolve() + }).then(() => stream.concat()).then(buf => { + clearTimeout(resTimeout) + return buf + }).catch(er => { + clearTimeout(resTimeout) + // request was aborted, reject with this Error + if (er.name === 'AbortError' || er.name === 'FetchError') { + throw er + } else if (er.name === 'RangeError') { + throw new FetchError(`Could not create Buffer from response body for ${ + this.url}: ${er.message}`, 'system', er) + } else { + // other errors, such as incorrect content-encoding or content-length + throw new FetchError(`Invalid response body while trying to fetch ${ + this.url}: ${er.message}`, 'system', er) + } + }) + } + + static clone (instance) { + if (instance.bodyUsed) { + throw new Error('cannot clone body after it is used') + } + + const body = instance.body + + // check that body is a stream and not form-data object + // NB: can't clone the form-data object without having it as a dependency + if (Minipass.isStream(body) && typeof body.getBoundary !== 'function') { + // create a dedicated tee stream so that we don't lose data + // potentially sitting in the body stream's buffer by writing it + // immediately to p1 and not having it for p2. + const tee = new Minipass() + const p1 = new Minipass() + const p2 = new Minipass() + tee.on('error', er => { + p1.emit('error', er) + p2.emit('error', er) + }) + body.on('error', er => tee.emit('error', er)) + tee.pipe(p1) + tee.pipe(p2) + body.pipe(tee) + // set instance body to one fork, return the other + instance[INTERNALS].body = p1 + return p2 + } else { + return instance.body + } + } + + static extractContentType (body) { + return body === null || body === undefined ? null + : typeof body === 'string' ? 'text/plain;charset=UTF-8' + : isURLSearchParams(body) + ? 'application/x-www-form-urlencoded;charset=UTF-8' + : isBlob(body) ? body.type || null + : Buffer.isBuffer(body) ? null + : Object.prototype.toString.call(body) === '[object ArrayBuffer]' ? null + : ArrayBuffer.isView(body) ? null + : typeof body.getBoundary === 'function' + ? `multipart/form-data;boundary=${body.getBoundary()}` + : Minipass.isStream(body) ? null + : 'text/plain;charset=UTF-8' + } + + static getTotalBytes (instance) { + const { body } = instance + return (body === null || body === undefined) ? 0 + : isBlob(body) ? body.size + : Buffer.isBuffer(body) ? body.length + : body && typeof body.getLengthSync === 'function' && ( + // detect form data input from form-data module + body._lengthRetrievers && + /* istanbul ignore next */ body._lengthRetrievers.length === 0 || // 1.x + body.hasKnownLength && body.hasKnownLength()) // 2.x + ? body.getLengthSync() + : null + } + + static writeToStream (dest, instance) { + const { body } = instance + + if (body === null || body === undefined) { + dest.end() + } else if (Buffer.isBuffer(body) || typeof body === 'string') { + dest.end(body) + } else { + // body is stream or blob + const stream = isBlob(body) ? body.stream() : body + stream.on('error', er => dest.emit('error', er)).pipe(dest) + } + + return dest + } +} + +Object.defineProperties(Body.prototype, { + body: { enumerable: true }, + bodyUsed: { enumerable: true }, + arrayBuffer: { enumerable: true }, + blob: { enumerable: true }, + json: { enumerable: true }, + text: { enumerable: true }, +}) + +const isURLSearchParams = obj => + // Duck-typing as a necessary condition. + (typeof obj !== 'object' || + typeof obj.append !== 'function' || + typeof obj.delete !== 'function' || + typeof obj.get !== 'function' || + typeof obj.getAll !== 'function' || + typeof obj.has !== 'function' || + typeof obj.set !== 'function') ? false + // Brand-checking and more duck-typing as optional condition. + : obj.constructor.name === 'URLSearchParams' || + Object.prototype.toString.call(obj) === '[object URLSearchParams]' || + typeof obj.sort === 'function' + +const isBlob = obj => + typeof obj === 'object' && + typeof obj.arrayBuffer === 'function' && + typeof obj.type === 'string' && + typeof obj.stream === 'function' && + typeof obj.constructor === 'function' && + typeof obj.constructor.name === 'string' && + /^(Blob|File)$/.test(obj.constructor.name) && + /^(Blob|File)$/.test(obj[Symbol.toStringTag]) + +const convertBody = (buffer, headers) => { + /* istanbul ignore if */ + if (typeof convert !== 'function') { + throw new Error('The package `encoding` must be installed to use the textConverted() function') + } + + const ct = headers && headers.get('content-type') + let charset = 'utf-8' + let res + + // header + if (ct) { + res = /charset=([^;]*)/i.exec(ct) + } + + // no charset in content type, peek at response body for at most 1024 bytes + const str = buffer.slice(0, 1024).toString() + + // html5 + if (!res && str) { + res = / this.expect + ? 'max-size' : type + this.message = message + Error.captureStackTrace(this, this.constructor) + } + + get name () { + return 'FetchError' + } + + // don't allow name to be overwritten + set name (n) {} + + get [Symbol.toStringTag] () { + return 'FetchError' + } +} +module.exports = FetchError diff --git a/deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/headers.js b/deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/headers.js new file mode 100644 index 00000000000000..dd6e854d5ba399 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/headers.js @@ -0,0 +1,267 @@ +'use strict' +const invalidTokenRegex = /[^^_`a-zA-Z\-0-9!#$%&'*+.|~]/ +const invalidHeaderCharRegex = /[^\t\x20-\x7e\x80-\xff]/ + +const validateName = name => { + name = `${name}` + if (invalidTokenRegex.test(name) || name === '') { + throw new TypeError(`${name} is not a legal HTTP header name`) + } +} + +const validateValue = value => { + value = `${value}` + if (invalidHeaderCharRegex.test(value)) { + throw new TypeError(`${value} is not a legal HTTP header value`) + } +} + +const find = (map, name) => { + name = name.toLowerCase() + for (const key in map) { + if (key.toLowerCase() === name) { + return key + } + } + return undefined +} + +const MAP = Symbol('map') +class Headers { + constructor (init = undefined) { + this[MAP] = Object.create(null) + if (init instanceof Headers) { + const rawHeaders = init.raw() + const headerNames = Object.keys(rawHeaders) + for (const headerName of headerNames) { + for (const value of rawHeaders[headerName]) { + this.append(headerName, value) + } + } + return + } + + // no-op + if (init === undefined || init === null) { + return + } + + if (typeof init === 'object') { + const method = init[Symbol.iterator] + if (method !== null && method !== undefined) { + if (typeof method !== 'function') { + throw new TypeError('Header pairs must be iterable') + } + + // sequence> + // Note: per spec we have to first exhaust the lists then process them + const pairs = [] + for (const pair of init) { + if (typeof pair !== 'object' || + typeof pair[Symbol.iterator] !== 'function') { + throw new TypeError('Each header pair must be iterable') + } + const arrPair = Array.from(pair) + if (arrPair.length !== 2) { + throw new TypeError('Each header pair must be a name/value tuple') + } + pairs.push(arrPair) + } + + for (const pair of pairs) { + this.append(pair[0], pair[1]) + } + } else { + // record + for (const key of Object.keys(init)) { + this.append(key, init[key]) + } + } + } else { + throw new TypeError('Provided initializer must be an object') + } + } + + get (name) { + name = `${name}` + validateName(name) + const key = find(this[MAP], name) + if (key === undefined) { + return null + } + + return this[MAP][key].join(', ') + } + + forEach (callback, thisArg = undefined) { + let pairs = getHeaders(this) + for (let i = 0; i < pairs.length; i++) { + const [name, value] = pairs[i] + callback.call(thisArg, value, name, this) + // refresh in case the callback added more headers + pairs = getHeaders(this) + } + } + + set (name, value) { + name = `${name}` + value = `${value}` + validateName(name) + validateValue(value) + const key = find(this[MAP], name) + this[MAP][key !== undefined ? key : name] = [value] + } + + append (name, value) { + name = `${name}` + value = `${value}` + validateName(name) + validateValue(value) + const key = find(this[MAP], name) + if (key !== undefined) { + this[MAP][key].push(value) + } else { + this[MAP][name] = [value] + } + } + + has (name) { + name = `${name}` + validateName(name) + return find(this[MAP], name) !== undefined + } + + delete (name) { + name = `${name}` + validateName(name) + const key = find(this[MAP], name) + if (key !== undefined) { + delete this[MAP][key] + } + } + + raw () { + return this[MAP] + } + + keys () { + return new HeadersIterator(this, 'key') + } + + values () { + return new HeadersIterator(this, 'value') + } + + [Symbol.iterator] () { + return new HeadersIterator(this, 'key+value') + } + + entries () { + return new HeadersIterator(this, 'key+value') + } + + get [Symbol.toStringTag] () { + return 'Headers' + } + + static exportNodeCompatibleHeaders (headers) { + const obj = Object.assign(Object.create(null), headers[MAP]) + + // http.request() only supports string as Host header. This hack makes + // specifying custom Host header possible. + const hostHeaderKey = find(headers[MAP], 'Host') + if (hostHeaderKey !== undefined) { + obj[hostHeaderKey] = obj[hostHeaderKey][0] + } + + return obj + } + + static createHeadersLenient (obj) { + const headers = new Headers() + for (const name of Object.keys(obj)) { + if (invalidTokenRegex.test(name)) { + continue + } + + if (Array.isArray(obj[name])) { + for (const val of obj[name]) { + if (invalidHeaderCharRegex.test(val)) { + continue + } + + if (headers[MAP][name] === undefined) { + headers[MAP][name] = [val] + } else { + headers[MAP][name].push(val) + } + } + } else if (!invalidHeaderCharRegex.test(obj[name])) { + headers[MAP][name] = [obj[name]] + } + } + return headers + } +} + +Object.defineProperties(Headers.prototype, { + get: { enumerable: true }, + forEach: { enumerable: true }, + set: { enumerable: true }, + append: { enumerable: true }, + has: { enumerable: true }, + delete: { enumerable: true }, + keys: { enumerable: true }, + values: { enumerable: true }, + entries: { enumerable: true }, +}) + +const getHeaders = (headers, kind = 'key+value') => + Object.keys(headers[MAP]).sort().map( + kind === 'key' ? k => k.toLowerCase() + : kind === 'value' ? k => headers[MAP][k].join(', ') + : k => [k.toLowerCase(), headers[MAP][k].join(', ')] + ) + +const INTERNAL = Symbol('internal') + +class HeadersIterator { + constructor (target, kind) { + this[INTERNAL] = { + target, + kind, + index: 0, + } + } + + get [Symbol.toStringTag] () { + return 'HeadersIterator' + } + + next () { + /* istanbul ignore if: should be impossible */ + if (!this || Object.getPrototypeOf(this) !== HeadersIterator.prototype) { + throw new TypeError('Value of `this` is not a HeadersIterator') + } + + const { target, kind, index } = this[INTERNAL] + const values = getHeaders(target, kind) + const len = values.length + if (index >= len) { + return { + value: undefined, + done: true, + } + } + + this[INTERNAL].index++ + + return { value: values[index], done: false } + } +} + +// manually extend because 'extends' requires a ctor +Object.setPrototypeOf(HeadersIterator.prototype, + Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))) + +module.exports = Headers diff --git a/deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/index.js b/deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/index.js new file mode 100644 index 00000000000000..da402161670e65 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/index.js @@ -0,0 +1,377 @@ +'use strict' +const { URL } = require('url') +const http = require('http') +const https = require('https') +const zlib = require('minizlib') +const { Minipass } = require('minipass') + +const Body = require('./body.js') +const { writeToStream, getTotalBytes } = Body +const Response = require('./response.js') +const Headers = require('./headers.js') +const { createHeadersLenient } = Headers +const Request = require('./request.js') +const { getNodeRequestOptions } = Request +const FetchError = require('./fetch-error.js') +const AbortError = require('./abort-error.js') + +// XXX this should really be split up and unit-ized for easier testing +// and better DRY implementation of data/http request aborting +const fetch = async (url, opts) => { + if (/^data:/.test(url)) { + const request = new Request(url, opts) + // delay 1 promise tick so that the consumer can abort right away + return Promise.resolve().then(() => new Promise((resolve, reject) => { + let type, data + try { + const { pathname, search } = new URL(url) + const split = pathname.split(',') + if (split.length < 2) { + throw new Error('invalid data: URI') + } + const mime = split.shift() + const base64 = /;base64$/.test(mime) + type = base64 ? mime.slice(0, -1 * ';base64'.length) : mime + const rawData = decodeURIComponent(split.join(',') + search) + data = base64 ? Buffer.from(rawData, 'base64') : Buffer.from(rawData) + } catch (er) { + return reject(new FetchError(`[${request.method}] ${ + request.url} invalid URL, ${er.message}`, 'system', er)) + } + + const { signal } = request + if (signal && signal.aborted) { + return reject(new AbortError('The user aborted a request.')) + } + + const headers = { 'Content-Length': data.length } + if (type) { + headers['Content-Type'] = type + } + return resolve(new Response(data, { headers })) + })) + } + + return new Promise((resolve, reject) => { + // build request object + const request = new Request(url, opts) + let options + try { + options = getNodeRequestOptions(request) + } catch (er) { + return reject(er) + } + + const send = (options.protocol === 'https:' ? https : http).request + const { signal } = request + let response = null + const abort = () => { + const error = new AbortError('The user aborted a request.') + reject(error) + if (Minipass.isStream(request.body) && + typeof request.body.destroy === 'function') { + request.body.destroy(error) + } + if (response && response.body) { + response.body.emit('error', error) + } + } + + if (signal && signal.aborted) { + return abort() + } + + const abortAndFinalize = () => { + abort() + finalize() + } + + const finalize = () => { + req.abort() + if (signal) { + signal.removeEventListener('abort', abortAndFinalize) + } + clearTimeout(reqTimeout) + } + + // send request + const req = send(options) + + if (signal) { + signal.addEventListener('abort', abortAndFinalize) + } + + let reqTimeout = null + if (request.timeout) { + req.once('socket', () => { + reqTimeout = setTimeout(() => { + reject(new FetchError(`network timeout at: ${ + request.url}`, 'request-timeout')) + finalize() + }, request.timeout) + }) + } + + req.on('error', er => { + // if a 'response' event is emitted before the 'error' event, then by the + // time this handler is run it's too late to reject the Promise for the + // response. instead, we forward the error event to the response stream + // so that the error will surface to the user when they try to consume + // the body. this is done as a side effect of aborting the request except + // for in windows, where we must forward the event manually, otherwise + // there is no longer a ref'd socket attached to the request and the + // stream never ends so the event loop runs out of work and the process + // exits without warning. + // coverage skipped here due to the difficulty in testing + // istanbul ignore next + if (req.res) { + req.res.emit('error', er) + } + reject(new FetchError(`request to ${request.url} failed, reason: ${ + er.message}`, 'system', er)) + finalize() + }) + + req.on('response', res => { + clearTimeout(reqTimeout) + + const headers = createHeadersLenient(res.headers) + + // HTTP fetch step 5 + if (fetch.isRedirect(res.statusCode)) { + // HTTP fetch step 5.2 + const location = headers.get('Location') + + // HTTP fetch step 5.3 + let locationURL = null + try { + locationURL = location === null ? null : new URL(location, request.url).toString() + } catch { + // error here can only be invalid URL in Location: header + // do not throw when options.redirect == manual + // let the user extract the errorneous redirect URL + if (request.redirect !== 'manual') { + /* eslint-disable-next-line max-len */ + reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect')) + finalize() + return + } + } + + // HTTP fetch step 5.5 + if (request.redirect === 'error') { + reject(new FetchError('uri requested responds with a redirect, ' + + `redirect mode is set to error: ${request.url}`, 'no-redirect')) + finalize() + return + } else if (request.redirect === 'manual') { + // node-fetch-specific step: make manual redirect a bit easier to + // use by setting the Location header value to the resolved URL. + if (locationURL !== null) { + // handle corrupted header + try { + headers.set('Location', locationURL) + } catch (err) { + /* istanbul ignore next: nodejs server prevent invalid + response headers, we can't test this through normal + request */ + reject(err) + } + } + } else if (request.redirect === 'follow' && locationURL !== null) { + // HTTP-redirect fetch step 5 + if (request.counter >= request.follow) { + reject(new FetchError(`maximum redirect reached at: ${ + request.url}`, 'max-redirect')) + finalize() + return + } + + // HTTP-redirect fetch step 9 + if (res.statusCode !== 303 && + request.body && + getTotalBytes(request) === null) { + reject(new FetchError( + 'Cannot follow redirect with body being a readable stream', + 'unsupported-redirect' + )) + finalize() + return + } + + // Update host due to redirection + request.headers.set('host', (new URL(locationURL)).host) + + // HTTP-redirect fetch step 6 (counter increment) + // Create a new Request object. + const requestOpts = { + headers: new Headers(request.headers), + follow: request.follow, + counter: request.counter + 1, + agent: request.agent, + compress: request.compress, + method: request.method, + body: request.body, + signal: request.signal, + timeout: request.timeout, + } + + // if the redirect is to a new hostname, strip the authorization and cookie headers + const parsedOriginal = new URL(request.url) + const parsedRedirect = new URL(locationURL) + if (parsedOriginal.hostname !== parsedRedirect.hostname) { + requestOpts.headers.delete('authorization') + requestOpts.headers.delete('cookie') + } + + // HTTP-redirect fetch step 11 + if (res.statusCode === 303 || ( + (res.statusCode === 301 || res.statusCode === 302) && + request.method === 'POST' + )) { + requestOpts.method = 'GET' + requestOpts.body = undefined + requestOpts.headers.delete('content-length') + } + + // HTTP-redirect fetch step 15 + resolve(fetch(new Request(locationURL, requestOpts))) + finalize() + return + } + } // end if(isRedirect) + + // prepare response + res.once('end', () => + signal && signal.removeEventListener('abort', abortAndFinalize)) + + const body = new Minipass() + // if an error occurs, either on the response stream itself, on one of the + // decoder streams, or a response length timeout from the Body class, we + // forward the error through to our internal body stream. If we see an + // error event on that, we call finalize to abort the request and ensure + // we don't leave a socket believing a request is in flight. + // this is difficult to test, so lacks specific coverage. + body.on('error', finalize) + // exceedingly rare that the stream would have an error, + // but just in case we proxy it to the stream in use. + res.on('error', /* istanbul ignore next */ er => body.emit('error', er)) + res.on('data', (chunk) => body.write(chunk)) + res.on('end', () => body.end()) + + const responseOptions = { + url: request.url, + status: res.statusCode, + statusText: res.statusMessage, + headers: headers, + size: request.size, + timeout: request.timeout, + counter: request.counter, + trailer: new Promise(resolveTrailer => + res.on('end', () => resolveTrailer(createHeadersLenient(res.trailers)))), + } + + // HTTP-network fetch step 12.1.1.3 + const codings = headers.get('Content-Encoding') + + // HTTP-network fetch step 12.1.1.4: handle content codings + + // in following scenarios we ignore compression support + // 1. compression support is disabled + // 2. HEAD request + // 3. no Content-Encoding header + // 4. no content response (204) + // 5. content not modified response (304) + if (!request.compress || + request.method === 'HEAD' || + codings === null || + res.statusCode === 204 || + res.statusCode === 304) { + response = new Response(body, responseOptions) + resolve(response) + return + } + + // Be less strict when decoding compressed responses, since sometimes + // servers send slightly invalid responses that are still accepted + // by common browsers. + // Always using Z_SYNC_FLUSH is what cURL does. + const zlibOptions = { + flush: zlib.constants.Z_SYNC_FLUSH, + finishFlush: zlib.constants.Z_SYNC_FLUSH, + } + + // for gzip + if (codings === 'gzip' || codings === 'x-gzip') { + const unzip = new zlib.Gunzip(zlibOptions) + response = new Response( + // exceedingly rare that the stream would have an error, + // but just in case we proxy it to the stream in use. + body.on('error', /* istanbul ignore next */ er => unzip.emit('error', er)).pipe(unzip), + responseOptions + ) + resolve(response) + return + } + + // for deflate + if (codings === 'deflate' || codings === 'x-deflate') { + // handle the infamous raw deflate response from old servers + // a hack for old IIS and Apache servers + const raw = res.pipe(new Minipass()) + raw.once('data', chunk => { + // see http://stackoverflow.com/questions/37519828 + const decoder = (chunk[0] & 0x0F) === 0x08 + ? new zlib.Inflate() + : new zlib.InflateRaw() + // exceedingly rare that the stream would have an error, + // but just in case we proxy it to the stream in use. + body.on('error', /* istanbul ignore next */ er => decoder.emit('error', er)).pipe(decoder) + response = new Response(decoder, responseOptions) + resolve(response) + }) + return + } + + // for br + if (codings === 'br') { + // ignoring coverage so tests don't have to fake support (or lack of) for brotli + // istanbul ignore next + try { + var decoder = new zlib.BrotliDecompress() + } catch (err) { + reject(err) + finalize() + return + } + // exceedingly rare that the stream would have an error, + // but just in case we proxy it to the stream in use. + body.on('error', /* istanbul ignore next */ er => decoder.emit('error', er)).pipe(decoder) + response = new Response(decoder, responseOptions) + resolve(response) + return + } + + // otherwise, use response as-is + response = new Response(body, responseOptions) + resolve(response) + }) + + writeToStream(req, request) + }) +} + +module.exports = fetch + +fetch.isRedirect = code => + code === 301 || + code === 302 || + code === 303 || + code === 307 || + code === 308 + +fetch.Headers = Headers +fetch.Request = Request +fetch.Response = Response +fetch.FetchError = FetchError +fetch.AbortError = AbortError diff --git a/deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/request.js b/deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/request.js new file mode 100644 index 00000000000000..054439e6699107 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/request.js @@ -0,0 +1,282 @@ +'use strict' +const { URL } = require('url') +const { Minipass } = require('minipass') +const Headers = require('./headers.js') +const { exportNodeCompatibleHeaders } = Headers +const Body = require('./body.js') +const { clone, extractContentType, getTotalBytes } = Body + +const version = require('../package.json').version +const defaultUserAgent = + `minipass-fetch/${version} (+https://github.com/isaacs/minipass-fetch)` + +const INTERNALS = Symbol('Request internals') + +const isRequest = input => + typeof input === 'object' && typeof input[INTERNALS] === 'object' + +const isAbortSignal = signal => { + const proto = ( + signal + && typeof signal === 'object' + && Object.getPrototypeOf(signal) + ) + return !!(proto && proto.constructor.name === 'AbortSignal') +} + +class Request extends Body { + constructor (input, init = {}) { + const parsedURL = isRequest(input) ? new URL(input.url) + : input && input.href ? new URL(input.href) + : new URL(`${input}`) + + if (isRequest(input)) { + init = { ...input[INTERNALS], ...init } + } else if (!input || typeof input === 'string') { + input = {} + } + + const method = (init.method || input.method || 'GET').toUpperCase() + const isGETHEAD = method === 'GET' || method === 'HEAD' + + if ((init.body !== null && init.body !== undefined || + isRequest(input) && input.body !== null) && isGETHEAD) { + throw new TypeError('Request with GET/HEAD method cannot have body') + } + + const inputBody = init.body !== null && init.body !== undefined ? init.body + : isRequest(input) && input.body !== null ? clone(input) + : null + + super(inputBody, { + timeout: init.timeout || input.timeout || 0, + size: init.size || input.size || 0, + }) + + const headers = new Headers(init.headers || input.headers || {}) + + if (inputBody !== null && inputBody !== undefined && + !headers.has('Content-Type')) { + const contentType = extractContentType(inputBody) + if (contentType) { + headers.append('Content-Type', contentType) + } + } + + const signal = 'signal' in init ? init.signal + : null + + if (signal !== null && signal !== undefined && !isAbortSignal(signal)) { + throw new TypeError('Expected signal must be an instanceof AbortSignal') + } + + // TLS specific options that are handled by node + const { + ca, + cert, + ciphers, + clientCertEngine, + crl, + dhparam, + ecdhCurve, + family, + honorCipherOrder, + key, + passphrase, + pfx, + rejectUnauthorized = process.env.NODE_TLS_REJECT_UNAUTHORIZED !== '0', + secureOptions, + secureProtocol, + servername, + sessionIdContext, + } = init + + this[INTERNALS] = { + method, + redirect: init.redirect || input.redirect || 'follow', + headers, + parsedURL, + signal, + ca, + cert, + ciphers, + clientCertEngine, + crl, + dhparam, + ecdhCurve, + family, + honorCipherOrder, + key, + passphrase, + pfx, + rejectUnauthorized, + secureOptions, + secureProtocol, + servername, + sessionIdContext, + } + + // node-fetch-only options + this.follow = init.follow !== undefined ? init.follow + : input.follow !== undefined ? input.follow + : 20 + this.compress = init.compress !== undefined ? init.compress + : input.compress !== undefined ? input.compress + : true + this.counter = init.counter || input.counter || 0 + this.agent = init.agent || input.agent + } + + get method () { + return this[INTERNALS].method + } + + get url () { + return this[INTERNALS].parsedURL.toString() + } + + get headers () { + return this[INTERNALS].headers + } + + get redirect () { + return this[INTERNALS].redirect + } + + get signal () { + return this[INTERNALS].signal + } + + clone () { + return new Request(this) + } + + get [Symbol.toStringTag] () { + return 'Request' + } + + static getNodeRequestOptions (request) { + const parsedURL = request[INTERNALS].parsedURL + const headers = new Headers(request[INTERNALS].headers) + + // fetch step 1.3 + if (!headers.has('Accept')) { + headers.set('Accept', '*/*') + } + + // Basic fetch + if (!/^https?:$/.test(parsedURL.protocol)) { + throw new TypeError('Only HTTP(S) protocols are supported') + } + + if (request.signal && + Minipass.isStream(request.body) && + typeof request.body.destroy !== 'function') { + throw new Error( + 'Cancellation of streamed requests with AbortSignal is not supported') + } + + // HTTP-network-or-cache fetch steps 2.4-2.7 + const contentLengthValue = + (request.body === null || request.body === undefined) && + /^(POST|PUT)$/i.test(request.method) ? '0' + : request.body !== null && request.body !== undefined + ? getTotalBytes(request) + : null + + if (contentLengthValue) { + headers.set('Content-Length', contentLengthValue + '') + } + + // HTTP-network-or-cache fetch step 2.11 + if (!headers.has('User-Agent')) { + headers.set('User-Agent', defaultUserAgent) + } + + // HTTP-network-or-cache fetch step 2.15 + if (request.compress && !headers.has('Accept-Encoding')) { + headers.set('Accept-Encoding', 'gzip,deflate') + } + + const agent = typeof request.agent === 'function' + ? request.agent(parsedURL) + : request.agent + + if (!headers.has('Connection') && !agent) { + headers.set('Connection', 'close') + } + + // TLS specific options that are handled by node + const { + ca, + cert, + ciphers, + clientCertEngine, + crl, + dhparam, + ecdhCurve, + family, + honorCipherOrder, + key, + passphrase, + pfx, + rejectUnauthorized, + secureOptions, + secureProtocol, + servername, + sessionIdContext, + } = request[INTERNALS] + + // HTTP-network fetch step 4.2 + // chunked encoding is handled by Node.js + + // we cannot spread parsedURL directly, so we have to read each property one-by-one + // and map them to the equivalent https?.request() method options + const urlProps = { + auth: parsedURL.username || parsedURL.password + ? `${parsedURL.username}:${parsedURL.password}` + : '', + host: parsedURL.host, + hostname: parsedURL.hostname, + path: `${parsedURL.pathname}${parsedURL.search}`, + port: parsedURL.port, + protocol: parsedURL.protocol, + } + + return { + ...urlProps, + method: request.method, + headers: exportNodeCompatibleHeaders(headers), + agent, + ca, + cert, + ciphers, + clientCertEngine, + crl, + dhparam, + ecdhCurve, + family, + honorCipherOrder, + key, + passphrase, + pfx, + rejectUnauthorized, + secureOptions, + secureProtocol, + servername, + sessionIdContext, + timeout: request.timeout, + } + } +} + +module.exports = Request + +Object.defineProperties(Request.prototype, { + method: { enumerable: true }, + url: { enumerable: true }, + headers: { enumerable: true }, + redirect: { enumerable: true }, + clone: { enumerable: true }, + signal: { enumerable: true }, +}) diff --git a/deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/response.js b/deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/response.js new file mode 100644 index 00000000000000..54cb52db3594a7 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/response.js @@ -0,0 +1,90 @@ +'use strict' +const http = require('http') +const { STATUS_CODES } = http + +const Headers = require('./headers.js') +const Body = require('./body.js') +const { clone, extractContentType } = Body + +const INTERNALS = Symbol('Response internals') + +class Response extends Body { + constructor (body = null, opts = {}) { + super(body, opts) + + const status = opts.status || 200 + const headers = new Headers(opts.headers) + + if (body !== null && body !== undefined && !headers.has('Content-Type')) { + const contentType = extractContentType(body) + if (contentType) { + headers.append('Content-Type', contentType) + } + } + + this[INTERNALS] = { + url: opts.url, + status, + statusText: opts.statusText || STATUS_CODES[status], + headers, + counter: opts.counter, + trailer: Promise.resolve(opts.trailer || new Headers()), + } + } + + get trailer () { + return this[INTERNALS].trailer + } + + get url () { + return this[INTERNALS].url || '' + } + + get status () { + return this[INTERNALS].status + } + + get ok () { + return this[INTERNALS].status >= 200 && this[INTERNALS].status < 300 + } + + get redirected () { + return this[INTERNALS].counter > 0 + } + + get statusText () { + return this[INTERNALS].statusText + } + + get headers () { + return this[INTERNALS].headers + } + + clone () { + return new Response(clone(this), { + url: this.url, + status: this.status, + statusText: this.statusText, + headers: this.headers, + ok: this.ok, + redirected: this.redirected, + trailer: this.trailer, + }) + } + + get [Symbol.toStringTag] () { + return 'Response' + } +} + +module.exports = Response + +Object.defineProperties(Response.prototype, { + url: { enumerable: true }, + status: { enumerable: true }, + ok: { enumerable: true }, + redirected: { enumerable: true }, + statusText: { enumerable: true }, + headers: { enumerable: true }, + clone: { enumerable: true }, +}) diff --git a/deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/package.json b/deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/package.json new file mode 100644 index 00000000000000..d491a7fba126d0 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/package.json @@ -0,0 +1,69 @@ +{ + "name": "minipass-fetch", + "version": "3.0.5", + "description": "An implementation of window.fetch in Node.js using Minipass streams", + "license": "MIT", + "main": "lib/index.js", + "scripts": { + "test:tls-fixtures": "./test/fixtures/tls/setup.sh", + "test": "tap", + "snap": "tap", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "postlint": "template-oss-check", + "lintfix": "npm run lint -- --fix", + "posttest": "npm run lint", + "template-oss-apply": "template-oss-apply --force" + }, + "tap": { + "coverage-map": "map.js", + "check-coverage": true, + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.22.0", + "@ungap/url-search-params": "^0.2.2", + "abort-controller": "^3.0.0", + "abortcontroller-polyfill": "~1.7.3", + "encoding": "^0.1.13", + "form-data": "^4.0.0", + "nock": "^13.2.4", + "parted": "^0.1.1", + "string-to-arraybuffer": "^1.0.2", + "tap": "^16.0.0" + }, + "dependencies": { + "minipass": "^7.0.3", + "minipass-sized": "^1.0.3", + "minizlib": "^2.1.2" + }, + "optionalDependencies": { + "encoding": "^0.1.13" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/npm/minipass-fetch.git" + }, + "keywords": [ + "fetch", + "minipass", + "node-fetch", + "window.fetch" + ], + "files": [ + "bin/", + "lib/" + ], + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "author": "GitHub Inc.", + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.22.0", + "publish": "true" + } +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/nopt/LICENSE b/deps/npm/node_modules/node-gyp/node_modules/nopt/LICENSE new file mode 100644 index 00000000000000..19129e315fe593 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/nopt/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/node-gyp/node_modules/nopt/README.md b/deps/npm/node_modules/node-gyp/node_modules/nopt/README.md new file mode 100644 index 00000000000000..a99531c04655fe --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/nopt/README.md @@ -0,0 +1,213 @@ +If you want to write an option parser, and have it be good, there are +two ways to do it. The Right Way, and the Wrong Way. + +The Wrong Way is to sit down and write an option parser. We've all done +that. + +The Right Way is to write some complex configurable program with so many +options that you hit the limit of your frustration just trying to +manage them all, and defer it with duct-tape solutions until you see +exactly to the core of the problem, and finally snap and write an +awesome option parser. + +If you want to write an option parser, don't write an option parser. +Write a package manager, or a source control system, or a service +restarter, or an operating system. You probably won't end up with a +good one of those, but if you don't give up, and you are relentless and +diligent enough in your procrastination, you may just end up with a very +nice option parser. + +## USAGE + +```javascript +// my-program.js +var nopt = require("nopt") + , Stream = require("stream").Stream + , path = require("path") + , knownOpts = { "foo" : [String, null] + , "bar" : [Stream, Number] + , "baz" : path + , "bloo" : [ "big", "medium", "small" ] + , "flag" : Boolean + , "pick" : Boolean + , "many1" : [String, Array] + , "many2" : [path, Array] + } + , shortHands = { "foofoo" : ["--foo", "Mr. Foo"] + , "b7" : ["--bar", "7"] + , "m" : ["--bloo", "medium"] + , "p" : ["--pick"] + , "f" : ["--flag"] + } + // everything is optional. + // knownOpts and shorthands default to {} + // arg list defaults to process.argv + // slice defaults to 2 + , parsed = nopt(knownOpts, shortHands, process.argv, 2) +console.log(parsed) +``` + +This would give you support for any of the following: + +```console +$ node my-program.js --foo "blerp" --no-flag +{ "foo" : "blerp", "flag" : false } + +$ node my-program.js ---bar 7 --foo "Mr. Hand" --flag +{ bar: 7, foo: "Mr. Hand", flag: true } + +$ node my-program.js --foo "blerp" -f -----p +{ foo: "blerp", flag: true, pick: true } + +$ node my-program.js -fp --foofoo +{ foo: "Mr. Foo", flag: true, pick: true } + +$ node my-program.js --foofoo -- -fp # -- stops the flag parsing. +{ foo: "Mr. Foo", argv: { remain: ["-fp"] } } + +$ node my-program.js --blatzk -fp # unknown opts are ok. +{ blatzk: true, flag: true, pick: true } + +$ node my-program.js --blatzk=1000 -fp # but you need to use = if they have a value +{ blatzk: 1000, flag: true, pick: true } + +$ node my-program.js --no-blatzk -fp # unless they start with "no-" +{ blatzk: false, flag: true, pick: true } + +$ node my-program.js --baz b/a/z # known paths are resolved. +{ baz: "/Users/isaacs/b/a/z" } + +# if Array is one of the types, then it can take many +# values, and will always be an array. The other types provided +# specify what types are allowed in the list. + +$ node my-program.js --many1 5 --many1 null --many1 foo +{ many1: ["5", "null", "foo"] } + +$ node my-program.js --many2 foo --many2 bar +{ many2: ["/path/to/foo", "path/to/bar"] } +``` + +Read the tests at the bottom of `lib/nopt.js` for more examples of +what this puppy can do. + +## Types + +The following types are supported, and defined on `nopt.typeDefs` + +* String: A normal string. No parsing is done. +* path: A file system path. Gets resolved against cwd if not absolute. +* url: A url. If it doesn't parse, it isn't accepted. +* Number: Must be numeric. +* Date: Must parse as a date. If it does, and `Date` is one of the options, + then it will return a Date object, not a string. +* Boolean: Must be either `true` or `false`. If an option is a boolean, + then it does not need a value, and its presence will imply `true` as + the value. To negate boolean flags, do `--no-whatever` or `--whatever + false` +* NaN: Means that the option is strictly not allowed. Any value will + fail. +* Stream: An object matching the "Stream" class in node. Valuable + for use when validating programmatically. (npm uses this to let you + supply any WriteStream on the `outfd` and `logfd` config options.) +* Array: If `Array` is specified as one of the types, then the value + will be parsed as a list of options. This means that multiple values + can be specified, and that the value will always be an array. + +If a type is an array of values not on this list, then those are +considered valid values. For instance, in the example above, the +`--bloo` option can only be one of `"big"`, `"medium"`, or `"small"`, +and any other value will be rejected. + +When parsing unknown fields, `"true"`, `"false"`, and `"null"` will be +interpreted as their JavaScript equivalents. + +You can also mix types and values, or multiple types, in a list. For +instance `{ blah: [Number, null] }` would allow a value to be set to +either a Number or null. When types are ordered, this implies a +preference, and the first type that can be used to properly interpret +the value will be used. + +To define a new type, add it to `nopt.typeDefs`. Each item in that +hash is an object with a `type` member and a `validate` method. The +`type` member is an object that matches what goes in the type list. The +`validate` method is a function that gets called with `validate(data, +key, val)`. Validate methods should assign `data[key]` to the valid +value of `val` if it can be handled properly, or return boolean +`false` if it cannot. + +You can also call `nopt.clean(data, types, typeDefs)` to clean up a +config object and remove its invalid properties. + +## Error Handling + +By default, nopt outputs a warning to standard error when invalid values for +known options are found. You can change this behavior by assigning a method +to `nopt.invalidHandler`. This method will be called with +the offending `nopt.invalidHandler(key, val, types)`. + +If no `nopt.invalidHandler` is assigned, then it will console.error +its whining. If it is assigned to boolean `false` then the warning is +suppressed. + +## Abbreviations + +Yes, they are supported. If you define options like this: + +```javascript +{ "foolhardyelephants" : Boolean +, "pileofmonkeys" : Boolean } +``` + +Then this will work: + +```bash +node program.js --foolhar --pil +node program.js --no-f --pileofmon +# etc. +``` + +## Shorthands + +Shorthands are a hash of shorter option names to a snippet of args that +they expand to. + +If multiple one-character shorthands are all combined, and the +combination does not unambiguously match any other option or shorthand, +then they will be broken up into their constituent parts. For example: + +```json +{ "s" : ["--loglevel", "silent"] +, "g" : "--global" +, "f" : "--force" +, "p" : "--parseable" +, "l" : "--long" +} +``` + +```bash +npm ls -sgflp +# just like doing this: +npm ls --loglevel silent --global --force --long --parseable +``` + +## The Rest of the args + +The config object returned by nopt is given a special member called +`argv`, which is an object with the following fields: + +* `remain`: The remaining args after all the parsing has occurred. +* `original`: The args as they originally appeared. +* `cooked`: The args after flags and shorthands are expanded. + +## Slicing + +Node programs are called with more or less the exact argv as it appears +in C land, after the v8 and node-specific options have been plucked off. +As such, `argv[0]` is always `node` and `argv[1]` is always the +JavaScript program being run. + +That's usually not very useful to you. So they're sliced off by +default. If you want them, then you can pass in `0` as the last +argument, or any other number that you'd like to slice off the start of +the list. diff --git a/deps/npm/node_modules/node-gyp/node_modules/nopt/bin/nopt.js b/deps/npm/node_modules/node-gyp/node_modules/nopt/bin/nopt.js new file mode 100755 index 00000000000000..6ed2082064b5ea --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/nopt/bin/nopt.js @@ -0,0 +1,29 @@ +#!/usr/bin/env node +const nopt = require('../lib/nopt') +const path = require('path') +console.log('parsed', nopt({ + num: Number, + bool: Boolean, + help: Boolean, + list: Array, + 'num-list': [Number, Array], + 'str-list': [String, Array], + 'bool-list': [Boolean, Array], + str: String, + clear: Boolean, + config: Boolean, + length: Number, + file: path, +}, { + s: ['--str', 'astring'], + b: ['--bool'], + nb: ['--no-bool'], + tft: ['--bool-list', '--no-bool-list', '--bool-list', 'true'], + '?': ['--help'], + h: ['--help'], + H: ['--help'], + n: ['--num', '125'], + c: ['--config'], + l: ['--length'], + f: ['--file'], +}, process.argv, 2)) diff --git a/deps/npm/node_modules/node-gyp/node_modules/nopt/lib/debug.js b/deps/npm/node_modules/node-gyp/node_modules/nopt/lib/debug.js new file mode 100644 index 00000000000000..544ab382ca85c0 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/nopt/lib/debug.js @@ -0,0 +1,5 @@ +/* istanbul ignore next */ +module.exports = process.env.DEBUG_NOPT || process.env.NOPT_DEBUG + // eslint-disable-next-line no-console + ? (...a) => console.error(...a) + : () => {} diff --git a/deps/npm/node_modules/node-gyp/node_modules/nopt/lib/nopt-lib.js b/deps/npm/node_modules/node-gyp/node_modules/nopt/lib/nopt-lib.js new file mode 100644 index 00000000000000..d3d1de0255ba9b --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/nopt/lib/nopt-lib.js @@ -0,0 +1,479 @@ +const abbrev = require('abbrev') +const debug = require('./debug') +const defaultTypeDefs = require('./type-defs') + +const hasOwn = (o, k) => Object.prototype.hasOwnProperty.call(o, k) + +const getType = (k, { types, dynamicTypes }) => { + let hasType = hasOwn(types, k) + let type = types[k] + if (!hasType && typeof dynamicTypes === 'function') { + const matchedType = dynamicTypes(k) + if (matchedType !== undefined) { + type = matchedType + hasType = true + } + } + return [hasType, type] +} + +const isTypeDef = (type, def) => def && type === def +const hasTypeDef = (type, def) => def && type.indexOf(def) !== -1 +const doesNotHaveTypeDef = (type, def) => def && !hasTypeDef(type, def) + +function nopt (args, { + types, + shorthands, + typeDefs, + invalidHandler, + typeDefault, + dynamicTypes, +} = {}) { + debug(types, shorthands, args, typeDefs) + + const data = {} + const argv = { + remain: [], + cooked: args, + original: args.slice(0), + } + + parse(args, data, argv.remain, { typeDefs, types, dynamicTypes, shorthands }) + + // now data is full + clean(data, { types, dynamicTypes, typeDefs, invalidHandler, typeDefault }) + data.argv = argv + + Object.defineProperty(data.argv, 'toString', { + value: function () { + return this.original.map(JSON.stringify).join(' ') + }, + enumerable: false, + }) + + return data +} + +function clean (data, { + types = {}, + typeDefs = {}, + dynamicTypes, + invalidHandler, + typeDefault, +} = {}) { + const StringType = typeDefs.String?.type + const NumberType = typeDefs.Number?.type + const ArrayType = typeDefs.Array?.type + const BooleanType = typeDefs.Boolean?.type + const DateType = typeDefs.Date?.type + + const hasTypeDefault = typeof typeDefault !== 'undefined' + if (!hasTypeDefault) { + typeDefault = [false, true, null] + if (StringType) { + typeDefault.push(StringType) + } + if (ArrayType) { + typeDefault.push(ArrayType) + } + } + + const remove = {} + + Object.keys(data).forEach((k) => { + if (k === 'argv') { + return + } + let val = data[k] + debug('val=%j', val) + const isArray = Array.isArray(val) + let [hasType, rawType] = getType(k, { types, dynamicTypes }) + let type = rawType + if (!isArray) { + val = [val] + } + if (!type) { + type = typeDefault + } + if (isTypeDef(type, ArrayType)) { + type = typeDefault.concat(ArrayType) + } + if (!Array.isArray(type)) { + type = [type] + } + + debug('val=%j', val) + debug('types=', type) + val = val.map((v) => { + // if it's an unknown value, then parse false/true/null/numbers/dates + if (typeof v === 'string') { + debug('string %j', v) + v = v.trim() + if ((v === 'null' && ~type.indexOf(null)) + || (v === 'true' && + (~type.indexOf(true) || hasTypeDef(type, BooleanType))) + || (v === 'false' && + (~type.indexOf(false) || hasTypeDef(type, BooleanType)))) { + v = JSON.parse(v) + debug('jsonable %j', v) + } else if (hasTypeDef(type, NumberType) && !isNaN(v)) { + debug('convert to number', v) + v = +v + } else if (hasTypeDef(type, DateType) && !isNaN(Date.parse(v))) { + debug('convert to date', v) + v = new Date(v) + } + } + + if (!hasType) { + if (!hasTypeDefault) { + return v + } + // if the default type has been passed in then we want to validate the + // unknown data key instead of bailing out earlier. we also set the raw + // type which is passed to the invalid handler so that it can be + // determined if during validation if it is unknown vs invalid + rawType = typeDefault + } + + // allow `--no-blah` to set 'blah' to null if null is allowed + if (v === false && ~type.indexOf(null) && + !(~type.indexOf(false) || hasTypeDef(type, BooleanType))) { + v = null + } + + const d = {} + d[k] = v + debug('prevalidated val', d, v, rawType) + if (!validate(d, k, v, rawType, { typeDefs })) { + if (invalidHandler) { + invalidHandler(k, v, rawType, data) + } else if (invalidHandler !== false) { + debug('invalid: ' + k + '=' + v, rawType) + } + return remove + } + debug('validated v', d, v, rawType) + return d[k] + }).filter((v) => v !== remove) + + // if we allow Array specifically, then an empty array is how we + // express 'no value here', not null. Allow it. + if (!val.length && doesNotHaveTypeDef(type, ArrayType)) { + debug('VAL HAS NO LENGTH, DELETE IT', val, k, type.indexOf(ArrayType)) + delete data[k] + } else if (isArray) { + debug(isArray, data[k], val) + data[k] = val + } else { + data[k] = val[0] + } + + debug('k=%s val=%j', k, val, data[k]) + }) +} + +function validate (data, k, val, type, { typeDefs } = {}) { + const ArrayType = typeDefs?.Array?.type + // arrays are lists of types. + if (Array.isArray(type)) { + for (let i = 0, l = type.length; i < l; i++) { + if (isTypeDef(type[i], ArrayType)) { + continue + } + if (validate(data, k, val, type[i], { typeDefs })) { + return true + } + } + delete data[k] + return false + } + + // an array of anything? + if (isTypeDef(type, ArrayType)) { + return true + } + + // Original comment: + // NaN is poisonous. Means that something is not allowed. + // New comment: Changing this to an isNaN check breaks a lot of tests. + // Something is being assumed here that is not actually what happens in + // practice. Fixing it is outside the scope of getting linting to pass in + // this repo. Leaving as-is for now. + /* eslint-disable-next-line no-self-compare */ + if (type !== type) { + debug('Poison NaN', k, val, type) + delete data[k] + return false + } + + // explicit list of values + if (val === type) { + debug('Explicitly allowed %j', val) + data[k] = val + return true + } + + // now go through the list of typeDefs, validate against each one. + let ok = false + const types = Object.keys(typeDefs) + for (let i = 0, l = types.length; i < l; i++) { + debug('test type %j %j %j', k, val, types[i]) + const t = typeDefs[types[i]] + if (t && ( + (type && type.name && t.type && t.type.name) ? + (type.name === t.type.name) : + (type === t.type) + )) { + const d = {} + ok = t.validate(d, k, val) !== false + val = d[k] + if (ok) { + data[k] = val + break + } + } + } + debug('OK? %j (%j %j %j)', ok, k, val, types[types.length - 1]) + + if (!ok) { + delete data[k] + } + return ok +} + +function parse (args, data, remain, { + types = {}, + typeDefs = {}, + shorthands = {}, + dynamicTypes, +} = {}) { + const StringType = typeDefs.String?.type + const NumberType = typeDefs.Number?.type + const ArrayType = typeDefs.Array?.type + const BooleanType = typeDefs.Boolean?.type + + debug('parse', args, data, remain) + + const abbrevs = abbrev(Object.keys(types)) + debug('abbrevs=%j', abbrevs) + const shortAbbr = abbrev(Object.keys(shorthands)) + + for (let i = 0; i < args.length; i++) { + let arg = args[i] + debug('arg', arg) + + if (arg.match(/^-{2,}$/)) { + // done with keys. + // the rest are args. + remain.push.apply(remain, args.slice(i + 1)) + args[i] = '--' + break + } + let hadEq = false + if (arg.charAt(0) === '-' && arg.length > 1) { + const at = arg.indexOf('=') + if (at > -1) { + hadEq = true + const v = arg.slice(at + 1) + arg = arg.slice(0, at) + args.splice(i, 1, arg, v) + } + + // see if it's a shorthand + // if so, splice and back up to re-parse it. + const shRes = resolveShort(arg, shortAbbr, abbrevs, { shorthands }) + debug('arg=%j shRes=%j', arg, shRes) + if (shRes) { + args.splice.apply(args, [i, 1].concat(shRes)) + if (arg !== shRes[0]) { + i-- + continue + } + } + arg = arg.replace(/^-+/, '') + let no = null + while (arg.toLowerCase().indexOf('no-') === 0) { + no = !no + arg = arg.slice(3) + } + + if (abbrevs[arg]) { + arg = abbrevs[arg] + } + + let [hasType, argType] = getType(arg, { types, dynamicTypes }) + let isTypeArray = Array.isArray(argType) + if (isTypeArray && argType.length === 1) { + isTypeArray = false + argType = argType[0] + } + + let isArray = isTypeDef(argType, ArrayType) || + isTypeArray && hasTypeDef(argType, ArrayType) + + // allow unknown things to be arrays if specified multiple times. + if (!hasType && hasOwn(data, arg)) { + if (!Array.isArray(data[arg])) { + data[arg] = [data[arg]] + } + isArray = true + } + + let val + let la = args[i + 1] + + const isBool = typeof no === 'boolean' || + isTypeDef(argType, BooleanType) || + isTypeArray && hasTypeDef(argType, BooleanType) || + (typeof argType === 'undefined' && !hadEq) || + (la === 'false' && + (argType === null || + isTypeArray && ~argType.indexOf(null))) + + if (isBool) { + // just set and move along + val = !no + // however, also support --bool true or --bool false + if (la === 'true' || la === 'false') { + val = JSON.parse(la) + la = null + if (no) { + val = !val + } + i++ + } + + // also support "foo":[Boolean, "bar"] and "--foo bar" + if (isTypeArray && la) { + if (~argType.indexOf(la)) { + // an explicit type + val = la + i++ + } else if (la === 'null' && ~argType.indexOf(null)) { + // null allowed + val = null + i++ + } else if (!la.match(/^-{2,}[^-]/) && + !isNaN(la) && + hasTypeDef(argType, NumberType)) { + // number + val = +la + i++ + } else if (!la.match(/^-[^-]/) && hasTypeDef(argType, StringType)) { + // string + val = la + i++ + } + } + + if (isArray) { + (data[arg] = data[arg] || []).push(val) + } else { + data[arg] = val + } + + continue + } + + if (isTypeDef(argType, StringType)) { + if (la === undefined) { + la = '' + } else if (la.match(/^-{1,2}[^-]+/)) { + la = '' + i-- + } + } + + if (la && la.match(/^-{2,}$/)) { + la = undefined + i-- + } + + val = la === undefined ? true : la + if (isArray) { + (data[arg] = data[arg] || []).push(val) + } else { + data[arg] = val + } + + i++ + continue + } + remain.push(arg) + } +} + +const SINGLES = Symbol('singles') +const singleCharacters = (arg, shorthands) => { + let singles = shorthands[SINGLES] + if (!singles) { + singles = Object.keys(shorthands).filter((s) => s.length === 1).reduce((l, r) => { + l[r] = true + return l + }, {}) + shorthands[SINGLES] = singles + debug('shorthand singles', singles) + } + const chrs = arg.split('').filter((c) => singles[c]) + return chrs.join('') === arg ? chrs : null +} + +function resolveShort (arg, ...rest) { + const { types = {}, shorthands = {} } = rest.length ? rest.pop() : {} + const shortAbbr = rest[0] ?? abbrev(Object.keys(shorthands)) + const abbrevs = rest[1] ?? abbrev(Object.keys(types)) + + // handle single-char shorthands glommed together, like + // npm ls -glp, but only if there is one dash, and only if + // all of the chars are single-char shorthands, and it's + // not a match to some other abbrev. + arg = arg.replace(/^-+/, '') + + // if it's an exact known option, then don't go any further + if (abbrevs[arg] === arg) { + return null + } + + // if it's an exact known shortopt, same deal + if (shorthands[arg]) { + // make it an array, if it's a list of words + if (shorthands[arg] && !Array.isArray(shorthands[arg])) { + shorthands[arg] = shorthands[arg].split(/\s+/) + } + + return shorthands[arg] + } + + // first check to see if this arg is a set of single-char shorthands + const chrs = singleCharacters(arg, shorthands) + if (chrs) { + return chrs.map((c) => shorthands[c]).reduce((l, r) => l.concat(r), []) + } + + // if it's an arg abbrev, and not a literal shorthand, then prefer the arg + if (abbrevs[arg] && !shorthands[arg]) { + return null + } + + // if it's an abbr for a shorthand, then use that + if (shortAbbr[arg]) { + arg = shortAbbr[arg] + } + + // make it an array, if it's a list of words + if (shorthands[arg] && !Array.isArray(shorthands[arg])) { + shorthands[arg] = shorthands[arg].split(/\s+/) + } + + return shorthands[arg] +} + +module.exports = { + nopt, + clean, + parse, + validate, + resolveShort, + typeDefs: defaultTypeDefs, +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/nopt/lib/nopt.js b/deps/npm/node_modules/node-gyp/node_modules/nopt/lib/nopt.js new file mode 100644 index 00000000000000..37f01a08783f87 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/nopt/lib/nopt.js @@ -0,0 +1,30 @@ +const lib = require('./nopt-lib') +const defaultTypeDefs = require('./type-defs') + +// This is the version of nopt's API that requires setting typeDefs and invalidHandler +// on the required `nopt` object since it is a singleton. To not do a breaking change +// an API that requires all options be passed in is located in `nopt-lib.js` and +// exported here as lib. +// TODO(breaking): make API only work in non-singleton mode + +module.exports = exports = nopt +exports.clean = clean +exports.typeDefs = defaultTypeDefs +exports.lib = lib + +function nopt (types, shorthands, args = process.argv, slice = 2) { + return lib.nopt(args.slice(slice), { + types: types || {}, + shorthands: shorthands || {}, + typeDefs: exports.typeDefs, + invalidHandler: exports.invalidHandler, + }) +} + +function clean (data, types, typeDefs = exports.typeDefs) { + return lib.clean(data, { + types: types || {}, + typeDefs, + invalidHandler: exports.invalidHandler, + }) +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/nopt/lib/type-defs.js b/deps/npm/node_modules/node-gyp/node_modules/nopt/lib/type-defs.js new file mode 100644 index 00000000000000..608352ee248cc4 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/nopt/lib/type-defs.js @@ -0,0 +1,91 @@ +const url = require('url') +const path = require('path') +const Stream = require('stream').Stream +const os = require('os') +const debug = require('./debug') + +function validateString (data, k, val) { + data[k] = String(val) +} + +function validatePath (data, k, val) { + if (val === true) { + return false + } + if (val === null) { + return true + } + + val = String(val) + + const isWin = process.platform === 'win32' + const homePattern = isWin ? /^~(\/|\\)/ : /^~\// + const home = os.homedir() + + if (home && val.match(homePattern)) { + data[k] = path.resolve(home, val.slice(2)) + } else { + data[k] = path.resolve(val) + } + return true +} + +function validateNumber (data, k, val) { + debug('validate Number %j %j %j', k, val, isNaN(val)) + if (isNaN(val)) { + return false + } + data[k] = +val +} + +function validateDate (data, k, val) { + const s = Date.parse(val) + debug('validate Date %j %j %j', k, val, s) + if (isNaN(s)) { + return false + } + data[k] = new Date(val) +} + +function validateBoolean (data, k, val) { + if (typeof val === 'string') { + if (!isNaN(val)) { + val = !!(+val) + } else if (val === 'null' || val === 'false') { + val = false + } else { + val = true + } + } else { + val = !!val + } + data[k] = val +} + +function validateUrl (data, k, val) { + // Changing this would be a breaking change in the npm cli + /* eslint-disable-next-line node/no-deprecated-api */ + val = url.parse(String(val)) + if (!val.host) { + return false + } + data[k] = val.href +} + +function validateStream (data, k, val) { + if (!(val instanceof Stream)) { + return false + } + data[k] = val +} + +module.exports = { + String: { type: String, validate: validateString }, + Boolean: { type: Boolean, validate: validateBoolean }, + url: { type: url, validate: validateUrl }, + Number: { type: Number, validate: validateNumber }, + path: { type: path, validate: validatePath }, + Stream: { type: Stream, validate: validateStream }, + Date: { type: Date, validate: validateDate }, + Array: { type: Array }, +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/nopt/package.json b/deps/npm/node_modules/node-gyp/node_modules/nopt/package.json new file mode 100644 index 00000000000000..37b770ad487711 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/nopt/package.json @@ -0,0 +1,51 @@ +{ + "name": "nopt", + "version": "7.2.1", + "description": "Option parsing for Node, supporting types, shorthands, etc. Used by npm.", + "author": "GitHub Inc.", + "main": "lib/nopt.js", + "scripts": { + "test": "tap", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force", + "lintfix": "npm run lint -- --fix", + "snap": "tap", + "posttest": "npm run lint" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/npm/nopt.git" + }, + "bin": { + "nopt": "bin/nopt.js" + }, + "license": "ISC", + "dependencies": { + "abbrev": "^2.0.0" + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.22.0", + "tap": "^16.3.0" + }, + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "files": [ + "bin/", + "lib/" + ], + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "windowsCI": false, + "version": "4.22.0", + "publish": true + } +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/proc-log/LICENSE b/deps/npm/node_modules/node-gyp/node_modules/proc-log/LICENSE new file mode 100644 index 00000000000000..83837797202b70 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/proc-log/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) GitHub, Inc. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/node-gyp/node_modules/proc-log/lib/index.js b/deps/npm/node_modules/node-gyp/node_modules/proc-log/lib/index.js new file mode 100644 index 00000000000000..86d90861078dab --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/proc-log/lib/index.js @@ -0,0 +1,153 @@ +const META = Symbol('proc-log.meta') +module.exports = { + META: META, + output: { + LEVELS: [ + 'standard', + 'error', + 'buffer', + 'flush', + ], + KEYS: { + standard: 'standard', + error: 'error', + buffer: 'buffer', + flush: 'flush', + }, + standard: function (...args) { + return process.emit('output', 'standard', ...args) + }, + error: function (...args) { + return process.emit('output', 'error', ...args) + }, + buffer: function (...args) { + return process.emit('output', 'buffer', ...args) + }, + flush: function (...args) { + return process.emit('output', 'flush', ...args) + }, + }, + log: { + LEVELS: [ + 'notice', + 'error', + 'warn', + 'info', + 'verbose', + 'http', + 'silly', + 'timing', + 'pause', + 'resume', + ], + KEYS: { + notice: 'notice', + error: 'error', + warn: 'warn', + info: 'info', + verbose: 'verbose', + http: 'http', + silly: 'silly', + timing: 'timing', + pause: 'pause', + resume: 'resume', + }, + error: function (...args) { + return process.emit('log', 'error', ...args) + }, + notice: function (...args) { + return process.emit('log', 'notice', ...args) + }, + warn: function (...args) { + return process.emit('log', 'warn', ...args) + }, + info: function (...args) { + return process.emit('log', 'info', ...args) + }, + verbose: function (...args) { + return process.emit('log', 'verbose', ...args) + }, + http: function (...args) { + return process.emit('log', 'http', ...args) + }, + silly: function (...args) { + return process.emit('log', 'silly', ...args) + }, + timing: function (...args) { + return process.emit('log', 'timing', ...args) + }, + pause: function () { + return process.emit('log', 'pause') + }, + resume: function () { + return process.emit('log', 'resume') + }, + }, + time: { + LEVELS: [ + 'start', + 'end', + ], + KEYS: { + start: 'start', + end: 'end', + }, + start: function (name, fn) { + process.emit('time', 'start', name) + function end () { + return process.emit('time', 'end', name) + } + if (typeof fn === 'function') { + const res = fn() + if (res && res.finally) { + return res.finally(end) + } + end() + return res + } + return end + }, + end: function (name) { + return process.emit('time', 'end', name) + }, + }, + input: { + LEVELS: [ + 'start', + 'end', + 'read', + ], + KEYS: { + start: 'start', + end: 'end', + read: 'read', + }, + start: function (fn) { + process.emit('input', 'start') + function end () { + return process.emit('input', 'end') + } + if (typeof fn === 'function') { + const res = fn() + if (res && res.finally) { + return res.finally(end) + } + end() + return res + } + return end + }, + end: function () { + return process.emit('input', 'end') + }, + read: function (...args) { + let resolve, reject + const promise = new Promise((_resolve, _reject) => { + resolve = _resolve + reject = _reject + }) + process.emit('input', 'read', resolve, reject, ...args) + return promise + }, + }, +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/proc-log/package.json b/deps/npm/node_modules/node-gyp/node_modules/proc-log/package.json new file mode 100644 index 00000000000000..4ab89102ecc9b5 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/proc-log/package.json @@ -0,0 +1,45 @@ +{ + "name": "proc-log", + "version": "4.2.0", + "files": [ + "bin/", + "lib/" + ], + "main": "lib/index.js", + "description": "just emit 'log' events on the process object", + "repository": { + "type": "git", + "url": "https://github.com/npm/proc-log.git" + }, + "author": "GitHub Inc.", + "license": "ISC", + "scripts": { + "test": "tap", + "snap": "tap", + "posttest": "npm run lint", + "postsnap": "eslint index.js test/*.js --fix", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "postlint": "template-oss-check", + "lintfix": "npm run lint -- --fix", + "template-oss-apply": "template-oss-apply --force" + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.21.3", + "tap": "^16.0.1" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.21.3", + "publish": true + }, + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + } +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/ssri/LICENSE.md b/deps/npm/node_modules/node-gyp/node_modules/ssri/LICENSE.md new file mode 100644 index 00000000000000..e335388869f50f --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/ssri/LICENSE.md @@ -0,0 +1,16 @@ +ISC License + +Copyright 2021 (c) npm, Inc. + +Permission to use, copy, modify, and/or distribute this software for +any purpose with or without fee is hereby granted, provided that the +above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS +ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/node-gyp/node_modules/ssri/lib/index.js b/deps/npm/node_modules/node-gyp/node_modules/ssri/lib/index.js new file mode 100644 index 00000000000000..7d749ed480fb98 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/ssri/lib/index.js @@ -0,0 +1,580 @@ +'use strict' + +const crypto = require('crypto') +const { Minipass } = require('minipass') + +const SPEC_ALGORITHMS = ['sha512', 'sha384', 'sha256'] +const DEFAULT_ALGORITHMS = ['sha512'] + +// TODO: this should really be a hardcoded list of algorithms we support, +// rather than [a-z0-9]. +const BASE64_REGEX = /^[a-z0-9+/]+(?:=?=?)$/i +const SRI_REGEX = /^([a-z0-9]+)-([^?]+)([?\S*]*)$/ +const STRICT_SRI_REGEX = /^([a-z0-9]+)-([A-Za-z0-9+/=]{44,88})(\?[\x21-\x7E]*)?$/ +const VCHAR_REGEX = /^[\x21-\x7E]+$/ + +const getOptString = options => options?.length ? `?${options.join('?')}` : '' + +class IntegrityStream extends Minipass { + #emittedIntegrity + #emittedSize + #emittedVerified + + constructor (opts) { + super() + this.size = 0 + this.opts = opts + + // may be overridden later, but set now for class consistency + this.#getOptions() + + // options used for calculating stream. can't be changed. + if (opts?.algorithms) { + this.algorithms = [...opts.algorithms] + } else { + this.algorithms = [...DEFAULT_ALGORITHMS] + } + if (this.algorithm !== null && !this.algorithms.includes(this.algorithm)) { + this.algorithms.push(this.algorithm) + } + + this.hashes = this.algorithms.map(crypto.createHash) + } + + #getOptions () { + // For verification + this.sri = this.opts?.integrity ? parse(this.opts?.integrity, this.opts) : null + this.expectedSize = this.opts?.size + + if (!this.sri) { + this.algorithm = null + } else if (this.sri.isHash) { + this.goodSri = true + this.algorithm = this.sri.algorithm + } else { + this.goodSri = !this.sri.isEmpty() + this.algorithm = this.sri.pickAlgorithm(this.opts) + } + + this.digests = this.goodSri ? this.sri[this.algorithm] : null + this.optString = getOptString(this.opts?.options) + } + + on (ev, handler) { + if (ev === 'size' && this.#emittedSize) { + return handler(this.#emittedSize) + } + + if (ev === 'integrity' && this.#emittedIntegrity) { + return handler(this.#emittedIntegrity) + } + + if (ev === 'verified' && this.#emittedVerified) { + return handler(this.#emittedVerified) + } + + return super.on(ev, handler) + } + + emit (ev, data) { + if (ev === 'end') { + this.#onEnd() + } + return super.emit(ev, data) + } + + write (data) { + this.size += data.length + this.hashes.forEach(h => h.update(data)) + return super.write(data) + } + + #onEnd () { + if (!this.goodSri) { + this.#getOptions() + } + const newSri = parse(this.hashes.map((h, i) => { + return `${this.algorithms[i]}-${h.digest('base64')}${this.optString}` + }).join(' '), this.opts) + // Integrity verification mode + const match = this.goodSri && newSri.match(this.sri, this.opts) + if (typeof this.expectedSize === 'number' && this.size !== this.expectedSize) { + /* eslint-disable-next-line max-len */ + const err = new Error(`stream size mismatch when checking ${this.sri}.\n Wanted: ${this.expectedSize}\n Found: ${this.size}`) + err.code = 'EBADSIZE' + err.found = this.size + err.expected = this.expectedSize + err.sri = this.sri + this.emit('error', err) + } else if (this.sri && !match) { + /* eslint-disable-next-line max-len */ + const err = new Error(`${this.sri} integrity checksum failed when using ${this.algorithm}: wanted ${this.digests} but got ${newSri}. (${this.size} bytes)`) + err.code = 'EINTEGRITY' + err.found = newSri + err.expected = this.digests + err.algorithm = this.algorithm + err.sri = this.sri + this.emit('error', err) + } else { + this.#emittedSize = this.size + this.emit('size', this.size) + this.#emittedIntegrity = newSri + this.emit('integrity', newSri) + if (match) { + this.#emittedVerified = match + this.emit('verified', match) + } + } + } +} + +class Hash { + get isHash () { + return true + } + + constructor (hash, opts) { + const strict = opts?.strict + this.source = hash.trim() + + // set default values so that we make V8 happy to + // always see a familiar object template. + this.digest = '' + this.algorithm = '' + this.options = [] + + // 3.1. Integrity metadata (called "Hash" by ssri) + // https://w3c.github.io/webappsec-subresource-integrity/#integrity-metadata-description + const match = this.source.match( + strict + ? STRICT_SRI_REGEX + : SRI_REGEX + ) + if (!match) { + return + } + if (strict && !SPEC_ALGORITHMS.includes(match[1])) { + return + } + this.algorithm = match[1] + this.digest = match[2] + + const rawOpts = match[3] + if (rawOpts) { + this.options = rawOpts.slice(1).split('?') + } + } + + hexDigest () { + return this.digest && Buffer.from(this.digest, 'base64').toString('hex') + } + + toJSON () { + return this.toString() + } + + match (integrity, opts) { + const other = parse(integrity, opts) + if (!other) { + return false + } + if (other.isIntegrity) { + const algo = other.pickAlgorithm(opts, [this.algorithm]) + + if (!algo) { + return false + } + + const foundHash = other[algo].find(hash => hash.digest === this.digest) + + if (foundHash) { + return foundHash + } + + return false + } + return other.digest === this.digest ? other : false + } + + toString (opts) { + if (opts?.strict) { + // Strict mode enforces the standard as close to the foot of the + // letter as it can. + if (!( + // The spec has very restricted productions for algorithms. + // https://www.w3.org/TR/CSP2/#source-list-syntax + SPEC_ALGORITHMS.includes(this.algorithm) && + // Usually, if someone insists on using a "different" base64, we + // leave it as-is, since there's multiple standards, and the + // specified is not a URL-safe variant. + // https://www.w3.org/TR/CSP2/#base64_value + this.digest.match(BASE64_REGEX) && + // Option syntax is strictly visual chars. + // https://w3c.github.io/webappsec-subresource-integrity/#grammardef-option-expression + // https://tools.ietf.org/html/rfc5234#appendix-B.1 + this.options.every(opt => opt.match(VCHAR_REGEX)) + )) { + return '' + } + } + return `${this.algorithm}-${this.digest}${getOptString(this.options)}` + } +} + +function integrityHashToString (toString, sep, opts, hashes) { + const toStringIsNotEmpty = toString !== '' + + let shouldAddFirstSep = false + let complement = '' + + const lastIndex = hashes.length - 1 + + for (let i = 0; i < lastIndex; i++) { + const hashString = Hash.prototype.toString.call(hashes[i], opts) + + if (hashString) { + shouldAddFirstSep = true + + complement += hashString + complement += sep + } + } + + const finalHashString = Hash.prototype.toString.call(hashes[lastIndex], opts) + + if (finalHashString) { + shouldAddFirstSep = true + complement += finalHashString + } + + if (toStringIsNotEmpty && shouldAddFirstSep) { + return toString + sep + complement + } + + return toString + complement +} + +class Integrity { + get isIntegrity () { + return true + } + + toJSON () { + return this.toString() + } + + isEmpty () { + return Object.keys(this).length === 0 + } + + toString (opts) { + let sep = opts?.sep || ' ' + let toString = '' + + if (opts?.strict) { + // Entries must be separated by whitespace, according to spec. + sep = sep.replace(/\S+/g, ' ') + + for (const hash of SPEC_ALGORITHMS) { + if (this[hash]) { + toString = integrityHashToString(toString, sep, opts, this[hash]) + } + } + } else { + for (const hash of Object.keys(this)) { + toString = integrityHashToString(toString, sep, opts, this[hash]) + } + } + + return toString + } + + concat (integrity, opts) { + const other = typeof integrity === 'string' + ? integrity + : stringify(integrity, opts) + return parse(`${this.toString(opts)} ${other}`, opts) + } + + hexDigest () { + return parse(this, { single: true }).hexDigest() + } + + // add additional hashes to an integrity value, but prevent + // *changing* an existing integrity hash. + merge (integrity, opts) { + const other = parse(integrity, opts) + for (const algo in other) { + if (this[algo]) { + if (!this[algo].find(hash => + other[algo].find(otherhash => + hash.digest === otherhash.digest))) { + throw new Error('hashes do not match, cannot update integrity') + } + } else { + this[algo] = other[algo] + } + } + } + + match (integrity, opts) { + const other = parse(integrity, opts) + if (!other) { + return false + } + const algo = other.pickAlgorithm(opts, Object.keys(this)) + return ( + !!algo && + this[algo] && + other[algo] && + this[algo].find(hash => + other[algo].find(otherhash => + hash.digest === otherhash.digest + ) + ) + ) || false + } + + // Pick the highest priority algorithm present, optionally also limited to a + // set of hashes found in another integrity. When limiting it may return + // nothing. + pickAlgorithm (opts, hashes) { + const pickAlgorithm = opts?.pickAlgorithm || getPrioritizedHash + const keys = Object.keys(this).filter(k => { + if (hashes?.length) { + return hashes.includes(k) + } + return true + }) + if (keys.length) { + return keys.reduce((acc, algo) => pickAlgorithm(acc, algo) || acc) + } + // no intersection between this and hashes, + return null + } +} + +module.exports.parse = parse +function parse (sri, opts) { + if (!sri) { + return null + } + if (typeof sri === 'string') { + return _parse(sri, opts) + } else if (sri.algorithm && sri.digest) { + const fullSri = new Integrity() + fullSri[sri.algorithm] = [sri] + return _parse(stringify(fullSri, opts), opts) + } else { + return _parse(stringify(sri, opts), opts) + } +} + +function _parse (integrity, opts) { + // 3.4.3. Parse metadata + // https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata + if (opts?.single) { + return new Hash(integrity, opts) + } + const hashes = integrity.trim().split(/\s+/).reduce((acc, string) => { + const hash = new Hash(string, opts) + if (hash.algorithm && hash.digest) { + const algo = hash.algorithm + if (!acc[algo]) { + acc[algo] = [] + } + acc[algo].push(hash) + } + return acc + }, new Integrity()) + return hashes.isEmpty() ? null : hashes +} + +module.exports.stringify = stringify +function stringify (obj, opts) { + if (obj.algorithm && obj.digest) { + return Hash.prototype.toString.call(obj, opts) + } else if (typeof obj === 'string') { + return stringify(parse(obj, opts), opts) + } else { + return Integrity.prototype.toString.call(obj, opts) + } +} + +module.exports.fromHex = fromHex +function fromHex (hexDigest, algorithm, opts) { + const optString = getOptString(opts?.options) + return parse( + `${algorithm}-${ + Buffer.from(hexDigest, 'hex').toString('base64') + }${optString}`, opts + ) +} + +module.exports.fromData = fromData +function fromData (data, opts) { + const algorithms = opts?.algorithms || [...DEFAULT_ALGORITHMS] + const optString = getOptString(opts?.options) + return algorithms.reduce((acc, algo) => { + const digest = crypto.createHash(algo).update(data).digest('base64') + const hash = new Hash( + `${algo}-${digest}${optString}`, + opts + ) + /* istanbul ignore else - it would be VERY strange if the string we + * just calculated with an algo did not have an algo or digest. + */ + if (hash.algorithm && hash.digest) { + const hashAlgo = hash.algorithm + if (!acc[hashAlgo]) { + acc[hashAlgo] = [] + } + acc[hashAlgo].push(hash) + } + return acc + }, new Integrity()) +} + +module.exports.fromStream = fromStream +function fromStream (stream, opts) { + const istream = integrityStream(opts) + return new Promise((resolve, reject) => { + stream.pipe(istream) + stream.on('error', reject) + istream.on('error', reject) + let sri + istream.on('integrity', s => { + sri = s + }) + istream.on('end', () => resolve(sri)) + istream.resume() + }) +} + +module.exports.checkData = checkData +function checkData (data, sri, opts) { + sri = parse(sri, opts) + if (!sri || !Object.keys(sri).length) { + if (opts?.error) { + throw Object.assign( + new Error('No valid integrity hashes to check against'), { + code: 'EINTEGRITY', + } + ) + } else { + return false + } + } + const algorithm = sri.pickAlgorithm(opts) + const digest = crypto.createHash(algorithm).update(data).digest('base64') + const newSri = parse({ algorithm, digest }) + const match = newSri.match(sri, opts) + opts = opts || {} + if (match || !(opts.error)) { + return match + } else if (typeof opts.size === 'number' && (data.length !== opts.size)) { + /* eslint-disable-next-line max-len */ + const err = new Error(`data size mismatch when checking ${sri}.\n Wanted: ${opts.size}\n Found: ${data.length}`) + err.code = 'EBADSIZE' + err.found = data.length + err.expected = opts.size + err.sri = sri + throw err + } else { + /* eslint-disable-next-line max-len */ + const err = new Error(`Integrity checksum failed when using ${algorithm}: Wanted ${sri}, but got ${newSri}. (${data.length} bytes)`) + err.code = 'EINTEGRITY' + err.found = newSri + err.expected = sri + err.algorithm = algorithm + err.sri = sri + throw err + } +} + +module.exports.checkStream = checkStream +function checkStream (stream, sri, opts) { + opts = opts || Object.create(null) + opts.integrity = sri + sri = parse(sri, opts) + if (!sri || !Object.keys(sri).length) { + return Promise.reject(Object.assign( + new Error('No valid integrity hashes to check against'), { + code: 'EINTEGRITY', + } + )) + } + const checker = integrityStream(opts) + return new Promise((resolve, reject) => { + stream.pipe(checker) + stream.on('error', reject) + checker.on('error', reject) + let verified + checker.on('verified', s => { + verified = s + }) + checker.on('end', () => resolve(verified)) + checker.resume() + }) +} + +module.exports.integrityStream = integrityStream +function integrityStream (opts = Object.create(null)) { + return new IntegrityStream(opts) +} + +module.exports.create = createIntegrity +function createIntegrity (opts) { + const algorithms = opts?.algorithms || [...DEFAULT_ALGORITHMS] + const optString = getOptString(opts?.options) + + const hashes = algorithms.map(crypto.createHash) + + return { + update: function (chunk, enc) { + hashes.forEach(h => h.update(chunk, enc)) + return this + }, + digest: function () { + const integrity = algorithms.reduce((acc, algo) => { + const digest = hashes.shift().digest('base64') + const hash = new Hash( + `${algo}-${digest}${optString}`, + opts + ) + /* istanbul ignore else - it would be VERY strange if the hash we + * just calculated with an algo did not have an algo or digest. + */ + if (hash.algorithm && hash.digest) { + const hashAlgo = hash.algorithm + if (!acc[hashAlgo]) { + acc[hashAlgo] = [] + } + acc[hashAlgo].push(hash) + } + return acc + }, new Integrity()) + + return integrity + }, + } +} + +const NODE_HASHES = crypto.getHashes() + +// This is a Best Effortâ„¢ at a reasonable priority for hash algos +const DEFAULT_PRIORITY = [ + 'md5', 'whirlpool', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512', + // TODO - it's unclear _which_ of these Node will actually use as its name + // for the algorithm, so we guesswork it based on the OpenSSL names. + 'sha3', + 'sha3-256', 'sha3-384', 'sha3-512', + 'sha3_256', 'sha3_384', 'sha3_512', +].filter(algo => NODE_HASHES.includes(algo)) + +function getPrioritizedHash (algo1, algo2) { + /* eslint-disable-next-line max-len */ + return DEFAULT_PRIORITY.indexOf(algo1.toLowerCase()) >= DEFAULT_PRIORITY.indexOf(algo2.toLowerCase()) + ? algo1 + : algo2 +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/ssri/package.json b/deps/npm/node_modules/node-gyp/node_modules/ssri/package.json new file mode 100644 index 00000000000000..28395414e4643c --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/ssri/package.json @@ -0,0 +1,65 @@ +{ + "name": "ssri", + "version": "10.0.6", + "description": "Standard Subresource Integrity library -- parses, serializes, generates, and verifies integrity metadata according to the SRI spec.", + "main": "lib/index.js", + "files": [ + "bin/", + "lib/" + ], + "scripts": { + "prerelease": "npm t", + "postrelease": "npm publish", + "posttest": "npm run lint", + "test": "tap", + "coverage": "tap", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force", + "lintfix": "npm run lint -- --fix", + "snap": "tap" + }, + "tap": { + "check-coverage": true, + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "repository": { + "type": "git", + "url": "git+https://github.com/npm/ssri.git" + }, + "keywords": [ + "w3c", + "web", + "security", + "integrity", + "checksum", + "hashing", + "subresource integrity", + "sri", + "sri hash", + "sri string", + "sri generator", + "html" + ], + "author": "GitHub Inc.", + "license": "ISC", + "dependencies": { + "minipass": "^7.0.3" + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.22.0", + "tap": "^16.0.1" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.22.0", + "publish": "true" + } +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/unique-filename/LICENSE b/deps/npm/node_modules/node-gyp/node_modules/unique-filename/LICENSE new file mode 100644 index 00000000000000..69619c125ea7ef --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/unique-filename/LICENSE @@ -0,0 +1,5 @@ +Copyright npm, Inc + +Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/node-gyp/node_modules/unique-filename/lib/index.js b/deps/npm/node_modules/node-gyp/node_modules/unique-filename/lib/index.js new file mode 100644 index 00000000000000..d067d2e709809a --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/unique-filename/lib/index.js @@ -0,0 +1,7 @@ +var path = require('path') + +var uniqueSlug = require('unique-slug') + +module.exports = function (filepath, prefix, uniq) { + return path.join(filepath, (prefix ? prefix + '-' : '') + uniqueSlug(uniq)) +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/unique-filename/package.json b/deps/npm/node_modules/node-gyp/node_modules/unique-filename/package.json new file mode 100644 index 00000000000000..b2fbf0666489a6 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/unique-filename/package.json @@ -0,0 +1,51 @@ +{ + "name": "unique-filename", + "version": "3.0.0", + "description": "Generate a unique filename for use in temporary directories or caches.", + "main": "lib/index.js", + "scripts": { + "test": "tap", + "lint": "eslint \"**/*.js\"", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force", + "lintfix": "npm run lint -- --fix", + "snap": "tap", + "posttest": "npm run lint" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/unique-filename.git" + }, + "keywords": [], + "author": "GitHub Inc.", + "license": "ISC", + "bugs": { + "url": "https://github.com/iarna/unique-filename/issues" + }, + "homepage": "https://github.com/iarna/unique-filename", + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.5.1", + "tap": "^16.3.0" + }, + "dependencies": { + "unique-slug": "^4.0.0" + }, + "files": [ + "bin/", + "lib/" + ], + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.5.1" + }, + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + } +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/unique-slug/LICENSE b/deps/npm/node_modules/node-gyp/node_modules/unique-slug/LICENSE new file mode 100644 index 00000000000000..7953647e7760b8 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/unique-slug/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright npm, Inc + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/node-gyp/node_modules/unique-slug/lib/index.js b/deps/npm/node_modules/node-gyp/node_modules/unique-slug/lib/index.js new file mode 100644 index 00000000000000..1bac84d95d7307 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/unique-slug/lib/index.js @@ -0,0 +1,11 @@ +'use strict' +var MurmurHash3 = require('imurmurhash') + +module.exports = function (uniq) { + if (uniq) { + var hash = new MurmurHash3(uniq) + return ('00000000' + hash.result().toString(16)).slice(-8) + } else { + return (Math.random().toString(16) + '0000000').slice(2, 10) + } +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/unique-slug/package.json b/deps/npm/node_modules/node-gyp/node_modules/unique-slug/package.json new file mode 100644 index 00000000000000..33732cdbb42859 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/unique-slug/package.json @@ -0,0 +1,47 @@ +{ + "name": "unique-slug", + "version": "4.0.0", + "description": "Generate a unique character string suitible for use in files and URLs.", + "main": "lib/index.js", + "scripts": { + "test": "tap", + "lint": "eslint \"**/*.js\"", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force", + "lintfix": "npm run lint -- --fix", + "snap": "tap", + "posttest": "npm run lint" + }, + "keywords": [], + "author": "GitHub Inc.", + "license": "ISC", + "devDependencies": { + "@npmcli/eslint-config": "^3.1.0", + "@npmcli/template-oss": "4.5.1", + "tap": "^16.3.0" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/unique-slug.git" + }, + "dependencies": { + "imurmurhash": "^0.1.4" + }, + "files": [ + "bin/", + "lib/" + ], + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.5.1" + }, + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + } +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/which/LICENSE b/deps/npm/node_modules/node-gyp/node_modules/which/LICENSE new file mode 100644 index 00000000000000..19129e315fe593 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/which/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/node-gyp/node_modules/which/README.md b/deps/npm/node_modules/node-gyp/node_modules/which/README.md new file mode 100644 index 00000000000000..942bf1e0932b89 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/which/README.md @@ -0,0 +1,51 @@ +# which + +Like the unix `which` utility. + +Finds the first instance of a specified executable in the PATH +environment variable. Does not cache the results, so `hash -r` is not +needed when the PATH changes. + +## USAGE + +```javascript +const which = require('which') + +// async usage +// rejects if not found +const resolved = await which('node') + +// if nothrow option is used, returns null if not found +const resolvedOrNull = await which('node', { nothrow: true }) + +// sync usage +// throws if not found +const resolved = which.sync('node') + +// if nothrow option is used, returns null if not found +const resolvedOrNull = which.sync('node', { nothrow: true }) + +// Pass options to override the PATH and PATHEXT environment vars. +await which('node', { path: someOtherPath, pathExt: somePathExt }) +``` + +## CLI USAGE + +Just like the BSD `which(1)` binary but using `node-which`. + +``` +usage: node-which [-as] program ... +``` + +You can learn more about why the binary is `node-which` and not `which` +[here](https://github.com/npm/node-which/pull/67) + +## OPTIONS + +You may pass an options object as the second argument. + +- `path`: Use instead of the `PATH` environment variable. +- `pathExt`: Use instead of the `PATHEXT` environment variable. +- `all`: Return all matches, instead of just the first one. Note that + this means the function returns an array of strings instead of a + single string. diff --git a/deps/npm/node_modules/node-gyp/node_modules/which/bin/which.js b/deps/npm/node_modules/node-gyp/node_modules/which/bin/which.js new file mode 100755 index 00000000000000..6df16f21acf930 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/which/bin/which.js @@ -0,0 +1,52 @@ +#!/usr/bin/env node + +const which = require('../lib') +const argv = process.argv.slice(2) + +const usage = (err) => { + if (err) { + console.error(`which: ${err}`) + } + console.error('usage: which [-as] program ...') + process.exit(1) +} + +if (!argv.length) { + return usage() +} + +let dashdash = false +const [commands, flags] = argv.reduce((acc, arg) => { + if (dashdash || arg === '--') { + dashdash = true + return acc + } + + if (!/^-/.test(arg)) { + acc[0].push(arg) + return acc + } + + for (const flag of arg.slice(1).split('')) { + if (flag === 's') { + acc[1].silent = true + } else if (flag === 'a') { + acc[1].all = true + } else { + usage(`illegal option -- ${flag}`) + } + } + + return acc +}, [[], {}]) + +for (const command of commands) { + try { + const res = which.sync(command, { all: flags.all }) + if (!flags.silent) { + console.log([].concat(res).join('\n')) + } + } catch (err) { + process.exitCode = 1 + } +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/which/lib/index.js b/deps/npm/node_modules/node-gyp/node_modules/which/lib/index.js new file mode 100644 index 00000000000000..2fd358baf888fd --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/which/lib/index.js @@ -0,0 +1,111 @@ +const { isexe, sync: isexeSync } = require('isexe') +const { join, delimiter, sep, posix } = require('path') + +const isWindows = process.platform === 'win32' + +// used to check for slashed in commands passed in. always checks for the posix +// seperator on all platforms, and checks for the current separator when not on +// a posix platform. don't use the isWindows check for this since that is mocked +// in tests but we still need the code to actually work when called. that is also +// why it is ignored from coverage. +/* istanbul ignore next */ +const rSlash = new RegExp(`[${posix.sep}${sep === posix.sep ? '' : sep}]`.replace(/(\\)/g, '\\$1')) +const rRel = new RegExp(`^\\.${rSlash.source}`) + +const getNotFoundError = (cmd) => + Object.assign(new Error(`not found: ${cmd}`), { code: 'ENOENT' }) + +const getPathInfo = (cmd, { + path: optPath = process.env.PATH, + pathExt: optPathExt = process.env.PATHEXT, + delimiter: optDelimiter = delimiter, +}) => { + // If it has a slash, then we don't bother searching the pathenv. + // just check the file itself, and that's it. + const pathEnv = cmd.match(rSlash) ? [''] : [ + // windows always checks the cwd first + ...(isWindows ? [process.cwd()] : []), + ...(optPath || /* istanbul ignore next: very unusual */ '').split(optDelimiter), + ] + + if (isWindows) { + const pathExtExe = optPathExt || + ['.EXE', '.CMD', '.BAT', '.COM'].join(optDelimiter) + const pathExt = pathExtExe.split(optDelimiter).flatMap((item) => [item, item.toLowerCase()]) + if (cmd.includes('.') && pathExt[0] !== '') { + pathExt.unshift('') + } + return { pathEnv, pathExt, pathExtExe } + } + + return { pathEnv, pathExt: [''] } +} + +const getPathPart = (raw, cmd) => { + const pathPart = /^".*"$/.test(raw) ? raw.slice(1, -1) : raw + const prefix = !pathPart && rRel.test(cmd) ? cmd.slice(0, 2) : '' + return prefix + join(pathPart, cmd) +} + +const which = async (cmd, opt = {}) => { + const { pathEnv, pathExt, pathExtExe } = getPathInfo(cmd, opt) + const found = [] + + for (const envPart of pathEnv) { + const p = getPathPart(envPart, cmd) + + for (const ext of pathExt) { + const withExt = p + ext + const is = await isexe(withExt, { pathExt: pathExtExe, ignoreErrors: true }) + if (is) { + if (!opt.all) { + return withExt + } + found.push(withExt) + } + } + } + + if (opt.all && found.length) { + return found + } + + if (opt.nothrow) { + return null + } + + throw getNotFoundError(cmd) +} + +const whichSync = (cmd, opt = {}) => { + const { pathEnv, pathExt, pathExtExe } = getPathInfo(cmd, opt) + const found = [] + + for (const pathEnvPart of pathEnv) { + const p = getPathPart(pathEnvPart, cmd) + + for (const ext of pathExt) { + const withExt = p + ext + const is = isexeSync(withExt, { pathExt: pathExtExe, ignoreErrors: true }) + if (is) { + if (!opt.all) { + return withExt + } + found.push(withExt) + } + } + } + + if (opt.all && found.length) { + return found + } + + if (opt.nothrow) { + return null + } + + throw getNotFoundError(cmd) +} + +module.exports = which +which.sync = whichSync diff --git a/deps/npm/node_modules/node-gyp/node_modules/which/package.json b/deps/npm/node_modules/node-gyp/node_modules/which/package.json new file mode 100644 index 00000000000000..515bfb22ca0e1e --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/which/package.json @@ -0,0 +1,57 @@ +{ + "author": "GitHub Inc.", + "name": "which", + "description": "Like which(1) unix command. Find the first instance of an executable in the PATH.", + "version": "4.0.0", + "repository": { + "type": "git", + "url": "https://github.com/npm/node-which.git" + }, + "main": "lib/index.js", + "bin": { + "node-which": "./bin/which.js" + }, + "license": "ISC", + "dependencies": { + "isexe": "^3.1.1" + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.18.0", + "tap": "^16.3.0" + }, + "scripts": { + "test": "tap", + "lint": "eslint \"**/*.js\"", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force", + "lintfix": "npm run lint -- --fix", + "snap": "tap", + "posttest": "npm run lint" + }, + "files": [ + "bin/", + "lib/" + ], + "tap": { + "check-coverage": true, + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "engines": { + "node": "^16.13.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "ciVersions": [ + "16.13.0", + "16.x", + "18.0.0", + "18.x" + ], + "version": "4.18.0", + "publish": "true" + } +} diff --git a/deps/npm/node_modules/nopt/node_modules/abbrev/LICENSE b/deps/npm/node_modules/nopt/node_modules/abbrev/LICENSE new file mode 100644 index 00000000000000..9bcfa9d7d8d26e --- /dev/null +++ b/deps/npm/node_modules/nopt/node_modules/abbrev/LICENSE @@ -0,0 +1,46 @@ +This software is dual-licensed under the ISC and MIT licenses. +You may use this software under EITHER of the following licenses. + +---------- + +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +---------- + +Copyright Isaac Z. Schlueter and Contributors +All rights reserved. + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/deps/npm/node_modules/nopt/node_modules/abbrev/lib/index.js b/deps/npm/node_modules/nopt/node_modules/abbrev/lib/index.js new file mode 100644 index 00000000000000..9f48801f049c9e --- /dev/null +++ b/deps/npm/node_modules/nopt/node_modules/abbrev/lib/index.js @@ -0,0 +1,50 @@ +module.exports = abbrev + +function abbrev (...args) { + let list = args.length === 1 || Array.isArray(args[0]) ? args[0] : args + + for (let i = 0, l = list.length; i < l; i++) { + list[i] = typeof list[i] === 'string' ? list[i] : String(list[i]) + } + + // sort them lexicographically, so that they're next to their nearest kin + list = list.sort(lexSort) + + // walk through each, seeing how much it has in common with the next and previous + const abbrevs = {} + let prev = '' + for (let ii = 0, ll = list.length; ii < ll; ii++) { + const current = list[ii] + const next = list[ii + 1] || '' + let nextMatches = true + let prevMatches = true + if (current === next) { + continue + } + let j = 0 + const cl = current.length + for (; j < cl; j++) { + const curChar = current.charAt(j) + nextMatches = nextMatches && curChar === next.charAt(j) + prevMatches = prevMatches && curChar === prev.charAt(j) + if (!nextMatches && !prevMatches) { + j++ + break + } + } + prev = current + if (j === cl) { + abbrevs[current] = current + continue + } + for (let a = current.slice(0, j); j <= cl; j++) { + abbrevs[a] = current + a += current.charAt(j) + } + } + return abbrevs +} + +function lexSort (a, b) { + return a === b ? 0 : a > b ? 1 : -1 +} diff --git a/deps/npm/node_modules/nopt/node_modules/abbrev/package.json b/deps/npm/node_modules/nopt/node_modules/abbrev/package.json new file mode 100644 index 00000000000000..e26400445631ad --- /dev/null +++ b/deps/npm/node_modules/nopt/node_modules/abbrev/package.json @@ -0,0 +1,43 @@ +{ + "name": "abbrev", + "version": "2.0.0", + "description": "Like ruby's abbrev module, but in js", + "author": "GitHub Inc.", + "main": "lib/index.js", + "scripts": { + "test": "tap", + "lint": "eslint \"**/*.js\"", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force", + "lintfix": "npm run lint -- --fix", + "snap": "tap", + "posttest": "npm run lint" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/abbrev-js.git" + }, + "license": "ISC", + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.8.0", + "tap": "^16.3.0" + }, + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "files": [ + "bin/", + "lib/" + ], + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.8.0" + } +} diff --git a/deps/npm/node_modules/nopt/package.json b/deps/npm/node_modules/nopt/package.json index 37b770ad487711..508b8e28b59f70 100644 --- a/deps/npm/node_modules/nopt/package.json +++ b/deps/npm/node_modules/nopt/package.json @@ -1,17 +1,18 @@ { "name": "nopt", - "version": "7.2.1", + "version": "8.0.0", "description": "Option parsing for Node, supporting types, shorthands, etc. Used by npm.", "author": "GitHub Inc.", "main": "lib/nopt.js", "scripts": { "test": "tap", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", + "lintfix": "npm run eslint -- --fix", "snap": "tap", - "posttest": "npm run lint" + "posttest": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "repository": { "type": "git", @@ -25,8 +26,8 @@ "abbrev": "^2.0.0" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.3.0" }, "tap": { @@ -40,12 +41,12 @@ "lib/" ], "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", "windowsCI": false, - "version": "4.22.0", + "version": "4.23.3", "publish": true } } diff --git a/deps/npm/node_modules/normalize-package-data/package.json b/deps/npm/node_modules/normalize-package-data/package.json index 04a7647abe65c6..a849ea3a848397 100644 --- a/deps/npm/node_modules/normalize-package-data/package.json +++ b/deps/npm/node_modules/normalize-package-data/package.json @@ -1,6 +1,6 @@ { "name": "normalize-package-data", - "version": "6.0.2", + "version": "7.0.0", "author": "GitHub Inc.", "description": "Normalizes data that can be found in package.json files.", "license": "BSD-2-Clause", @@ -12,22 +12,23 @@ "scripts": { "test": "tap", "npmclilint": "npmcli-lint", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", - "lintfix": "npm run lint -- --fix", + "lint": "npm run eslint", + "lintfix": "npm run eslint -- --fix", "posttest": "npm run lint", "postsnap": "npm run lintfix --", "postlint": "template-oss-check", "snap": "tap", - "template-oss-apply": "template-oss-apply --force" + "template-oss-apply": "template-oss-apply --force", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "dependencies": { - "hosted-git-info": "^7.0.0", + "hosted-git-info": "^8.0.0", "semver": "^7.3.5", "validate-npm-package-license": "^3.0.4" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.0.1" }, "files": [ @@ -35,11 +36,11 @@ "lib/" ], "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.22.0", + "version": "4.23.3", "publish": "true" }, "tap": { diff --git a/deps/npm/node_modules/npm-audit-report/package.json b/deps/npm/node_modules/npm-audit-report/package.json index 492071c1faf902..22b16099e23d66 100644 --- a/deps/npm/node_modules/npm-audit-report/package.json +++ b/deps/npm/node_modules/npm-audit-report/package.json @@ -1,16 +1,17 @@ { "name": "npm-audit-report", - "version": "5.0.0", + "version": "6.0.0", "description": "Given a response from the npm security api, render it into a variety of security reports", "main": "lib/index.js", "scripts": { "test": "tap", "snap": "tap", - "lint": "eslint \"**/*.js\"", + "lint": "npm run eslint", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", - "posttest": "npm run lint" + "lintfix": "npm run eslint -- --fix", + "posttest": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "tap": { "check-coverage": true, @@ -29,8 +30,8 @@ "author": "GitHub Inc.", "license": "ISC", "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.14.1", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "chalk": "^5.2.0", "tap": "^16.0.0" }, @@ -40,7 +41,7 @@ }, "repository": { "type": "git", - "url": "https://github.com/npm/npm-audit-report.git" + "url": "git+https://github.com/npm/npm-audit-report.git" }, "bugs": { "url": "https://github.com/npm/npm-audit-report/issues" @@ -51,10 +52,11 @@ "lib/" ], "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.14.1" + "version": "4.23.3", + "publish": true } } diff --git a/deps/npm/node_modules/npm-bundled/package.json b/deps/npm/node_modules/npm-bundled/package.json index 2744ca6af67fc7..c5daf35dbaa841 100644 --- a/deps/npm/node_modules/npm-bundled/package.json +++ b/deps/npm/node_modules/npm-bundled/package.json @@ -1,6 +1,6 @@ { "name": "npm-bundled", - "version": "3.0.1", + "version": "4.0.0", "description": "list things in node_modules that are bundledDependencies, or transitive dependencies thereof", "main": "lib/index.js", "repository": { @@ -10,33 +10,34 @@ "author": "GitHub Inc.", "license": "ISC", "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "mutate-fs": "^2.1.1", "tap": "^16.3.0" }, "scripts": { "test": "tap", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", + "lintfix": "npm run eslint -- --fix", "snap": "tap", - "posttest": "npm run lint" + "posttest": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "files": [ "bin/", "lib/" ], "dependencies": { - "npm-normalize-package-bin": "^3.0.0" + "npm-normalize-package-bin": "^4.0.0" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.22.0", + "version": "4.23.3", "publish": true }, "tap": { diff --git a/deps/npm/node_modules/npm-install-checks/lib/current-env.js b/deps/npm/node_modules/npm-install-checks/lib/current-env.js new file mode 100644 index 00000000000000..9babde1f277ff1 --- /dev/null +++ b/deps/npm/node_modules/npm-install-checks/lib/current-env.js @@ -0,0 +1,63 @@ +const process = require('node:process') +const nodeOs = require('node:os') + +function isMusl (file) { + return file.includes('libc.musl-') || file.includes('ld-musl-') +} + +function os () { + return process.platform +} + +function cpu () { + return process.arch +} + +function libc (osName) { + // this is to make it faster on non linux machines + if (osName !== 'linux') { + return undefined + } + let family + const originalExclude = process.report.excludeNetwork + process.report.excludeNetwork = true + const report = process.report.getReport() + process.report.excludeNetwork = originalExclude + if (report.header?.glibcVersionRuntime) { + family = 'glibc' + } else if (Array.isArray(report.sharedObjects) && report.sharedObjects.some(isMusl)) { + family = 'musl' + } + return family +} + +function devEngines (env = {}) { + const osName = env.os || os() + return { + cpu: { + name: env.cpu || cpu(), + }, + libc: { + name: env.libc || libc(osName), + }, + os: { + name: osName, + version: env.osVersion || nodeOs.release(), + }, + packageManager: { + name: 'npm', + version: env.npmVersion, + }, + runtime: { + name: 'node', + version: env.nodeVersion || process.version, + }, + } +} + +module.exports = { + cpu, + libc, + os, + devEngines, +} diff --git a/deps/npm/node_modules/npm-install-checks/lib/dev-engines.js b/deps/npm/node_modules/npm-install-checks/lib/dev-engines.js new file mode 100644 index 00000000000000..ac5a182330d3b2 --- /dev/null +++ b/deps/npm/node_modules/npm-install-checks/lib/dev-engines.js @@ -0,0 +1,145 @@ +const satisfies = require('semver/functions/satisfies') +const validRange = require('semver/ranges/valid') + +const recognizedOnFail = [ + 'ignore', + 'warn', + 'error', + 'download', +] + +const recognizedProperties = [ + 'name', + 'version', + 'onFail', +] + +const recognizedEngines = [ + 'packageManager', + 'runtime', + 'cpu', + 'libc', + 'os', +] + +/** checks a devEngine dependency */ +function checkDependency (wanted, current, opts) { + const { engine } = opts + + if ((typeof wanted !== 'object' || wanted === null) || Array.isArray(wanted)) { + throw new Error(`Invalid non-object value for "${engine}"`) + } + + const properties = Object.keys(wanted) + + for (const prop of properties) { + if (!recognizedProperties.includes(prop)) { + throw new Error(`Invalid property "${prop}" for "${engine}"`) + } + } + + if (!properties.includes('name')) { + throw new Error(`Missing "name" property for "${engine}"`) + } + + if (typeof wanted.name !== 'string') { + throw new Error(`Invalid non-string value for "name" within "${engine}"`) + } + + if (typeof current.name !== 'string' || current.name === '') { + throw new Error(`Unable to determine "name" for "${engine}"`) + } + + if (properties.includes('onFail')) { + if (typeof wanted.onFail !== 'string') { + throw new Error(`Invalid non-string value for "onFail" within "${engine}"`) + } + if (!recognizedOnFail.includes(wanted.onFail)) { + throw new Error(`Invalid onFail value "${wanted.onFail}" for "${engine}"`) + } + } + + if (wanted.name !== current.name) { + return new Error( + `Invalid name "${wanted.name}" does not match "${current.name}" for "${engine}"` + ) + } + + if (properties.includes('version')) { + if (typeof wanted.version !== 'string') { + throw new Error(`Invalid non-string value for "version" within "${engine}"`) + } + if (typeof current.version !== 'string' || current.version === '') { + throw new Error(`Unable to determine "version" for "${engine}" "${wanted.name}"`) + } + if (validRange(wanted.version)) { + if (!satisfies(current.version, wanted.version, opts.semver)) { + return new Error( + // eslint-disable-next-line max-len + `Invalid semver version "${wanted.version}" does not match "${current.version}" for "${engine}"` + ) + } + } else if (wanted.version !== current.version) { + return new Error( + `Invalid version "${wanted.version}" does not match "${current.version}" for "${engine}"` + ) + } + } +} + +/** checks devEngines package property and returns array of warnings / errors */ +function checkDevEngines (wanted, current = {}, opts = {}) { + if ((typeof wanted !== 'object' || wanted === null) || Array.isArray(wanted)) { + throw new Error(`Invalid non-object value for devEngines`) + } + + const errors = [] + + for (const engine of Object.keys(wanted)) { + if (!recognizedEngines.includes(engine)) { + throw new Error(`Invalid property "${engine}"`) + } + const dependencyAsAuthored = wanted[engine] + const dependencies = [dependencyAsAuthored].flat() + const currentEngine = current[engine] || {} + + // this accounts for empty array eg { runtime: [] } and ignores it + if (dependencies.length === 0) { + continue + } + + const depErrors = [] + for (const dep of dependencies) { + const result = checkDependency(dep, currentEngine, { ...opts, engine }) + if (result) { + depErrors.push(result) + } + } + + const invalid = depErrors.length === dependencies.length + + if (invalid) { + const lastDependency = dependencies[dependencies.length - 1] + let onFail = lastDependency.onFail || 'error' + if (onFail === 'download') { + onFail = 'error' + } + + const err = Object.assign(new Error(`Invalid engine "${engine}"`), { + errors: depErrors, + engine, + isWarn: onFail === 'warn', + isError: onFail === 'error', + current: currentEngine, + required: dependencyAsAuthored, + }) + + errors.push(err) + } + } + return errors +} + +module.exports = { + checkDevEngines, +} diff --git a/deps/npm/node_modules/npm-install-checks/lib/index.js b/deps/npm/node_modules/npm-install-checks/lib/index.js index 545472b61dc604..71702920873080 100644 --- a/deps/npm/node_modules/npm-install-checks/lib/index.js +++ b/deps/npm/node_modules/npm-install-checks/lib/index.js @@ -1,4 +1,6 @@ const semver = require('semver') +const currentEnv = require('./current-env') +const { checkDevEngines } = require('./dev-engines') const checkEngine = (target, npmVer, nodeVer, force = false) => { const nodev = force ? null : nodeVer @@ -20,44 +22,29 @@ const checkEngine = (target, npmVer, nodeVer, force = false) => { } } -const isMusl = (file) => file.includes('libc.musl-') || file.includes('ld-musl-') - const checkPlatform = (target, force = false, environment = {}) => { if (force) { return } - const platform = environment.os || process.platform - const arch = environment.cpu || process.arch - const osOk = target.os ? checkList(platform, target.os) : true - const cpuOk = target.cpu ? checkList(arch, target.cpu) : true + const os = environment.os || currentEnv.os() + const cpu = environment.cpu || currentEnv.cpu() + const libc = environment.libc || currentEnv.libc(os) - let libcOk = true - let libcFamily = null - if (target.libc) { - // libc checks only work in linux, any value is a failure if we aren't - if (environment.libc) { - libcOk = checkList(environment.libc, target.libc) - } else if (platform !== 'linux') { - libcOk = false - } else { - const report = process.report.getReport() - if (report.header?.glibcVersionRuntime) { - libcFamily = 'glibc' - } else if (Array.isArray(report.sharedObjects) && report.sharedObjects.some(isMusl)) { - libcFamily = 'musl' - } - libcOk = libcFamily ? checkList(libcFamily, target.libc) : false - } + const osOk = target.os ? checkList(os, target.os) : true + const cpuOk = target.cpu ? checkList(cpu, target.cpu) : true + let libcOk = target.libc ? checkList(libc, target.libc) : true + if (target.libc && !libc) { + libcOk = false } if (!osOk || !cpuOk || !libcOk) { throw Object.assign(new Error('Unsupported platform'), { pkgid: target._id, current: { - os: platform, - cpu: arch, - libc: libcFamily, + os, + cpu, + libc, }, required: { os: target.os, @@ -98,4 +85,6 @@ const checkList = (value, list) => { module.exports = { checkEngine, checkPlatform, + checkDevEngines, + currentEnv, } diff --git a/deps/npm/node_modules/npm-install-checks/package.json b/deps/npm/node_modules/npm-install-checks/package.json index 11a3b87750e25a..e9e69575a6dc6d 100644 --- a/deps/npm/node_modules/npm-install-checks/package.json +++ b/deps/npm/node_modules/npm-install-checks/package.json @@ -1,28 +1,29 @@ { "name": "npm-install-checks", - "version": "6.3.0", + "version": "7.1.0", "description": "Check the engines and platform fields in package.json", "main": "lib/index.js", "dependencies": { "semver": "^7.1.1" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.19.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.0.1" }, "scripts": { "test": "tap", - "lint": "eslint \"**/*.js\"", + "lint": "npm run eslint", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", + "lintfix": "npm run eslint -- --fix", "snap": "tap", - "posttest": "npm run lint" + "posttest": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "repository": { "type": "git", - "url": "https://github.com/npm/npm-install-checks.git" + "url": "git+https://github.com/npm/npm-install-checks.git" }, "keywords": [ "npm,", @@ -34,12 +35,12 @@ "lib/" ], "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "author": "GitHub Inc.", "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.19.0", + "version": "4.23.3", "publish": "true" }, "tap": { diff --git a/deps/npm/node_modules/npm-normalize-package-bin/package.json b/deps/npm/node_modules/npm-normalize-package-bin/package.json index 5ea50dc2a47ba1..a1aeef0e1e7512 100644 --- a/deps/npm/node_modules/npm-normalize-package-bin/package.json +++ b/deps/npm/node_modules/npm-normalize-package-bin/package.json @@ -1,26 +1,27 @@ { "name": "npm-normalize-package-bin", - "version": "3.0.1", + "version": "4.0.0", "description": "Turn any flavor of allowable package.json bin into a normalized object", "main": "lib/index.js", "repository": { "type": "git", - "url": "https://github.com/npm/npm-normalize-package-bin.git" + "url": "git+https://github.com/npm/npm-normalize-package-bin.git" }, "author": "GitHub Inc.", "license": "ISC", "scripts": { "test": "tap", "snap": "tap", - "lint": "eslint \"**/*.js\"", + "lint": "npm run eslint", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", - "posttest": "npm run lint" + "lintfix": "npm run eslint -- --fix", + "posttest": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.14.1", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.3.0" }, "files": [ @@ -28,11 +29,11 @@ "lib/" ], "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.14.1", + "version": "4.23.3", "publish": "true" }, "tap": { diff --git a/deps/npm/node_modules/npm-package-arg/package.json b/deps/npm/node_modules/npm-package-arg/package.json index d3f6fd7cf0a05d..80baa3d32a52fe 100644 --- a/deps/npm/node_modules/npm-package-arg/package.json +++ b/deps/npm/node_modules/npm-package-arg/package.json @@ -1,6 +1,6 @@ { "name": "npm-package-arg", - "version": "11.0.3", + "version": "12.0.0", "description": "Parse the things that can be arguments to `npm install`", "main": "./lib/npa.js", "directories": { @@ -11,14 +11,14 @@ "lib/" ], "dependencies": { - "hosted-git-info": "^7.0.0", - "proc-log": "^4.0.0", + "hosted-git-info": "^8.0.0", + "proc-log": "^5.0.0", "semver": "^7.3.5", - "validate-npm-package-name": "^5.0.0" + "validate-npm-package-name": "^6.0.0" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.23.1", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.0.1" }, "scripts": { @@ -44,7 +44,7 @@ }, "homepage": "https://github.com/npm/npm-package-arg", "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "tap": { "branches": 97, @@ -55,7 +55,7 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.23.1", + "version": "4.23.3", "publish": true } } diff --git a/deps/npm/node_modules/npm-packlist/package.json b/deps/npm/node_modules/npm-packlist/package.json index 8c3a16e741ad30..d7e0a4fd5a8452 100644 --- a/deps/npm/node_modules/npm-packlist/package.json +++ b/deps/npm/node_modules/npm-packlist/package.json @@ -1,13 +1,13 @@ { "name": "npm-packlist", - "version": "8.0.2", + "version": "9.0.0", "description": "Get a list of the files to add from a folder into an npm package", "directories": { "test": "test" }, "main": "lib/index.js", "dependencies": { - "ignore-walk": "^6.0.4" + "ignore-walk": "^7.0.0" }, "author": "GitHub Inc.", "license": "ISC", @@ -16,9 +16,9 @@ "lib/" ], "devDependencies": { - "@npmcli/arborist": "^6.0.0 || ^6.0.0-pre.0", + "@npmcli/arborist": "^7.5.4", "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.21.2", + "@npmcli/template-oss": "4.23.3", "mutate-fs": "^2.1.1", "tap": "^16.0.1" }, @@ -27,16 +27,16 @@ "posttest": "npm run lint", "snap": "tap", "postsnap": "npm run lintfix --", - "eslint": "eslint", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", - "lintfix": "npm run lint -- --fix", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", + "lintfix": "npm run eslint -- --fix", "npmclilint": "npmcli-lint", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force" }, "repository": { "type": "git", - "url": "https://github.com/npm/npm-packlist.git" + "url": "git+https://github.com/npm/npm-packlist.git" }, "tap": { "test-env": [ @@ -51,11 +51,11 @@ ] }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.21.2", + "version": "4.23.3", "publish": true } } diff --git a/deps/npm/node_modules/npm-pick-manifest/package.json b/deps/npm/node_modules/npm-pick-manifest/package.json index 4c0dd50630def5..5763088c250b69 100644 --- a/deps/npm/node_modules/npm-pick-manifest/package.json +++ b/deps/npm/node_modules/npm-pick-manifest/package.json @@ -1,6 +1,6 @@ { "name": "npm-pick-manifest", - "version": "9.1.0", + "version": "10.0.0", "description": "Resolves a matching manifest from a package metadata document according to standard npm semver resolution rules.", "main": "./lib", "files": [ @@ -9,13 +9,14 @@ ], "scripts": { "coverage": "tap", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", "test": "tap", "posttest": "npm run lint", "postlint": "template-oss-check", - "lintfix": "npm run lint -- --fix", + "lintfix": "npm run eslint -- --fix", "snap": "tap", - "template-oss-apply": "template-oss-apply --force" + "template-oss-apply": "template-oss-apply --force", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "repository": { "type": "git", @@ -29,14 +30,14 @@ "author": "GitHub Inc.", "license": "ISC", "dependencies": { - "npm-install-checks": "^6.0.0", - "npm-normalize-package-bin": "^3.0.0", - "npm-package-arg": "^11.0.0", + "npm-install-checks": "^7.1.0", + "npm-normalize-package-bin": "^4.0.0", + "npm-package-arg": "^12.0.0", "semver": "^7.3.5" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.0.1" }, "tap": { @@ -47,11 +48,11 @@ ] }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.22.0", + "version": "4.23.3", "publish": true } } diff --git a/deps/npm/node_modules/npm-profile/lib/index.js b/deps/npm/node_modules/npm-profile/lib/index.js index 69e4e49b1fcddf..83ab5e1b46b68e 100644 --- a/deps/npm/node_modules/npm-profile/lib/index.js +++ b/deps/npm/node_modules/npm-profile/lib/index.js @@ -119,7 +119,7 @@ const webAuthCheckLogin = async (doneUrl, opts, { signal } = {}) => { if (res.status === 202) { const retry = +res.headers.get('retry-after') * 1000 if (retry > 0) { - await timers.setTimeout(retry, null, { ref: false, signal }) + await timers.setTimeout(retry, null, { signal }) } return webAuthCheckLogin(doneUrl, opts, { signal }) } diff --git a/deps/npm/node_modules/npm-profile/package.json b/deps/npm/node_modules/npm-profile/package.json index ff93911716fa7e..72a19a08231e2b 100644 --- a/deps/npm/node_modules/npm-profile/package.json +++ b/deps/npm/node_modules/npm-profile/package.json @@ -1,26 +1,26 @@ { "name": "npm-profile", - "version": "10.0.0", + "version": "11.0.1", "description": "Library for updating an npmjs.com profile", "keywords": [], "author": "GitHub Inc.", "license": "ISC", "dependencies": { - "npm-registry-fetch": "^17.0.1", - "proc-log": "^4.0.0" + "npm-registry-fetch": "^18.0.0", + "proc-log": "^5.0.0" }, "main": "./lib/index.js", "repository": { "type": "git", - "url": "https://github.com/npm/npm-profile.git" + "url": "git+https://github.com/npm/npm-profile.git" }, "files": [ "bin/", "lib/" ], "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.21.4", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "nock": "^13.2.4", "tap": "^16.0.1" }, @@ -28,10 +28,11 @@ "posttest": "npm run lint", "test": "tap", "snap": "tap", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", "postlint": "template-oss-check", - "lintfix": "npm run lint -- --fix", - "template-oss-apply": "template-oss-apply --force" + "lintfix": "npm run eslint -- --fix", + "template-oss-apply": "template-oss-apply --force", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "tap": { "check-coverage": true, @@ -41,11 +42,11 @@ ] }, "engines": { - "node": ">=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.21.4", + "version": "4.23.3", "publish": true } } diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/LICENSE b/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/LICENSE new file mode 100644 index 00000000000000..49f7efe431c9ea --- /dev/null +++ b/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/LICENSE @@ -0,0 +1,26 @@ +Minizlib was created by Isaac Z. Schlueter. +It is a derivative work of the Node.js project. + +""" +Copyright (c) 2017-2023 Isaac Z. Schlueter and Contributors +Copyright (c) 2017-2023 Node.js contributors. All rights reserved. +Copyright (c) 2017-2023 Joyent, Inc. and other Node contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +""" diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/dist/commonjs/constants.js b/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/dist/commonjs/constants.js new file mode 100644 index 00000000000000..dfc2c1957bfc99 --- /dev/null +++ b/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/dist/commonjs/constants.js @@ -0,0 +1,123 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.constants = void 0; +// Update with any zlib constants that are added or changed in the future. +// Node v6 didn't export this, so we just hard code the version and rely +// on all the other hard-coded values from zlib v4736. When node v6 +// support drops, we can just export the realZlibConstants object. +const zlib_1 = __importDefault(require("zlib")); +/* c8 ignore start */ +const realZlibConstants = zlib_1.default.constants || { ZLIB_VERNUM: 4736 }; +/* c8 ignore stop */ +exports.constants = Object.freeze(Object.assign(Object.create(null), { + Z_NO_FLUSH: 0, + Z_PARTIAL_FLUSH: 1, + Z_SYNC_FLUSH: 2, + Z_FULL_FLUSH: 3, + Z_FINISH: 4, + Z_BLOCK: 5, + Z_OK: 0, + Z_STREAM_END: 1, + Z_NEED_DICT: 2, + Z_ERRNO: -1, + Z_STREAM_ERROR: -2, + Z_DATA_ERROR: -3, + Z_MEM_ERROR: -4, + Z_BUF_ERROR: -5, + Z_VERSION_ERROR: -6, + Z_NO_COMPRESSION: 0, + Z_BEST_SPEED: 1, + Z_BEST_COMPRESSION: 9, + Z_DEFAULT_COMPRESSION: -1, + Z_FILTERED: 1, + Z_HUFFMAN_ONLY: 2, + Z_RLE: 3, + Z_FIXED: 4, + Z_DEFAULT_STRATEGY: 0, + DEFLATE: 1, + INFLATE: 2, + GZIP: 3, + GUNZIP: 4, + DEFLATERAW: 5, + INFLATERAW: 6, + UNZIP: 7, + BROTLI_DECODE: 8, + BROTLI_ENCODE: 9, + Z_MIN_WINDOWBITS: 8, + Z_MAX_WINDOWBITS: 15, + Z_DEFAULT_WINDOWBITS: 15, + Z_MIN_CHUNK: 64, + Z_MAX_CHUNK: Infinity, + Z_DEFAULT_CHUNK: 16384, + Z_MIN_MEMLEVEL: 1, + Z_MAX_MEMLEVEL: 9, + Z_DEFAULT_MEMLEVEL: 8, + Z_MIN_LEVEL: -1, + Z_MAX_LEVEL: 9, + Z_DEFAULT_LEVEL: -1, + BROTLI_OPERATION_PROCESS: 0, + BROTLI_OPERATION_FLUSH: 1, + BROTLI_OPERATION_FINISH: 2, + BROTLI_OPERATION_EMIT_METADATA: 3, + BROTLI_MODE_GENERIC: 0, + BROTLI_MODE_TEXT: 1, + BROTLI_MODE_FONT: 2, + BROTLI_DEFAULT_MODE: 0, + BROTLI_MIN_QUALITY: 0, + BROTLI_MAX_QUALITY: 11, + BROTLI_DEFAULT_QUALITY: 11, + BROTLI_MIN_WINDOW_BITS: 10, + BROTLI_MAX_WINDOW_BITS: 24, + BROTLI_LARGE_MAX_WINDOW_BITS: 30, + BROTLI_DEFAULT_WINDOW: 22, + BROTLI_MIN_INPUT_BLOCK_BITS: 16, + BROTLI_MAX_INPUT_BLOCK_BITS: 24, + BROTLI_PARAM_MODE: 0, + BROTLI_PARAM_QUALITY: 1, + BROTLI_PARAM_LGWIN: 2, + BROTLI_PARAM_LGBLOCK: 3, + BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING: 4, + BROTLI_PARAM_SIZE_HINT: 5, + BROTLI_PARAM_LARGE_WINDOW: 6, + BROTLI_PARAM_NPOSTFIX: 7, + BROTLI_PARAM_NDIRECT: 8, + BROTLI_DECODER_RESULT_ERROR: 0, + BROTLI_DECODER_RESULT_SUCCESS: 1, + BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT: 2, + BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT: 3, + BROTLI_DECODER_PARAM_DISABLE_RING_BUFFER_REALLOCATION: 0, + BROTLI_DECODER_PARAM_LARGE_WINDOW: 1, + BROTLI_DECODER_NO_ERROR: 0, + BROTLI_DECODER_SUCCESS: 1, + BROTLI_DECODER_NEEDS_MORE_INPUT: 2, + BROTLI_DECODER_NEEDS_MORE_OUTPUT: 3, + BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_NIBBLE: -1, + BROTLI_DECODER_ERROR_FORMAT_RESERVED: -2, + BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_META_NIBBLE: -3, + BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_ALPHABET: -4, + BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_SAME: -5, + BROTLI_DECODER_ERROR_FORMAT_CL_SPACE: -6, + BROTLI_DECODER_ERROR_FORMAT_HUFFMAN_SPACE: -7, + BROTLI_DECODER_ERROR_FORMAT_CONTEXT_MAP_REPEAT: -8, + BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_1: -9, + BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_2: -10, + BROTLI_DECODER_ERROR_FORMAT_TRANSFORM: -11, + BROTLI_DECODER_ERROR_FORMAT_DICTIONARY: -12, + BROTLI_DECODER_ERROR_FORMAT_WINDOW_BITS: -13, + BROTLI_DECODER_ERROR_FORMAT_PADDING_1: -14, + BROTLI_DECODER_ERROR_FORMAT_PADDING_2: -15, + BROTLI_DECODER_ERROR_FORMAT_DISTANCE: -16, + BROTLI_DECODER_ERROR_DICTIONARY_NOT_SET: -19, + BROTLI_DECODER_ERROR_INVALID_ARGUMENTS: -20, + BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MODES: -21, + BROTLI_DECODER_ERROR_ALLOC_TREE_GROUPS: -22, + BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MAP: -25, + BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_1: -26, + BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_2: -27, + BROTLI_DECODER_ERROR_ALLOC_BLOCK_TYPE_TREES: -30, + BROTLI_DECODER_ERROR_UNREACHABLE: -31, +}, realZlibConstants)); +//# sourceMappingURL=constants.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/dist/commonjs/index.js b/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/dist/commonjs/index.js new file mode 100644 index 00000000000000..ad65eef0495076 --- /dev/null +++ b/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/dist/commonjs/index.js @@ -0,0 +1,352 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.BrotliDecompress = exports.BrotliCompress = exports.Brotli = exports.Unzip = exports.InflateRaw = exports.DeflateRaw = exports.Gunzip = exports.Gzip = exports.Inflate = exports.Deflate = exports.Zlib = exports.ZlibError = exports.constants = void 0; +const assert_1 = __importDefault(require("assert")); +const buffer_1 = require("buffer"); +const minipass_1 = require("minipass"); +const zlib_1 = __importDefault(require("zlib")); +const constants_js_1 = require("./constants.js"); +var constants_js_2 = require("./constants.js"); +Object.defineProperty(exports, "constants", { enumerable: true, get: function () { return constants_js_2.constants; } }); +const OriginalBufferConcat = buffer_1.Buffer.concat; +const _superWrite = Symbol('_superWrite'); +class ZlibError extends Error { + code; + errno; + constructor(err) { + super('zlib: ' + err.message); + this.code = err.code; + this.errno = err.errno; + /* c8 ignore next */ + if (!this.code) + this.code = 'ZLIB_ERROR'; + this.message = 'zlib: ' + err.message; + Error.captureStackTrace(this, this.constructor); + } + get name() { + return 'ZlibError'; + } +} +exports.ZlibError = ZlibError; +// the Zlib class they all inherit from +// This thing manages the queue of requests, and returns +// true or false if there is anything in the queue when +// you call the .write() method. +const _flushFlag = Symbol('flushFlag'); +class ZlibBase extends minipass_1.Minipass { + #sawError = false; + #ended = false; + #flushFlag; + #finishFlushFlag; + #fullFlushFlag; + #handle; + #onError; + get sawError() { + return this.#sawError; + } + get handle() { + return this.#handle; + } + /* c8 ignore start */ + get flushFlag() { + return this.#flushFlag; + } + /* c8 ignore stop */ + constructor(opts, mode) { + if (!opts || typeof opts !== 'object') + throw new TypeError('invalid options for ZlibBase constructor'); + //@ts-ignore + super(opts); + /* c8 ignore start */ + this.#flushFlag = opts.flush ?? 0; + this.#finishFlushFlag = opts.finishFlush ?? 0; + this.#fullFlushFlag = opts.fullFlushFlag ?? 0; + /* c8 ignore stop */ + // this will throw if any options are invalid for the class selected + try { + // @types/node doesn't know that it exports the classes, but they're there + //@ts-ignore + this.#handle = new zlib_1.default[mode](opts); + } + catch (er) { + // make sure that all errors get decorated properly + throw new ZlibError(er); + } + this.#onError = err => { + // no sense raising multiple errors, since we abort on the first one. + if (this.#sawError) + return; + this.#sawError = true; + // there is no way to cleanly recover. + // continuing only obscures problems. + this.close(); + this.emit('error', err); + }; + this.#handle?.on('error', er => this.#onError(new ZlibError(er))); + this.once('end', () => this.close); + } + close() { + if (this.#handle) { + this.#handle.close(); + this.#handle = undefined; + this.emit('close'); + } + } + reset() { + if (!this.#sawError) { + (0, assert_1.default)(this.#handle, 'zlib binding closed'); + //@ts-ignore + return this.#handle.reset?.(); + } + } + flush(flushFlag) { + if (this.ended) + return; + if (typeof flushFlag !== 'number') + flushFlag = this.#fullFlushFlag; + this.write(Object.assign(buffer_1.Buffer.alloc(0), { [_flushFlag]: flushFlag })); + } + end(chunk, encoding, cb) { + /* c8 ignore start */ + if (typeof chunk === 'function') { + cb = chunk; + encoding = undefined; + chunk = undefined; + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = undefined; + } + /* c8 ignore stop */ + if (chunk) { + if (encoding) + this.write(chunk, encoding); + else + this.write(chunk); + } + this.flush(this.#finishFlushFlag); + this.#ended = true; + return super.end(cb); + } + get ended() { + return this.#ended; + } + // overridden in the gzip classes to do portable writes + [_superWrite](data) { + return super.write(data); + } + write(chunk, encoding, cb) { + // process the chunk using the sync process + // then super.write() all the outputted chunks + if (typeof encoding === 'function') + (cb = encoding), (encoding = 'utf8'); + if (typeof chunk === 'string') + chunk = buffer_1.Buffer.from(chunk, encoding); + if (this.#sawError) + return; + (0, assert_1.default)(this.#handle, 'zlib binding closed'); + // _processChunk tries to .close() the native handle after it's done, so we + // intercept that by temporarily making it a no-op. + // diving into the node:zlib internals a bit here + const nativeHandle = this.#handle + ._handle; + const originalNativeClose = nativeHandle.close; + nativeHandle.close = () => { }; + const originalClose = this.#handle.close; + this.#handle.close = () => { }; + // It also calls `Buffer.concat()` at the end, which may be convenient + // for some, but which we are not interested in as it slows us down. + buffer_1.Buffer.concat = args => args; + let result = undefined; + try { + const flushFlag = typeof chunk[_flushFlag] === 'number' + ? chunk[_flushFlag] + : this.#flushFlag; + result = this.#handle._processChunk(chunk, flushFlag); + // if we don't throw, reset it back how it was + buffer_1.Buffer.concat = OriginalBufferConcat; + } + catch (err) { + // or if we do, put Buffer.concat() back before we emit error + // Error events call into user code, which may call Buffer.concat() + buffer_1.Buffer.concat = OriginalBufferConcat; + this.#onError(new ZlibError(err)); + } + finally { + if (this.#handle) { + // Core zlib resets `_handle` to null after attempting to close the + // native handle. Our no-op handler prevented actual closure, but we + // need to restore the `._handle` property. + ; + this.#handle._handle = + nativeHandle; + nativeHandle.close = originalNativeClose; + this.#handle.close = originalClose; + // `_processChunk()` adds an 'error' listener. If we don't remove it + // after each call, these handlers start piling up. + this.#handle.removeAllListeners('error'); + // make sure OUR error listener is still attached tho + } + } + if (this.#handle) + this.#handle.on('error', er => this.#onError(new ZlibError(er))); + let writeReturn; + if (result) { + if (Array.isArray(result) && result.length > 0) { + const r = result[0]; + // The first buffer is always `handle._outBuffer`, which would be + // re-used for later invocations; so, we always have to copy that one. + writeReturn = this[_superWrite](buffer_1.Buffer.from(r)); + for (let i = 1; i < result.length; i++) { + writeReturn = this[_superWrite](result[i]); + } + } + else { + // either a single Buffer or an empty array + writeReturn = this[_superWrite](buffer_1.Buffer.from(result)); + } + } + if (cb) + cb(); + return writeReturn; + } +} +class Zlib extends ZlibBase { + #level; + #strategy; + constructor(opts, mode) { + opts = opts || {}; + opts.flush = opts.flush || constants_js_1.constants.Z_NO_FLUSH; + opts.finishFlush = opts.finishFlush || constants_js_1.constants.Z_FINISH; + opts.fullFlushFlag = constants_js_1.constants.Z_FULL_FLUSH; + super(opts, mode); + this.#level = opts.level; + this.#strategy = opts.strategy; + } + params(level, strategy) { + if (this.sawError) + return; + if (!this.handle) + throw new Error('cannot switch params when binding is closed'); + // no way to test this without also not supporting params at all + /* c8 ignore start */ + if (!this.handle.params) + throw new Error('not supported in this implementation'); + /* c8 ignore stop */ + if (this.#level !== level || this.#strategy !== strategy) { + this.flush(constants_js_1.constants.Z_SYNC_FLUSH); + (0, assert_1.default)(this.handle, 'zlib binding closed'); + // .params() calls .flush(), but the latter is always async in the + // core zlib. We override .flush() temporarily to intercept that and + // flush synchronously. + const origFlush = this.handle.flush; + this.handle.flush = (flushFlag, cb) => { + /* c8 ignore start */ + if (typeof flushFlag === 'function') { + cb = flushFlag; + flushFlag = this.flushFlag; + } + /* c8 ignore stop */ + this.flush(flushFlag); + cb?.(); + }; + try { + ; + this.handle.params(level, strategy); + } + finally { + this.handle.flush = origFlush; + } + /* c8 ignore start */ + if (this.handle) { + this.#level = level; + this.#strategy = strategy; + } + /* c8 ignore stop */ + } + } +} +exports.Zlib = Zlib; +// minimal 2-byte header +class Deflate extends Zlib { + constructor(opts) { + super(opts, 'Deflate'); + } +} +exports.Deflate = Deflate; +class Inflate extends Zlib { + constructor(opts) { + super(opts, 'Inflate'); + } +} +exports.Inflate = Inflate; +class Gzip extends Zlib { + #portable; + constructor(opts) { + super(opts, 'Gzip'); + this.#portable = opts && !!opts.portable; + } + [_superWrite](data) { + if (!this.#portable) + return super[_superWrite](data); + // we'll always get the header emitted in one first chunk + // overwrite the OS indicator byte with 0xFF + this.#portable = false; + data[9] = 255; + return super[_superWrite](data); + } +} +exports.Gzip = Gzip; +class Gunzip extends Zlib { + constructor(opts) { + super(opts, 'Gunzip'); + } +} +exports.Gunzip = Gunzip; +// raw - no header +class DeflateRaw extends Zlib { + constructor(opts) { + super(opts, 'DeflateRaw'); + } +} +exports.DeflateRaw = DeflateRaw; +class InflateRaw extends Zlib { + constructor(opts) { + super(opts, 'InflateRaw'); + } +} +exports.InflateRaw = InflateRaw; +// auto-detect header. +class Unzip extends Zlib { + constructor(opts) { + super(opts, 'Unzip'); + } +} +exports.Unzip = Unzip; +class Brotli extends ZlibBase { + constructor(opts, mode) { + opts = opts || {}; + opts.flush = opts.flush || constants_js_1.constants.BROTLI_OPERATION_PROCESS; + opts.finishFlush = + opts.finishFlush || constants_js_1.constants.BROTLI_OPERATION_FINISH; + opts.fullFlushFlag = constants_js_1.constants.BROTLI_OPERATION_FLUSH; + super(opts, mode); + } +} +exports.Brotli = Brotli; +class BrotliCompress extends Brotli { + constructor(opts) { + super(opts, 'BrotliCompress'); + } +} +exports.BrotliCompress = BrotliCompress; +class BrotliDecompress extends Brotli { + constructor(opts) { + super(opts, 'BrotliDecompress'); + } +} +exports.BrotliDecompress = BrotliDecompress; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/dist/commonjs/package.json b/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/dist/commonjs/package.json new file mode 100644 index 00000000000000..5bbefffbabee39 --- /dev/null +++ b/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/dist/commonjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/dist/esm/constants.js b/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/dist/esm/constants.js new file mode 100644 index 00000000000000..7faf40be5068d0 --- /dev/null +++ b/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/dist/esm/constants.js @@ -0,0 +1,117 @@ +// Update with any zlib constants that are added or changed in the future. +// Node v6 didn't export this, so we just hard code the version and rely +// on all the other hard-coded values from zlib v4736. When node v6 +// support drops, we can just export the realZlibConstants object. +import realZlib from 'zlib'; +/* c8 ignore start */ +const realZlibConstants = realZlib.constants || { ZLIB_VERNUM: 4736 }; +/* c8 ignore stop */ +export const constants = Object.freeze(Object.assign(Object.create(null), { + Z_NO_FLUSH: 0, + Z_PARTIAL_FLUSH: 1, + Z_SYNC_FLUSH: 2, + Z_FULL_FLUSH: 3, + Z_FINISH: 4, + Z_BLOCK: 5, + Z_OK: 0, + Z_STREAM_END: 1, + Z_NEED_DICT: 2, + Z_ERRNO: -1, + Z_STREAM_ERROR: -2, + Z_DATA_ERROR: -3, + Z_MEM_ERROR: -4, + Z_BUF_ERROR: -5, + Z_VERSION_ERROR: -6, + Z_NO_COMPRESSION: 0, + Z_BEST_SPEED: 1, + Z_BEST_COMPRESSION: 9, + Z_DEFAULT_COMPRESSION: -1, + Z_FILTERED: 1, + Z_HUFFMAN_ONLY: 2, + Z_RLE: 3, + Z_FIXED: 4, + Z_DEFAULT_STRATEGY: 0, + DEFLATE: 1, + INFLATE: 2, + GZIP: 3, + GUNZIP: 4, + DEFLATERAW: 5, + INFLATERAW: 6, + UNZIP: 7, + BROTLI_DECODE: 8, + BROTLI_ENCODE: 9, + Z_MIN_WINDOWBITS: 8, + Z_MAX_WINDOWBITS: 15, + Z_DEFAULT_WINDOWBITS: 15, + Z_MIN_CHUNK: 64, + Z_MAX_CHUNK: Infinity, + Z_DEFAULT_CHUNK: 16384, + Z_MIN_MEMLEVEL: 1, + Z_MAX_MEMLEVEL: 9, + Z_DEFAULT_MEMLEVEL: 8, + Z_MIN_LEVEL: -1, + Z_MAX_LEVEL: 9, + Z_DEFAULT_LEVEL: -1, + BROTLI_OPERATION_PROCESS: 0, + BROTLI_OPERATION_FLUSH: 1, + BROTLI_OPERATION_FINISH: 2, + BROTLI_OPERATION_EMIT_METADATA: 3, + BROTLI_MODE_GENERIC: 0, + BROTLI_MODE_TEXT: 1, + BROTLI_MODE_FONT: 2, + BROTLI_DEFAULT_MODE: 0, + BROTLI_MIN_QUALITY: 0, + BROTLI_MAX_QUALITY: 11, + BROTLI_DEFAULT_QUALITY: 11, + BROTLI_MIN_WINDOW_BITS: 10, + BROTLI_MAX_WINDOW_BITS: 24, + BROTLI_LARGE_MAX_WINDOW_BITS: 30, + BROTLI_DEFAULT_WINDOW: 22, + BROTLI_MIN_INPUT_BLOCK_BITS: 16, + BROTLI_MAX_INPUT_BLOCK_BITS: 24, + BROTLI_PARAM_MODE: 0, + BROTLI_PARAM_QUALITY: 1, + BROTLI_PARAM_LGWIN: 2, + BROTLI_PARAM_LGBLOCK: 3, + BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING: 4, + BROTLI_PARAM_SIZE_HINT: 5, + BROTLI_PARAM_LARGE_WINDOW: 6, + BROTLI_PARAM_NPOSTFIX: 7, + BROTLI_PARAM_NDIRECT: 8, + BROTLI_DECODER_RESULT_ERROR: 0, + BROTLI_DECODER_RESULT_SUCCESS: 1, + BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT: 2, + BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT: 3, + BROTLI_DECODER_PARAM_DISABLE_RING_BUFFER_REALLOCATION: 0, + BROTLI_DECODER_PARAM_LARGE_WINDOW: 1, + BROTLI_DECODER_NO_ERROR: 0, + BROTLI_DECODER_SUCCESS: 1, + BROTLI_DECODER_NEEDS_MORE_INPUT: 2, + BROTLI_DECODER_NEEDS_MORE_OUTPUT: 3, + BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_NIBBLE: -1, + BROTLI_DECODER_ERROR_FORMAT_RESERVED: -2, + BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_META_NIBBLE: -3, + BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_ALPHABET: -4, + BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_SAME: -5, + BROTLI_DECODER_ERROR_FORMAT_CL_SPACE: -6, + BROTLI_DECODER_ERROR_FORMAT_HUFFMAN_SPACE: -7, + BROTLI_DECODER_ERROR_FORMAT_CONTEXT_MAP_REPEAT: -8, + BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_1: -9, + BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_2: -10, + BROTLI_DECODER_ERROR_FORMAT_TRANSFORM: -11, + BROTLI_DECODER_ERROR_FORMAT_DICTIONARY: -12, + BROTLI_DECODER_ERROR_FORMAT_WINDOW_BITS: -13, + BROTLI_DECODER_ERROR_FORMAT_PADDING_1: -14, + BROTLI_DECODER_ERROR_FORMAT_PADDING_2: -15, + BROTLI_DECODER_ERROR_FORMAT_DISTANCE: -16, + BROTLI_DECODER_ERROR_DICTIONARY_NOT_SET: -19, + BROTLI_DECODER_ERROR_INVALID_ARGUMENTS: -20, + BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MODES: -21, + BROTLI_DECODER_ERROR_ALLOC_TREE_GROUPS: -22, + BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MAP: -25, + BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_1: -26, + BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_2: -27, + BROTLI_DECODER_ERROR_ALLOC_BLOCK_TYPE_TREES: -30, + BROTLI_DECODER_ERROR_UNREACHABLE: -31, +}, realZlibConstants)); +//# sourceMappingURL=constants.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/dist/esm/index.js b/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/dist/esm/index.js new file mode 100644 index 00000000000000..a6269b505f47cc --- /dev/null +++ b/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/dist/esm/index.js @@ -0,0 +1,333 @@ +import assert from 'assert'; +import { Buffer } from 'buffer'; +import { Minipass } from 'minipass'; +import realZlib from 'zlib'; +import { constants } from './constants.js'; +export { constants } from './constants.js'; +const OriginalBufferConcat = Buffer.concat; +const _superWrite = Symbol('_superWrite'); +export class ZlibError extends Error { + code; + errno; + constructor(err) { + super('zlib: ' + err.message); + this.code = err.code; + this.errno = err.errno; + /* c8 ignore next */ + if (!this.code) + this.code = 'ZLIB_ERROR'; + this.message = 'zlib: ' + err.message; + Error.captureStackTrace(this, this.constructor); + } + get name() { + return 'ZlibError'; + } +} +// the Zlib class they all inherit from +// This thing manages the queue of requests, and returns +// true or false if there is anything in the queue when +// you call the .write() method. +const _flushFlag = Symbol('flushFlag'); +class ZlibBase extends Minipass { + #sawError = false; + #ended = false; + #flushFlag; + #finishFlushFlag; + #fullFlushFlag; + #handle; + #onError; + get sawError() { + return this.#sawError; + } + get handle() { + return this.#handle; + } + /* c8 ignore start */ + get flushFlag() { + return this.#flushFlag; + } + /* c8 ignore stop */ + constructor(opts, mode) { + if (!opts || typeof opts !== 'object') + throw new TypeError('invalid options for ZlibBase constructor'); + //@ts-ignore + super(opts); + /* c8 ignore start */ + this.#flushFlag = opts.flush ?? 0; + this.#finishFlushFlag = opts.finishFlush ?? 0; + this.#fullFlushFlag = opts.fullFlushFlag ?? 0; + /* c8 ignore stop */ + // this will throw if any options are invalid for the class selected + try { + // @types/node doesn't know that it exports the classes, but they're there + //@ts-ignore + this.#handle = new realZlib[mode](opts); + } + catch (er) { + // make sure that all errors get decorated properly + throw new ZlibError(er); + } + this.#onError = err => { + // no sense raising multiple errors, since we abort on the first one. + if (this.#sawError) + return; + this.#sawError = true; + // there is no way to cleanly recover. + // continuing only obscures problems. + this.close(); + this.emit('error', err); + }; + this.#handle?.on('error', er => this.#onError(new ZlibError(er))); + this.once('end', () => this.close); + } + close() { + if (this.#handle) { + this.#handle.close(); + this.#handle = undefined; + this.emit('close'); + } + } + reset() { + if (!this.#sawError) { + assert(this.#handle, 'zlib binding closed'); + //@ts-ignore + return this.#handle.reset?.(); + } + } + flush(flushFlag) { + if (this.ended) + return; + if (typeof flushFlag !== 'number') + flushFlag = this.#fullFlushFlag; + this.write(Object.assign(Buffer.alloc(0), { [_flushFlag]: flushFlag })); + } + end(chunk, encoding, cb) { + /* c8 ignore start */ + if (typeof chunk === 'function') { + cb = chunk; + encoding = undefined; + chunk = undefined; + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = undefined; + } + /* c8 ignore stop */ + if (chunk) { + if (encoding) + this.write(chunk, encoding); + else + this.write(chunk); + } + this.flush(this.#finishFlushFlag); + this.#ended = true; + return super.end(cb); + } + get ended() { + return this.#ended; + } + // overridden in the gzip classes to do portable writes + [_superWrite](data) { + return super.write(data); + } + write(chunk, encoding, cb) { + // process the chunk using the sync process + // then super.write() all the outputted chunks + if (typeof encoding === 'function') + (cb = encoding), (encoding = 'utf8'); + if (typeof chunk === 'string') + chunk = Buffer.from(chunk, encoding); + if (this.#sawError) + return; + assert(this.#handle, 'zlib binding closed'); + // _processChunk tries to .close() the native handle after it's done, so we + // intercept that by temporarily making it a no-op. + // diving into the node:zlib internals a bit here + const nativeHandle = this.#handle + ._handle; + const originalNativeClose = nativeHandle.close; + nativeHandle.close = () => { }; + const originalClose = this.#handle.close; + this.#handle.close = () => { }; + // It also calls `Buffer.concat()` at the end, which may be convenient + // for some, but which we are not interested in as it slows us down. + Buffer.concat = args => args; + let result = undefined; + try { + const flushFlag = typeof chunk[_flushFlag] === 'number' + ? chunk[_flushFlag] + : this.#flushFlag; + result = this.#handle._processChunk(chunk, flushFlag); + // if we don't throw, reset it back how it was + Buffer.concat = OriginalBufferConcat; + } + catch (err) { + // or if we do, put Buffer.concat() back before we emit error + // Error events call into user code, which may call Buffer.concat() + Buffer.concat = OriginalBufferConcat; + this.#onError(new ZlibError(err)); + } + finally { + if (this.#handle) { + // Core zlib resets `_handle` to null after attempting to close the + // native handle. Our no-op handler prevented actual closure, but we + // need to restore the `._handle` property. + ; + this.#handle._handle = + nativeHandle; + nativeHandle.close = originalNativeClose; + this.#handle.close = originalClose; + // `_processChunk()` adds an 'error' listener. If we don't remove it + // after each call, these handlers start piling up. + this.#handle.removeAllListeners('error'); + // make sure OUR error listener is still attached tho + } + } + if (this.#handle) + this.#handle.on('error', er => this.#onError(new ZlibError(er))); + let writeReturn; + if (result) { + if (Array.isArray(result) && result.length > 0) { + const r = result[0]; + // The first buffer is always `handle._outBuffer`, which would be + // re-used for later invocations; so, we always have to copy that one. + writeReturn = this[_superWrite](Buffer.from(r)); + for (let i = 1; i < result.length; i++) { + writeReturn = this[_superWrite](result[i]); + } + } + else { + // either a single Buffer or an empty array + writeReturn = this[_superWrite](Buffer.from(result)); + } + } + if (cb) + cb(); + return writeReturn; + } +} +export class Zlib extends ZlibBase { + #level; + #strategy; + constructor(opts, mode) { + opts = opts || {}; + opts.flush = opts.flush || constants.Z_NO_FLUSH; + opts.finishFlush = opts.finishFlush || constants.Z_FINISH; + opts.fullFlushFlag = constants.Z_FULL_FLUSH; + super(opts, mode); + this.#level = opts.level; + this.#strategy = opts.strategy; + } + params(level, strategy) { + if (this.sawError) + return; + if (!this.handle) + throw new Error('cannot switch params when binding is closed'); + // no way to test this without also not supporting params at all + /* c8 ignore start */ + if (!this.handle.params) + throw new Error('not supported in this implementation'); + /* c8 ignore stop */ + if (this.#level !== level || this.#strategy !== strategy) { + this.flush(constants.Z_SYNC_FLUSH); + assert(this.handle, 'zlib binding closed'); + // .params() calls .flush(), but the latter is always async in the + // core zlib. We override .flush() temporarily to intercept that and + // flush synchronously. + const origFlush = this.handle.flush; + this.handle.flush = (flushFlag, cb) => { + /* c8 ignore start */ + if (typeof flushFlag === 'function') { + cb = flushFlag; + flushFlag = this.flushFlag; + } + /* c8 ignore stop */ + this.flush(flushFlag); + cb?.(); + }; + try { + ; + this.handle.params(level, strategy); + } + finally { + this.handle.flush = origFlush; + } + /* c8 ignore start */ + if (this.handle) { + this.#level = level; + this.#strategy = strategy; + } + /* c8 ignore stop */ + } + } +} +// minimal 2-byte header +export class Deflate extends Zlib { + constructor(opts) { + super(opts, 'Deflate'); + } +} +export class Inflate extends Zlib { + constructor(opts) { + super(opts, 'Inflate'); + } +} +export class Gzip extends Zlib { + #portable; + constructor(opts) { + super(opts, 'Gzip'); + this.#portable = opts && !!opts.portable; + } + [_superWrite](data) { + if (!this.#portable) + return super[_superWrite](data); + // we'll always get the header emitted in one first chunk + // overwrite the OS indicator byte with 0xFF + this.#portable = false; + data[9] = 255; + return super[_superWrite](data); + } +} +export class Gunzip extends Zlib { + constructor(opts) { + super(opts, 'Gunzip'); + } +} +// raw - no header +export class DeflateRaw extends Zlib { + constructor(opts) { + super(opts, 'DeflateRaw'); + } +} +export class InflateRaw extends Zlib { + constructor(opts) { + super(opts, 'InflateRaw'); + } +} +// auto-detect header. +export class Unzip extends Zlib { + constructor(opts) { + super(opts, 'Unzip'); + } +} +export class Brotli extends ZlibBase { + constructor(opts, mode) { + opts = opts || {}; + opts.flush = opts.flush || constants.BROTLI_OPERATION_PROCESS; + opts.finishFlush = + opts.finishFlush || constants.BROTLI_OPERATION_FINISH; + opts.fullFlushFlag = constants.BROTLI_OPERATION_FLUSH; + super(opts, mode); + } +} +export class BrotliCompress extends Brotli { + constructor(opts) { + super(opts, 'BrotliCompress'); + } +} +export class BrotliDecompress extends Brotli { + constructor(opts) { + super(opts, 'BrotliDecompress'); + } +} +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/dist/esm/package.json b/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/dist/esm/package.json new file mode 100644 index 00000000000000..3dbc1ca591c055 --- /dev/null +++ b/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/dist/esm/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/package.json b/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/package.json new file mode 100644 index 00000000000000..e94623ff43d353 --- /dev/null +++ b/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/package.json @@ -0,0 +1,81 @@ +{ + "name": "minizlib", + "version": "3.0.1", + "description": "A small fast zlib stream built on [minipass](http://npm.im/minipass) and Node.js's zlib binding.", + "main": "./dist/commonjs/index.js", + "dependencies": { + "minipass": "^7.0.4", + "rimraf": "^5.0.5" + }, + "scripts": { + "prepare": "tshy", + "pretest": "npm run prepare", + "test": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "format": "prettier --write . --loglevel warn", + "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/isaacs/minizlib.git" + }, + "keywords": [ + "zlib", + "gzip", + "gunzip", + "deflate", + "inflate", + "compression", + "zip", + "unzip" + ], + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "MIT", + "devDependencies": { + "@types/node": "^20.11.29", + "mkdirp": "^3.0.1", + "tap": "^18.7.1", + "tshy": "^1.12.0", + "typedoc": "^0.25.12" + }, + "files": [ + "dist" + ], + "engines": { + "node": ">= 18" + }, + "tshy": { + "exports": { + "./package.json": "./package.json", + ".": "./src/index.ts" + } + }, + "exports": { + "./package.json": "./package.json", + ".": { + "import": { + "types": "./dist/esm/index.d.ts", + "default": "./dist/esm/index.js" + }, + "require": { + "types": "./dist/commonjs/index.d.ts", + "default": "./dist/commonjs/index.js" + } + } + }, + "types": "./dist/commonjs/index.d.ts", + "type": "module", + "prettier": { + "semi": false, + "printWidth": 75, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + } +} diff --git a/deps/npm/node_modules/npm-registry-fetch/package.json b/deps/npm/node_modules/npm-registry-fetch/package.json index 07ea620d153172..559473b964aaa5 100644 --- a/deps/npm/node_modules/npm-registry-fetch/package.json +++ b/deps/npm/node_modules/npm-registry-fetch/package.json @@ -1,6 +1,6 @@ { "name": "npm-registry-fetch", - "version": "17.1.0", + "version": "18.0.1", "description": "Fetch-based http client for use with npm registry APIs", "main": "lib", "files": [ @@ -8,9 +8,9 @@ "lib/" ], "scripts": { - "eslint": "eslint", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", - "lintfix": "npm run lint -- --fix", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", + "lintfix": "npm run eslint -- --fix", "test": "tap", "posttest": "npm run lint", "npmclilint": "npmcli-lint", @@ -31,22 +31,22 @@ "author": "GitHub Inc.", "license": "ISC", "dependencies": { - "@npmcli/redact": "^2.0.0", + "@npmcli/redact": "^3.0.0", "jsonparse": "^1.3.1", - "make-fetch-happen": "^13.0.0", + "make-fetch-happen": "^14.0.0", "minipass": "^7.0.2", - "minipass-fetch": "^3.0.0", - "minizlib": "^2.1.2", - "npm-package-arg": "^11.0.0", - "proc-log": "^4.0.0" + "minipass-fetch": "^4.0.0", + "minizlib": "^3.0.1", + "npm-package-arg": "^12.0.0", + "proc-log": "^5.0.0" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "cacache": "^18.0.0", "nock": "^13.2.4", "require-inject": "^1.4.4", - "ssri": "^10.0.0", + "ssri": "^12.0.0", "tap": "^16.0.1" }, "tap": { @@ -58,11 +58,11 @@ ] }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.22.0", + "version": "4.23.3", "publish": "true" } } diff --git a/deps/npm/node_modules/npm-user-validate/package.json b/deps/npm/node_modules/npm-user-validate/package.json index 15a1305356ef3b..8c66f8f3e47b9b 100644 --- a/deps/npm/node_modules/npm-user-validate/package.json +++ b/deps/npm/node_modules/npm-user-validate/package.json @@ -1,21 +1,22 @@ { "name": "npm-user-validate", - "version": "2.0.1", + "version": "3.0.0", "description": "User validations for npm", "main": "lib/index.js", "devDependencies": { - "@npmcli/eslint-config": "^4.0.1", - "@npmcli/template-oss": "4.22.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.3.2" }, "scripts": { "test": "tap", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", + "lintfix": "npm run eslint -- --fix", "snap": "tap", - "posttest": "npm run lint" + "posttest": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "repository": { "type": "git", @@ -33,11 +34,11 @@ "lib/" ], "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.22.0", + "version": "4.23.3", "publish": true }, "tap": { diff --git a/deps/npm/node_modules/pacote/package.json b/deps/npm/node_modules/pacote/package.json index caadaf2db50c81..0eb8261af96e0c 100644 --- a/deps/npm/node_modules/pacote/package.json +++ b/deps/npm/node_modules/pacote/package.json @@ -1,6 +1,6 @@ { "name": "pacote", - "version": "18.0.6", + "version": "19.0.0", "description": "JavaScript package downloader", "author": "GitHub Inc.", "bin": { @@ -11,11 +11,12 @@ "scripts": { "test": "tap", "snap": "tap", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", "postlint": "template-oss-check", - "lintfix": "npm run lint -- --fix", + "lintfix": "npm run eslint -- --fix", "posttest": "npm run lint", - "template-oss-apply": "template-oss-apply --force" + "template-oss-apply": "template-oss-apply --force", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "tap": { "timeout": 300, @@ -26,9 +27,9 @@ }, "devDependencies": { "@npmcli/arborist": "^7.1.0", - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", - "hosted-git-info": "^7.0.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", + "hosted-git-info": "^8.0.0", "mutate-fs": "^2.1.1", "nock": "^13.2.4", "npm-registry-mock": "^1.3.2", @@ -44,26 +45,26 @@ "git" ], "dependencies": { - "@npmcli/git": "^5.0.0", - "@npmcli/installed-package-contents": "^2.0.1", - "@npmcli/package-json": "^5.1.0", - "@npmcli/promise-spawn": "^7.0.0", - "@npmcli/run-script": "^8.0.0", - "cacache": "^18.0.0", + "@npmcli/git": "^6.0.0", + "@npmcli/installed-package-contents": "^3.0.0", + "@npmcli/package-json": "^6.0.0", + "@npmcli/promise-spawn": "^8.0.0", + "@npmcli/run-script": "^9.0.0", + "cacache": "^19.0.0", "fs-minipass": "^3.0.0", "minipass": "^7.0.2", - "npm-package-arg": "^11.0.0", - "npm-packlist": "^8.0.0", - "npm-pick-manifest": "^9.0.0", - "npm-registry-fetch": "^17.0.0", - "proc-log": "^4.0.0", + "npm-package-arg": "^12.0.0", + "npm-packlist": "^9.0.0", + "npm-pick-manifest": "^10.0.0", + "npm-registry-fetch": "^18.0.0", + "proc-log": "^5.0.0", "promise-retry": "^2.0.1", "sigstore": "^2.2.0", - "ssri": "^10.0.0", + "ssri": "^12.0.0", "tar": "^6.1.11" }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "repository": { "type": "git", @@ -71,7 +72,7 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.22.0", + "version": "4.23.3", "windowsCI": false, "publish": "true" } diff --git a/deps/npm/node_modules/parse-conflict-json/package.json b/deps/npm/node_modules/parse-conflict-json/package.json index 32584d3e6401b3..824ca8ed2bd145 100644 --- a/deps/npm/node_modules/parse-conflict-json/package.json +++ b/deps/npm/node_modules/parse-conflict-json/package.json @@ -1,6 +1,6 @@ { "name": "parse-conflict-json", - "version": "3.0.1", + "version": "4.0.0", "description": "Parse a JSON string that has git merge conflicts, resolving if possible", "author": "GitHub Inc.", "license": "ISC", @@ -8,11 +8,12 @@ "scripts": { "test": "tap", "snap": "tap", - "lint": "eslint \"**/*.js\"", + "lint": "npm run eslint", "postlint": "template-oss-check", - "lintfix": "npm run lint -- --fix", + "lintfix": "npm run eslint -- --fix", "posttest": "npm run lint", - "template-oss-apply": "template-oss-apply --force" + "template-oss-apply": "template-oss-apply --force", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "tap": { "check-coverage": true, @@ -22,28 +23,29 @@ ] }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.12.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.0.1" }, "dependencies": { - "json-parse-even-better-errors": "^3.0.0", + "json-parse-even-better-errors": "^4.0.0", "just-diff": "^6.0.0", "just-diff-apply": "^5.2.0" }, "repository": { "type": "git", - "url": "https://github.com/npm/parse-conflict-json.git" + "url": "git+https://github.com/npm/parse-conflict-json.git" }, "files": [ "bin/", "lib/" ], "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.12.0" + "version": "4.23.3", + "publish": true } } diff --git a/deps/npm/node_modules/proc-log/package.json b/deps/npm/node_modules/proc-log/package.json index 4ab89102ecc9b5..957209d3954e53 100644 --- a/deps/npm/node_modules/proc-log/package.json +++ b/deps/npm/node_modules/proc-log/package.json @@ -1,6 +1,6 @@ { "name": "proc-log", - "version": "4.2.0", + "version": "5.0.0", "files": [ "bin/", "lib/" @@ -9,7 +9,7 @@ "description": "just emit 'log' events on the process object", "repository": { "type": "git", - "url": "https://github.com/npm/proc-log.git" + "url": "git+https://github.com/npm/proc-log.git" }, "author": "GitHub Inc.", "license": "ISC", @@ -18,22 +18,23 @@ "snap": "tap", "posttest": "npm run lint", "postsnap": "eslint index.js test/*.js --fix", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", "postlint": "template-oss-check", - "lintfix": "npm run lint -- --fix", - "template-oss-apply": "template-oss-apply --force" + "lintfix": "npm run eslint -- --fix", + "template-oss-apply": "template-oss-apply --force", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.21.3", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.0.1" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.21.3", + "version": "4.23.3", "publish": true }, "tap": { diff --git a/deps/npm/node_modules/proggy/package.json b/deps/npm/node_modules/proggy/package.json index 4940fc9d002a64..01ac44b4af18f8 100644 --- a/deps/npm/node_modules/proggy/package.json +++ b/deps/npm/node_modules/proggy/package.json @@ -1,6 +1,6 @@ { "name": "proggy", - "version": "2.0.0", + "version": "3.0.0", "files": [ "bin/", "lib/" @@ -9,7 +9,7 @@ "description": "Progress bar updates at a distance", "repository": { "type": "git", - "url": "https://github.com/npm/proggy.git" + "url": "git+https://github.com/npm/proggy.git" }, "author": "GitHub Inc.", "license": "ISC", @@ -18,17 +18,18 @@ "posttest": "npm run lint", "snap": "tap", "postsnap": "eslint lib test --fix", - "lint": "eslint \"**/*.js\"", + "lint": "npm run eslint", "postlint": "template-oss-check", - "lintfix": "npm run lint -- --fix", - "template-oss-apply": "template-oss-apply --force" + "lintfix": "npm run eslint -- --fix", + "template-oss-apply": "template-oss-apply --force", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "devDependencies": { - "@npmcli/eslint-config": "^3.0.1", - "@npmcli/template-oss": "4.5.1", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "chalk": "^4.1.2", "cli-progress": "^3.10.0", - "npmlog": "^6.0.1", + "npmlog": "^7.0.0", "tap": "^16.0.1" }, "tap": { @@ -39,10 +40,11 @@ ] }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.5.1" + "version": "4.23.3", + "publish": true } } diff --git a/deps/npm/node_modules/promzard/package.json b/deps/npm/node_modules/promzard/package.json index 71b0ed439a4402..c224a4b458e877 100644 --- a/deps/npm/node_modules/promzard/package.json +++ b/deps/npm/node_modules/promzard/package.json @@ -2,28 +2,29 @@ "author": "GitHub Inc.", "name": "promzard", "description": "prompting wizardly", - "version": "1.0.2", + "version": "2.0.0", "repository": { "url": "git+https://github.com/npm/promzard.git", "type": "git" }, "dependencies": { - "read": "^3.0.1" + "read": "^4.0.0" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.3.0" }, "main": "lib/index.js", "scripts": { "test": "tap", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", + "lintfix": "npm run eslint -- --fix", "snap": "tap", - "posttest": "npm run lint" + "posttest": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "license": "ISC", "files": [ @@ -31,11 +32,11 @@ "lib/" ], "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.22.0", + "version": "4.23.3", "publish": true }, "tap": { diff --git a/deps/npm/node_modules/read-cmd-shim/package.json b/deps/npm/node_modules/read-cmd-shim/package.json index 401ee3d7101f10..3b16802df3ce21 100644 --- a/deps/npm/node_modules/read-cmd-shim/package.json +++ b/deps/npm/node_modules/read-cmd-shim/package.json @@ -1,23 +1,23 @@ { "name": "read-cmd-shim", - "version": "4.0.0", + "version": "5.0.0", "description": "Figure out what a cmd-shim is pointing at. This acts as the equivalent of fs.readlink.", "main": "lib/index.js", "devDependencies": { - "@npmcli/eslint-config": "^3.0.1", - "@npmcli/template-oss": "4.5.1", - "cmd-shim": "^5.0.0", - "rimraf": "^3.0.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", + "cmd-shim": "^7.0.0", "tap": "^16.0.1" }, "scripts": { "test": "tap", - "lint": "eslint \"**/*.js\"", + "lint": "npm run eslint", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", + "lintfix": "npm run eslint -- --fix", "snap": "tap", - "posttest": "npm run lint" + "posttest": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "tap": { "check-coverage": true, @@ -28,7 +28,7 @@ }, "repository": { "type": "git", - "url": "https://github.com/npm/read-cmd-shim.git" + "url": "git+https://github.com/npm/read-cmd-shim.git" }, "license": "ISC", "homepage": "https://github.com/npm/read-cmd-shim#readme", @@ -38,10 +38,11 @@ ], "author": "GitHub Inc.", "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.5.1" + "version": "4.23.3", + "publish": true } } diff --git a/deps/npm/node_modules/read-package-json-fast/package.json b/deps/npm/node_modules/read-package-json-fast/package.json index 4964bb0a934cb8..20208329e24be6 100644 --- a/deps/npm/node_modules/read-package-json-fast/package.json +++ b/deps/npm/node_modules/read-package-json-fast/package.json @@ -1,6 +1,6 @@ { "name": "read-package-json-fast", - "version": "3.0.2", + "version": "4.0.0", "description": "Like read-package-json, but faster", "main": "lib/index.js", "author": "GitHub Inc.", @@ -8,27 +8,28 @@ "scripts": { "test": "tap", "snap": "tap", - "lint": "eslint \"**/*.js\"", + "lint": "npm run eslint", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", - "posttest": "npm run lint" + "lintfix": "npm run eslint -- --fix", + "posttest": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.11.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.3.0" }, "dependencies": { - "json-parse-even-better-errors": "^3.0.0", - "npm-normalize-package-bin": "^3.0.0" + "json-parse-even-better-errors": "^4.0.0", + "npm-normalize-package-bin": "^4.0.0" }, "repository": { "type": "git", - "url": "https://github.com/npm/read-package-json-fast.git" + "url": "git+https://github.com/npm/read-package-json-fast.git" }, "files": [ "bin/", @@ -36,7 +37,8 @@ ], "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.11.0" + "version": "4.23.3", + "publish": true }, "tap": { "nyc-arg": [ diff --git a/deps/npm/node_modules/read/dist/commonjs/read.js b/deps/npm/node_modules/read/dist/commonjs/read.js index bab433d8a1155f..c0600d2b4e8cae 100644 --- a/deps/npm/node_modules/read/dist/commonjs/read.js +++ b/deps/npm/node_modules/read/dist/commonjs/read.js @@ -3,7 +3,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); -exports.read = void 0; +exports.read = read; const mute_stream_1 = __importDefault(require("mute-stream")); const readline_1 = require("readline"); async function read({ default: def, input = process.stdin, output = process.stdout, completer, prompt = '', silent, timeout, edit, terminal, replace, }) { @@ -91,5 +91,4 @@ async function read({ default: def, input = process.stdin, output = process.stdo /* c8 ignore stop */ }); } -exports.read = read; //# sourceMappingURL=read.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/read/package.json b/deps/npm/node_modules/read/package.json index 799362f40f1529..337f7d26d4dd94 100644 --- a/deps/npm/node_modules/read/package.json +++ b/deps/npm/node_modules/read/package.json @@ -1,6 +1,6 @@ { "name": "read", - "version": "3.0.1", + "version": "4.0.0", "exports": { "./package.json": "./package.json", ".": { @@ -22,29 +22,29 @@ } }, "dependencies": { - "mute-stream": "^1.0.0" + "mute-stream": "^2.0.0" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.2", - "@npmcli/template-oss": "4.20.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "@types/mute-stream": "^0.0.4", "@types/tap": "^15.0.11", - "@typescript-eslint/parser": "^6.11.0", - "c8": "^8.0.1", + "@typescript-eslint/parser": "^8.0.1", + "c8": "^10.1.2", "eslint-import-resolver-typescript": "^3.6.1", "tap": "^16.3.9", "ts-node": "^10.9.1", - "tshy": "^1.8.0", + "tshy": "^3.0.2", "typescript": "^5.2.2" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "author": "GitHub Inc.", "description": "read(1) for node programs", "repository": { "type": "git", - "url": "https://github.com/npm/read.git" + "url": "git+https://github.com/npm/read.git" }, "license": "ISC", "scripts": { @@ -52,19 +52,19 @@ "pretest": "npm run prepare", "presnap": "npm run prepare", "test": "c8 tap", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", + "lintfix": "npm run eslint -- --fix", "snap": "c8 tap", - "posttest": "npm run lint" + "posttest": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.20.0", + "version": "4.23.3", "publish": true, - "typescript": true, - "content": "./scripts/template-oss" + "typescript": true }, "main": "./dist/commonjs/read.js", "types": "./dist/commonjs/read.d.ts", @@ -83,5 +83,6 @@ }, "files": [ "dist/" - ] + ], + "module": "./dist/esm/read.js" } diff --git a/deps/npm/node_modules/rimraf/LICENSE b/deps/npm/node_modules/rimraf/LICENSE new file mode 100644 index 00000000000000..1493534e60dce4 --- /dev/null +++ b/deps/npm/node_modules/rimraf/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) 2011-2023 Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/rimraf/README.md b/deps/npm/node_modules/rimraf/README.md new file mode 100644 index 00000000000000..7ab1a5d6d77ffa --- /dev/null +++ b/deps/npm/node_modules/rimraf/README.md @@ -0,0 +1,220 @@ +The [UNIX command]() `rm -rf` for node +in a cross-platform implementation. + +Install with `npm install rimraf`. + +## Major Changes + +### v4 to v5 + +- There is no default export anymore. Import the functions directly + using, e.g., `import { rimrafSync } from 'rimraf'`. + +### v3 to v4 + +- The function returns a `Promise` instead of taking a callback. +- Globbing requires the `--glob` CLI option or `glob` option property + to be set. (Removed in 4.0 and 4.1, opt-in support added in 4.2.) +- Functions take arrays of paths, as well as a single path. +- Native implementation used by default when available, except on + Windows, where this implementation is faster and more reliable. +- New implementation on Windows, falling back to "move then + remove" strategy when exponential backoff for `EBUSY` fails to + resolve the situation. +- Simplified implementation on POSIX, since the Windows + affordances are not necessary there. +- As of 4.3, return/resolve value is boolean instead of undefined. + +## API + +Hybrid module, load either with `import` or `require()`. + +```js +// 'rimraf' export is the one you probably want, but other +// strategies exported as well. +import { rimraf, rimrafSync, native, nativeSync } from 'rimraf' +// or +const { rimraf, rimrafSync, native, nativeSync } = require('rimraf') +``` + +All removal functions return a boolean indicating that all +entries were successfully removed. + +The only case in which this will not return `true` is if +something was omitted from the removal via a `filter` option. + +### `rimraf(f, [opts]) -> Promise` + +This first parameter is a path or array of paths. The second +argument is an options object. + +Options: + +- `preserveRoot`: If set to boolean `false`, then allow the + recursive removal of the root directory. Otherwise, this is + not allowed. +- `tmp`: Windows only. Temp folder to place files and + folders for the "move then remove" fallback. Must be on the + same physical device as the path being deleted. Defaults to + `os.tmpdir()` when that is on the same drive letter as the path + being deleted, or `${drive}:\temp` if present, or `${drive}:\` + if not. +- `maxRetries`: Windows and Native only. Maximum number of + retry attempts in case of `EBUSY`, `EMFILE`, and `ENFILE` + errors. Default `10` for Windows implementation, `0` for Native + implementation. +- `backoff`: Windows only. Rate of exponential backoff for async + removal in case of `EBUSY`, `EMFILE`, and `ENFILE` errors. + Should be a number greater than 1. Default `1.2` +- `maxBackoff`: Windows only. Maximum total backoff time in ms to + attempt asynchronous retries in case of `EBUSY`, `EMFILE`, and + `ENFILE` errors. Default `200`. With the default `1.2` backoff + rate, this results in 14 retries, with the final retry being + delayed 33ms. +- `retryDelay`: Native only. Time to wait between retries, using + linear backoff. Default `100`. +- `signal` Pass in an AbortSignal to cancel the directory + removal. This is useful when removing large folder structures, + if you'd like to limit the time spent. + + Using a `signal` option prevents the use of Node's built-in + `fs.rm` because that implementation does not support abort + signals. + +- `glob` Boolean flag to treat path as glob pattern, or an object + specifying [`glob` options](https://github.com/isaacs/node-glob). +- `filter` Method that returns a boolean indicating whether that + path should be deleted. With async `rimraf` methods, this may + return a Promise that resolves to a boolean. (Since Promises + are truthy, returning a Promise from a sync filter is the same + as just not filtering anything.) + + The first argument to the filter is the path string. The + second argument is either a `Dirent` or `Stats` object for that + path. (The first path explored will be a `Stats`, the rest + will be `Dirent`.) + + If a filter method is provided, it will _only_ remove entries + if the filter returns (or resolves to) a truthy value. Omitting + a directory will still allow its children to be removed, unless + they are also filtered out, but any parents of a filtered entry + will not be removed, since the directory will not be empty in + that case. + + Using a filter method prevents the use of Node's built-in + `fs.rm` because that implementation does not support filtering. + +Any other options are provided to the native Node.js `fs.rm` implementation +when that is used. + +This will attempt to choose the best implementation, based on the Node.js +version and `process.platform`. To force a specific implementation, use +one of the other functions provided. + +### `rimraf.sync(f, [opts])`
    `rimraf.rimrafSync(f, [opts])` + +Synchronous form of `rimraf()` + +Note that, unlike many file system operations, the synchronous form will +typically be significantly _slower_ than the async form, because recursive +deletion is extremely parallelizable. + +### `rimraf.native(f, [opts])` + +Uses the built-in `fs.rm` implementation that Node.js provides. This is +used by default on Node.js versions greater than or equal to `14.14.0`. + +### `rimraf.native.sync(f, [opts])`
    `rimraf.nativeSync(f, [opts])` + +Synchronous form of `rimraf.native` + +### `rimraf.manual(f, [opts])` + +Use the JavaScript implementation appropriate for your operating system. + +### `rimraf.manual.sync(f, [opts])`
    `rimraf.manualSync(f, opts)` + +Synchronous form of `rimraf.manual()` + +### `rimraf.windows(f, [opts])` + +JavaScript implementation of file removal appropriate for Windows +platforms. Works around `unlink` and `rmdir` not being atomic +operations, and `EPERM` when deleting files with certain +permission modes. + +First deletes all non-directory files within the tree, and then +removes all directories, which should ideally be empty by that +time. When an `ENOTEMPTY` is raised in the second pass, falls +back to the `rimraf.moveRemove` strategy as needed. + +### `rimraf.windows.sync(path, [opts])`
    `rimraf.windowsSync(path, [opts])` + +Synchronous form of `rimraf.windows()` + +### `rimraf.moveRemove(path, [opts])` + +Moves all files and folders to the parent directory of `path` +with a temporary filename prior to attempting to remove them. + +Note that, in cases where the operation fails, this _may_ leave +files lying around in the parent directory with names like +`.file-basename.txt.0.123412341`. Until the Windows kernel +provides a way to perform atomic `unlink` and `rmdir` operations, +this is, unfortunately, unavoidable. + +To move files to a different temporary directory other than the +parent, provide `opts.tmp`. Note that this _must_ be on the same +physical device as the folder being deleted, or else the +operation will fail. + +This is the slowest strategy, but most reliable on Windows +platforms. Used as a last-ditch fallback by `rimraf.windows()`. + +### `rimraf.moveRemove.sync(path, [opts])`
    `rimraf.moveRemoveSync(path, [opts])` + +Synchronous form of `rimraf.moveRemove()` + +### Command Line Interface + +``` +rimraf version 4.3.0 + +Usage: rimraf [ ...] +Deletes all files and folders at "path", recursively. + +Options: + -- Treat all subsequent arguments as paths + -h --help Display this usage info + --preserve-root Do not remove '/' recursively (default) + --no-preserve-root Do not treat '/' specially + -G --no-glob Treat arguments as literal paths, not globs (default) + -g --glob Treat arguments as glob patterns + -v --verbose Be verbose when deleting files, showing them as + they are removed. Not compatible with --impl=native + -V --no-verbose Be silent when deleting files, showing nothing as + they are removed (default) + -i --interactive Ask for confirmation before deleting anything + Not compatible with --impl=native + -I --no-interactive Do not ask for confirmation before deleting + + --impl= Specify the implementation to use: + rimraf: choose the best option (default) + native: the built-in implementation in Node.js + manual: the platform-specific JS implementation + posix: the Posix JS implementation + windows: the Windows JS implementation (falls back to + move-remove on ENOTEMPTY) + move-remove: a slow reliable Windows fallback + +Implementation-specific options: + --tmp= Temp file folder for 'move-remove' implementation + --max-retries= maxRetries for 'native' and 'windows' implementations + --retry-delay= retryDelay for 'native' implementation, default 100 + --backoff= Exponential backoff factor for retries (default: 1.2) +``` + +## mkdirp + +If you need to _create_ a directory recursively, check out +[mkdirp](https://github.com/isaacs/node-mkdirp). diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/default-tmp.d.ts b/deps/npm/node_modules/rimraf/dist/commonjs/default-tmp.d.ts new file mode 100644 index 00000000000000..a68e925b249a8d --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/default-tmp.d.ts @@ -0,0 +1,3 @@ +export declare const defaultTmp: (path: string) => Promise; +export declare const defaultTmpSync: (path: string) => string; +//# sourceMappingURL=default-tmp.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/default-tmp.d.ts.map b/deps/npm/node_modules/rimraf/dist/commonjs/default-tmp.d.ts.map new file mode 100644 index 00000000000000..d0b35f2786233b --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/default-tmp.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"default-tmp.d.ts","sourceRoot":"","sources":["../../src/default-tmp.ts"],"names":[],"mappings":"AAiEA,eAAO,MAAM,UAAU,SAnCc,MAAM,oBAoCe,CAAA;AAC1D,eAAO,MAAM,cAAc,SArBQ,MAAM,WAsByB,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/default-tmp.js b/deps/npm/node_modules/rimraf/dist/commonjs/default-tmp.js new file mode 100644 index 00000000000000..ae9087881962da --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/default-tmp.js @@ -0,0 +1,61 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.defaultTmpSync = exports.defaultTmp = void 0; +// The default temporary folder location for use in the windows algorithm. +// It's TEMPting to use dirname(path), since that's guaranteed to be on the +// same device. However, this means that: +// rimraf(path).then(() => rimraf(dirname(path))) +// will often fail with EBUSY, because the parent dir contains +// marked-for-deletion directory entries (which do not show up in readdir). +// The approach here is to use os.tmpdir() if it's on the same drive letter, +// or resolve(path, '\\temp') if it exists, or the root of the drive if not. +// On Posix (not that you'd be likely to use the windows algorithm there), +// it uses os.tmpdir() always. +const os_1 = require("os"); +const path_1 = require("path"); +const fs_js_1 = require("./fs.js"); +const platform_js_1 = __importDefault(require("./platform.js")); +const { stat } = fs_js_1.promises; +const isDirSync = (path) => { + try { + return (0, fs_js_1.statSync)(path).isDirectory(); + } + catch (er) { + return false; + } +}; +const isDir = (path) => stat(path).then(st => st.isDirectory(), () => false); +const win32DefaultTmp = async (path) => { + const { root } = (0, path_1.parse)(path); + const tmp = (0, os_1.tmpdir)(); + const { root: tmpRoot } = (0, path_1.parse)(tmp); + if (root.toLowerCase() === tmpRoot.toLowerCase()) { + return tmp; + } + const driveTmp = (0, path_1.resolve)(root, '/temp'); + if (await isDir(driveTmp)) { + return driveTmp; + } + return root; +}; +const win32DefaultTmpSync = (path) => { + const { root } = (0, path_1.parse)(path); + const tmp = (0, os_1.tmpdir)(); + const { root: tmpRoot } = (0, path_1.parse)(tmp); + if (root.toLowerCase() === tmpRoot.toLowerCase()) { + return tmp; + } + const driveTmp = (0, path_1.resolve)(root, '/temp'); + if (isDirSync(driveTmp)) { + return driveTmp; + } + return root; +}; +const posixDefaultTmp = async () => (0, os_1.tmpdir)(); +const posixDefaultTmpSync = () => (0, os_1.tmpdir)(); +exports.defaultTmp = platform_js_1.default === 'win32' ? win32DefaultTmp : posixDefaultTmp; +exports.defaultTmpSync = platform_js_1.default === 'win32' ? win32DefaultTmpSync : posixDefaultTmpSync; +//# sourceMappingURL=default-tmp.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/default-tmp.js.map b/deps/npm/node_modules/rimraf/dist/commonjs/default-tmp.js.map new file mode 100644 index 00000000000000..4984afb1b21290 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/default-tmp.js.map @@ -0,0 +1 @@ +{"version":3,"file":"default-tmp.js","sourceRoot":"","sources":["../../src/default-tmp.ts"],"names":[],"mappings":";;;;;;AAAA,0EAA0E;AAC1E,2EAA2E;AAC3E,0CAA0C;AAC1C,iDAAiD;AACjD,8DAA8D;AAC9D,2EAA2E;AAC3E,4EAA4E;AAC5E,4EAA4E;AAC5E,0EAA0E;AAC1E,8BAA8B;AAC9B,2BAA2B;AAC3B,+BAAqC;AACrC,mCAA4C;AAC5C,gEAAoC;AACpC,MAAM,EAAE,IAAI,EAAE,GAAG,gBAAQ,CAAA;AAEzB,MAAM,SAAS,GAAG,CAAC,IAAY,EAAE,EAAE;IACjC,IAAI,CAAC;QACH,OAAO,IAAA,gBAAQ,EAAC,IAAI,CAAC,CAAC,WAAW,EAAE,CAAA;IACrC,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,OAAO,KAAK,CAAA;IACd,CAAC;AACH,CAAC,CAAA;AAED,MAAM,KAAK,GAAG,CAAC,IAAY,EAAE,EAAE,CAC7B,IAAI,CAAC,IAAI,CAAC,CAAC,IAAI,CACb,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,WAAW,EAAE,EACtB,GAAG,EAAE,CAAC,KAAK,CACZ,CAAA;AAEH,MAAM,eAAe,GAAG,KAAK,EAAE,IAAY,EAAE,EAAE;IAC7C,MAAM,EAAE,IAAI,EAAE,GAAG,IAAA,YAAK,EAAC,IAAI,CAAC,CAAA;IAC5B,MAAM,GAAG,GAAG,IAAA,WAAM,GAAE,CAAA;IACpB,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,GAAG,IAAA,YAAK,EAAC,GAAG,CAAC,CAAA;IACpC,IAAI,IAAI,CAAC,WAAW,EAAE,KAAK,OAAO,CAAC,WAAW,EAAE,EAAE,CAAC;QACjD,OAAO,GAAG,CAAA;IACZ,CAAC;IAED,MAAM,QAAQ,GAAG,IAAA,cAAO,EAAC,IAAI,EAAE,OAAO,CAAC,CAAA;IACvC,IAAI,MAAM,KAAK,CAAC,QAAQ,CAAC,EAAE,CAAC;QAC1B,OAAO,QAAQ,CAAA;IACjB,CAAC;IAED,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAED,MAAM,mBAAmB,GAAG,CAAC,IAAY,EAAE,EAAE;IAC3C,MAAM,EAAE,IAAI,EAAE,GAAG,IAAA,YAAK,EAAC,IAAI,CAAC,CAAA;IAC5B,MAAM,GAAG,GAAG,IAAA,WAAM,GAAE,CAAA;IACpB,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,GAAG,IAAA,YAAK,EAAC,GAAG,CAAC,CAAA;IACpC,IAAI,IAAI,CAAC,WAAW,EAAE,KAAK,OAAO,CAAC,WAAW,EAAE,EAAE,CAAC;QACjD,OAAO,GAAG,CAAA;IACZ,CAAC;IAED,MAAM,QAAQ,GAAG,IAAA,cAAO,EAAC,IAAI,EAAE,OAAO,CAAC,CAAA;IACvC,IAAI,SAAS,CAAC,QAAQ,CAAC,EAAE,CAAC;QACxB,OAAO,QAAQ,CAAA;IACjB,CAAC;IAED,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAED,MAAM,eAAe,GAAG,KAAK,IAAI,EAAE,CAAC,IAAA,WAAM,GAAE,CAAA;AAC5C,MAAM,mBAAmB,GAAG,GAAG,EAAE,CAAC,IAAA,WAAM,GAAE,CAAA;AAE7B,QAAA,UAAU,GACrB,qBAAQ,KAAK,OAAO,CAAC,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,eAAe,CAAA;AAC7C,QAAA,cAAc,GACzB,qBAAQ,KAAK,OAAO,CAAC,CAAC,CAAC,mBAAmB,CAAC,CAAC,CAAC,mBAAmB,CAAA","sourcesContent":["// The default temporary folder location for use in the windows algorithm.\n// It's TEMPting to use dirname(path), since that's guaranteed to be on the\n// same device. However, this means that:\n// rimraf(path).then(() => rimraf(dirname(path)))\n// will often fail with EBUSY, because the parent dir contains\n// marked-for-deletion directory entries (which do not show up in readdir).\n// The approach here is to use os.tmpdir() if it's on the same drive letter,\n// or resolve(path, '\\\\temp') if it exists, or the root of the drive if not.\n// On Posix (not that you'd be likely to use the windows algorithm there),\n// it uses os.tmpdir() always.\nimport { tmpdir } from 'os'\nimport { parse, resolve } from 'path'\nimport { promises, statSync } from './fs.js'\nimport platform from './platform.js'\nconst { stat } = promises\n\nconst isDirSync = (path: string) => {\n try {\n return statSync(path).isDirectory()\n } catch (er) {\n return false\n }\n}\n\nconst isDir = (path: string) =>\n stat(path).then(\n st => st.isDirectory(),\n () => false,\n )\n\nconst win32DefaultTmp = async (path: string) => {\n const { root } = parse(path)\n const tmp = tmpdir()\n const { root: tmpRoot } = parse(tmp)\n if (root.toLowerCase() === tmpRoot.toLowerCase()) {\n return tmp\n }\n\n const driveTmp = resolve(root, '/temp')\n if (await isDir(driveTmp)) {\n return driveTmp\n }\n\n return root\n}\n\nconst win32DefaultTmpSync = (path: string) => {\n const { root } = parse(path)\n const tmp = tmpdir()\n const { root: tmpRoot } = parse(tmp)\n if (root.toLowerCase() === tmpRoot.toLowerCase()) {\n return tmp\n }\n\n const driveTmp = resolve(root, '/temp')\n if (isDirSync(driveTmp)) {\n return driveTmp\n }\n\n return root\n}\n\nconst posixDefaultTmp = async () => tmpdir()\nconst posixDefaultTmpSync = () => tmpdir()\n\nexport const defaultTmp =\n platform === 'win32' ? win32DefaultTmp : posixDefaultTmp\nexport const defaultTmpSync =\n platform === 'win32' ? win32DefaultTmpSync : posixDefaultTmpSync\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/fix-eperm.d.ts b/deps/npm/node_modules/rimraf/dist/commonjs/fix-eperm.d.ts new file mode 100644 index 00000000000000..20e76a82c4942e --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/fix-eperm.d.ts @@ -0,0 +1,3 @@ +export declare const fixEPERM: (fn: (path: string) => Promise) => (path: string) => Promise; +export declare const fixEPERMSync: (fn: (path: string) => any) => (path: string) => any; +//# sourceMappingURL=fix-eperm.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/fix-eperm.d.ts.map b/deps/npm/node_modules/rimraf/dist/commonjs/fix-eperm.d.ts.map new file mode 100644 index 00000000000000..ac17d6f4e060bb --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/fix-eperm.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"fix-eperm.d.ts","sourceRoot":"","sources":["../../src/fix-eperm.ts"],"names":[],"mappings":"AAGA,eAAO,MAAM,QAAQ,OACd,CAAC,IAAI,EAAE,MAAM,KAAK,OAAO,CAAC,GAAG,CAAC,YAAkB,MAAM,iBAsB1D,CAAA;AAEH,eAAO,MAAM,YAAY,OAAQ,CAAC,IAAI,EAAE,MAAM,KAAK,GAAG,YAAY,MAAM,QAsBvE,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/fix-eperm.js b/deps/npm/node_modules/rimraf/dist/commonjs/fix-eperm.js new file mode 100644 index 00000000000000..7baecb7c9589bd --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/fix-eperm.js @@ -0,0 +1,58 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.fixEPERMSync = exports.fixEPERM = void 0; +const fs_js_1 = require("./fs.js"); +const { chmod } = fs_js_1.promises; +const fixEPERM = (fn) => async (path) => { + try { + return await fn(path); + } + catch (er) { + const fer = er; + if (fer?.code === 'ENOENT') { + return; + } + if (fer?.code === 'EPERM') { + try { + await chmod(path, 0o666); + } + catch (er2) { + const fer2 = er2; + if (fer2?.code === 'ENOENT') { + return; + } + throw er; + } + return await fn(path); + } + throw er; + } +}; +exports.fixEPERM = fixEPERM; +const fixEPERMSync = (fn) => (path) => { + try { + return fn(path); + } + catch (er) { + const fer = er; + if (fer?.code === 'ENOENT') { + return; + } + if (fer?.code === 'EPERM') { + try { + (0, fs_js_1.chmodSync)(path, 0o666); + } + catch (er2) { + const fer2 = er2; + if (fer2?.code === 'ENOENT') { + return; + } + throw er; + } + return fn(path); + } + throw er; + } +}; +exports.fixEPERMSync = fixEPERMSync; +//# sourceMappingURL=fix-eperm.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/fix-eperm.js.map b/deps/npm/node_modules/rimraf/dist/commonjs/fix-eperm.js.map new file mode 100644 index 00000000000000..250ea5d5b4cbc6 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/fix-eperm.js.map @@ -0,0 +1 @@ +{"version":3,"file":"fix-eperm.js","sourceRoot":"","sources":["../../src/fix-eperm.ts"],"names":[],"mappings":";;;AAAA,mCAA6C;AAC7C,MAAM,EAAE,KAAK,EAAE,GAAG,gBAAQ,CAAA;AAEnB,MAAM,QAAQ,GACnB,CAAC,EAAkC,EAAE,EAAE,CAAC,KAAK,EAAE,IAAY,EAAE,EAAE;IAC7D,IAAI,CAAC;QACH,OAAO,MAAM,EAAE,CAAC,IAAI,CAAC,CAAA;IACvB,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,MAAM,GAAG,GAAG,EAA2B,CAAA;QACvC,IAAI,GAAG,EAAE,IAAI,KAAK,QAAQ,EAAE,CAAC;YAC3B,OAAM;QACR,CAAC;QACD,IAAI,GAAG,EAAE,IAAI,KAAK,OAAO,EAAE,CAAC;YAC1B,IAAI,CAAC;gBACH,MAAM,KAAK,CAAC,IAAI,EAAE,KAAK,CAAC,CAAA;YAC1B,CAAC;YAAC,OAAO,GAAG,EAAE,CAAC;gBACb,MAAM,IAAI,GAAG,GAA4B,CAAA;gBACzC,IAAI,IAAI,EAAE,IAAI,KAAK,QAAQ,EAAE,CAAC;oBAC5B,OAAM;gBACR,CAAC;gBACD,MAAM,EAAE,CAAA;YACV,CAAC;YACD,OAAO,MAAM,EAAE,CAAC,IAAI,CAAC,CAAA;QACvB,CAAC;QACD,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAvBU,QAAA,QAAQ,YAuBlB;AAEI,MAAM,YAAY,GAAG,CAAC,EAAyB,EAAE,EAAE,CAAC,CAAC,IAAY,EAAE,EAAE;IAC1E,IAAI,CAAC;QACH,OAAO,EAAE,CAAC,IAAI,CAAC,CAAA;IACjB,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,MAAM,GAAG,GAAG,EAA2B,CAAA;QACvC,IAAI,GAAG,EAAE,IAAI,KAAK,QAAQ,EAAE,CAAC;YAC3B,OAAM;QACR,CAAC;QACD,IAAI,GAAG,EAAE,IAAI,KAAK,OAAO,EAAE,CAAC;YAC1B,IAAI,CAAC;gBACH,IAAA,iBAAS,EAAC,IAAI,EAAE,KAAK,CAAC,CAAA;YACxB,CAAC;YAAC,OAAO,GAAG,EAAE,CAAC;gBACb,MAAM,IAAI,GAAG,GAA4B,CAAA;gBACzC,IAAI,IAAI,EAAE,IAAI,KAAK,QAAQ,EAAE,CAAC;oBAC5B,OAAM;gBACR,CAAC;gBACD,MAAM,EAAE,CAAA;YACV,CAAC;YACD,OAAO,EAAE,CAAC,IAAI,CAAC,CAAA;QACjB,CAAC;QACD,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAtBY,QAAA,YAAY,gBAsBxB","sourcesContent":["import { chmodSync, promises } from './fs.js'\nconst { chmod } = promises\n\nexport const fixEPERM =\n (fn: (path: string) => Promise) => async (path: string) => {\n try {\n return await fn(path)\n } catch (er) {\n const fer = er as NodeJS.ErrnoException\n if (fer?.code === 'ENOENT') {\n return\n }\n if (fer?.code === 'EPERM') {\n try {\n await chmod(path, 0o666)\n } catch (er2) {\n const fer2 = er2 as NodeJS.ErrnoException\n if (fer2?.code === 'ENOENT') {\n return\n }\n throw er\n }\n return await fn(path)\n }\n throw er\n }\n }\n\nexport const fixEPERMSync = (fn: (path: string) => any) => (path: string) => {\n try {\n return fn(path)\n } catch (er) {\n const fer = er as NodeJS.ErrnoException\n if (fer?.code === 'ENOENT') {\n return\n }\n if (fer?.code === 'EPERM') {\n try {\n chmodSync(path, 0o666)\n } catch (er2) {\n const fer2 = er2 as NodeJS.ErrnoException\n if (fer2?.code === 'ENOENT') {\n return\n }\n throw er\n }\n return fn(path)\n }\n throw er\n }\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/fs.d.ts b/deps/npm/node_modules/rimraf/dist/commonjs/fs.d.ts new file mode 100644 index 00000000000000..9e4e95b4e7a411 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/fs.d.ts @@ -0,0 +1,17 @@ +import fs, { Dirent } from 'fs'; +export { chmodSync, mkdirSync, renameSync, rmdirSync, rmSync, statSync, lstatSync, unlinkSync, } from 'fs'; +export declare const readdirSync: (path: fs.PathLike) => Dirent[]; +export declare const promises: { + chmod: (path: fs.PathLike, mode: fs.Mode) => Promise; + mkdir: (path: fs.PathLike, options?: fs.Mode | (fs.MakeDirectoryOptions & { + recursive?: boolean | null; + }) | undefined | null) => Promise; + readdir: (path: fs.PathLike) => Promise; + rename: (oldPath: fs.PathLike, newPath: fs.PathLike) => Promise; + rm: (path: fs.PathLike, options: fs.RmOptions) => Promise; + rmdir: (path: fs.PathLike) => Promise; + stat: (path: fs.PathLike) => Promise; + lstat: (path: fs.PathLike) => Promise; + unlink: (path: fs.PathLike) => Promise; +}; +//# sourceMappingURL=fs.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/fs.d.ts.map b/deps/npm/node_modules/rimraf/dist/commonjs/fs.d.ts.map new file mode 100644 index 00000000000000..8c8b1034cbcd27 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/fs.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"fs.d.ts","sourceRoot":"","sources":["../../src/fs.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,EAAE,EAAE,MAAM,EAAE,MAAM,IAAI,CAAA;AAG/B,OAAO,EACL,SAAS,EACT,SAAS,EACT,UAAU,EACV,SAAS,EACT,MAAM,EACN,QAAQ,EACR,SAAS,EACT,UAAU,GACX,MAAM,IAAI,CAAA;AAGX,eAAO,MAAM,WAAW,SAAU,EAAE,CAAC,QAAQ,KAAG,MAAM,EACf,CAAA;AA+DvC,eAAO,MAAM,QAAQ;kBAxDA,EAAE,CAAC,QAAQ,QAAQ,EAAE,CAAC,IAAI,KAAG,OAAO,CAAC,IAAI,CAAC;kBAMvD,EAAE,CAAC,QAAQ,YAEb,EAAE,CAAC,IAAI,GACP,CAAC,EAAE,CAAC,oBAAoB,GAAG;QAAE,SAAS,CAAC,EAAE,OAAO,GAAG,IAAI,CAAA;KAAE,CAAC,GAC1D,SAAS,GACT,IAAI,KACP,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC;oBAKP,EAAE,CAAC,QAAQ,KAAG,OAAO,CAAC,MAAM,EAAE,CAAC;sBAO7B,EAAE,CAAC,QAAQ,WAAW,EAAE,CAAC,QAAQ,KAAG,OAAO,CAAC,IAAI,CAAC;eAOxD,EAAE,CAAC,QAAQ,WAAW,EAAE,CAAC,SAAS,KAAG,OAAO,CAAC,IAAI,CAAC;kBAK/C,EAAE,CAAC,QAAQ,KAAG,OAAO,CAAC,IAAI,CAAC;iBAK5B,EAAE,CAAC,QAAQ,KAAG,OAAO,CAAC,EAAE,CAAC,KAAK,CAAC;kBAK9B,EAAE,CAAC,QAAQ,KAAG,OAAO,CAAC,EAAE,CAAC,KAAK,CAAC;mBAK9B,EAAE,CAAC,QAAQ,KAAG,OAAO,CAAC,IAAI,CAAC;CAehD,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/fs.js b/deps/npm/node_modules/rimraf/dist/commonjs/fs.js new file mode 100644 index 00000000000000..dba64c9830ed82 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/fs.js @@ -0,0 +1,46 @@ +"use strict"; +// promisify ourselves, because older nodes don't have fs.promises +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.promises = exports.readdirSync = exports.unlinkSync = exports.lstatSync = exports.statSync = exports.rmSync = exports.rmdirSync = exports.renameSync = exports.mkdirSync = exports.chmodSync = void 0; +const fs_1 = __importDefault(require("fs")); +// sync ones just take the sync version from node +var fs_2 = require("fs"); +Object.defineProperty(exports, "chmodSync", { enumerable: true, get: function () { return fs_2.chmodSync; } }); +Object.defineProperty(exports, "mkdirSync", { enumerable: true, get: function () { return fs_2.mkdirSync; } }); +Object.defineProperty(exports, "renameSync", { enumerable: true, get: function () { return fs_2.renameSync; } }); +Object.defineProperty(exports, "rmdirSync", { enumerable: true, get: function () { return fs_2.rmdirSync; } }); +Object.defineProperty(exports, "rmSync", { enumerable: true, get: function () { return fs_2.rmSync; } }); +Object.defineProperty(exports, "statSync", { enumerable: true, get: function () { return fs_2.statSync; } }); +Object.defineProperty(exports, "lstatSync", { enumerable: true, get: function () { return fs_2.lstatSync; } }); +Object.defineProperty(exports, "unlinkSync", { enumerable: true, get: function () { return fs_2.unlinkSync; } }); +const fs_3 = require("fs"); +const readdirSync = (path) => (0, fs_3.readdirSync)(path, { withFileTypes: true }); +exports.readdirSync = readdirSync; +// unrolled for better inlining, this seems to get better performance +// than something like: +// const makeCb = (res, rej) => (er, ...d) => er ? rej(er) : res(...d) +// which would be a bit cleaner. +const chmod = (path, mode) => new Promise((res, rej) => fs_1.default.chmod(path, mode, (er, ...d) => (er ? rej(er) : res(...d)))); +const mkdir = (path, options) => new Promise((res, rej) => fs_1.default.mkdir(path, options, (er, made) => (er ? rej(er) : res(made)))); +const readdir = (path) => new Promise((res, rej) => fs_1.default.readdir(path, { withFileTypes: true }, (er, data) => er ? rej(er) : res(data))); +const rename = (oldPath, newPath) => new Promise((res, rej) => fs_1.default.rename(oldPath, newPath, (er, ...d) => er ? rej(er) : res(...d))); +const rm = (path, options) => new Promise((res, rej) => fs_1.default.rm(path, options, (er, ...d) => (er ? rej(er) : res(...d)))); +const rmdir = (path) => new Promise((res, rej) => fs_1.default.rmdir(path, (er, ...d) => (er ? rej(er) : res(...d)))); +const stat = (path) => new Promise((res, rej) => fs_1.default.stat(path, (er, data) => (er ? rej(er) : res(data)))); +const lstat = (path) => new Promise((res, rej) => fs_1.default.lstat(path, (er, data) => (er ? rej(er) : res(data)))); +const unlink = (path) => new Promise((res, rej) => fs_1.default.unlink(path, (er, ...d) => (er ? rej(er) : res(...d)))); +exports.promises = { + chmod, + mkdir, + readdir, + rename, + rm, + rmdir, + stat, + lstat, + unlink, +}; +//# sourceMappingURL=fs.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/fs.js.map b/deps/npm/node_modules/rimraf/dist/commonjs/fs.js.map new file mode 100644 index 00000000000000..9d9e1fba4b2dc8 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/fs.js.map @@ -0,0 +1 @@ +{"version":3,"file":"fs.js","sourceRoot":"","sources":["../../src/fs.ts"],"names":[],"mappings":";AAAA,kEAAkE;;;;;;AAElE,4CAA+B;AAE/B,iDAAiD;AACjD,yBASW;AART,+FAAA,SAAS,OAAA;AACT,+FAAA,SAAS,OAAA;AACT,gGAAA,UAAU,OAAA;AACV,+FAAA,SAAS,OAAA;AACT,4FAAA,MAAM,OAAA;AACN,8FAAA,QAAQ,OAAA;AACR,+FAAA,SAAS,OAAA;AACT,gGAAA,UAAU,OAAA;AAGZ,2BAA0C;AACnC,MAAM,WAAW,GAAG,CAAC,IAAiB,EAAY,EAAE,CACzD,IAAA,gBAAM,EAAC,IAAI,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE,CAAC,CAAA;AAD1B,QAAA,WAAW,eACe;AAEvC,qEAAqE;AACrE,uBAAuB;AACvB,sEAAsE;AACtE,gCAAgC;AAEhC,MAAM,KAAK,GAAG,CAAC,IAAiB,EAAE,IAAa,EAAiB,EAAE,CAChE,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACvB,YAAE,CAAC,KAAK,CAAC,IAAI,EAAE,IAAI,EAAE,CAAC,EAAE,EAAE,GAAG,CAAQ,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CACtE,CAAA;AAEH,MAAM,KAAK,GAAG,CACZ,IAAiB,EACjB,OAIQ,EACqB,EAAE,CAC/B,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACvB,YAAE,CAAC,KAAK,CAAC,IAAI,EAAE,OAAO,EAAE,CAAC,EAAE,EAAE,IAAI,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,CAClE,CAAA;AAEH,MAAM,OAAO,GAAG,CAAC,IAAiB,EAAqB,EAAE,CACvD,IAAI,OAAO,CAAW,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACjC,YAAE,CAAC,OAAO,CAAC,IAAI,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE,EAAE,CAAC,EAAE,EAAE,IAAI,EAAE,EAAE,CACrD,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,CACzB,CACF,CAAA;AAEH,MAAM,MAAM,GAAG,CAAC,OAAoB,EAAE,OAAoB,EAAiB,EAAE,CAC3E,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACvB,YAAE,CAAC,MAAM,CAAC,OAAO,EAAE,OAAO,EAAE,CAAC,EAAE,EAAE,GAAG,CAAQ,EAAE,EAAE,CAC9C,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CACzB,CACF,CAAA;AAEH,MAAM,EAAE,GAAG,CAAC,IAAiB,EAAE,OAAqB,EAAiB,EAAE,CACrE,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACvB,YAAE,CAAC,EAAE,CAAC,IAAI,EAAE,OAAO,EAAE,CAAC,EAAE,EAAE,GAAG,CAAQ,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CACtE,CAAA;AAEH,MAAM,KAAK,GAAG,CAAC,IAAiB,EAAiB,EAAE,CACjD,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACvB,YAAE,CAAC,KAAK,CAAC,IAAI,EAAE,CAAC,EAAE,EAAE,GAAG,CAAQ,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAChE,CAAA;AAEH,MAAM,IAAI,GAAG,CAAC,IAAiB,EAAqB,EAAE,CACpD,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACvB,YAAE,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,EAAE,EAAE,IAAI,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,CACxD,CAAA;AAEH,MAAM,KAAK,GAAG,CAAC,IAAiB,EAAqB,EAAE,CACrD,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACvB,YAAE,CAAC,KAAK,CAAC,IAAI,EAAE,CAAC,EAAE,EAAE,IAAI,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,CACzD,CAAA;AAEH,MAAM,MAAM,GAAG,CAAC,IAAiB,EAAiB,EAAE,CAClD,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACvB,YAAE,CAAC,MAAM,CAAC,IAAI,EAAE,CAAC,EAAE,EAAE,GAAG,CAAQ,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CACjE,CAAA;AAEU,QAAA,QAAQ,GAAG;IACtB,KAAK;IACL,KAAK;IACL,OAAO;IACP,MAAM;IACN,EAAE;IACF,KAAK;IACL,IAAI;IACJ,KAAK;IACL,MAAM;CACP,CAAA","sourcesContent":["// promisify ourselves, because older nodes don't have fs.promises\n\nimport fs, { Dirent } from 'fs'\n\n// sync ones just take the sync version from node\nexport {\n chmodSync,\n mkdirSync,\n renameSync,\n rmdirSync,\n rmSync,\n statSync,\n lstatSync,\n unlinkSync,\n} from 'fs'\n\nimport { readdirSync as rdSync } from 'fs'\nexport const readdirSync = (path: fs.PathLike): Dirent[] =>\n rdSync(path, { withFileTypes: true })\n\n// unrolled for better inlining, this seems to get better performance\n// than something like:\n// const makeCb = (res, rej) => (er, ...d) => er ? rej(er) : res(...d)\n// which would be a bit cleaner.\n\nconst chmod = (path: fs.PathLike, mode: fs.Mode): Promise =>\n new Promise((res, rej) =>\n fs.chmod(path, mode, (er, ...d: any[]) => (er ? rej(er) : res(...d))),\n )\n\nconst mkdir = (\n path: fs.PathLike,\n options?:\n | fs.Mode\n | (fs.MakeDirectoryOptions & { recursive?: boolean | null })\n | undefined\n | null,\n): Promise =>\n new Promise((res, rej) =>\n fs.mkdir(path, options, (er, made) => (er ? rej(er) : res(made))),\n )\n\nconst readdir = (path: fs.PathLike): Promise =>\n new Promise((res, rej) =>\n fs.readdir(path, { withFileTypes: true }, (er, data) =>\n er ? rej(er) : res(data),\n ),\n )\n\nconst rename = (oldPath: fs.PathLike, newPath: fs.PathLike): Promise =>\n new Promise((res, rej) =>\n fs.rename(oldPath, newPath, (er, ...d: any[]) =>\n er ? rej(er) : res(...d),\n ),\n )\n\nconst rm = (path: fs.PathLike, options: fs.RmOptions): Promise =>\n new Promise((res, rej) =>\n fs.rm(path, options, (er, ...d: any[]) => (er ? rej(er) : res(...d))),\n )\n\nconst rmdir = (path: fs.PathLike): Promise =>\n new Promise((res, rej) =>\n fs.rmdir(path, (er, ...d: any[]) => (er ? rej(er) : res(...d))),\n )\n\nconst stat = (path: fs.PathLike): Promise =>\n new Promise((res, rej) =>\n fs.stat(path, (er, data) => (er ? rej(er) : res(data))),\n )\n\nconst lstat = (path: fs.PathLike): Promise =>\n new Promise((res, rej) =>\n fs.lstat(path, (er, data) => (er ? rej(er) : res(data))),\n )\n\nconst unlink = (path: fs.PathLike): Promise =>\n new Promise((res, rej) =>\n fs.unlink(path, (er, ...d: any[]) => (er ? rej(er) : res(...d))),\n )\n\nexport const promises = {\n chmod,\n mkdir,\n readdir,\n rename,\n rm,\n rmdir,\n stat,\n lstat,\n unlink,\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/ignore-enoent.d.ts b/deps/npm/node_modules/rimraf/dist/commonjs/ignore-enoent.d.ts new file mode 100644 index 00000000000000..f158cc27025b16 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/ignore-enoent.d.ts @@ -0,0 +1,3 @@ +export declare const ignoreENOENT: (p: Promise) => Promise; +export declare const ignoreENOENTSync: (fn: () => any) => any; +//# sourceMappingURL=ignore-enoent.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/ignore-enoent.d.ts.map b/deps/npm/node_modules/rimraf/dist/commonjs/ignore-enoent.d.ts.map new file mode 100644 index 00000000000000..2cfb3bbac5fab7 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/ignore-enoent.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"ignore-enoent.d.ts","sourceRoot":"","sources":["../../src/ignore-enoent.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,YAAY,MAAa,OAAO,CAAC,GAAG,CAAC,iBAK9C,CAAA;AAEJ,eAAO,MAAM,gBAAgB,OAAQ,MAAM,GAAG,QAQ7C,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/ignore-enoent.js b/deps/npm/node_modules/rimraf/dist/commonjs/ignore-enoent.js new file mode 100644 index 00000000000000..02595342121f79 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/ignore-enoent.js @@ -0,0 +1,21 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ignoreENOENTSync = exports.ignoreENOENT = void 0; +const ignoreENOENT = async (p) => p.catch(er => { + if (er.code !== 'ENOENT') { + throw er; + } +}); +exports.ignoreENOENT = ignoreENOENT; +const ignoreENOENTSync = (fn) => { + try { + return fn(); + } + catch (er) { + if (er?.code !== 'ENOENT') { + throw er; + } + } +}; +exports.ignoreENOENTSync = ignoreENOENTSync; +//# sourceMappingURL=ignore-enoent.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/ignore-enoent.js.map b/deps/npm/node_modules/rimraf/dist/commonjs/ignore-enoent.js.map new file mode 100644 index 00000000000000..7acf4c29d1e56f --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/ignore-enoent.js.map @@ -0,0 +1 @@ +{"version":3,"file":"ignore-enoent.js","sourceRoot":"","sources":["../../src/ignore-enoent.ts"],"names":[],"mappings":";;;AAAO,MAAM,YAAY,GAAG,KAAK,EAAE,CAAe,EAAE,EAAE,CACpD,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,EAAE;IACX,IAAI,EAAE,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;QACzB,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAC,CAAA;AALS,QAAA,YAAY,gBAKrB;AAEG,MAAM,gBAAgB,GAAG,CAAC,EAAa,EAAE,EAAE;IAChD,IAAI,CAAC;QACH,OAAO,EAAE,EAAE,CAAA;IACb,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,IAAK,EAA4B,EAAE,IAAI,KAAK,QAAQ,EAAE,CAAC;YACrD,MAAM,EAAE,CAAA;QACV,CAAC;IACH,CAAC;AACH,CAAC,CAAA;AARY,QAAA,gBAAgB,oBAQ5B","sourcesContent":["export const ignoreENOENT = async (p: Promise) =>\n p.catch(er => {\n if (er.code !== 'ENOENT') {\n throw er\n }\n })\n\nexport const ignoreENOENTSync = (fn: () => any) => {\n try {\n return fn()\n } catch (er) {\n if ((er as NodeJS.ErrnoException)?.code !== 'ENOENT') {\n throw er\n }\n }\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/index.d.ts b/deps/npm/node_modules/rimraf/dist/commonjs/index.d.ts new file mode 100644 index 00000000000000..9ec4a124ab613d --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/index.d.ts @@ -0,0 +1,50 @@ +import { RimrafAsyncOptions, RimrafSyncOptions } from './opt-arg.js'; +export { assertRimrafOptions, isRimrafOptions, type RimrafAsyncOptions, type RimrafOptions, type RimrafSyncOptions, } from './opt-arg.js'; +export declare const nativeSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; +export declare const native: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & { + sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; +}; +export declare const manualSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; +export declare const manual: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & { + sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; +}; +export declare const windowsSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; +export declare const windows: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & { + sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; +}; +export declare const posixSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; +export declare const posix: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & { + sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; +}; +export declare const moveRemoveSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; +export declare const moveRemove: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & { + sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; +}; +export declare const rimrafSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; +export declare const sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; +export declare const rimraf: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & { + rimraf: (path: string | string[], opt?: RimrafAsyncOptions) => Promise; + sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; + rimrafSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; + manual: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & { + sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; + }; + manualSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; + native: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & { + sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; + }; + nativeSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; + posix: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & { + sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; + }; + posixSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; + windows: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & { + sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; + }; + windowsSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; + moveRemove: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & { + sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; + }; + moveRemoveSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; +}; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/index.d.ts.map b/deps/npm/node_modules/rimraf/dist/commonjs/index.d.ts.map new file mode 100644 index 00000000000000..0dc659ca730252 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AACA,OAAO,EAGL,kBAAkB,EAClB,iBAAiB,EAClB,MAAM,cAAc,CAAA;AASrB,OAAO,EACL,mBAAmB,EACnB,eAAe,EACf,KAAK,kBAAkB,EACvB,KAAK,aAAa,EAClB,KAAK,iBAAiB,GACvB,MAAM,cAAc,CAAA;AAqCrB,eAAO,MAAM,UAAU,SAdd,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAcF,CAAA;AACpD,eAAO,MAAM,MAAM,UAjCT,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;iBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;CAegB,CAAA;AAE7E,eAAO,MAAM,UAAU,SAjBd,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAiBF,CAAA;AACpD,eAAO,MAAM,MAAM,UApCT,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;iBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;CAkBgB,CAAA;AAE7E,eAAO,MAAM,WAAW,SApBf,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAoBA,CAAA;AACtD,eAAO,MAAM,OAAO,UAvCV,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;iBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;CAqBmB,CAAA;AAEhF,eAAO,MAAM,SAAS,SAvBb,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAuBJ,CAAA;AAClD,eAAO,MAAM,KAAK,UA1CR,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;iBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;CAwBa,CAAA;AAE1E,eAAO,MAAM,cAAc,SA1BlB,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OA0BM,CAAA;AAC5D,eAAO,MAAM,UAAU,UA7Cb,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;iBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;CA6B3D,CAAA;AAEF,eAAO,MAAM,UAAU,SA/Bd,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAmCrD,CAAA;AACD,eAAO,MAAM,IAAI,SApCR,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAoCxB,CAAA;AAK9B,eAAO,MAAM,MAAM,UA3DT,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;mBAFX,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;iBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;uBAApD,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;oBAlBnD,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;qBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;;uBAApD,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;oBAlBnD,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;qBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;;uBAApD,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;mBAlBnD,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;qBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;;sBAApD,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;qBAlBnD,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;qBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;;wBAApD,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;wBAlBnD,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;qBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;;2BAApD,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;CAuD3D,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/index.js b/deps/npm/node_modules/rimraf/dist/commonjs/index.js new file mode 100644 index 00000000000000..09b5d9993c1e78 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/index.js @@ -0,0 +1,78 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.rimraf = exports.sync = exports.rimrafSync = exports.moveRemove = exports.moveRemoveSync = exports.posix = exports.posixSync = exports.windows = exports.windowsSync = exports.manual = exports.manualSync = exports.native = exports.nativeSync = exports.isRimrafOptions = exports.assertRimrafOptions = void 0; +const glob_1 = require("glob"); +const opt_arg_js_1 = require("./opt-arg.js"); +const path_arg_js_1 = __importDefault(require("./path-arg.js")); +const rimraf_manual_js_1 = require("./rimraf-manual.js"); +const rimraf_move_remove_js_1 = require("./rimraf-move-remove.js"); +const rimraf_native_js_1 = require("./rimraf-native.js"); +const rimraf_posix_js_1 = require("./rimraf-posix.js"); +const rimraf_windows_js_1 = require("./rimraf-windows.js"); +const use_native_js_1 = require("./use-native.js"); +var opt_arg_js_2 = require("./opt-arg.js"); +Object.defineProperty(exports, "assertRimrafOptions", { enumerable: true, get: function () { return opt_arg_js_2.assertRimrafOptions; } }); +Object.defineProperty(exports, "isRimrafOptions", { enumerable: true, get: function () { return opt_arg_js_2.isRimrafOptions; } }); +const wrap = (fn) => async (path, opt) => { + const options = (0, opt_arg_js_1.optArg)(opt); + if (options.glob) { + path = await (0, glob_1.glob)(path, options.glob); + } + if (Array.isArray(path)) { + return !!(await Promise.all(path.map(p => fn((0, path_arg_js_1.default)(p, options), options)))).reduce((a, b) => a && b, true); + } + else { + return !!(await fn((0, path_arg_js_1.default)(path, options), options)); + } +}; +const wrapSync = (fn) => (path, opt) => { + const options = (0, opt_arg_js_1.optArgSync)(opt); + if (options.glob) { + path = (0, glob_1.globSync)(path, options.glob); + } + if (Array.isArray(path)) { + return !!path + .map(p => fn((0, path_arg_js_1.default)(p, options), options)) + .reduce((a, b) => a && b, true); + } + else { + return !!fn((0, path_arg_js_1.default)(path, options), options); + } +}; +exports.nativeSync = wrapSync(rimraf_native_js_1.rimrafNativeSync); +exports.native = Object.assign(wrap(rimraf_native_js_1.rimrafNative), { sync: exports.nativeSync }); +exports.manualSync = wrapSync(rimraf_manual_js_1.rimrafManualSync); +exports.manual = Object.assign(wrap(rimraf_manual_js_1.rimrafManual), { sync: exports.manualSync }); +exports.windowsSync = wrapSync(rimraf_windows_js_1.rimrafWindowsSync); +exports.windows = Object.assign(wrap(rimraf_windows_js_1.rimrafWindows), { sync: exports.windowsSync }); +exports.posixSync = wrapSync(rimraf_posix_js_1.rimrafPosixSync); +exports.posix = Object.assign(wrap(rimraf_posix_js_1.rimrafPosix), { sync: exports.posixSync }); +exports.moveRemoveSync = wrapSync(rimraf_move_remove_js_1.rimrafMoveRemoveSync); +exports.moveRemove = Object.assign(wrap(rimraf_move_remove_js_1.rimrafMoveRemove), { + sync: exports.moveRemoveSync, +}); +exports.rimrafSync = wrapSync((path, opt) => (0, use_native_js_1.useNativeSync)(opt) ? + (0, rimraf_native_js_1.rimrafNativeSync)(path, opt) + : (0, rimraf_manual_js_1.rimrafManualSync)(path, opt)); +exports.sync = exports.rimrafSync; +const rimraf_ = wrap((path, opt) => (0, use_native_js_1.useNative)(opt) ? (0, rimraf_native_js_1.rimrafNative)(path, opt) : (0, rimraf_manual_js_1.rimrafManual)(path, opt)); +exports.rimraf = Object.assign(rimraf_, { + rimraf: rimraf_, + sync: exports.rimrafSync, + rimrafSync: exports.rimrafSync, + manual: exports.manual, + manualSync: exports.manualSync, + native: exports.native, + nativeSync: exports.nativeSync, + posix: exports.posix, + posixSync: exports.posixSync, + windows: exports.windows, + windowsSync: exports.windowsSync, + moveRemove: exports.moveRemove, + moveRemoveSync: exports.moveRemoveSync, +}); +exports.rimraf.rimraf = exports.rimraf; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/index.js.map b/deps/npm/node_modules/rimraf/dist/commonjs/index.js.map new file mode 100644 index 00000000000000..5ed1978ae92f1f --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";;;;;;AAAA,+BAAqC;AACrC,6CAKqB;AACrB,gEAAmC;AACnC,yDAAmE;AACnE,mEAAgF;AAChF,yDAAmE;AACnE,uDAAgE;AAChE,2DAAsE;AACtE,mDAA0D;AAE1D,2CAMqB;AALnB,iHAAA,mBAAmB,OAAA;AACnB,6GAAA,eAAe,OAAA;AAMjB,MAAM,IAAI,GACR,CAAC,EAA0D,EAAE,EAAE,CAC/D,KAAK,EACH,IAAuB,EACvB,GAAwB,EACN,EAAE;IACpB,MAAM,OAAO,GAAG,IAAA,mBAAM,EAAC,GAAG,CAAC,CAAA;IAC3B,IAAI,OAAO,CAAC,IAAI,EAAE,CAAC;QACjB,IAAI,GAAG,MAAM,IAAA,WAAI,EAAC,IAAI,EAAE,OAAO,CAAC,IAAI,CAAC,CAAA;IACvC,CAAC;IACD,IAAI,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,EAAE,CAAC;QACxB,OAAO,CAAC,CAAC,CACP,MAAM,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,IAAA,qBAAO,EAAC,CAAC,EAAE,OAAO,CAAC,EAAE,OAAO,CAAC,CAAC,CAAC,CACnE,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,IAAI,CAAC,CAAA;IAClC,CAAC;SAAM,CAAC;QACN,OAAO,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,IAAA,qBAAO,EAAC,IAAI,EAAE,OAAO,CAAC,EAAE,OAAO,CAAC,CAAC,CAAA;IACtD,CAAC;AACH,CAAC,CAAA;AAEH,MAAM,QAAQ,GACZ,CAAC,EAAgD,EAAE,EAAE,CACrD,CAAC,IAAuB,EAAE,GAAuB,EAAW,EAAE;IAC5D,MAAM,OAAO,GAAG,IAAA,uBAAU,EAAC,GAAG,CAAC,CAAA;IAC/B,IAAI,OAAO,CAAC,IAAI,EAAE,CAAC;QACjB,IAAI,GAAG,IAAA,eAAQ,EAAC,IAAI,EAAE,OAAO,CAAC,IAAI,CAAC,CAAA;IACrC,CAAC;IACD,IAAI,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,EAAE,CAAC;QACxB,OAAO,CAAC,CAAC,IAAI;aACV,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,IAAA,qBAAO,EAAC,CAAC,EAAE,OAAO,CAAC,EAAE,OAAO,CAAC,CAAC;aAC1C,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,IAAI,CAAC,CAAA;IACnC,CAAC;SAAM,CAAC;QACN,OAAO,CAAC,CAAC,EAAE,CAAC,IAAA,qBAAO,EAAC,IAAI,EAAE,OAAO,CAAC,EAAE,OAAO,CAAC,CAAA;IAC9C,CAAC;AACH,CAAC,CAAA;AAEU,QAAA,UAAU,GAAG,QAAQ,CAAC,mCAAgB,CAAC,CAAA;AACvC,QAAA,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,+BAAY,CAAC,EAAE,EAAE,IAAI,EAAE,kBAAU,EAAE,CAAC,CAAA;AAEhE,QAAA,UAAU,GAAG,QAAQ,CAAC,mCAAgB,CAAC,CAAA;AACvC,QAAA,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,+BAAY,CAAC,EAAE,EAAE,IAAI,EAAE,kBAAU,EAAE,CAAC,CAAA;AAEhE,QAAA,WAAW,GAAG,QAAQ,CAAC,qCAAiB,CAAC,CAAA;AACzC,QAAA,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,iCAAa,CAAC,EAAE,EAAE,IAAI,EAAE,mBAAW,EAAE,CAAC,CAAA;AAEnE,QAAA,SAAS,GAAG,QAAQ,CAAC,iCAAe,CAAC,CAAA;AACrC,QAAA,KAAK,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,6BAAW,CAAC,EAAE,EAAE,IAAI,EAAE,iBAAS,EAAE,CAAC,CAAA;AAE7D,QAAA,cAAc,GAAG,QAAQ,CAAC,4CAAoB,CAAC,CAAA;AAC/C,QAAA,UAAU,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,wCAAgB,CAAC,EAAE;IAC9D,IAAI,EAAE,sBAAc;CACrB,CAAC,CAAA;AAEW,QAAA,UAAU,GAAG,QAAQ,CAAC,CAAC,IAAI,EAAE,GAAG,EAAE,EAAE,CAC/C,IAAA,6BAAa,EAAC,GAAG,CAAC,CAAC,CAAC;IAClB,IAAA,mCAAgB,EAAC,IAAI,EAAE,GAAG,CAAC;IAC7B,CAAC,CAAC,IAAA,mCAAgB,EAAC,IAAI,EAAE,GAAG,CAAC,CAC9B,CAAA;AACY,QAAA,IAAI,GAAG,kBAAU,CAAA;AAE9B,MAAM,OAAO,GAAG,IAAI,CAAC,CAAC,IAAI,EAAE,GAAG,EAAE,EAAE,CACjC,IAAA,yBAAS,EAAC,GAAG,CAAC,CAAC,CAAC,CAAC,IAAA,+BAAY,EAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC,IAAA,+BAAY,EAAC,IAAI,EAAE,GAAG,CAAC,CACnE,CAAA;AACY,QAAA,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,OAAO,EAAE;IAC3C,MAAM,EAAE,OAAO;IACf,IAAI,EAAE,kBAAU;IAChB,UAAU,EAAE,kBAAU;IACtB,MAAM,EAAN,cAAM;IACN,UAAU,EAAV,kBAAU;IACV,MAAM,EAAN,cAAM;IACN,UAAU,EAAV,kBAAU;IACV,KAAK,EAAL,aAAK;IACL,SAAS,EAAT,iBAAS;IACT,OAAO,EAAP,eAAO;IACP,WAAW,EAAX,mBAAW;IACX,UAAU,EAAV,kBAAU;IACV,cAAc,EAAd,sBAAc;CACf,CAAC,CAAA;AACF,cAAM,CAAC,MAAM,GAAG,cAAM,CAAA","sourcesContent":["import { glob, globSync } from 'glob'\nimport {\n optArg,\n optArgSync,\n RimrafAsyncOptions,\n RimrafSyncOptions,\n} from './opt-arg.js'\nimport pathArg from './path-arg.js'\nimport { rimrafManual, rimrafManualSync } from './rimraf-manual.js'\nimport { rimrafMoveRemove, rimrafMoveRemoveSync } from './rimraf-move-remove.js'\nimport { rimrafNative, rimrafNativeSync } from './rimraf-native.js'\nimport { rimrafPosix, rimrafPosixSync } from './rimraf-posix.js'\nimport { rimrafWindows, rimrafWindowsSync } from './rimraf-windows.js'\nimport { useNative, useNativeSync } from './use-native.js'\n\nexport {\n assertRimrafOptions,\n isRimrafOptions,\n type RimrafAsyncOptions,\n type RimrafOptions,\n type RimrafSyncOptions,\n} from './opt-arg.js'\n\nconst wrap =\n (fn: (p: string, o: RimrafAsyncOptions) => Promise) =>\n async (\n path: string | string[],\n opt?: RimrafAsyncOptions,\n ): Promise => {\n const options = optArg(opt)\n if (options.glob) {\n path = await glob(path, options.glob)\n }\n if (Array.isArray(path)) {\n return !!(\n await Promise.all(path.map(p => fn(pathArg(p, options), options)))\n ).reduce((a, b) => a && b, true)\n } else {\n return !!(await fn(pathArg(path, options), options))\n }\n }\n\nconst wrapSync =\n (fn: (p: string, o: RimrafSyncOptions) => boolean) =>\n (path: string | string[], opt?: RimrafSyncOptions): boolean => {\n const options = optArgSync(opt)\n if (options.glob) {\n path = globSync(path, options.glob)\n }\n if (Array.isArray(path)) {\n return !!path\n .map(p => fn(pathArg(p, options), options))\n .reduce((a, b) => a && b, true)\n } else {\n return !!fn(pathArg(path, options), options)\n }\n }\n\nexport const nativeSync = wrapSync(rimrafNativeSync)\nexport const native = Object.assign(wrap(rimrafNative), { sync: nativeSync })\n\nexport const manualSync = wrapSync(rimrafManualSync)\nexport const manual = Object.assign(wrap(rimrafManual), { sync: manualSync })\n\nexport const windowsSync = wrapSync(rimrafWindowsSync)\nexport const windows = Object.assign(wrap(rimrafWindows), { sync: windowsSync })\n\nexport const posixSync = wrapSync(rimrafPosixSync)\nexport const posix = Object.assign(wrap(rimrafPosix), { sync: posixSync })\n\nexport const moveRemoveSync = wrapSync(rimrafMoveRemoveSync)\nexport const moveRemove = Object.assign(wrap(rimrafMoveRemove), {\n sync: moveRemoveSync,\n})\n\nexport const rimrafSync = wrapSync((path, opt) =>\n useNativeSync(opt) ?\n rimrafNativeSync(path, opt)\n : rimrafManualSync(path, opt),\n)\nexport const sync = rimrafSync\n\nconst rimraf_ = wrap((path, opt) =>\n useNative(opt) ? rimrafNative(path, opt) : rimrafManual(path, opt),\n)\nexport const rimraf = Object.assign(rimraf_, {\n rimraf: rimraf_,\n sync: rimrafSync,\n rimrafSync: rimrafSync,\n manual,\n manualSync,\n native,\n nativeSync,\n posix,\n posixSync,\n windows,\n windowsSync,\n moveRemove,\n moveRemoveSync,\n})\nrimraf.rimraf = rimraf\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/opt-arg.d.ts b/deps/npm/node_modules/rimraf/dist/commonjs/opt-arg.d.ts new file mode 100644 index 00000000000000..c869d4ae85251b --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/opt-arg.d.ts @@ -0,0 +1,34 @@ +import { Dirent, Stats } from 'fs'; +import { GlobOptions } from 'glob'; +export declare const isRimrafOptions: (o: any) => o is RimrafOptions; +export declare const assertRimrafOptions: (o: any) => void; +export interface RimrafAsyncOptions { + preserveRoot?: boolean; + tmp?: string; + maxRetries?: number; + retryDelay?: number; + backoff?: number; + maxBackoff?: number; + signal?: AbortSignal; + glob?: boolean | GlobOptions; + filter?: ((path: string, ent: Dirent | Stats) => boolean) | ((path: string, ent: Dirent | Stats) => Promise); +} +export interface RimrafSyncOptions extends RimrafAsyncOptions { + filter?: (path: string, ent: Dirent | Stats) => boolean; +} +export type RimrafOptions = RimrafSyncOptions | RimrafAsyncOptions; +export declare const optArg: (opt?: RimrafAsyncOptions) => (RimrafAsyncOptions & { + glob: GlobOptions & { + withFileTypes: false; + }; +}) | (RimrafAsyncOptions & { + glob: undefined; +}); +export declare const optArgSync: (opt?: RimrafSyncOptions) => (RimrafSyncOptions & { + glob: GlobOptions & { + withFileTypes: false; + }; +}) | (RimrafSyncOptions & { + glob: undefined; +}); +//# sourceMappingURL=opt-arg.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/opt-arg.d.ts.map b/deps/npm/node_modules/rimraf/dist/commonjs/opt-arg.d.ts.map new file mode 100644 index 00000000000000..89e83b205ac628 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/opt-arg.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"opt-arg.d.ts","sourceRoot":"","sources":["../../src/opt-arg.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,IAAI,CAAA;AAClC,OAAO,EAAE,WAAW,EAAE,MAAM,MAAM,CAAA;AAKlC,eAAO,MAAM,eAAe,MAAO,GAAG,KAAG,CAAC,IAAI,aAUX,CAAA;AAEnC,eAAO,MAAM,mBAAmB,EAAE,CAAC,CAAC,EAAE,GAAG,KAAK,IAM7C,CAAA;AAED,MAAM,WAAW,kBAAkB;IACjC,YAAY,CAAC,EAAE,OAAO,CAAA;IACtB,GAAG,CAAC,EAAE,MAAM,CAAA;IACZ,UAAU,CAAC,EAAE,MAAM,CAAA;IACnB,UAAU,CAAC,EAAE,MAAM,CAAA;IACnB,OAAO,CAAC,EAAE,MAAM,CAAA;IAChB,UAAU,CAAC,EAAE,MAAM,CAAA;IACnB,MAAM,CAAC,EAAE,WAAW,CAAA;IACpB,IAAI,CAAC,EAAE,OAAO,GAAG,WAAW,CAAA;IAC5B,MAAM,CAAC,EACH,CAAC,CAAC,IAAI,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,GAAG,KAAK,KAAK,OAAO,CAAC,GAChD,CAAC,CAAC,IAAI,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,GAAG,KAAK,KAAK,OAAO,CAAC,OAAO,CAAC,CAAC,CAAA;CAC9D;AAED,MAAM,WAAW,iBAAkB,SAAQ,kBAAkB;IAC3D,MAAM,CAAC,EAAE,CAAC,IAAI,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,GAAG,KAAK,KAAK,OAAO,CAAA;CACxD;AAED,MAAM,MAAM,aAAa,GAAG,iBAAiB,GAAG,kBAAkB,CAAA;AAqClE,eAAO,MAAM,MAAM,SAAS,kBAAkB;UA/BlC,WAAW,GAAG;QAAE,aAAa,EAAE,KAAK,CAAA;KAAE;;UAEjC,SAAS;EA6B0C,CAAA;AACpE,eAAO,MAAM,UAAU,SAAS,iBAAiB;UAhCrC,WAAW,GAAG;QAAE,aAAa,EAAE,KAAK,CAAA;KAAE;;UAEjC,SAAS;EA8B6C,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/opt-arg.js b/deps/npm/node_modules/rimraf/dist/commonjs/opt-arg.js new file mode 100644 index 00000000000000..1d030a16d3c0f0 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/opt-arg.js @@ -0,0 +1,53 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.optArgSync = exports.optArg = exports.assertRimrafOptions = exports.isRimrafOptions = void 0; +const typeOrUndef = (val, t) => typeof val === 'undefined' || typeof val === t; +const isRimrafOptions = (o) => !!o && + typeof o === 'object' && + typeOrUndef(o.preserveRoot, 'boolean') && + typeOrUndef(o.tmp, 'string') && + typeOrUndef(o.maxRetries, 'number') && + typeOrUndef(o.retryDelay, 'number') && + typeOrUndef(o.backoff, 'number') && + typeOrUndef(o.maxBackoff, 'number') && + (typeOrUndef(o.glob, 'boolean') || (o.glob && typeof o.glob === 'object')) && + typeOrUndef(o.filter, 'function'); +exports.isRimrafOptions = isRimrafOptions; +const assertRimrafOptions = (o) => { + if (!(0, exports.isRimrafOptions)(o)) { + throw new Error('invalid rimraf options'); + } +}; +exports.assertRimrafOptions = assertRimrafOptions; +const optArgT = (opt) => { + (0, exports.assertRimrafOptions)(opt); + const { glob, ...options } = opt; + if (!glob) { + return options; + } + const globOpt = glob === true ? + opt.signal ? + { signal: opt.signal } + : {} + : opt.signal ? + { + signal: opt.signal, + ...glob, + } + : glob; + return { + ...options, + glob: { + ...globOpt, + // always get absolute paths from glob, to ensure + // that we are referencing the correct thing. + absolute: true, + withFileTypes: false, + }, + }; +}; +const optArg = (opt = {}) => optArgT(opt); +exports.optArg = optArg; +const optArgSync = (opt = {}) => optArgT(opt); +exports.optArgSync = optArgSync; +//# sourceMappingURL=opt-arg.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/opt-arg.js.map b/deps/npm/node_modules/rimraf/dist/commonjs/opt-arg.js.map new file mode 100644 index 00000000000000..d815735d639a46 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/opt-arg.js.map @@ -0,0 +1 @@ +{"version":3,"file":"opt-arg.js","sourceRoot":"","sources":["../../src/opt-arg.ts"],"names":[],"mappings":";;;AAGA,MAAM,WAAW,GAAG,CAAC,GAAQ,EAAE,CAAS,EAAE,EAAE,CAC1C,OAAO,GAAG,KAAK,WAAW,IAAI,OAAO,GAAG,KAAK,CAAC,CAAA;AAEzC,MAAM,eAAe,GAAG,CAAC,CAAM,EAAsB,EAAE,CAC5D,CAAC,CAAC,CAAC;IACH,OAAO,CAAC,KAAK,QAAQ;IACrB,WAAW,CAAC,CAAC,CAAC,YAAY,EAAE,SAAS,CAAC;IACtC,WAAW,CAAC,CAAC,CAAC,GAAG,EAAE,QAAQ,CAAC;IAC5B,WAAW,CAAC,CAAC,CAAC,UAAU,EAAE,QAAQ,CAAC;IACnC,WAAW,CAAC,CAAC,CAAC,UAAU,EAAE,QAAQ,CAAC;IACnC,WAAW,CAAC,CAAC,CAAC,OAAO,EAAE,QAAQ,CAAC;IAChC,WAAW,CAAC,CAAC,CAAC,UAAU,EAAE,QAAQ,CAAC;IACnC,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,EAAE,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,IAAI,OAAO,CAAC,CAAC,IAAI,KAAK,QAAQ,CAAC,CAAC;IAC1E,WAAW,CAAC,CAAC,CAAC,MAAM,EAAE,UAAU,CAAC,CAAA;AAVtB,QAAA,eAAe,mBAUO;AAE5B,MAAM,mBAAmB,GAAqB,CACnD,CAAM,EACsB,EAAE;IAC9B,IAAI,CAAC,IAAA,uBAAe,EAAC,CAAC,CAAC,EAAE,CAAC;QACxB,MAAM,IAAI,KAAK,CAAC,wBAAwB,CAAC,CAAA;IAC3C,CAAC;AACH,CAAC,CAAA;AANY,QAAA,mBAAmB,uBAM/B;AAsBD,MAAM,OAAO,GAAG,CACd,GAAM,EAKsB,EAAE;IAC9B,IAAA,2BAAmB,EAAC,GAAG,CAAC,CAAA;IACxB,MAAM,EAAE,IAAI,EAAE,GAAG,OAAO,EAAE,GAAG,GAAG,CAAA;IAChC,IAAI,CAAC,IAAI,EAAE,CAAC;QACV,OAAO,OAAkC,CAAA;IAC3C,CAAC;IACD,MAAM,OAAO,GACX,IAAI,KAAK,IAAI,CAAC,CAAC;QACb,GAAG,CAAC,MAAM,CAAC,CAAC;YACV,EAAE,MAAM,EAAE,GAAG,CAAC,MAAM,EAAE;YACxB,CAAC,CAAC,EAAE;QACN,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;YACZ;gBACE,MAAM,EAAE,GAAG,CAAC,MAAM;gBAClB,GAAG,IAAI;aACR;YACH,CAAC,CAAC,IAAI,CAAA;IACR,OAAO;QACL,GAAG,OAAO;QACV,IAAI,EAAE;YACJ,GAAG,OAAO;YACV,iDAAiD;YACjD,6CAA6C;YAC7C,QAAQ,EAAE,IAAI;YACd,aAAa,EAAE,KAAK;SACrB;KACsD,CAAA;AAC3D,CAAC,CAAA;AAEM,MAAM,MAAM,GAAG,CAAC,MAA0B,EAAE,EAAE,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,CAAA;AAAvD,QAAA,MAAM,UAAiD;AAC7D,MAAM,UAAU,GAAG,CAAC,MAAyB,EAAE,EAAE,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,CAAA;AAA1D,QAAA,UAAU,cAAgD","sourcesContent":["import { Dirent, Stats } from 'fs'\nimport { GlobOptions } from 'glob'\n\nconst typeOrUndef = (val: any, t: string) =>\n typeof val === 'undefined' || typeof val === t\n\nexport const isRimrafOptions = (o: any): o is RimrafOptions =>\n !!o &&\n typeof o === 'object' &&\n typeOrUndef(o.preserveRoot, 'boolean') &&\n typeOrUndef(o.tmp, 'string') &&\n typeOrUndef(o.maxRetries, 'number') &&\n typeOrUndef(o.retryDelay, 'number') &&\n typeOrUndef(o.backoff, 'number') &&\n typeOrUndef(o.maxBackoff, 'number') &&\n (typeOrUndef(o.glob, 'boolean') || (o.glob && typeof o.glob === 'object')) &&\n typeOrUndef(o.filter, 'function')\n\nexport const assertRimrafOptions: (o: any) => void = (\n o: any,\n): asserts o is RimrafOptions => {\n if (!isRimrafOptions(o)) {\n throw new Error('invalid rimraf options')\n }\n}\n\nexport interface RimrafAsyncOptions {\n preserveRoot?: boolean\n tmp?: string\n maxRetries?: number\n retryDelay?: number\n backoff?: number\n maxBackoff?: number\n signal?: AbortSignal\n glob?: boolean | GlobOptions\n filter?:\n | ((path: string, ent: Dirent | Stats) => boolean)\n | ((path: string, ent: Dirent | Stats) => Promise)\n}\n\nexport interface RimrafSyncOptions extends RimrafAsyncOptions {\n filter?: (path: string, ent: Dirent | Stats) => boolean\n}\n\nexport type RimrafOptions = RimrafSyncOptions | RimrafAsyncOptions\n\nconst optArgT = (\n opt: T,\n):\n | (T & {\n glob: GlobOptions & { withFileTypes: false }\n })\n | (T & { glob: undefined }) => {\n assertRimrafOptions(opt)\n const { glob, ...options } = opt\n if (!glob) {\n return options as T & { glob: undefined }\n }\n const globOpt =\n glob === true ?\n opt.signal ?\n { signal: opt.signal }\n : {}\n : opt.signal ?\n {\n signal: opt.signal,\n ...glob,\n }\n : glob\n return {\n ...options,\n glob: {\n ...globOpt,\n // always get absolute paths from glob, to ensure\n // that we are referencing the correct thing.\n absolute: true,\n withFileTypes: false,\n },\n } as T & { glob: GlobOptions & { withFileTypes: false } }\n}\n\nexport const optArg = (opt: RimrafAsyncOptions = {}) => optArgT(opt)\nexport const optArgSync = (opt: RimrafSyncOptions = {}) => optArgT(opt)\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/package.json b/deps/npm/node_modules/rimraf/dist/commonjs/package.json new file mode 100644 index 00000000000000..5bbefffbabee39 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/path-arg.d.ts b/deps/npm/node_modules/rimraf/dist/commonjs/path-arg.d.ts new file mode 100644 index 00000000000000..c0b7e7cb4b15e3 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/path-arg.d.ts @@ -0,0 +1,4 @@ +import { RimrafAsyncOptions } from './index.js'; +declare const pathArg: (path: string, opt?: RimrafAsyncOptions) => string; +export default pathArg; +//# sourceMappingURL=path-arg.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/path-arg.d.ts.map b/deps/npm/node_modules/rimraf/dist/commonjs/path-arg.d.ts.map new file mode 100644 index 00000000000000..4fe93c3a8aec47 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/path-arg.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"path-arg.d.ts","sourceRoot":"","sources":["../../src/path-arg.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,kBAAkB,EAAE,MAAM,YAAY,CAAA;AAG/C,QAAA,MAAM,OAAO,SAAU,MAAM,QAAO,kBAAkB,WAgDrD,CAAA;AAED,eAAe,OAAO,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/path-arg.js b/deps/npm/node_modules/rimraf/dist/commonjs/path-arg.js new file mode 100644 index 00000000000000..8a4908aa08ef50 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/path-arg.js @@ -0,0 +1,52 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const path_1 = require("path"); +const util_1 = require("util"); +const platform_js_1 = __importDefault(require("./platform.js")); +const pathArg = (path, opt = {}) => { + const type = typeof path; + if (type !== 'string') { + const ctor = path && type === 'object' && path.constructor; + const received = ctor && ctor.name ? `an instance of ${ctor.name}` + : type === 'object' ? (0, util_1.inspect)(path) + : `type ${type} ${path}`; + const msg = 'The "path" argument must be of type string. ' + `Received ${received}`; + throw Object.assign(new TypeError(msg), { + path, + code: 'ERR_INVALID_ARG_TYPE', + }); + } + if (/\0/.test(path)) { + // simulate same failure that node raises + const msg = 'path must be a string without null bytes'; + throw Object.assign(new TypeError(msg), { + path, + code: 'ERR_INVALID_ARG_VALUE', + }); + } + path = (0, path_1.resolve)(path); + const { root } = (0, path_1.parse)(path); + if (path === root && opt.preserveRoot !== false) { + const msg = 'refusing to remove root directory without preserveRoot:false'; + throw Object.assign(new Error(msg), { + path, + code: 'ERR_PRESERVE_ROOT', + }); + } + if (platform_js_1.default === 'win32') { + const badWinChars = /[*|"<>?:]/; + const { root } = (0, path_1.parse)(path); + if (badWinChars.test(path.substring(root.length))) { + throw Object.assign(new Error('Illegal characters in path.'), { + path, + code: 'EINVAL', + }); + } + } + return path; +}; +exports.default = pathArg; +//# sourceMappingURL=path-arg.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/path-arg.js.map b/deps/npm/node_modules/rimraf/dist/commonjs/path-arg.js.map new file mode 100644 index 00000000000000..40e4a19e7003a0 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/path-arg.js.map @@ -0,0 +1 @@ +{"version":3,"file":"path-arg.js","sourceRoot":"","sources":["../../src/path-arg.ts"],"names":[],"mappings":";;;;;AAAA,+BAAqC;AACrC,+BAA8B;AAE9B,gEAAoC;AAEpC,MAAM,OAAO,GAAG,CAAC,IAAY,EAAE,MAA0B,EAAE,EAAE,EAAE;IAC7D,MAAM,IAAI,GAAG,OAAO,IAAI,CAAA;IACxB,IAAI,IAAI,KAAK,QAAQ,EAAE,CAAC;QACtB,MAAM,IAAI,GAAG,IAAI,IAAI,IAAI,KAAK,QAAQ,IAAI,IAAI,CAAC,WAAW,CAAA;QAC1D,MAAM,QAAQ,GACZ,IAAI,IAAI,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,kBAAkB,IAAI,CAAC,IAAI,EAAE;YACjD,CAAC,CAAC,IAAI,KAAK,QAAQ,CAAC,CAAC,CAAC,IAAA,cAAO,EAAC,IAAI,CAAC;gBACnC,CAAC,CAAC,QAAQ,IAAI,IAAI,IAAI,EAAE,CAAA;QAC1B,MAAM,GAAG,GACP,8CAA8C,GAAG,YAAY,QAAQ,EAAE,CAAA;QACzE,MAAM,MAAM,CAAC,MAAM,CAAC,IAAI,SAAS,CAAC,GAAG,CAAC,EAAE;YACtC,IAAI;YACJ,IAAI,EAAE,sBAAsB;SAC7B,CAAC,CAAA;IACJ,CAAC;IAED,IAAI,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC;QACpB,yCAAyC;QACzC,MAAM,GAAG,GAAG,0CAA0C,CAAA;QACtD,MAAM,MAAM,CAAC,MAAM,CAAC,IAAI,SAAS,CAAC,GAAG,CAAC,EAAE;YACtC,IAAI;YACJ,IAAI,EAAE,uBAAuB;SAC9B,CAAC,CAAA;IACJ,CAAC;IAED,IAAI,GAAG,IAAA,cAAO,EAAC,IAAI,CAAC,CAAA;IACpB,MAAM,EAAE,IAAI,EAAE,GAAG,IAAA,YAAK,EAAC,IAAI,CAAC,CAAA;IAE5B,IAAI,IAAI,KAAK,IAAI,IAAI,GAAG,CAAC,YAAY,KAAK,KAAK,EAAE,CAAC;QAChD,MAAM,GAAG,GAAG,8DAA8D,CAAA;QAC1E,MAAM,MAAM,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,GAAG,CAAC,EAAE;YAClC,IAAI;YACJ,IAAI,EAAE,mBAAmB;SAC1B,CAAC,CAAA;IACJ,CAAC;IAED,IAAI,qBAAQ,KAAK,OAAO,EAAE,CAAC;QACzB,MAAM,WAAW,GAAG,WAAW,CAAA;QAC/B,MAAM,EAAE,IAAI,EAAE,GAAG,IAAA,YAAK,EAAC,IAAI,CAAC,CAAA;QAC5B,IAAI,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC;YAClD,MAAM,MAAM,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,6BAA6B,CAAC,EAAE;gBAC5D,IAAI;gBACJ,IAAI,EAAE,QAAQ;aACf,CAAC,CAAA;QACJ,CAAC;IACH,CAAC;IAED,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAED,kBAAe,OAAO,CAAA","sourcesContent":["import { parse, resolve } from 'path'\nimport { inspect } from 'util'\nimport { RimrafAsyncOptions } from './index.js'\nimport platform from './platform.js'\n\nconst pathArg = (path: string, opt: RimrafAsyncOptions = {}) => {\n const type = typeof path\n if (type !== 'string') {\n const ctor = path && type === 'object' && path.constructor\n const received =\n ctor && ctor.name ? `an instance of ${ctor.name}`\n : type === 'object' ? inspect(path)\n : `type ${type} ${path}`\n const msg =\n 'The \"path\" argument must be of type string. ' + `Received ${received}`\n throw Object.assign(new TypeError(msg), {\n path,\n code: 'ERR_INVALID_ARG_TYPE',\n })\n }\n\n if (/\\0/.test(path)) {\n // simulate same failure that node raises\n const msg = 'path must be a string without null bytes'\n throw Object.assign(new TypeError(msg), {\n path,\n code: 'ERR_INVALID_ARG_VALUE',\n })\n }\n\n path = resolve(path)\n const { root } = parse(path)\n\n if (path === root && opt.preserveRoot !== false) {\n const msg = 'refusing to remove root directory without preserveRoot:false'\n throw Object.assign(new Error(msg), {\n path,\n code: 'ERR_PRESERVE_ROOT',\n })\n }\n\n if (platform === 'win32') {\n const badWinChars = /[*|\"<>?:]/\n const { root } = parse(path)\n if (badWinChars.test(path.substring(root.length))) {\n throw Object.assign(new Error('Illegal characters in path.'), {\n path,\n code: 'EINVAL',\n })\n }\n }\n\n return path\n}\n\nexport default pathArg\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/platform.d.ts b/deps/npm/node_modules/rimraf/dist/commonjs/platform.d.ts new file mode 100644 index 00000000000000..e127a8e529ffd2 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/platform.d.ts @@ -0,0 +1,3 @@ +declare const _default: string; +export default _default; +//# sourceMappingURL=platform.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/platform.d.ts.map b/deps/npm/node_modules/rimraf/dist/commonjs/platform.d.ts.map new file mode 100644 index 00000000000000..ef2e6734f8cfbb --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/platform.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"platform.d.ts","sourceRoot":"","sources":["../../src/platform.ts"],"names":[],"mappings":";AAAA,wBAA0E"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/platform.js b/deps/npm/node_modules/rimraf/dist/commonjs/platform.js new file mode 100644 index 00000000000000..58f197ffbf8249 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/platform.js @@ -0,0 +1,4 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.default = process.env.__TESTING_RIMRAF_PLATFORM__ || process.platform; +//# sourceMappingURL=platform.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/platform.js.map b/deps/npm/node_modules/rimraf/dist/commonjs/platform.js.map new file mode 100644 index 00000000000000..814cdb8c244c57 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/platform.js.map @@ -0,0 +1 @@ +{"version":3,"file":"platform.js","sourceRoot":"","sources":["../../src/platform.ts"],"names":[],"mappings":";;AAAA,kBAAe,OAAO,CAAC,GAAG,CAAC,2BAA2B,IAAI,OAAO,CAAC,QAAQ,CAAA","sourcesContent":["export default process.env.__TESTING_RIMRAF_PLATFORM__ || process.platform\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/readdir-or-error.d.ts b/deps/npm/node_modules/rimraf/dist/commonjs/readdir-or-error.d.ts new file mode 100644 index 00000000000000..cce73097f1681f --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/readdir-or-error.d.ts @@ -0,0 +1,3 @@ +export declare const readdirOrError: (path: string) => Promise; +export declare const readdirOrErrorSync: (path: string) => import("fs").Dirent[] | NodeJS.ErrnoException; +//# sourceMappingURL=readdir-or-error.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/readdir-or-error.d.ts.map b/deps/npm/node_modules/rimraf/dist/commonjs/readdir-or-error.d.ts.map new file mode 100644 index 00000000000000..8a19f6bdfd0706 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/readdir-or-error.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"readdir-or-error.d.ts","sourceRoot":"","sources":["../../src/readdir-or-error.ts"],"names":[],"mappings":"AAIA,eAAO,MAAM,cAAc,SAAU,MAAM,2DACa,CAAA;AACxD,eAAO,MAAM,kBAAkB,SAAU,MAAM,kDAM9C,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/readdir-or-error.js b/deps/npm/node_modules/rimraf/dist/commonjs/readdir-or-error.js new file mode 100644 index 00000000000000..75330cb3816c8b --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/readdir-or-error.js @@ -0,0 +1,19 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.readdirOrErrorSync = exports.readdirOrError = void 0; +// returns an array of entries if readdir() works, +// or the error that readdir() raised if not. +const fs_js_1 = require("./fs.js"); +const { readdir } = fs_js_1.promises; +const readdirOrError = (path) => readdir(path).catch(er => er); +exports.readdirOrError = readdirOrError; +const readdirOrErrorSync = (path) => { + try { + return (0, fs_js_1.readdirSync)(path); + } + catch (er) { + return er; + } +}; +exports.readdirOrErrorSync = readdirOrErrorSync; +//# sourceMappingURL=readdir-or-error.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/readdir-or-error.js.map b/deps/npm/node_modules/rimraf/dist/commonjs/readdir-or-error.js.map new file mode 100644 index 00000000000000..61dbfe11956140 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/readdir-or-error.js.map @@ -0,0 +1 @@ +{"version":3,"file":"readdir-or-error.js","sourceRoot":"","sources":["../../src/readdir-or-error.ts"],"names":[],"mappings":";;;AAAA,kDAAkD;AAClD,6CAA6C;AAC7C,mCAA+C;AAC/C,MAAM,EAAE,OAAO,EAAE,GAAG,gBAAQ,CAAA;AACrB,MAAM,cAAc,GAAG,CAAC,IAAY,EAAE,EAAE,CAC7C,OAAO,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,EAAE,CAAC,EAA2B,CAAC,CAAA;AAD3C,QAAA,cAAc,kBAC6B;AACjD,MAAM,kBAAkB,GAAG,CAAC,IAAY,EAAE,EAAE;IACjD,IAAI,CAAC;QACH,OAAO,IAAA,mBAAW,EAAC,IAAI,CAAC,CAAA;IAC1B,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,OAAO,EAA2B,CAAA;IACpC,CAAC;AACH,CAAC,CAAA;AANY,QAAA,kBAAkB,sBAM9B","sourcesContent":["// returns an array of entries if readdir() works,\n// or the error that readdir() raised if not.\nimport { promises, readdirSync } from './fs.js'\nconst { readdir } = promises\nexport const readdirOrError = (path: string) =>\n readdir(path).catch(er => er as NodeJS.ErrnoException)\nexport const readdirOrErrorSync = (path: string) => {\n try {\n return readdirSync(path)\n } catch (er) {\n return er as NodeJS.ErrnoException\n }\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/retry-busy.d.ts b/deps/npm/node_modules/rimraf/dist/commonjs/retry-busy.d.ts new file mode 100644 index 00000000000000..c0af0dd62f0df9 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/retry-busy.d.ts @@ -0,0 +1,8 @@ +import { RimrafAsyncOptions, RimrafOptions } from './index.js'; +export declare const MAXBACKOFF = 200; +export declare const RATE = 1.2; +export declare const MAXRETRIES = 10; +export declare const codes: Set; +export declare const retryBusy: (fn: (path: string) => Promise) => (path: string, opt: RimrafAsyncOptions, backoff?: number, total?: number) => Promise; +export declare const retryBusySync: (fn: (path: string) => any) => (path: string, opt: RimrafOptions) => any; +//# sourceMappingURL=retry-busy.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/retry-busy.d.ts.map b/deps/npm/node_modules/rimraf/dist/commonjs/retry-busy.d.ts.map new file mode 100644 index 00000000000000..21960c58914b4b --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/retry-busy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"retry-busy.d.ts","sourceRoot":"","sources":["../../src/retry-busy.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,kBAAkB,EAAE,aAAa,EAAE,MAAM,YAAY,CAAA;AAE9D,eAAO,MAAM,UAAU,MAAM,CAAA;AAC7B,eAAO,MAAM,IAAI,MAAM,CAAA;AACvB,eAAO,MAAM,UAAU,KAAK,CAAA;AAC5B,eAAO,MAAM,KAAK,aAAyC,CAAA;AAE3D,eAAO,MAAM,SAAS,OAAQ,CAAC,IAAI,EAAE,MAAM,KAAK,OAAO,CAAC,GAAG,CAAC,YAElD,MAAM,OACP,kBAAkB,mDAkC1B,CAAA;AAGD,eAAO,MAAM,aAAa,OAAQ,CAAC,IAAI,EAAE,MAAM,KAAK,GAAG,YAC/B,MAAM,OAAO,aAAa,QAsBjD,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/retry-busy.js b/deps/npm/node_modules/rimraf/dist/commonjs/retry-busy.js new file mode 100644 index 00000000000000..5f9d15252bb10f --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/retry-busy.js @@ -0,0 +1,68 @@ +"use strict"; +// note: max backoff is the maximum that any *single* backoff will do +Object.defineProperty(exports, "__esModule", { value: true }); +exports.retryBusySync = exports.retryBusy = exports.codes = exports.MAXRETRIES = exports.RATE = exports.MAXBACKOFF = void 0; +exports.MAXBACKOFF = 200; +exports.RATE = 1.2; +exports.MAXRETRIES = 10; +exports.codes = new Set(['EMFILE', 'ENFILE', 'EBUSY']); +const retryBusy = (fn) => { + const method = async (path, opt, backoff = 1, total = 0) => { + const mbo = opt.maxBackoff || exports.MAXBACKOFF; + const rate = opt.backoff || exports.RATE; + const max = opt.maxRetries || exports.MAXRETRIES; + let retries = 0; + while (true) { + try { + return await fn(path); + } + catch (er) { + const fer = er; + if (fer?.path === path && fer?.code && exports.codes.has(fer.code)) { + backoff = Math.ceil(backoff * rate); + total = backoff + total; + if (total < mbo) { + return new Promise((res, rej) => { + setTimeout(() => { + method(path, opt, backoff, total).then(res, rej); + }, backoff); + }); + } + if (retries < max) { + retries++; + continue; + } + } + throw er; + } + } + }; + return method; +}; +exports.retryBusy = retryBusy; +// just retries, no async so no backoff +const retryBusySync = (fn) => { + const method = (path, opt) => { + const max = opt.maxRetries || exports.MAXRETRIES; + let retries = 0; + while (true) { + try { + return fn(path); + } + catch (er) { + const fer = er; + if (fer?.path === path && + fer?.code && + exports.codes.has(fer.code) && + retries < max) { + retries++; + continue; + } + throw er; + } + } + }; + return method; +}; +exports.retryBusySync = retryBusySync; +//# sourceMappingURL=retry-busy.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/retry-busy.js.map b/deps/npm/node_modules/rimraf/dist/commonjs/retry-busy.js.map new file mode 100644 index 00000000000000..1f1051d2f115fc --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/retry-busy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"retry-busy.js","sourceRoot":"","sources":["../../src/retry-busy.ts"],"names":[],"mappings":";AAAA,qEAAqE;;;AAIxD,QAAA,UAAU,GAAG,GAAG,CAAA;AAChB,QAAA,IAAI,GAAG,GAAG,CAAA;AACV,QAAA,UAAU,GAAG,EAAE,CAAA;AACf,QAAA,KAAK,GAAG,IAAI,GAAG,CAAC,CAAC,QAAQ,EAAE,QAAQ,EAAE,OAAO,CAAC,CAAC,CAAA;AAEpD,MAAM,SAAS,GAAG,CAAC,EAAkC,EAAE,EAAE;IAC9D,MAAM,MAAM,GAAG,KAAK,EAClB,IAAY,EACZ,GAAuB,EACvB,OAAO,GAAG,CAAC,EACX,KAAK,GAAG,CAAC,EACT,EAAE;QACF,MAAM,GAAG,GAAG,GAAG,CAAC,UAAU,IAAI,kBAAU,CAAA;QACxC,MAAM,IAAI,GAAG,GAAG,CAAC,OAAO,IAAI,YAAI,CAAA;QAChC,MAAM,GAAG,GAAG,GAAG,CAAC,UAAU,IAAI,kBAAU,CAAA;QACxC,IAAI,OAAO,GAAG,CAAC,CAAA;QACf,OAAO,IAAI,EAAE,CAAC;YACZ,IAAI,CAAC;gBACH,OAAO,MAAM,EAAE,CAAC,IAAI,CAAC,CAAA;YACvB,CAAC;YAAC,OAAO,EAAE,EAAE,CAAC;gBACZ,MAAM,GAAG,GAAG,EAA2B,CAAA;gBACvC,IAAI,GAAG,EAAE,IAAI,KAAK,IAAI,IAAI,GAAG,EAAE,IAAI,IAAI,aAAK,CAAC,GAAG,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC;oBAC3D,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,CAAA;oBACnC,KAAK,GAAG,OAAO,GAAG,KAAK,CAAA;oBACvB,IAAI,KAAK,GAAG,GAAG,EAAE,CAAC;wBAChB,OAAO,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE;4BAC9B,UAAU,CAAC,GAAG,EAAE;gCACd,MAAM,CAAC,IAAI,EAAE,GAAG,EAAE,OAAO,EAAE,KAAK,CAAC,CAAC,IAAI,CAAC,GAAG,EAAE,GAAG,CAAC,CAAA;4BAClD,CAAC,EAAE,OAAO,CAAC,CAAA;wBACb,CAAC,CAAC,CAAA;oBACJ,CAAC;oBACD,IAAI,OAAO,GAAG,GAAG,EAAE,CAAC;wBAClB,OAAO,EAAE,CAAA;wBACT,SAAQ;oBACV,CAAC;gBACH,CAAC;gBACD,MAAM,EAAE,CAAA;YACV,CAAC;QACH,CAAC;IACH,CAAC,CAAA;IAED,OAAO,MAAM,CAAA;AACf,CAAC,CAAA;AArCY,QAAA,SAAS,aAqCrB;AAED,uCAAuC;AAChC,MAAM,aAAa,GAAG,CAAC,EAAyB,EAAE,EAAE;IACzD,MAAM,MAAM,GAAG,CAAC,IAAY,EAAE,GAAkB,EAAE,EAAE;QAClD,MAAM,GAAG,GAAG,GAAG,CAAC,UAAU,IAAI,kBAAU,CAAA;QACxC,IAAI,OAAO,GAAG,CAAC,CAAA;QACf,OAAO,IAAI,EAAE,CAAC;YACZ,IAAI,CAAC;gBACH,OAAO,EAAE,CAAC,IAAI,CAAC,CAAA;YACjB,CAAC;YAAC,OAAO,EAAE,EAAE,CAAC;gBACZ,MAAM,GAAG,GAAG,EAA2B,CAAA;gBACvC,IACE,GAAG,EAAE,IAAI,KAAK,IAAI;oBAClB,GAAG,EAAE,IAAI;oBACT,aAAK,CAAC,GAAG,CAAC,GAAG,CAAC,IAAI,CAAC;oBACnB,OAAO,GAAG,GAAG,EACb,CAAC;oBACD,OAAO,EAAE,CAAA;oBACT,SAAQ;gBACV,CAAC;gBACD,MAAM,EAAE,CAAA;YACV,CAAC;QACH,CAAC;IACH,CAAC,CAAA;IACD,OAAO,MAAM,CAAA;AACf,CAAC,CAAA;AAvBY,QAAA,aAAa,iBAuBzB","sourcesContent":["// note: max backoff is the maximum that any *single* backoff will do\n\nimport { RimrafAsyncOptions, RimrafOptions } from './index.js'\n\nexport const MAXBACKOFF = 200\nexport const RATE = 1.2\nexport const MAXRETRIES = 10\nexport const codes = new Set(['EMFILE', 'ENFILE', 'EBUSY'])\n\nexport const retryBusy = (fn: (path: string) => Promise) => {\n const method = async (\n path: string,\n opt: RimrafAsyncOptions,\n backoff = 1,\n total = 0,\n ) => {\n const mbo = opt.maxBackoff || MAXBACKOFF\n const rate = opt.backoff || RATE\n const max = opt.maxRetries || MAXRETRIES\n let retries = 0\n while (true) {\n try {\n return await fn(path)\n } catch (er) {\n const fer = er as NodeJS.ErrnoException\n if (fer?.path === path && fer?.code && codes.has(fer.code)) {\n backoff = Math.ceil(backoff * rate)\n total = backoff + total\n if (total < mbo) {\n return new Promise((res, rej) => {\n setTimeout(() => {\n method(path, opt, backoff, total).then(res, rej)\n }, backoff)\n })\n }\n if (retries < max) {\n retries++\n continue\n }\n }\n throw er\n }\n }\n }\n\n return method\n}\n\n// just retries, no async so no backoff\nexport const retryBusySync = (fn: (path: string) => any) => {\n const method = (path: string, opt: RimrafOptions) => {\n const max = opt.maxRetries || MAXRETRIES\n let retries = 0\n while (true) {\n try {\n return fn(path)\n } catch (er) {\n const fer = er as NodeJS.ErrnoException\n if (\n fer?.path === path &&\n fer?.code &&\n codes.has(fer.code) &&\n retries < max\n ) {\n retries++\n continue\n }\n throw er\n }\n }\n }\n return method\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-manual.d.ts b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-manual.d.ts new file mode 100644 index 00000000000000..35c5c86844c7fa --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-manual.d.ts @@ -0,0 +1,3 @@ +export declare const rimrafManual: (path: string, opt: import("./opt-arg.js").RimrafAsyncOptions) => Promise; +export declare const rimrafManualSync: (path: string, opt: import("./opt-arg.js").RimrafSyncOptions) => boolean; +//# sourceMappingURL=rimraf-manual.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-manual.d.ts.map b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-manual.d.ts.map new file mode 100644 index 00000000000000..19bd25149ceb07 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-manual.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"rimraf-manual.d.ts","sourceRoot":"","sources":["../../src/rimraf-manual.ts"],"names":[],"mappings":"AAKA,eAAO,MAAM,YAAY,oFAAqD,CAAA;AAC9E,eAAO,MAAM,gBAAgB,0EAC+B,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-manual.js b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-manual.js new file mode 100644 index 00000000000000..1c95ae23bb98b1 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-manual.js @@ -0,0 +1,12 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.rimrafManualSync = exports.rimrafManual = void 0; +const platform_js_1 = __importDefault(require("./platform.js")); +const rimraf_posix_js_1 = require("./rimraf-posix.js"); +const rimraf_windows_js_1 = require("./rimraf-windows.js"); +exports.rimrafManual = platform_js_1.default === 'win32' ? rimraf_windows_js_1.rimrafWindows : rimraf_posix_js_1.rimrafPosix; +exports.rimrafManualSync = platform_js_1.default === 'win32' ? rimraf_windows_js_1.rimrafWindowsSync : rimraf_posix_js_1.rimrafPosixSync; +//# sourceMappingURL=rimraf-manual.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-manual.js.map b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-manual.js.map new file mode 100644 index 00000000000000..e26e44577d9f0d --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-manual.js.map @@ -0,0 +1 @@ +{"version":3,"file":"rimraf-manual.js","sourceRoot":"","sources":["../../src/rimraf-manual.ts"],"names":[],"mappings":";;;;;;AAAA,gEAAoC;AAEpC,uDAAgE;AAChE,2DAAsE;AAEzD,QAAA,YAAY,GAAG,qBAAQ,KAAK,OAAO,CAAC,CAAC,CAAC,iCAAa,CAAC,CAAC,CAAC,6BAAW,CAAA;AACjE,QAAA,gBAAgB,GAC3B,qBAAQ,KAAK,OAAO,CAAC,CAAC,CAAC,qCAAiB,CAAC,CAAC,CAAC,iCAAe,CAAA","sourcesContent":["import platform from './platform.js'\n\nimport { rimrafPosix, rimrafPosixSync } from './rimraf-posix.js'\nimport { rimrafWindows, rimrafWindowsSync } from './rimraf-windows.js'\n\nexport const rimrafManual = platform === 'win32' ? rimrafWindows : rimrafPosix\nexport const rimrafManualSync =\n platform === 'win32' ? rimrafWindowsSync : rimrafPosixSync\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-move-remove.d.ts b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-move-remove.d.ts new file mode 100644 index 00000000000000..5d41d40825e4c7 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-move-remove.d.ts @@ -0,0 +1,4 @@ +import { RimrafAsyncOptions, RimrafSyncOptions } from './index.js'; +export declare const rimrafMoveRemove: (path: string, opt: RimrafAsyncOptions) => Promise; +export declare const rimrafMoveRemoveSync: (path: string, opt: RimrafSyncOptions) => boolean; +//# sourceMappingURL=rimraf-move-remove.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-move-remove.d.ts.map b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-move-remove.d.ts.map new file mode 100644 index 00000000000000..4062eaebbb1302 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-move-remove.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"rimraf-move-remove.d.ts","sourceRoot":"","sources":["../../src/rimraf-move-remove.ts"],"names":[],"mappings":"AA6BA,OAAO,EAAE,kBAAkB,EAAE,iBAAiB,EAAE,MAAM,YAAY,CAAA;AA4ClE,eAAO,MAAM,gBAAgB,SACrB,MAAM,OACP,kBAAkB,qBAWxB,CAAA;AA4ED,eAAO,MAAM,oBAAoB,SAAU,MAAM,OAAO,iBAAiB,YAUxE,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-move-remove.js b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-move-remove.js new file mode 100644 index 00000000000000..ac668d1c9dbbae --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-move-remove.js @@ -0,0 +1,192 @@ +"use strict"; +// https://youtu.be/uhRWMGBjlO8?t=537 +// +// 1. readdir +// 2. for each entry +// a. if a non-empty directory, recurse +// b. if an empty directory, move to random hidden file name in $TEMP +// c. unlink/rmdir $TEMP +// +// This works around the fact that unlink/rmdir is non-atomic and takes +// a non-deterministic amount of time to complete. +// +// However, it is HELLA SLOW, like 2-10x slower than a naive recursive rm. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.rimrafMoveRemoveSync = exports.rimrafMoveRemove = void 0; +const path_1 = require("path"); +const default_tmp_js_1 = require("./default-tmp.js"); +const ignore_enoent_js_1 = require("./ignore-enoent.js"); +const fs_js_1 = require("./fs.js"); +const { lstat, rename, unlink, rmdir, chmod } = fs_js_1.promises; +const readdir_or_error_js_1 = require("./readdir-or-error.js"); +// crypto.randomBytes is much slower, and Math.random() is enough here +const uniqueFilename = (path) => `.${(0, path_1.basename)(path)}.${Math.random()}`; +const unlinkFixEPERM = async (path) => unlink(path).catch((er) => { + if (er.code === 'EPERM') { + return chmod(path, 0o666).then(() => unlink(path), er2 => { + if (er2.code === 'ENOENT') { + return; + } + throw er; + }); + } + else if (er.code === 'ENOENT') { + return; + } + throw er; +}); +const unlinkFixEPERMSync = (path) => { + try { + (0, fs_js_1.unlinkSync)(path); + } + catch (er) { + if (er?.code === 'EPERM') { + try { + return (0, fs_js_1.chmodSync)(path, 0o666); + } + catch (er2) { + if (er2?.code === 'ENOENT') { + return; + } + throw er; + } + } + else if (er?.code === 'ENOENT') { + return; + } + throw er; + } +}; +const rimrafMoveRemove = async (path, opt) => { + if (opt?.signal?.aborted) { + throw opt.signal.reason; + } + try { + return await rimrafMoveRemoveDir(path, opt, await lstat(path)); + } + catch (er) { + if (er?.code === 'ENOENT') + return true; + throw er; + } +}; +exports.rimrafMoveRemove = rimrafMoveRemove; +const rimrafMoveRemoveDir = async (path, opt, ent) => { + if (opt?.signal?.aborted) { + throw opt.signal.reason; + } + if (!opt.tmp) { + return rimrafMoveRemoveDir(path, { ...opt, tmp: await (0, default_tmp_js_1.defaultTmp)(path) }, ent); + } + if (path === opt.tmp && (0, path_1.parse)(path).root !== path) { + throw new Error('cannot delete temp directory used for deletion'); + } + const entries = ent.isDirectory() ? await (0, readdir_or_error_js_1.readdirOrError)(path) : null; + if (!Array.isArray(entries)) { + // this can only happen if lstat/readdir lied, or if the dir was + // swapped out with a file at just the right moment. + /* c8 ignore start */ + if (entries) { + if (entries.code === 'ENOENT') { + return true; + } + if (entries.code !== 'ENOTDIR') { + throw entries; + } + } + /* c8 ignore stop */ + if (opt.filter && !(await opt.filter(path, ent))) { + return false; + } + await (0, ignore_enoent_js_1.ignoreENOENT)(tmpUnlink(path, opt.tmp, unlinkFixEPERM)); + return true; + } + const removedAll = (await Promise.all(entries.map(ent => rimrafMoveRemoveDir((0, path_1.resolve)(path, ent.name), opt, ent)))).reduce((a, b) => a && b, true); + if (!removedAll) { + return false; + } + // we don't ever ACTUALLY try to unlink /, because that can never work + // but when preserveRoot is false, we could be operating on it. + // No need to check if preserveRoot is not false. + if (opt.preserveRoot === false && path === (0, path_1.parse)(path).root) { + return false; + } + if (opt.filter && !(await opt.filter(path, ent))) { + return false; + } + await (0, ignore_enoent_js_1.ignoreENOENT)(tmpUnlink(path, opt.tmp, rmdir)); + return true; +}; +const tmpUnlink = async (path, tmp, rm) => { + const tmpFile = (0, path_1.resolve)(tmp, uniqueFilename(path)); + await rename(path, tmpFile); + return await rm(tmpFile); +}; +const rimrafMoveRemoveSync = (path, opt) => { + if (opt?.signal?.aborted) { + throw opt.signal.reason; + } + try { + return rimrafMoveRemoveDirSync(path, opt, (0, fs_js_1.lstatSync)(path)); + } + catch (er) { + if (er?.code === 'ENOENT') + return true; + throw er; + } +}; +exports.rimrafMoveRemoveSync = rimrafMoveRemoveSync; +const rimrafMoveRemoveDirSync = (path, opt, ent) => { + if (opt?.signal?.aborted) { + throw opt.signal.reason; + } + if (!opt.tmp) { + return rimrafMoveRemoveDirSync(path, { ...opt, tmp: (0, default_tmp_js_1.defaultTmpSync)(path) }, ent); + } + const tmp = opt.tmp; + if (path === opt.tmp && (0, path_1.parse)(path).root !== path) { + throw new Error('cannot delete temp directory used for deletion'); + } + const entries = ent.isDirectory() ? (0, readdir_or_error_js_1.readdirOrErrorSync)(path) : null; + if (!Array.isArray(entries)) { + // this can only happen if lstat/readdir lied, or if the dir was + // swapped out with a file at just the right moment. + /* c8 ignore start */ + if (entries) { + if (entries.code === 'ENOENT') { + return true; + } + if (entries.code !== 'ENOTDIR') { + throw entries; + } + } + /* c8 ignore stop */ + if (opt.filter && !opt.filter(path, ent)) { + return false; + } + (0, ignore_enoent_js_1.ignoreENOENTSync)(() => tmpUnlinkSync(path, tmp, unlinkFixEPERMSync)); + return true; + } + let removedAll = true; + for (const ent of entries) { + const p = (0, path_1.resolve)(path, ent.name); + removedAll = rimrafMoveRemoveDirSync(p, opt, ent) && removedAll; + } + if (!removedAll) { + return false; + } + if (opt.preserveRoot === false && path === (0, path_1.parse)(path).root) { + return false; + } + if (opt.filter && !opt.filter(path, ent)) { + return false; + } + (0, ignore_enoent_js_1.ignoreENOENTSync)(() => tmpUnlinkSync(path, tmp, fs_js_1.rmdirSync)); + return true; +}; +const tmpUnlinkSync = (path, tmp, rmSync) => { + const tmpFile = (0, path_1.resolve)(tmp, uniqueFilename(path)); + (0, fs_js_1.renameSync)(path, tmpFile); + return rmSync(tmpFile); +}; +//# sourceMappingURL=rimraf-move-remove.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-move-remove.js.map b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-move-remove.js.map new file mode 100644 index 00000000000000..44602502b90b2d --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-move-remove.js.map @@ -0,0 +1 @@ +{"version":3,"file":"rimraf-move-remove.js","sourceRoot":"","sources":["../../src/rimraf-move-remove.ts"],"names":[],"mappings":";AAAA,qCAAqC;AACrC,EAAE;AACF,aAAa;AACb,oBAAoB;AACpB,yCAAyC;AACzC,uEAAuE;AACvE,0BAA0B;AAC1B,EAAE;AACF,uEAAuE;AACvE,kDAAkD;AAClD,EAAE;AACF,0EAA0E;;;AAE1E,+BAA+C;AAC/C,qDAA6D;AAE7D,yDAAmE;AAEnE,mCAOgB;AAChB,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE,GAAG,gBAAU,CAAA;AAI1D,+DAA0E;AAE1E,sEAAsE;AACtE,MAAM,cAAc,GAAG,CAAC,IAAY,EAAE,EAAE,CAAC,IAAI,IAAA,eAAQ,EAAC,IAAI,CAAC,IAAI,IAAI,CAAC,MAAM,EAAE,EAAE,CAAA;AAE9E,MAAM,cAAc,GAAG,KAAK,EAAE,IAAY,EAAE,EAAE,CAC5C,MAAM,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,CAAC,EAA6B,EAAE,EAAE;IACnD,IAAI,EAAE,CAAC,IAAI,KAAK,OAAO,EAAE,CAAC;QACxB,OAAO,KAAK,CAAC,IAAI,EAAE,KAAK,CAAC,CAAC,IAAI,CAC5B,GAAG,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,EAClB,GAAG,CAAC,EAAE;YACJ,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;gBAC1B,OAAM;YACR,CAAC;YACD,MAAM,EAAE,CAAA;QACV,CAAC,CACF,CAAA;IACH,CAAC;SAAM,IAAI,EAAE,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;QAChC,OAAM;IACR,CAAC;IACD,MAAM,EAAE,CAAA;AACV,CAAC,CAAC,CAAA;AAEJ,MAAM,kBAAkB,GAAG,CAAC,IAAY,EAAE,EAAE;IAC1C,IAAI,CAAC;QACH,IAAA,kBAAU,EAAC,IAAI,CAAC,CAAA;IAClB,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,IAAK,EAA4B,EAAE,IAAI,KAAK,OAAO,EAAE,CAAC;YACpD,IAAI,CAAC;gBACH,OAAO,IAAA,iBAAS,EAAC,IAAI,EAAE,KAAK,CAAC,CAAA;YAC/B,CAAC;YAAC,OAAO,GAAG,EAAE,CAAC;gBACb,IAAK,GAA6B,EAAE,IAAI,KAAK,QAAQ,EAAE,CAAC;oBACtD,OAAM;gBACR,CAAC;gBACD,MAAM,EAAE,CAAA;YACV,CAAC;QACH,CAAC;aAAM,IAAK,EAA4B,EAAE,IAAI,KAAK,QAAQ,EAAE,CAAC;YAC5D,OAAM;QACR,CAAC;QACD,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAEM,MAAM,gBAAgB,GAAG,KAAK,EACnC,IAAY,EACZ,GAAuB,EACvB,EAAE;IACF,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,IAAI,CAAC;QACH,OAAO,MAAM,mBAAmB,CAAC,IAAI,EAAE,GAAG,EAAE,MAAM,KAAK,CAAC,IAAI,CAAC,CAAC,CAAA;IAChE,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,IAAK,EAA4B,EAAE,IAAI,KAAK,QAAQ;YAAE,OAAO,IAAI,CAAA;QACjE,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAbY,QAAA,gBAAgB,oBAa5B;AAED,MAAM,mBAAmB,GAAG,KAAK,EAC/B,IAAY,EACZ,GAAuB,EACvB,GAAmB,EACD,EAAE;IACpB,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,IAAI,CAAC,GAAG,CAAC,GAAG,EAAE,CAAC;QACb,OAAO,mBAAmB,CACxB,IAAI,EACJ,EAAE,GAAG,GAAG,EAAE,GAAG,EAAE,MAAM,IAAA,2BAAU,EAAC,IAAI,CAAC,EAAE,EACvC,GAAG,CACJ,CAAA;IACH,CAAC;IACD,IAAI,IAAI,KAAK,GAAG,CAAC,GAAG,IAAI,IAAA,YAAK,EAAC,IAAI,CAAC,CAAC,IAAI,KAAK,IAAI,EAAE,CAAC;QAClD,MAAM,IAAI,KAAK,CAAC,gDAAgD,CAAC,CAAA;IACnE,CAAC;IAED,MAAM,OAAO,GAAG,GAAG,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,MAAM,IAAA,oCAAc,EAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAA;IACrE,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC;QAC5B,gEAAgE;QAChE,oDAAoD;QACpD,qBAAqB;QACrB,IAAI,OAAO,EAAE,CAAC;YACZ,IAAI,OAAO,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;gBAC9B,OAAO,IAAI,CAAA;YACb,CAAC;YACD,IAAI,OAAO,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;gBAC/B,MAAM,OAAO,CAAA;YACf,CAAC;QACH,CAAC;QACD,oBAAoB;QACpB,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,EAAE,CAAC;YACjD,OAAO,KAAK,CAAA;QACd,CAAC;QACD,MAAM,IAAA,+BAAY,EAAC,SAAS,CAAC,IAAI,EAAE,GAAG,CAAC,GAAG,EAAE,cAAc,CAAC,CAAC,CAAA;QAC5D,OAAO,IAAI,CAAA;IACb,CAAC;IAED,MAAM,UAAU,GAAG,CACjB,MAAM,OAAO,CAAC,GAAG,CACf,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAChB,mBAAmB,CAAC,IAAA,cAAO,EAAC,IAAI,EAAE,GAAG,CAAC,IAAI,CAAC,EAAE,GAAG,EAAE,GAAG,CAAC,CACvD,CACF,CACF,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,IAAI,CAAC,CAAA;IAChC,IAAI,CAAC,UAAU,EAAE,CAAC;QAChB,OAAO,KAAK,CAAA;IACd,CAAC;IAED,sEAAsE;IACtE,+DAA+D;IAC/D,iDAAiD;IACjD,IAAI,GAAG,CAAC,YAAY,KAAK,KAAK,IAAI,IAAI,KAAK,IAAA,YAAK,EAAC,IAAI,CAAC,CAAC,IAAI,EAAE,CAAC;QAC5D,OAAO,KAAK,CAAA;IACd,CAAC;IACD,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,EAAE,CAAC;QACjD,OAAO,KAAK,CAAA;IACd,CAAC;IACD,MAAM,IAAA,+BAAY,EAAC,SAAS,CAAC,IAAI,EAAE,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC,CAAA;IACnD,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAED,MAAM,SAAS,GAAG,KAAK,EACrB,IAAY,EACZ,GAAW,EACX,EAA+B,EAC/B,EAAE;IACF,MAAM,OAAO,GAAG,IAAA,cAAO,EAAC,GAAG,EAAE,cAAc,CAAC,IAAI,CAAC,CAAC,CAAA;IAClD,MAAM,MAAM,CAAC,IAAI,EAAE,OAAO,CAAC,CAAA;IAC3B,OAAO,MAAM,EAAE,CAAC,OAAO,CAAC,CAAA;AAC1B,CAAC,CAAA;AAEM,MAAM,oBAAoB,GAAG,CAAC,IAAY,EAAE,GAAsB,EAAE,EAAE;IAC3E,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,IAAI,CAAC;QACH,OAAO,uBAAuB,CAAC,IAAI,EAAE,GAAG,EAAE,IAAA,iBAAS,EAAC,IAAI,CAAC,CAAC,CAAA;IAC5D,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,IAAK,EAA4B,EAAE,IAAI,KAAK,QAAQ;YAAE,OAAO,IAAI,CAAA;QACjE,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAVY,QAAA,oBAAoB,wBAUhC;AAED,MAAM,uBAAuB,GAAG,CAC9B,IAAY,EACZ,GAAsB,EACtB,GAAmB,EACV,EAAE;IACX,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,IAAI,CAAC,GAAG,CAAC,GAAG,EAAE,CAAC;QACb,OAAO,uBAAuB,CAC5B,IAAI,EACJ,EAAE,GAAG,GAAG,EAAE,GAAG,EAAE,IAAA,+BAAc,EAAC,IAAI,CAAC,EAAE,EACrC,GAAG,CACJ,CAAA;IACH,CAAC;IACD,MAAM,GAAG,GAAW,GAAG,CAAC,GAAG,CAAA;IAE3B,IAAI,IAAI,KAAK,GAAG,CAAC,GAAG,IAAI,IAAA,YAAK,EAAC,IAAI,CAAC,CAAC,IAAI,KAAK,IAAI,EAAE,CAAC;QAClD,MAAM,IAAI,KAAK,CAAC,gDAAgD,CAAC,CAAA;IACnE,CAAC;IAED,MAAM,OAAO,GAAG,GAAG,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,IAAA,wCAAkB,EAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAA;IACnE,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC;QAC5B,gEAAgE;QAChE,oDAAoD;QACpD,qBAAqB;QACrB,IAAI,OAAO,EAAE,CAAC;YACZ,IAAI,OAAO,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;gBAC9B,OAAO,IAAI,CAAA;YACb,CAAC;YACD,IAAI,OAAO,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;gBAC/B,MAAM,OAAO,CAAA;YACf,CAAC;QACH,CAAC;QACD,oBAAoB;QACpB,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,EAAE,CAAC;YACzC,OAAO,KAAK,CAAA;QACd,CAAC;QACD,IAAA,mCAAgB,EAAC,GAAG,EAAE,CAAC,aAAa,CAAC,IAAI,EAAE,GAAG,EAAE,kBAAkB,CAAC,CAAC,CAAA;QACpE,OAAO,IAAI,CAAA;IACb,CAAC;IAED,IAAI,UAAU,GAAG,IAAI,CAAA;IACrB,KAAK,MAAM,GAAG,IAAI,OAAO,EAAE,CAAC;QAC1B,MAAM,CAAC,GAAG,IAAA,cAAO,EAAC,IAAI,EAAE,GAAG,CAAC,IAAI,CAAC,CAAA;QACjC,UAAU,GAAG,uBAAuB,CAAC,CAAC,EAAE,GAAG,EAAE,GAAG,CAAC,IAAI,UAAU,CAAA;IACjE,CAAC;IACD,IAAI,CAAC,UAAU,EAAE,CAAC;QAChB,OAAO,KAAK,CAAA;IACd,CAAC;IACD,IAAI,GAAG,CAAC,YAAY,KAAK,KAAK,IAAI,IAAI,KAAK,IAAA,YAAK,EAAC,IAAI,CAAC,CAAC,IAAI,EAAE,CAAC;QAC5D,OAAO,KAAK,CAAA;IACd,CAAC;IACD,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,EAAE,CAAC;QACzC,OAAO,KAAK,CAAA;IACd,CAAC;IACD,IAAA,mCAAgB,EAAC,GAAG,EAAE,CAAC,aAAa,CAAC,IAAI,EAAE,GAAG,EAAE,iBAAS,CAAC,CAAC,CAAA;IAC3D,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAED,MAAM,aAAa,GAAG,CACpB,IAAY,EACZ,GAAW,EACX,MAA2B,EAC3B,EAAE;IACF,MAAM,OAAO,GAAG,IAAA,cAAO,EAAC,GAAG,EAAE,cAAc,CAAC,IAAI,CAAC,CAAC,CAAA;IAClD,IAAA,kBAAU,EAAC,IAAI,EAAE,OAAO,CAAC,CAAA;IACzB,OAAO,MAAM,CAAC,OAAO,CAAC,CAAA;AACxB,CAAC,CAAA","sourcesContent":["// https://youtu.be/uhRWMGBjlO8?t=537\n//\n// 1. readdir\n// 2. for each entry\n// a. if a non-empty directory, recurse\n// b. if an empty directory, move to random hidden file name in $TEMP\n// c. unlink/rmdir $TEMP\n//\n// This works around the fact that unlink/rmdir is non-atomic and takes\n// a non-deterministic amount of time to complete.\n//\n// However, it is HELLA SLOW, like 2-10x slower than a naive recursive rm.\n\nimport { basename, parse, resolve } from 'path'\nimport { defaultTmp, defaultTmpSync } from './default-tmp.js'\n\nimport { ignoreENOENT, ignoreENOENTSync } from './ignore-enoent.js'\n\nimport {\n chmodSync,\n lstatSync,\n promises as fsPromises,\n renameSync,\n rmdirSync,\n unlinkSync,\n} from './fs.js'\nconst { lstat, rename, unlink, rmdir, chmod } = fsPromises\n\nimport { Dirent, Stats } from 'fs'\nimport { RimrafAsyncOptions, RimrafSyncOptions } from './index.js'\nimport { readdirOrError, readdirOrErrorSync } from './readdir-or-error.js'\n\n// crypto.randomBytes is much slower, and Math.random() is enough here\nconst uniqueFilename = (path: string) => `.${basename(path)}.${Math.random()}`\n\nconst unlinkFixEPERM = async (path: string) =>\n unlink(path).catch((er: Error & { code?: string }) => {\n if (er.code === 'EPERM') {\n return chmod(path, 0o666).then(\n () => unlink(path),\n er2 => {\n if (er2.code === 'ENOENT') {\n return\n }\n throw er\n },\n )\n } else if (er.code === 'ENOENT') {\n return\n }\n throw er\n })\n\nconst unlinkFixEPERMSync = (path: string) => {\n try {\n unlinkSync(path)\n } catch (er) {\n if ((er as NodeJS.ErrnoException)?.code === 'EPERM') {\n try {\n return chmodSync(path, 0o666)\n } catch (er2) {\n if ((er2 as NodeJS.ErrnoException)?.code === 'ENOENT') {\n return\n }\n throw er\n }\n } else if ((er as NodeJS.ErrnoException)?.code === 'ENOENT') {\n return\n }\n throw er\n }\n}\n\nexport const rimrafMoveRemove = async (\n path: string,\n opt: RimrafAsyncOptions,\n) => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n try {\n return await rimrafMoveRemoveDir(path, opt, await lstat(path))\n } catch (er) {\n if ((er as NodeJS.ErrnoException)?.code === 'ENOENT') return true\n throw er\n }\n}\n\nconst rimrafMoveRemoveDir = async (\n path: string,\n opt: RimrafAsyncOptions,\n ent: Dirent | Stats,\n): Promise => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n if (!opt.tmp) {\n return rimrafMoveRemoveDir(\n path,\n { ...opt, tmp: await defaultTmp(path) },\n ent,\n )\n }\n if (path === opt.tmp && parse(path).root !== path) {\n throw new Error('cannot delete temp directory used for deletion')\n }\n\n const entries = ent.isDirectory() ? await readdirOrError(path) : null\n if (!Array.isArray(entries)) {\n // this can only happen if lstat/readdir lied, or if the dir was\n // swapped out with a file at just the right moment.\n /* c8 ignore start */\n if (entries) {\n if (entries.code === 'ENOENT') {\n return true\n }\n if (entries.code !== 'ENOTDIR') {\n throw entries\n }\n }\n /* c8 ignore stop */\n if (opt.filter && !(await opt.filter(path, ent))) {\n return false\n }\n await ignoreENOENT(tmpUnlink(path, opt.tmp, unlinkFixEPERM))\n return true\n }\n\n const removedAll = (\n await Promise.all(\n entries.map(ent =>\n rimrafMoveRemoveDir(resolve(path, ent.name), opt, ent),\n ),\n )\n ).reduce((a, b) => a && b, true)\n if (!removedAll) {\n return false\n }\n\n // we don't ever ACTUALLY try to unlink /, because that can never work\n // but when preserveRoot is false, we could be operating on it.\n // No need to check if preserveRoot is not false.\n if (opt.preserveRoot === false && path === parse(path).root) {\n return false\n }\n if (opt.filter && !(await opt.filter(path, ent))) {\n return false\n }\n await ignoreENOENT(tmpUnlink(path, opt.tmp, rmdir))\n return true\n}\n\nconst tmpUnlink = async (\n path: string,\n tmp: string,\n rm: (p: string) => Promise,\n) => {\n const tmpFile = resolve(tmp, uniqueFilename(path))\n await rename(path, tmpFile)\n return await rm(tmpFile)\n}\n\nexport const rimrafMoveRemoveSync = (path: string, opt: RimrafSyncOptions) => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n try {\n return rimrafMoveRemoveDirSync(path, opt, lstatSync(path))\n } catch (er) {\n if ((er as NodeJS.ErrnoException)?.code === 'ENOENT') return true\n throw er\n }\n}\n\nconst rimrafMoveRemoveDirSync = (\n path: string,\n opt: RimrafSyncOptions,\n ent: Dirent | Stats,\n): boolean => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n if (!opt.tmp) {\n return rimrafMoveRemoveDirSync(\n path,\n { ...opt, tmp: defaultTmpSync(path) },\n ent,\n )\n }\n const tmp: string = opt.tmp\n\n if (path === opt.tmp && parse(path).root !== path) {\n throw new Error('cannot delete temp directory used for deletion')\n }\n\n const entries = ent.isDirectory() ? readdirOrErrorSync(path) : null\n if (!Array.isArray(entries)) {\n // this can only happen if lstat/readdir lied, or if the dir was\n // swapped out with a file at just the right moment.\n /* c8 ignore start */\n if (entries) {\n if (entries.code === 'ENOENT') {\n return true\n }\n if (entries.code !== 'ENOTDIR') {\n throw entries\n }\n }\n /* c8 ignore stop */\n if (opt.filter && !opt.filter(path, ent)) {\n return false\n }\n ignoreENOENTSync(() => tmpUnlinkSync(path, tmp, unlinkFixEPERMSync))\n return true\n }\n\n let removedAll = true\n for (const ent of entries) {\n const p = resolve(path, ent.name)\n removedAll = rimrafMoveRemoveDirSync(p, opt, ent) && removedAll\n }\n if (!removedAll) {\n return false\n }\n if (opt.preserveRoot === false && path === parse(path).root) {\n return false\n }\n if (opt.filter && !opt.filter(path, ent)) {\n return false\n }\n ignoreENOENTSync(() => tmpUnlinkSync(path, tmp, rmdirSync))\n return true\n}\n\nconst tmpUnlinkSync = (\n path: string,\n tmp: string,\n rmSync: (p: string) => void,\n) => {\n const tmpFile = resolve(tmp, uniqueFilename(path))\n renameSync(path, tmpFile)\n return rmSync(tmpFile)\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-native.d.ts b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-native.d.ts new file mode 100644 index 00000000000000..cc84bf7ffd34d0 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-native.d.ts @@ -0,0 +1,4 @@ +import { RimrafAsyncOptions, RimrafSyncOptions } from './index.js'; +export declare const rimrafNative: (path: string, opt: RimrafAsyncOptions) => Promise; +export declare const rimrafNativeSync: (path: string, opt: RimrafSyncOptions) => boolean; +//# sourceMappingURL=rimraf-native.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-native.d.ts.map b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-native.d.ts.map new file mode 100644 index 00000000000000..bea6b79965192f --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-native.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"rimraf-native.d.ts","sourceRoot":"","sources":["../../src/rimraf-native.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,kBAAkB,EAAE,iBAAiB,EAAE,MAAM,YAAY,CAAA;AAIlE,eAAO,MAAM,YAAY,SACjB,MAAM,OACP,kBAAkB,KACtB,OAAO,CAAC,OAAO,CAOjB,CAAA;AAED,eAAO,MAAM,gBAAgB,SACrB,MAAM,OACP,iBAAiB,KACrB,OAOF,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-native.js b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-native.js new file mode 100644 index 00000000000000..ab9f633d7ca157 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-native.js @@ -0,0 +1,24 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.rimrafNativeSync = exports.rimrafNative = void 0; +const fs_js_1 = require("./fs.js"); +const { rm } = fs_js_1.promises; +const rimrafNative = async (path, opt) => { + await rm(path, { + ...opt, + force: true, + recursive: true, + }); + return true; +}; +exports.rimrafNative = rimrafNative; +const rimrafNativeSync = (path, opt) => { + (0, fs_js_1.rmSync)(path, { + ...opt, + force: true, + recursive: true, + }); + return true; +}; +exports.rimrafNativeSync = rimrafNativeSync; +//# sourceMappingURL=rimraf-native.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-native.js.map b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-native.js.map new file mode 100644 index 00000000000000..6eddd444e49a13 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-native.js.map @@ -0,0 +1 @@ +{"version":3,"file":"rimraf-native.js","sourceRoot":"","sources":["../../src/rimraf-native.ts"],"names":[],"mappings":";;;AACA,mCAA0C;AAC1C,MAAM,EAAE,EAAE,EAAE,GAAG,gBAAQ,CAAA;AAEhB,MAAM,YAAY,GAAG,KAAK,EAC/B,IAAY,EACZ,GAAuB,EACL,EAAE;IACpB,MAAM,EAAE,CAAC,IAAI,EAAE;QACb,GAAG,GAAG;QACN,KAAK,EAAE,IAAI;QACX,SAAS,EAAE,IAAI;KAChB,CAAC,CAAA;IACF,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAVY,QAAA,YAAY,gBAUxB;AAEM,MAAM,gBAAgB,GAAG,CAC9B,IAAY,EACZ,GAAsB,EACb,EAAE;IACX,IAAA,cAAM,EAAC,IAAI,EAAE;QACX,GAAG,GAAG;QACN,KAAK,EAAE,IAAI;QACX,SAAS,EAAE,IAAI;KAChB,CAAC,CAAA;IACF,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAVY,QAAA,gBAAgB,oBAU5B","sourcesContent":["import { RimrafAsyncOptions, RimrafSyncOptions } from './index.js'\nimport { promises, rmSync } from './fs.js'\nconst { rm } = promises\n\nexport const rimrafNative = async (\n path: string,\n opt: RimrafAsyncOptions,\n): Promise => {\n await rm(path, {\n ...opt,\n force: true,\n recursive: true,\n })\n return true\n}\n\nexport const rimrafNativeSync = (\n path: string,\n opt: RimrafSyncOptions,\n): boolean => {\n rmSync(path, {\n ...opt,\n force: true,\n recursive: true,\n })\n return true\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-posix.d.ts b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-posix.d.ts new file mode 100644 index 00000000000000..8e532efe9aba21 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-posix.d.ts @@ -0,0 +1,4 @@ +import { RimrafAsyncOptions, RimrafSyncOptions } from './index.js'; +export declare const rimrafPosix: (path: string, opt: RimrafAsyncOptions) => Promise; +export declare const rimrafPosixSync: (path: string, opt: RimrafSyncOptions) => boolean; +//# sourceMappingURL=rimraf-posix.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-posix.d.ts.map b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-posix.d.ts.map new file mode 100644 index 00000000000000..3f9b8084ed470b --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-posix.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"rimraf-posix.d.ts","sourceRoot":"","sources":["../../src/rimraf-posix.ts"],"names":[],"mappings":"AAcA,OAAO,EAAE,kBAAkB,EAAE,iBAAiB,EAAE,MAAM,YAAY,CAAA;AAGlE,eAAO,MAAM,WAAW,SAAgB,MAAM,OAAO,kBAAkB,qBAUtE,CAAA;AAED,eAAO,MAAM,eAAe,SAAU,MAAM,OAAO,iBAAiB,YAUnE,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-posix.js b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-posix.js new file mode 100644 index 00000000000000..eb0e7f11680107 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-posix.js @@ -0,0 +1,123 @@ +"use strict"; +// the simple recursive removal, where unlink and rmdir are atomic +// Note that this approach does NOT work on Windows! +// We stat first and only unlink if the Dirent isn't a directory, +// because sunos will let root unlink a directory, and some +// SUPER weird breakage happens as a result. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.rimrafPosixSync = exports.rimrafPosix = void 0; +const fs_js_1 = require("./fs.js"); +const { lstat, rmdir, unlink } = fs_js_1.promises; +const path_1 = require("path"); +const readdir_or_error_js_1 = require("./readdir-or-error.js"); +const ignore_enoent_js_1 = require("./ignore-enoent.js"); +const rimrafPosix = async (path, opt) => { + if (opt?.signal?.aborted) { + throw opt.signal.reason; + } + try { + return await rimrafPosixDir(path, opt, await lstat(path)); + } + catch (er) { + if (er?.code === 'ENOENT') + return true; + throw er; + } +}; +exports.rimrafPosix = rimrafPosix; +const rimrafPosixSync = (path, opt) => { + if (opt?.signal?.aborted) { + throw opt.signal.reason; + } + try { + return rimrafPosixDirSync(path, opt, (0, fs_js_1.lstatSync)(path)); + } + catch (er) { + if (er?.code === 'ENOENT') + return true; + throw er; + } +}; +exports.rimrafPosixSync = rimrafPosixSync; +const rimrafPosixDir = async (path, opt, ent) => { + if (opt?.signal?.aborted) { + throw opt.signal.reason; + } + const entries = ent.isDirectory() ? await (0, readdir_or_error_js_1.readdirOrError)(path) : null; + if (!Array.isArray(entries)) { + // this can only happen if lstat/readdir lied, or if the dir was + // swapped out with a file at just the right moment. + /* c8 ignore start */ + if (entries) { + if (entries.code === 'ENOENT') { + return true; + } + if (entries.code !== 'ENOTDIR') { + throw entries; + } + } + /* c8 ignore stop */ + if (opt.filter && !(await opt.filter(path, ent))) { + return false; + } + await (0, ignore_enoent_js_1.ignoreENOENT)(unlink(path)); + return true; + } + const removedAll = (await Promise.all(entries.map(ent => rimrafPosixDir((0, path_1.resolve)(path, ent.name), opt, ent)))).reduce((a, b) => a && b, true); + if (!removedAll) { + return false; + } + // we don't ever ACTUALLY try to unlink /, because that can never work + // but when preserveRoot is false, we could be operating on it. + // No need to check if preserveRoot is not false. + if (opt.preserveRoot === false && path === (0, path_1.parse)(path).root) { + return false; + } + if (opt.filter && !(await opt.filter(path, ent))) { + return false; + } + await (0, ignore_enoent_js_1.ignoreENOENT)(rmdir(path)); + return true; +}; +const rimrafPosixDirSync = (path, opt, ent) => { + if (opt?.signal?.aborted) { + throw opt.signal.reason; + } + const entries = ent.isDirectory() ? (0, readdir_or_error_js_1.readdirOrErrorSync)(path) : null; + if (!Array.isArray(entries)) { + // this can only happen if lstat/readdir lied, or if the dir was + // swapped out with a file at just the right moment. + /* c8 ignore start */ + if (entries) { + if (entries.code === 'ENOENT') { + return true; + } + if (entries.code !== 'ENOTDIR') { + throw entries; + } + } + /* c8 ignore stop */ + if (opt.filter && !opt.filter(path, ent)) { + return false; + } + (0, ignore_enoent_js_1.ignoreENOENTSync)(() => (0, fs_js_1.unlinkSync)(path)); + return true; + } + let removedAll = true; + for (const ent of entries) { + const p = (0, path_1.resolve)(path, ent.name); + removedAll = rimrafPosixDirSync(p, opt, ent) && removedAll; + } + if (opt.preserveRoot === false && path === (0, path_1.parse)(path).root) { + return false; + } + if (!removedAll) { + return false; + } + if (opt.filter && !opt.filter(path, ent)) { + return false; + } + (0, ignore_enoent_js_1.ignoreENOENTSync)(() => (0, fs_js_1.rmdirSync)(path)); + return true; +}; +//# sourceMappingURL=rimraf-posix.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-posix.js.map b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-posix.js.map new file mode 100644 index 00000000000000..32a366a54f7e3c --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-posix.js.map @@ -0,0 +1 @@ +{"version":3,"file":"rimraf-posix.js","sourceRoot":"","sources":["../../src/rimraf-posix.ts"],"names":[],"mappings":";AAAA,kEAAkE;AAClE,oDAAoD;AACpD,iEAAiE;AACjE,2DAA2D;AAC3D,4CAA4C;;;AAE5C,mCAAoE;AACpE,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE,MAAM,EAAE,GAAG,gBAAQ,CAAA;AAEzC,+BAAqC;AAErC,+DAA0E;AAI1E,yDAAmE;AAE5D,MAAM,WAAW,GAAG,KAAK,EAAE,IAAY,EAAE,GAAuB,EAAE,EAAE;IACzE,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,IAAI,CAAC;QACH,OAAO,MAAM,cAAc,CAAC,IAAI,EAAE,GAAG,EAAE,MAAM,KAAK,CAAC,IAAI,CAAC,CAAC,CAAA;IAC3D,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,IAAK,EAA4B,EAAE,IAAI,KAAK,QAAQ;YAAE,OAAO,IAAI,CAAA;QACjE,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAVY,QAAA,WAAW,eAUvB;AAEM,MAAM,eAAe,GAAG,CAAC,IAAY,EAAE,GAAsB,EAAE,EAAE;IACtE,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,IAAI,CAAC;QACH,OAAO,kBAAkB,CAAC,IAAI,EAAE,GAAG,EAAE,IAAA,iBAAS,EAAC,IAAI,CAAC,CAAC,CAAA;IACvD,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,IAAK,EAA4B,EAAE,IAAI,KAAK,QAAQ;YAAE,OAAO,IAAI,CAAA;QACjE,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAVY,QAAA,eAAe,mBAU3B;AAED,MAAM,cAAc,GAAG,KAAK,EAC1B,IAAY,EACZ,GAAuB,EACvB,GAAmB,EACD,EAAE;IACpB,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,MAAM,OAAO,GAAG,GAAG,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,MAAM,IAAA,oCAAc,EAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAA;IACrE,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC;QAC5B,gEAAgE;QAChE,oDAAoD;QACpD,qBAAqB;QACrB,IAAI,OAAO,EAAE,CAAC;YACZ,IAAI,OAAO,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;gBAC9B,OAAO,IAAI,CAAA;YACb,CAAC;YACD,IAAI,OAAO,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;gBAC/B,MAAM,OAAO,CAAA;YACf,CAAC;QACH,CAAC;QACD,oBAAoB;QACpB,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,EAAE,CAAC;YACjD,OAAO,KAAK,CAAA;QACd,CAAC;QACD,MAAM,IAAA,+BAAY,EAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAA;QAChC,OAAO,IAAI,CAAA;IACb,CAAC;IAED,MAAM,UAAU,GAAG,CACjB,MAAM,OAAO,CAAC,GAAG,CACf,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,cAAc,CAAC,IAAA,cAAO,EAAC,IAAI,EAAE,GAAG,CAAC,IAAI,CAAC,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC,CACtE,CACF,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,IAAI,CAAC,CAAA;IAEhC,IAAI,CAAC,UAAU,EAAE,CAAC;QAChB,OAAO,KAAK,CAAA;IACd,CAAC;IAED,sEAAsE;IACtE,+DAA+D;IAC/D,iDAAiD;IACjD,IAAI,GAAG,CAAC,YAAY,KAAK,KAAK,IAAI,IAAI,KAAK,IAAA,YAAK,EAAC,IAAI,CAAC,CAAC,IAAI,EAAE,CAAC;QAC5D,OAAO,KAAK,CAAA;IACd,CAAC;IAED,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,EAAE,CAAC;QACjD,OAAO,KAAK,CAAA;IACd,CAAC;IAED,MAAM,IAAA,+BAAY,EAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAA;IAC/B,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAED,MAAM,kBAAkB,GAAG,CACzB,IAAY,EACZ,GAAsB,EACtB,GAAmB,EACV,EAAE;IACX,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,MAAM,OAAO,GAAG,GAAG,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,IAAA,wCAAkB,EAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAA;IACnE,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC;QAC5B,gEAAgE;QAChE,oDAAoD;QACpD,qBAAqB;QACrB,IAAI,OAAO,EAAE,CAAC;YACZ,IAAI,OAAO,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;gBAC9B,OAAO,IAAI,CAAA;YACb,CAAC;YACD,IAAI,OAAO,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;gBAC/B,MAAM,OAAO,CAAA;YACf,CAAC;QACH,CAAC;QACD,oBAAoB;QACpB,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,EAAE,CAAC;YACzC,OAAO,KAAK,CAAA;QACd,CAAC;QACD,IAAA,mCAAgB,EAAC,GAAG,EAAE,CAAC,IAAA,kBAAU,EAAC,IAAI,CAAC,CAAC,CAAA;QACxC,OAAO,IAAI,CAAA;IACb,CAAC;IACD,IAAI,UAAU,GAAY,IAAI,CAAA;IAC9B,KAAK,MAAM,GAAG,IAAI,OAAO,EAAE,CAAC;QAC1B,MAAM,CAAC,GAAG,IAAA,cAAO,EAAC,IAAI,EAAE,GAAG,CAAC,IAAI,CAAC,CAAA;QACjC,UAAU,GAAG,kBAAkB,CAAC,CAAC,EAAE,GAAG,EAAE,GAAG,CAAC,IAAI,UAAU,CAAA;IAC5D,CAAC;IACD,IAAI,GAAG,CAAC,YAAY,KAAK,KAAK,IAAI,IAAI,KAAK,IAAA,YAAK,EAAC,IAAI,CAAC,CAAC,IAAI,EAAE,CAAC;QAC5D,OAAO,KAAK,CAAA;IACd,CAAC;IAED,IAAI,CAAC,UAAU,EAAE,CAAC;QAChB,OAAO,KAAK,CAAA;IACd,CAAC;IAED,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,EAAE,CAAC;QACzC,OAAO,KAAK,CAAA;IACd,CAAC;IAED,IAAA,mCAAgB,EAAC,GAAG,EAAE,CAAC,IAAA,iBAAS,EAAC,IAAI,CAAC,CAAC,CAAA;IACvC,OAAO,IAAI,CAAA;AACb,CAAC,CAAA","sourcesContent":["// the simple recursive removal, where unlink and rmdir are atomic\n// Note that this approach does NOT work on Windows!\n// We stat first and only unlink if the Dirent isn't a directory,\n// because sunos will let root unlink a directory, and some\n// SUPER weird breakage happens as a result.\n\nimport { lstatSync, promises, rmdirSync, unlinkSync } from './fs.js'\nconst { lstat, rmdir, unlink } = promises\n\nimport { parse, resolve } from 'path'\n\nimport { readdirOrError, readdirOrErrorSync } from './readdir-or-error.js'\n\nimport { Dirent, Stats } from 'fs'\nimport { RimrafAsyncOptions, RimrafSyncOptions } from './index.js'\nimport { ignoreENOENT, ignoreENOENTSync } from './ignore-enoent.js'\n\nexport const rimrafPosix = async (path: string, opt: RimrafAsyncOptions) => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n try {\n return await rimrafPosixDir(path, opt, await lstat(path))\n } catch (er) {\n if ((er as NodeJS.ErrnoException)?.code === 'ENOENT') return true\n throw er\n }\n}\n\nexport const rimrafPosixSync = (path: string, opt: RimrafSyncOptions) => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n try {\n return rimrafPosixDirSync(path, opt, lstatSync(path))\n } catch (er) {\n if ((er as NodeJS.ErrnoException)?.code === 'ENOENT') return true\n throw er\n }\n}\n\nconst rimrafPosixDir = async (\n path: string,\n opt: RimrafAsyncOptions,\n ent: Dirent | Stats,\n): Promise => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n const entries = ent.isDirectory() ? await readdirOrError(path) : null\n if (!Array.isArray(entries)) {\n // this can only happen if lstat/readdir lied, or if the dir was\n // swapped out with a file at just the right moment.\n /* c8 ignore start */\n if (entries) {\n if (entries.code === 'ENOENT') {\n return true\n }\n if (entries.code !== 'ENOTDIR') {\n throw entries\n }\n }\n /* c8 ignore stop */\n if (opt.filter && !(await opt.filter(path, ent))) {\n return false\n }\n await ignoreENOENT(unlink(path))\n return true\n }\n\n const removedAll = (\n await Promise.all(\n entries.map(ent => rimrafPosixDir(resolve(path, ent.name), opt, ent)),\n )\n ).reduce((a, b) => a && b, true)\n\n if (!removedAll) {\n return false\n }\n\n // we don't ever ACTUALLY try to unlink /, because that can never work\n // but when preserveRoot is false, we could be operating on it.\n // No need to check if preserveRoot is not false.\n if (opt.preserveRoot === false && path === parse(path).root) {\n return false\n }\n\n if (opt.filter && !(await opt.filter(path, ent))) {\n return false\n }\n\n await ignoreENOENT(rmdir(path))\n return true\n}\n\nconst rimrafPosixDirSync = (\n path: string,\n opt: RimrafSyncOptions,\n ent: Dirent | Stats,\n): boolean => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n const entries = ent.isDirectory() ? readdirOrErrorSync(path) : null\n if (!Array.isArray(entries)) {\n // this can only happen if lstat/readdir lied, or if the dir was\n // swapped out with a file at just the right moment.\n /* c8 ignore start */\n if (entries) {\n if (entries.code === 'ENOENT') {\n return true\n }\n if (entries.code !== 'ENOTDIR') {\n throw entries\n }\n }\n /* c8 ignore stop */\n if (opt.filter && !opt.filter(path, ent)) {\n return false\n }\n ignoreENOENTSync(() => unlinkSync(path))\n return true\n }\n let removedAll: boolean = true\n for (const ent of entries) {\n const p = resolve(path, ent.name)\n removedAll = rimrafPosixDirSync(p, opt, ent) && removedAll\n }\n if (opt.preserveRoot === false && path === parse(path).root) {\n return false\n }\n\n if (!removedAll) {\n return false\n }\n\n if (opt.filter && !opt.filter(path, ent)) {\n return false\n }\n\n ignoreENOENTSync(() => rmdirSync(path))\n return true\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-windows.d.ts b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-windows.d.ts new file mode 100644 index 00000000000000..555689073ffe75 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-windows.d.ts @@ -0,0 +1,4 @@ +import { RimrafAsyncOptions, RimrafSyncOptions } from './index.js'; +export declare const rimrafWindows: (path: string, opt: RimrafAsyncOptions) => Promise; +export declare const rimrafWindowsSync: (path: string, opt: RimrafSyncOptions) => boolean; +//# sourceMappingURL=rimraf-windows.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-windows.d.ts.map b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-windows.d.ts.map new file mode 100644 index 00000000000000..56f00d9f2e3d13 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-windows.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"rimraf-windows.d.ts","sourceRoot":"","sources":["../../src/rimraf-windows.ts"],"names":[],"mappings":"AAYA,OAAO,EAAE,kBAAkB,EAAE,iBAAiB,EAAE,MAAM,YAAY,CAAA;AA2DlE,eAAO,MAAM,aAAa,SAAgB,MAAM,OAAO,kBAAkB,qBAUxE,CAAA;AAED,eAAO,MAAM,iBAAiB,SAAU,MAAM,OAAO,iBAAiB,YAUrE,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-windows.js b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-windows.js new file mode 100644 index 00000000000000..8d19f98f963606 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-windows.js @@ -0,0 +1,182 @@ +"use strict"; +// This is the same as rimrafPosix, with the following changes: +// +// 1. EBUSY, ENFILE, EMFILE trigger retries and/or exponential backoff +// 2. All non-directories are removed first and then all directories are +// removed in a second sweep. +// 3. If we hit ENOTEMPTY in the second sweep, fall back to move-remove on +// the that folder. +// +// Note: "move then remove" is 2-10 times slower, and just as unreliable. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.rimrafWindowsSync = exports.rimrafWindows = void 0; +const path_1 = require("path"); +const fix_eperm_js_1 = require("./fix-eperm.js"); +const fs_js_1 = require("./fs.js"); +const ignore_enoent_js_1 = require("./ignore-enoent.js"); +const readdir_or_error_js_1 = require("./readdir-or-error.js"); +const retry_busy_js_1 = require("./retry-busy.js"); +const rimraf_move_remove_js_1 = require("./rimraf-move-remove.js"); +const { unlink, rmdir, lstat } = fs_js_1.promises; +const rimrafWindowsFile = (0, retry_busy_js_1.retryBusy)((0, fix_eperm_js_1.fixEPERM)(unlink)); +const rimrafWindowsFileSync = (0, retry_busy_js_1.retryBusySync)((0, fix_eperm_js_1.fixEPERMSync)(fs_js_1.unlinkSync)); +const rimrafWindowsDirRetry = (0, retry_busy_js_1.retryBusy)((0, fix_eperm_js_1.fixEPERM)(rmdir)); +const rimrafWindowsDirRetrySync = (0, retry_busy_js_1.retryBusySync)((0, fix_eperm_js_1.fixEPERMSync)(fs_js_1.rmdirSync)); +const rimrafWindowsDirMoveRemoveFallback = async (path, opt) => { + /* c8 ignore start */ + if (opt?.signal?.aborted) { + throw opt.signal.reason; + } + /* c8 ignore stop */ + // already filtered, remove from options so we don't call unnecessarily + const { filter, ...options } = opt; + try { + return await rimrafWindowsDirRetry(path, options); + } + catch (er) { + if (er?.code === 'ENOTEMPTY') { + return await (0, rimraf_move_remove_js_1.rimrafMoveRemove)(path, options); + } + throw er; + } +}; +const rimrafWindowsDirMoveRemoveFallbackSync = (path, opt) => { + if (opt?.signal?.aborted) { + throw opt.signal.reason; + } + // already filtered, remove from options so we don't call unnecessarily + const { filter, ...options } = opt; + try { + return rimrafWindowsDirRetrySync(path, options); + } + catch (er) { + const fer = er; + if (fer?.code === 'ENOTEMPTY') { + return (0, rimraf_move_remove_js_1.rimrafMoveRemoveSync)(path, options); + } + throw er; + } +}; +const START = Symbol('start'); +const CHILD = Symbol('child'); +const FINISH = Symbol('finish'); +const rimrafWindows = async (path, opt) => { + if (opt?.signal?.aborted) { + throw opt.signal.reason; + } + try { + return await rimrafWindowsDir(path, opt, await lstat(path), START); + } + catch (er) { + if (er?.code === 'ENOENT') + return true; + throw er; + } +}; +exports.rimrafWindows = rimrafWindows; +const rimrafWindowsSync = (path, opt) => { + if (opt?.signal?.aborted) { + throw opt.signal.reason; + } + try { + return rimrafWindowsDirSync(path, opt, (0, fs_js_1.lstatSync)(path), START); + } + catch (er) { + if (er?.code === 'ENOENT') + return true; + throw er; + } +}; +exports.rimrafWindowsSync = rimrafWindowsSync; +const rimrafWindowsDir = async (path, opt, ent, state = START) => { + if (opt?.signal?.aborted) { + throw opt.signal.reason; + } + const entries = ent.isDirectory() ? await (0, readdir_or_error_js_1.readdirOrError)(path) : null; + if (!Array.isArray(entries)) { + // this can only happen if lstat/readdir lied, or if the dir was + // swapped out with a file at just the right moment. + /* c8 ignore start */ + if (entries) { + if (entries.code === 'ENOENT') { + return true; + } + if (entries.code !== 'ENOTDIR') { + throw entries; + } + } + /* c8 ignore stop */ + if (opt.filter && !(await opt.filter(path, ent))) { + return false; + } + // is a file + await (0, ignore_enoent_js_1.ignoreENOENT)(rimrafWindowsFile(path, opt)); + return true; + } + const s = state === START ? CHILD : state; + const removedAll = (await Promise.all(entries.map(ent => rimrafWindowsDir((0, path_1.resolve)(path, ent.name), opt, ent, s)))).reduce((a, b) => a && b, true); + if (state === START) { + return rimrafWindowsDir(path, opt, ent, FINISH); + } + else if (state === FINISH) { + if (opt.preserveRoot === false && path === (0, path_1.parse)(path).root) { + return false; + } + if (!removedAll) { + return false; + } + if (opt.filter && !(await opt.filter(path, ent))) { + return false; + } + await (0, ignore_enoent_js_1.ignoreENOENT)(rimrafWindowsDirMoveRemoveFallback(path, opt)); + } + return true; +}; +const rimrafWindowsDirSync = (path, opt, ent, state = START) => { + const entries = ent.isDirectory() ? (0, readdir_or_error_js_1.readdirOrErrorSync)(path) : null; + if (!Array.isArray(entries)) { + // this can only happen if lstat/readdir lied, or if the dir was + // swapped out with a file at just the right moment. + /* c8 ignore start */ + if (entries) { + if (entries.code === 'ENOENT') { + return true; + } + if (entries.code !== 'ENOTDIR') { + throw entries; + } + } + /* c8 ignore stop */ + if (opt.filter && !opt.filter(path, ent)) { + return false; + } + // is a file + (0, ignore_enoent_js_1.ignoreENOENTSync)(() => rimrafWindowsFileSync(path, opt)); + return true; + } + let removedAll = true; + for (const ent of entries) { + const s = state === START ? CHILD : state; + const p = (0, path_1.resolve)(path, ent.name); + removedAll = rimrafWindowsDirSync(p, opt, ent, s) && removedAll; + } + if (state === START) { + return rimrafWindowsDirSync(path, opt, ent, FINISH); + } + else if (state === FINISH) { + if (opt.preserveRoot === false && path === (0, path_1.parse)(path).root) { + return false; + } + if (!removedAll) { + return false; + } + if (opt.filter && !opt.filter(path, ent)) { + return false; + } + (0, ignore_enoent_js_1.ignoreENOENTSync)(() => { + rimrafWindowsDirMoveRemoveFallbackSync(path, opt); + }); + } + return true; +}; +//# sourceMappingURL=rimraf-windows.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-windows.js.map b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-windows.js.map new file mode 100644 index 00000000000000..50a97f890d84aa --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-windows.js.map @@ -0,0 +1 @@ +{"version":3,"file":"rimraf-windows.js","sourceRoot":"","sources":["../../src/rimraf-windows.ts"],"names":[],"mappings":";AAAA,+DAA+D;AAC/D,EAAE;AACF,sEAAsE;AACtE,wEAAwE;AACxE,gCAAgC;AAChC,0EAA0E;AAC1E,sBAAsB;AACtB,EAAE;AACF,yEAAyE;;;AAGzE,+BAAqC;AAErC,iDAAuD;AACvD,mCAAoE;AACpE,yDAAmE;AACnE,+DAA0E;AAC1E,mDAA0D;AAC1D,mEAAgF;AAChF,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE,GAAG,gBAAQ,CAAA;AAEzC,MAAM,iBAAiB,GAAG,IAAA,yBAAS,EAAC,IAAA,uBAAQ,EAAC,MAAM,CAAC,CAAC,CAAA;AACrD,MAAM,qBAAqB,GAAG,IAAA,6BAAa,EAAC,IAAA,2BAAY,EAAC,kBAAU,CAAC,CAAC,CAAA;AACrE,MAAM,qBAAqB,GAAG,IAAA,yBAAS,EAAC,IAAA,uBAAQ,EAAC,KAAK,CAAC,CAAC,CAAA;AACxD,MAAM,yBAAyB,GAAG,IAAA,6BAAa,EAAC,IAAA,2BAAY,EAAC,iBAAS,CAAC,CAAC,CAAA;AAExE,MAAM,kCAAkC,GAAG,KAAK,EAC9C,IAAY,EACZ,GAAuB,EACL,EAAE;IACpB,qBAAqB;IACrB,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,oBAAoB;IACpB,uEAAuE;IACvE,MAAM,EAAE,MAAM,EAAE,GAAG,OAAO,EAAE,GAAG,GAAG,CAAA;IAClC,IAAI,CAAC;QACH,OAAO,MAAM,qBAAqB,CAAC,IAAI,EAAE,OAAO,CAAC,CAAA;IACnD,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,IAAK,EAA4B,EAAE,IAAI,KAAK,WAAW,EAAE,CAAC;YACxD,OAAO,MAAM,IAAA,wCAAgB,EAAC,IAAI,EAAE,OAAO,CAAC,CAAA;QAC9C,CAAC;QACD,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAED,MAAM,sCAAsC,GAAG,CAC7C,IAAY,EACZ,GAAsB,EACb,EAAE;IACX,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,uEAAuE;IACvE,MAAM,EAAE,MAAM,EAAE,GAAG,OAAO,EAAE,GAAG,GAAG,CAAA;IAClC,IAAI,CAAC;QACH,OAAO,yBAAyB,CAAC,IAAI,EAAE,OAAO,CAAC,CAAA;IACjD,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,MAAM,GAAG,GAAG,EAA2B,CAAA;QACvC,IAAI,GAAG,EAAE,IAAI,KAAK,WAAW,EAAE,CAAC;YAC9B,OAAO,IAAA,4CAAoB,EAAC,IAAI,EAAE,OAAO,CAAC,CAAA;QAC5C,CAAC;QACD,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAED,MAAM,KAAK,GAAG,MAAM,CAAC,OAAO,CAAC,CAAA;AAC7B,MAAM,KAAK,GAAG,MAAM,CAAC,OAAO,CAAC,CAAA;AAC7B,MAAM,MAAM,GAAG,MAAM,CAAC,QAAQ,CAAC,CAAA;AAExB,MAAM,aAAa,GAAG,KAAK,EAAE,IAAY,EAAE,GAAuB,EAAE,EAAE;IAC3E,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,IAAI,CAAC;QACH,OAAO,MAAM,gBAAgB,CAAC,IAAI,EAAE,GAAG,EAAE,MAAM,KAAK,CAAC,IAAI,CAAC,EAAE,KAAK,CAAC,CAAA;IACpE,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,IAAK,EAA4B,EAAE,IAAI,KAAK,QAAQ;YAAE,OAAO,IAAI,CAAA;QACjE,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAVY,QAAA,aAAa,iBAUzB;AAEM,MAAM,iBAAiB,GAAG,CAAC,IAAY,EAAE,GAAsB,EAAE,EAAE;IACxE,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,IAAI,CAAC;QACH,OAAO,oBAAoB,CAAC,IAAI,EAAE,GAAG,EAAE,IAAA,iBAAS,EAAC,IAAI,CAAC,EAAE,KAAK,CAAC,CAAA;IAChE,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,IAAK,EAA4B,EAAE,IAAI,KAAK,QAAQ;YAAE,OAAO,IAAI,CAAA;QACjE,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAVY,QAAA,iBAAiB,qBAU7B;AAED,MAAM,gBAAgB,GAAG,KAAK,EAC5B,IAAY,EACZ,GAAuB,EACvB,GAAmB,EACnB,KAAK,GAAG,KAAK,EACK,EAAE;IACpB,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IAED,MAAM,OAAO,GAAG,GAAG,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,MAAM,IAAA,oCAAc,EAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAA;IACrE,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC;QAC5B,gEAAgE;QAChE,oDAAoD;QACpD,qBAAqB;QACrB,IAAI,OAAO,EAAE,CAAC;YACZ,IAAI,OAAO,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;gBAC9B,OAAO,IAAI,CAAA;YACb,CAAC;YACD,IAAI,OAAO,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;gBAC/B,MAAM,OAAO,CAAA;YACf,CAAC;QACH,CAAC;QACD,oBAAoB;QACpB,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,EAAE,CAAC;YACjD,OAAO,KAAK,CAAA;QACd,CAAC;QACD,YAAY;QACZ,MAAM,IAAA,+BAAY,EAAC,iBAAiB,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAA;QAChD,OAAO,IAAI,CAAA;IACb,CAAC;IAED,MAAM,CAAC,GAAG,KAAK,KAAK,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAA;IACzC,MAAM,UAAU,GAAG,CACjB,MAAM,OAAO,CAAC,GAAG,CACf,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAChB,gBAAgB,CAAC,IAAA,cAAO,EAAC,IAAI,EAAE,GAAG,CAAC,IAAI,CAAC,EAAE,GAAG,EAAE,GAAG,EAAE,CAAC,CAAC,CACvD,CACF,CACF,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,IAAI,CAAC,CAAA;IAEhC,IAAI,KAAK,KAAK,KAAK,EAAE,CAAC;QACpB,OAAO,gBAAgB,CAAC,IAAI,EAAE,GAAG,EAAE,GAAG,EAAE,MAAM,CAAC,CAAA;IACjD,CAAC;SAAM,IAAI,KAAK,KAAK,MAAM,EAAE,CAAC;QAC5B,IAAI,GAAG,CAAC,YAAY,KAAK,KAAK,IAAI,IAAI,KAAK,IAAA,YAAK,EAAC,IAAI,CAAC,CAAC,IAAI,EAAE,CAAC;YAC5D,OAAO,KAAK,CAAA;QACd,CAAC;QACD,IAAI,CAAC,UAAU,EAAE,CAAC;YAChB,OAAO,KAAK,CAAA;QACd,CAAC;QACD,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,EAAE,CAAC;YACjD,OAAO,KAAK,CAAA;QACd,CAAC;QACD,MAAM,IAAA,+BAAY,EAAC,kCAAkC,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAA;IACnE,CAAC;IACD,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAED,MAAM,oBAAoB,GAAG,CAC3B,IAAY,EACZ,GAAsB,EACtB,GAAmB,EACnB,KAAK,GAAG,KAAK,EACJ,EAAE;IACX,MAAM,OAAO,GAAG,GAAG,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,IAAA,wCAAkB,EAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAA;IACnE,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC;QAC5B,gEAAgE;QAChE,oDAAoD;QACpD,qBAAqB;QACrB,IAAI,OAAO,EAAE,CAAC;YACZ,IAAI,OAAO,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;gBAC9B,OAAO,IAAI,CAAA;YACb,CAAC;YACD,IAAI,OAAO,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;gBAC/B,MAAM,OAAO,CAAA;YACf,CAAC;QACH,CAAC;QACD,oBAAoB;QACpB,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,EAAE,CAAC;YACzC,OAAO,KAAK,CAAA;QACd,CAAC;QACD,YAAY;QACZ,IAAA,mCAAgB,EAAC,GAAG,EAAE,CAAC,qBAAqB,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAA;QACxD,OAAO,IAAI,CAAA;IACb,CAAC;IAED,IAAI,UAAU,GAAG,IAAI,CAAA;IACrB,KAAK,MAAM,GAAG,IAAI,OAAO,EAAE,CAAC;QAC1B,MAAM,CAAC,GAAG,KAAK,KAAK,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAA;QACzC,MAAM,CAAC,GAAG,IAAA,cAAO,EAAC,IAAI,EAAE,GAAG,CAAC,IAAI,CAAC,CAAA;QACjC,UAAU,GAAG,oBAAoB,CAAC,CAAC,EAAE,GAAG,EAAE,GAAG,EAAE,CAAC,CAAC,IAAI,UAAU,CAAA;IACjE,CAAC;IAED,IAAI,KAAK,KAAK,KAAK,EAAE,CAAC;QACpB,OAAO,oBAAoB,CAAC,IAAI,EAAE,GAAG,EAAE,GAAG,EAAE,MAAM,CAAC,CAAA;IACrD,CAAC;SAAM,IAAI,KAAK,KAAK,MAAM,EAAE,CAAC;QAC5B,IAAI,GAAG,CAAC,YAAY,KAAK,KAAK,IAAI,IAAI,KAAK,IAAA,YAAK,EAAC,IAAI,CAAC,CAAC,IAAI,EAAE,CAAC;YAC5D,OAAO,KAAK,CAAA;QACd,CAAC;QACD,IAAI,CAAC,UAAU,EAAE,CAAC;YAChB,OAAO,KAAK,CAAA;QACd,CAAC;QACD,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,EAAE,CAAC;YACzC,OAAO,KAAK,CAAA;QACd,CAAC;QACD,IAAA,mCAAgB,EAAC,GAAG,EAAE;YACpB,sCAAsC,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;QACnD,CAAC,CAAC,CAAA;IACJ,CAAC;IACD,OAAO,IAAI,CAAA;AACb,CAAC,CAAA","sourcesContent":["// This is the same as rimrafPosix, with the following changes:\n//\n// 1. EBUSY, ENFILE, EMFILE trigger retries and/or exponential backoff\n// 2. All non-directories are removed first and then all directories are\n// removed in a second sweep.\n// 3. If we hit ENOTEMPTY in the second sweep, fall back to move-remove on\n// the that folder.\n//\n// Note: \"move then remove\" is 2-10 times slower, and just as unreliable.\n\nimport { Dirent, Stats } from 'fs'\nimport { parse, resolve } from 'path'\nimport { RimrafAsyncOptions, RimrafSyncOptions } from './index.js'\nimport { fixEPERM, fixEPERMSync } from './fix-eperm.js'\nimport { lstatSync, promises, rmdirSync, unlinkSync } from './fs.js'\nimport { ignoreENOENT, ignoreENOENTSync } from './ignore-enoent.js'\nimport { readdirOrError, readdirOrErrorSync } from './readdir-or-error.js'\nimport { retryBusy, retryBusySync } from './retry-busy.js'\nimport { rimrafMoveRemove, rimrafMoveRemoveSync } from './rimraf-move-remove.js'\nconst { unlink, rmdir, lstat } = promises\n\nconst rimrafWindowsFile = retryBusy(fixEPERM(unlink))\nconst rimrafWindowsFileSync = retryBusySync(fixEPERMSync(unlinkSync))\nconst rimrafWindowsDirRetry = retryBusy(fixEPERM(rmdir))\nconst rimrafWindowsDirRetrySync = retryBusySync(fixEPERMSync(rmdirSync))\n\nconst rimrafWindowsDirMoveRemoveFallback = async (\n path: string,\n opt: RimrafAsyncOptions,\n): Promise => {\n /* c8 ignore start */\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n /* c8 ignore stop */\n // already filtered, remove from options so we don't call unnecessarily\n const { filter, ...options } = opt\n try {\n return await rimrafWindowsDirRetry(path, options)\n } catch (er) {\n if ((er as NodeJS.ErrnoException)?.code === 'ENOTEMPTY') {\n return await rimrafMoveRemove(path, options)\n }\n throw er\n }\n}\n\nconst rimrafWindowsDirMoveRemoveFallbackSync = (\n path: string,\n opt: RimrafSyncOptions,\n): boolean => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n // already filtered, remove from options so we don't call unnecessarily\n const { filter, ...options } = opt\n try {\n return rimrafWindowsDirRetrySync(path, options)\n } catch (er) {\n const fer = er as NodeJS.ErrnoException\n if (fer?.code === 'ENOTEMPTY') {\n return rimrafMoveRemoveSync(path, options)\n }\n throw er\n }\n}\n\nconst START = Symbol('start')\nconst CHILD = Symbol('child')\nconst FINISH = Symbol('finish')\n\nexport const rimrafWindows = async (path: string, opt: RimrafAsyncOptions) => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n try {\n return await rimrafWindowsDir(path, opt, await lstat(path), START)\n } catch (er) {\n if ((er as NodeJS.ErrnoException)?.code === 'ENOENT') return true\n throw er\n }\n}\n\nexport const rimrafWindowsSync = (path: string, opt: RimrafSyncOptions) => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n try {\n return rimrafWindowsDirSync(path, opt, lstatSync(path), START)\n } catch (er) {\n if ((er as NodeJS.ErrnoException)?.code === 'ENOENT') return true\n throw er\n }\n}\n\nconst rimrafWindowsDir = async (\n path: string,\n opt: RimrafAsyncOptions,\n ent: Dirent | Stats,\n state = START,\n): Promise => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n\n const entries = ent.isDirectory() ? await readdirOrError(path) : null\n if (!Array.isArray(entries)) {\n // this can only happen if lstat/readdir lied, or if the dir was\n // swapped out with a file at just the right moment.\n /* c8 ignore start */\n if (entries) {\n if (entries.code === 'ENOENT') {\n return true\n }\n if (entries.code !== 'ENOTDIR') {\n throw entries\n }\n }\n /* c8 ignore stop */\n if (opt.filter && !(await opt.filter(path, ent))) {\n return false\n }\n // is a file\n await ignoreENOENT(rimrafWindowsFile(path, opt))\n return true\n }\n\n const s = state === START ? CHILD : state\n const removedAll = (\n await Promise.all(\n entries.map(ent =>\n rimrafWindowsDir(resolve(path, ent.name), opt, ent, s),\n ),\n )\n ).reduce((a, b) => a && b, true)\n\n if (state === START) {\n return rimrafWindowsDir(path, opt, ent, FINISH)\n } else if (state === FINISH) {\n if (opt.preserveRoot === false && path === parse(path).root) {\n return false\n }\n if (!removedAll) {\n return false\n }\n if (opt.filter && !(await opt.filter(path, ent))) {\n return false\n }\n await ignoreENOENT(rimrafWindowsDirMoveRemoveFallback(path, opt))\n }\n return true\n}\n\nconst rimrafWindowsDirSync = (\n path: string,\n opt: RimrafSyncOptions,\n ent: Dirent | Stats,\n state = START,\n): boolean => {\n const entries = ent.isDirectory() ? readdirOrErrorSync(path) : null\n if (!Array.isArray(entries)) {\n // this can only happen if lstat/readdir lied, or if the dir was\n // swapped out with a file at just the right moment.\n /* c8 ignore start */\n if (entries) {\n if (entries.code === 'ENOENT') {\n return true\n }\n if (entries.code !== 'ENOTDIR') {\n throw entries\n }\n }\n /* c8 ignore stop */\n if (opt.filter && !opt.filter(path, ent)) {\n return false\n }\n // is a file\n ignoreENOENTSync(() => rimrafWindowsFileSync(path, opt))\n return true\n }\n\n let removedAll = true\n for (const ent of entries) {\n const s = state === START ? CHILD : state\n const p = resolve(path, ent.name)\n removedAll = rimrafWindowsDirSync(p, opt, ent, s) && removedAll\n }\n\n if (state === START) {\n return rimrafWindowsDirSync(path, opt, ent, FINISH)\n } else if (state === FINISH) {\n if (opt.preserveRoot === false && path === parse(path).root) {\n return false\n }\n if (!removedAll) {\n return false\n }\n if (opt.filter && !opt.filter(path, ent)) {\n return false\n }\n ignoreENOENTSync(() => {\n rimrafWindowsDirMoveRemoveFallbackSync(path, opt)\n })\n }\n return true\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/use-native.d.ts b/deps/npm/node_modules/rimraf/dist/commonjs/use-native.d.ts new file mode 100644 index 00000000000000..e191fd90da93d3 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/use-native.d.ts @@ -0,0 +1,4 @@ +import { RimrafAsyncOptions, RimrafOptions } from './index.js'; +export declare const useNative: (opt?: RimrafAsyncOptions) => boolean; +export declare const useNativeSync: (opt?: RimrafOptions) => boolean; +//# sourceMappingURL=use-native.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/use-native.d.ts.map b/deps/npm/node_modules/rimraf/dist/commonjs/use-native.d.ts.map new file mode 100644 index 00000000000000..b182beb1707a7d --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/use-native.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"use-native.d.ts","sourceRoot":"","sources":["../../src/use-native.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,kBAAkB,EAAE,aAAa,EAAE,MAAM,YAAY,CAAA;AAa9D,eAAO,MAAM,SAAS,EAAE,CAAC,GAAG,CAAC,EAAE,kBAAkB,KAAK,OAGf,CAAA;AACvC,eAAO,MAAM,aAAa,EAAE,CAAC,GAAG,CAAC,EAAE,aAAa,KAAK,OAGd,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/use-native.js b/deps/npm/node_modules/rimraf/dist/commonjs/use-native.js new file mode 100644 index 00000000000000..1f668768d96bcb --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/use-native.js @@ -0,0 +1,22 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.useNativeSync = exports.useNative = void 0; +const platform_js_1 = __importDefault(require("./platform.js")); +const version = process.env.__TESTING_RIMRAF_NODE_VERSION__ || process.version; +const versArr = version.replace(/^v/, '').split('.'); +/* c8 ignore start */ +const [major = 0, minor = 0] = versArr.map(v => parseInt(v, 10)); +/* c8 ignore stop */ +const hasNative = major > 14 || (major === 14 && minor >= 14); +// we do NOT use native by default on Windows, because Node's native +// rm implementation is less advanced. Change this code if that changes. +exports.useNative = !hasNative || platform_js_1.default === 'win32' ? + () => false + : opt => !opt?.signal && !opt?.filter; +exports.useNativeSync = !hasNative || platform_js_1.default === 'win32' ? + () => false + : opt => !opt?.signal && !opt?.filter; +//# sourceMappingURL=use-native.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/use-native.js.map b/deps/npm/node_modules/rimraf/dist/commonjs/use-native.js.map new file mode 100644 index 00000000000000..a89b8db7e68352 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/use-native.js.map @@ -0,0 +1 @@ +{"version":3,"file":"use-native.js","sourceRoot":"","sources":["../../src/use-native.ts"],"names":[],"mappings":";;;;;;AACA,gEAAoC;AAEpC,MAAM,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,+BAA+B,IAAI,OAAO,CAAC,OAAO,CAAA;AAC9E,MAAM,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAA;AAEpD,qBAAqB;AACrB,MAAM,CAAC,KAAK,GAAG,CAAC,EAAE,KAAK,GAAG,CAAC,CAAC,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAA;AAChE,oBAAoB;AACpB,MAAM,SAAS,GAAG,KAAK,GAAG,EAAE,IAAI,CAAC,KAAK,KAAK,EAAE,IAAI,KAAK,IAAI,EAAE,CAAC,CAAA;AAE7D,oEAAoE;AACpE,yEAAyE;AAC5D,QAAA,SAAS,GACpB,CAAC,SAAS,IAAI,qBAAQ,KAAK,OAAO,CAAC,CAAC;IAClC,GAAG,EAAE,CAAC,KAAK;IACb,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,GAAG,EAAE,MAAM,IAAI,CAAC,GAAG,EAAE,MAAM,CAAA;AAC1B,QAAA,aAAa,GACxB,CAAC,SAAS,IAAI,qBAAQ,KAAK,OAAO,CAAC,CAAC;IAClC,GAAG,EAAE,CAAC,KAAK;IACb,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,GAAG,EAAE,MAAM,IAAI,CAAC,GAAG,EAAE,MAAM,CAAA","sourcesContent":["import { RimrafAsyncOptions, RimrafOptions } from './index.js'\nimport platform from './platform.js'\n\nconst version = process.env.__TESTING_RIMRAF_NODE_VERSION__ || process.version\nconst versArr = version.replace(/^v/, '').split('.')\n\n/* c8 ignore start */\nconst [major = 0, minor = 0] = versArr.map(v => parseInt(v, 10))\n/* c8 ignore stop */\nconst hasNative = major > 14 || (major === 14 && minor >= 14)\n\n// we do NOT use native by default on Windows, because Node's native\n// rm implementation is less advanced. Change this code if that changes.\nexport const useNative: (opt?: RimrafAsyncOptions) => boolean =\n !hasNative || platform === 'win32' ?\n () => false\n : opt => !opt?.signal && !opt?.filter\nexport const useNativeSync: (opt?: RimrafOptions) => boolean =\n !hasNative || platform === 'win32' ?\n () => false\n : opt => !opt?.signal && !opt?.filter\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/bin.d.mts b/deps/npm/node_modules/rimraf/dist/esm/bin.d.mts new file mode 100644 index 00000000000000..5600d7c766e6d6 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/bin.d.mts @@ -0,0 +1,8 @@ +#!/usr/bin/env node +export declare const help: string; +declare const main: { + (...args: string[]): Promise<1 | 0>; + help: string; +}; +export default main; +//# sourceMappingURL=bin.d.mts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/bin.d.mts.map b/deps/npm/node_modules/rimraf/dist/esm/bin.d.mts.map new file mode 100644 index 00000000000000..a5f1ec2cb6c8e8 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/bin.d.mts.map @@ -0,0 +1 @@ +{"version":3,"file":"bin.d.mts","sourceRoot":"","sources":["../../src/bin.mts"],"names":[],"mappings":";AAcA,eAAO,MAAM,IAAI,QAkChB,CAAA;AA8ED,QAAA,MAAM,IAAI;cAAmB,MAAM,EAAE;;CAoIpC,CAAA;AAGD,eAAe,IAAI,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/bin.mjs b/deps/npm/node_modules/rimraf/dist/esm/bin.mjs new file mode 100755 index 00000000000000..4aea35e9c43256 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/bin.mjs @@ -0,0 +1,256 @@ +#!/usr/bin/env node +import { readFile } from 'fs/promises'; +import { rimraf } from './index.js'; +const pj = fileURLToPath(new URL('../package.json', import.meta.url)); +const pjDist = fileURLToPath(new URL('../../package.json', import.meta.url)); +const { version } = JSON.parse(await readFile(pjDist, 'utf8').catch(() => readFile(pj, 'utf8'))); +const runHelpForUsage = () => console.error('run `rimraf --help` for usage information'); +export const help = `rimraf version ${version} + +Usage: rimraf [ ...] +Deletes all files and folders at "path", recursively. + +Options: + -- Treat all subsequent arguments as paths + -h --help Display this usage info + --preserve-root Do not remove '/' recursively (default) + --no-preserve-root Do not treat '/' specially + -G --no-glob Treat arguments as literal paths, not globs (default) + -g --glob Treat arguments as glob patterns + -v --verbose Be verbose when deleting files, showing them as + they are removed. Not compatible with --impl=native + -V --no-verbose Be silent when deleting files, showing nothing as + they are removed (default) + -i --interactive Ask for confirmation before deleting anything + Not compatible with --impl=native + -I --no-interactive Do not ask for confirmation before deleting + + --impl= Specify the implementation to use: + rimraf: choose the best option (default) + native: the built-in implementation in Node.js + manual: the platform-specific JS implementation + posix: the Posix JS implementation + windows: the Windows JS implementation (falls back to + move-remove on ENOTEMPTY) + move-remove: a slow reliable Windows fallback + +Implementation-specific options: + --tmp= Temp file folder for 'move-remove' implementation + --max-retries= maxRetries for 'native' and 'windows' implementations + --retry-delay= retryDelay for 'native' implementation, default 100 + --backoff= Exponential backoff factor for retries (default: 1.2) +`; +import { parse, relative, resolve } from 'path'; +const cwd = process.cwd(); +import { createInterface } from 'readline'; +import { fileURLToPath } from 'url'; +const prompt = async (rl, q) => new Promise(res => rl.question(q, res)); +const interactiveRimraf = async (impl, paths, opt) => { + const existingFilter = opt.filter || (() => true); + let allRemaining = false; + let noneRemaining = false; + const queue = []; + let processing = false; + const processQueue = async () => { + if (processing) + return; + processing = true; + let next; + while ((next = queue.shift())) { + await next(); + } + processing = false; + }; + const oneAtATime = (fn) => async (s, e) => { + const p = new Promise(res => { + queue.push(async () => { + const result = await fn(s, e); + res(result); + return result; + }); + }); + processQueue(); + return p; + }; + const rl = createInterface({ + input: process.stdin, + output: process.stdout, + }); + opt.filter = oneAtATime(async (path, ent) => { + if (noneRemaining) { + return false; + } + while (!allRemaining) { + const a = (await prompt(rl, `rm? ${relative(cwd, path)}\n[(Yes)/No/All/Quit] > `)).trim(); + if (/^n/i.test(a)) { + return false; + } + else if (/^a/i.test(a)) { + allRemaining = true; + break; + } + else if (/^q/i.test(a)) { + noneRemaining = true; + return false; + } + else if (a === '' || /^y/i.test(a)) { + break; + } + else { + continue; + } + } + return existingFilter(path, ent); + }); + await impl(paths, opt); + rl.close(); +}; +const main = async (...args) => { + const verboseFilter = (s) => { + console.log(relative(cwd, s)); + return true; + }; + if (process.env.__RIMRAF_TESTING_BIN_FAIL__ === '1') { + throw new Error('simulated rimraf failure'); + } + const opt = {}; + const paths = []; + let dashdash = false; + let impl = rimraf; + let interactive = false; + for (const arg of args) { + if (dashdash) { + paths.push(arg); + continue; + } + if (arg === '--') { + dashdash = true; + continue; + } + else if (arg === '-rf' || arg === '-fr') { + // this never did anything, but people put it there I guess + continue; + } + else if (arg === '-h' || arg === '--help') { + console.log(help); + return 0; + } + else if (arg === '--interactive' || arg === '-i') { + interactive = true; + continue; + } + else if (arg === '--no-interactive' || arg === '-I') { + interactive = false; + continue; + } + else if (arg === '--verbose' || arg === '-v') { + opt.filter = verboseFilter; + continue; + } + else if (arg === '--no-verbose' || arg === '-V') { + opt.filter = undefined; + continue; + } + else if (arg === '-g' || arg === '--glob') { + opt.glob = true; + continue; + } + else if (arg === '-G' || arg === '--no-glob') { + opt.glob = false; + continue; + } + else if (arg === '--preserve-root') { + opt.preserveRoot = true; + continue; + } + else if (arg === '--no-preserve-root') { + opt.preserveRoot = false; + continue; + } + else if (/^--tmp=/.test(arg)) { + const val = arg.substring('--tmp='.length); + opt.tmp = val; + continue; + } + else if (/^--max-retries=/.test(arg)) { + const val = +arg.substring('--max-retries='.length); + opt.maxRetries = val; + continue; + } + else if (/^--retry-delay=/.test(arg)) { + const val = +arg.substring('--retry-delay='.length); + opt.retryDelay = val; + continue; + } + else if (/^--backoff=/.test(arg)) { + const val = +arg.substring('--backoff='.length); + opt.backoff = val; + continue; + } + else if (/^--impl=/.test(arg)) { + const val = arg.substring('--impl='.length); + switch (val) { + case 'rimraf': + impl = rimraf; + continue; + case 'native': + case 'manual': + case 'posix': + case 'windows': + impl = rimraf[val]; + continue; + case 'move-remove': + impl = rimraf.moveRemove; + continue; + default: + console.error(`unknown implementation: ${val}`); + runHelpForUsage(); + return 1; + } + } + else if (/^-/.test(arg)) { + console.error(`unknown option: ${arg}`); + runHelpForUsage(); + return 1; + } + else { + paths.push(arg); + } + } + if (opt.preserveRoot !== false) { + for (const path of paths.map(p => resolve(p))) { + if (path === parse(path).root) { + console.error(`rimraf: it is dangerous to operate recursively on '/'`); + console.error('use --no-preserve-root to override this failsafe'); + return 1; + } + } + } + if (!paths.length) { + console.error('rimraf: must provide a path to remove'); + runHelpForUsage(); + return 1; + } + if (impl === rimraf.native && (interactive || opt.filter)) { + console.error('native implementation does not support -v or -i'); + runHelpForUsage(); + return 1; + } + if (interactive) { + await interactiveRimraf(impl, paths, opt); + } + else { + await impl(paths, opt); + } + return 0; +}; +main.help = help; +export default main; +if (process.env.__TESTING_RIMRAF_BIN__ !== '1') { + const args = process.argv.slice(2); + main(...args).then(code => process.exit(code), er => { + console.error(er); + process.exit(1); + }); +} +//# sourceMappingURL=bin.mjs.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/bin.mjs.map b/deps/npm/node_modules/rimraf/dist/esm/bin.mjs.map new file mode 100644 index 00000000000000..163fc96df5b6a2 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/bin.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"bin.mjs","sourceRoot":"","sources":["../../src/bin.mts"],"names":[],"mappings":";AACA,OAAO,EAAE,QAAQ,EAAE,MAAM,aAAa,CAAA;AAEtC,OAAO,EAAE,MAAM,EAAE,MAAM,YAAY,CAAA;AAEnC,MAAM,EAAE,GAAG,aAAa,CAAC,IAAI,GAAG,CAAC,iBAAiB,EAAE,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAA;AACrE,MAAM,MAAM,GAAG,aAAa,CAAC,IAAI,GAAG,CAAC,oBAAoB,EAAE,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAA;AAC5E,MAAM,EAAE,OAAO,EAAE,GAAG,IAAI,CAAC,KAAK,CAC5B,MAAM,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC,KAAK,CAAC,GAAG,EAAE,CAAC,QAAQ,CAAC,EAAE,EAAE,MAAM,CAAC,CAAC,CAC1C,CAAA;AAExB,MAAM,eAAe,GAAG,GAAG,EAAE,CAC3B,OAAO,CAAC,KAAK,CAAC,2CAA2C,CAAC,CAAA;AAE5D,MAAM,CAAC,MAAM,IAAI,GAAG,kBAAkB,OAAO;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAkC5C,CAAA;AAED,OAAO,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,MAAM,MAAM,CAAA;AAC/C,MAAM,GAAG,GAAG,OAAO,CAAC,GAAG,EAAE,CAAA;AAGzB,OAAO,EAAE,eAAe,EAAa,MAAM,UAAU,CAAA;AACrD,OAAO,EAAE,aAAa,EAAE,MAAM,KAAK,CAAA;AAEnC,MAAM,MAAM,GAAG,KAAK,EAAE,EAAa,EAAE,CAAS,EAAE,EAAE,CAChD,IAAI,OAAO,CAAS,GAAG,CAAC,EAAE,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,CAAA;AAEjD,MAAM,iBAAiB,GAAG,KAAK,EAC7B,IAA6E,EAC7E,KAAe,EACf,GAAuB,EACvB,EAAE;IACF,MAAM,cAAc,GAAG,GAAG,CAAC,MAAM,IAAI,CAAC,GAAG,EAAE,CAAC,IAAI,CAAC,CAAA;IACjD,IAAI,YAAY,GAAG,KAAK,CAAA;IACxB,IAAI,aAAa,GAAG,KAAK,CAAA;IACzB,MAAM,KAAK,GAA+B,EAAE,CAAA;IAC5C,IAAI,UAAU,GAAG,KAAK,CAAA;IACtB,MAAM,YAAY,GAAG,KAAK,IAAI,EAAE;QAC9B,IAAI,UAAU;YAAE,OAAM;QACtB,UAAU,GAAG,IAAI,CAAA;QACjB,IAAI,IAA0C,CAAA;QAC9C,OAAO,CAAC,IAAI,GAAG,KAAK,CAAC,KAAK,EAAE,CAAC,EAAE,CAAC;YAC9B,MAAM,IAAI,EAAE,CAAA;QACd,CAAC;QACD,UAAU,GAAG,KAAK,CAAA;IACpB,CAAC,CAAA;IACD,MAAM,UAAU,GACd,CAAC,EAAsD,EAAE,EAAE,CAC3D,KAAK,EAAE,CAAS,EAAE,CAAiB,EAAoB,EAAE;QACvD,MAAM,CAAC,GAAG,IAAI,OAAO,CAAU,GAAG,CAAC,EAAE;YACnC,KAAK,CAAC,IAAI,CAAC,KAAK,IAAI,EAAE;gBACpB,MAAM,MAAM,GAAG,MAAM,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAA;gBAC7B,GAAG,CAAC,MAAM,CAAC,CAAA;gBACX,OAAO,MAAM,CAAA;YACf,CAAC,CAAC,CAAA;QACJ,CAAC,CAAC,CAAA;QACF,YAAY,EAAE,CAAA;QACd,OAAO,CAAC,CAAA;IACV,CAAC,CAAA;IACH,MAAM,EAAE,GAAG,eAAe,CAAC;QACzB,KAAK,EAAE,OAAO,CAAC,KAAK;QACpB,MAAM,EAAE,OAAO,CAAC,MAAM;KACvB,CAAC,CAAA;IACF,GAAG,CAAC,MAAM,GAAG,UAAU,CACrB,KAAK,EAAE,IAAY,EAAE,GAAmB,EAAoB,EAAE;QAC5D,IAAI,aAAa,EAAE,CAAC;YAClB,OAAO,KAAK,CAAA;QACd,CAAC;QACD,OAAO,CAAC,YAAY,EAAE,CAAC;YACrB,MAAM,CAAC,GAAG,CACR,MAAM,MAAM,CAAC,EAAE,EAAE,OAAO,QAAQ,CAAC,GAAG,EAAE,IAAI,CAAC,0BAA0B,CAAC,CACvE,CAAC,IAAI,EAAE,CAAA;YACR,IAAI,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC;gBAClB,OAAO,KAAK,CAAA;YACd,CAAC;iBAAM,IAAI,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC;gBACzB,YAAY,GAAG,IAAI,CAAA;gBACnB,MAAK;YACP,CAAC;iBAAM,IAAI,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC;gBACzB,aAAa,GAAG,IAAI,CAAA;gBACpB,OAAO,KAAK,CAAA;YACd,CAAC;iBAAM,IAAI,CAAC,KAAK,EAAE,IAAI,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC;gBACrC,MAAK;YACP,CAAC;iBAAM,CAAC;gBACN,SAAQ;YACV,CAAC;QACH,CAAC;QACD,OAAO,cAAc,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;IAClC,CAAC,CACF,CAAA;IACD,MAAM,IAAI,CAAC,KAAK,EAAE,GAAG,CAAC,CAAA;IACtB,EAAE,CAAC,KAAK,EAAE,CAAA;AACZ,CAAC,CAAA;AAED,MAAM,IAAI,GAAG,KAAK,EAAE,GAAG,IAAc,EAAE,EAAE;IACvC,MAAM,aAAa,GAAG,CAAC,CAAS,EAAE,EAAE;QAClC,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC,CAAA;QAC7B,OAAO,IAAI,CAAA;IACb,CAAC,CAAA;IAED,IAAI,OAAO,CAAC,GAAG,CAAC,2BAA2B,KAAK,GAAG,EAAE,CAAC;QACpD,MAAM,IAAI,KAAK,CAAC,0BAA0B,CAAC,CAAA;IAC7C,CAAC;IAED,MAAM,GAAG,GAAuB,EAAE,CAAA;IAClC,MAAM,KAAK,GAAa,EAAE,CAAA;IAC1B,IAAI,QAAQ,GAAG,KAAK,CAAA;IACpB,IAAI,IAAI,GAGgB,MAAM,CAAA;IAE9B,IAAI,WAAW,GAAG,KAAK,CAAA;IAEvB,KAAK,MAAM,GAAG,IAAI,IAAI,EAAE,CAAC;QACvB,IAAI,QAAQ,EAAE,CAAC;YACb,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;YACf,SAAQ;QACV,CAAC;QACD,IAAI,GAAG,KAAK,IAAI,EAAE,CAAC;YACjB,QAAQ,GAAG,IAAI,CAAA;YACf,SAAQ;QACV,CAAC;aAAM,IAAI,GAAG,KAAK,KAAK,IAAI,GAAG,KAAK,KAAK,EAAE,CAAC;YAC1C,2DAA2D;YAC3D,SAAQ;QACV,CAAC;aAAM,IAAI,GAAG,KAAK,IAAI,IAAI,GAAG,KAAK,QAAQ,EAAE,CAAC;YAC5C,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,CAAA;YACjB,OAAO,CAAC,CAAA;QACV,CAAC;aAAM,IAAI,GAAG,KAAK,eAAe,IAAI,GAAG,KAAK,IAAI,EAAE,CAAC;YACnD,WAAW,GAAG,IAAI,CAAA;YAClB,SAAQ;QACV,CAAC;aAAM,IAAI,GAAG,KAAK,kBAAkB,IAAI,GAAG,KAAK,IAAI,EAAE,CAAC;YACtD,WAAW,GAAG,KAAK,CAAA;YACnB,SAAQ;QACV,CAAC;aAAM,IAAI,GAAG,KAAK,WAAW,IAAI,GAAG,KAAK,IAAI,EAAE,CAAC;YAC/C,GAAG,CAAC,MAAM,GAAG,aAAa,CAAA;YAC1B,SAAQ;QACV,CAAC;aAAM,IAAI,GAAG,KAAK,cAAc,IAAI,GAAG,KAAK,IAAI,EAAE,CAAC;YAClD,GAAG,CAAC,MAAM,GAAG,SAAS,CAAA;YACtB,SAAQ;QACV,CAAC;aAAM,IAAI,GAAG,KAAK,IAAI,IAAI,GAAG,KAAK,QAAQ,EAAE,CAAC;YAC5C,GAAG,CAAC,IAAI,GAAG,IAAI,CAAA;YACf,SAAQ;QACV,CAAC;aAAM,IAAI,GAAG,KAAK,IAAI,IAAI,GAAG,KAAK,WAAW,EAAE,CAAC;YAC/C,GAAG,CAAC,IAAI,GAAG,KAAK,CAAA;YAChB,SAAQ;QACV,CAAC;aAAM,IAAI,GAAG,KAAK,iBAAiB,EAAE,CAAC;YACrC,GAAG,CAAC,YAAY,GAAG,IAAI,CAAA;YACvB,SAAQ;QACV,CAAC;aAAM,IAAI,GAAG,KAAK,oBAAoB,EAAE,CAAC;YACxC,GAAG,CAAC,YAAY,GAAG,KAAK,CAAA;YACxB,SAAQ;QACV,CAAC;aAAM,IAAI,SAAS,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC;YAC/B,MAAM,GAAG,GAAG,GAAG,CAAC,SAAS,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAA;YAC1C,GAAG,CAAC,GAAG,GAAG,GAAG,CAAA;YACb,SAAQ;QACV,CAAC;aAAM,IAAI,iBAAiB,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC;YACvC,MAAM,GAAG,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,gBAAgB,CAAC,MAAM,CAAC,CAAA;YACnD,GAAG,CAAC,UAAU,GAAG,GAAG,CAAA;YACpB,SAAQ;QACV,CAAC;aAAM,IAAI,iBAAiB,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC;YACvC,MAAM,GAAG,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,gBAAgB,CAAC,MAAM,CAAC,CAAA;YACnD,GAAG,CAAC,UAAU,GAAG,GAAG,CAAA;YACpB,SAAQ;QACV,CAAC;aAAM,IAAI,aAAa,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC;YACnC,MAAM,GAAG,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,YAAY,CAAC,MAAM,CAAC,CAAA;YAC/C,GAAG,CAAC,OAAO,GAAG,GAAG,CAAA;YACjB,SAAQ;QACV,CAAC;aAAM,IAAI,UAAU,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC;YAChC,MAAM,GAAG,GAAG,GAAG,CAAC,SAAS,CAAC,SAAS,CAAC,MAAM,CAAC,CAAA;YAC3C,QAAQ,GAAG,EAAE,CAAC;gBACZ,KAAK,QAAQ;oBACX,IAAI,GAAG,MAAM,CAAA;oBACb,SAAQ;gBACV,KAAK,QAAQ,CAAC;gBACd,KAAK,QAAQ,CAAC;gBACd,KAAK,OAAO,CAAC;gBACb,KAAK,SAAS;oBACZ,IAAI,GAAG,MAAM,CAAC,GAAG,CAAC,CAAA;oBAClB,SAAQ;gBACV,KAAK,aAAa;oBAChB,IAAI,GAAG,MAAM,CAAC,UAAU,CAAA;oBACxB,SAAQ;gBACV;oBACE,OAAO,CAAC,KAAK,CAAC,2BAA2B,GAAG,EAAE,CAAC,CAAA;oBAC/C,eAAe,EAAE,CAAA;oBACjB,OAAO,CAAC,CAAA;YACZ,CAAC;QACH,CAAC;aAAM,IAAI,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC;YAC1B,OAAO,CAAC,KAAK,CAAC,mBAAmB,GAAG,EAAE,CAAC,CAAA;YACvC,eAAe,EAAE,CAAA;YACjB,OAAO,CAAC,CAAA;QACV,CAAC;aAAM,CAAC;YACN,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;QACjB,CAAC;IACH,CAAC;IAED,IAAI,GAAG,CAAC,YAAY,KAAK,KAAK,EAAE,CAAC;QAC/B,KAAK,MAAM,IAAI,IAAI,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;YAC9C,IAAI,IAAI,KAAK,KAAK,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,CAAC;gBAC9B,OAAO,CAAC,KAAK,CAAC,uDAAuD,CAAC,CAAA;gBACtE,OAAO,CAAC,KAAK,CAAC,kDAAkD,CAAC,CAAA;gBACjE,OAAO,CAAC,CAAA;YACV,CAAC;QACH,CAAC;IACH,CAAC;IAED,IAAI,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC;QAClB,OAAO,CAAC,KAAK,CAAC,uCAAuC,CAAC,CAAA;QACtD,eAAe,EAAE,CAAA;QACjB,OAAO,CAAC,CAAA;IACV,CAAC;IAED,IAAI,IAAI,KAAK,MAAM,CAAC,MAAM,IAAI,CAAC,WAAW,IAAI,GAAG,CAAC,MAAM,CAAC,EAAE,CAAC;QAC1D,OAAO,CAAC,KAAK,CAAC,iDAAiD,CAAC,CAAA;QAChE,eAAe,EAAE,CAAA;QACjB,OAAO,CAAC,CAAA;IACV,CAAC;IAED,IAAI,WAAW,EAAE,CAAC;QAChB,MAAM,iBAAiB,CAAC,IAAI,EAAE,KAAK,EAAE,GAAG,CAAC,CAAA;IAC3C,CAAC;SAAM,CAAC;QACN,MAAM,IAAI,CAAC,KAAK,EAAE,GAAG,CAAC,CAAA;IACxB,CAAC;IAED,OAAO,CAAC,CAAA;AACV,CAAC,CAAA;AACD,IAAI,CAAC,IAAI,GAAG,IAAI,CAAA;AAEhB,eAAe,IAAI,CAAA;AAEnB,IAAI,OAAO,CAAC,GAAG,CAAC,sBAAsB,KAAK,GAAG,EAAE,CAAC;IAC/C,MAAM,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAA;IAClC,IAAI,CAAC,GAAG,IAAI,CAAC,CAAC,IAAI,CAChB,IAAI,CAAC,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,EAC1B,EAAE,CAAC,EAAE;QACH,OAAO,CAAC,KAAK,CAAC,EAAE,CAAC,CAAA;QACjB,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;IACjB,CAAC,CACF,CAAA;AACH,CAAC","sourcesContent":["#!/usr/bin/env node\nimport { readFile } from 'fs/promises'\nimport type { RimrafAsyncOptions } from './index.js'\nimport { rimraf } from './index.js'\n\nconst pj = fileURLToPath(new URL('../package.json', import.meta.url))\nconst pjDist = fileURLToPath(new URL('../../package.json', import.meta.url))\nconst { version } = JSON.parse(\n await readFile(pjDist, 'utf8').catch(() => readFile(pj, 'utf8')),\n) as { version: string }\n\nconst runHelpForUsage = () =>\n console.error('run `rimraf --help` for usage information')\n\nexport const help = `rimraf version ${version}\n\nUsage: rimraf [ ...]\nDeletes all files and folders at \"path\", recursively.\n\nOptions:\n -- Treat all subsequent arguments as paths\n -h --help Display this usage info\n --preserve-root Do not remove '/' recursively (default)\n --no-preserve-root Do not treat '/' specially\n -G --no-glob Treat arguments as literal paths, not globs (default)\n -g --glob Treat arguments as glob patterns\n -v --verbose Be verbose when deleting files, showing them as\n they are removed. Not compatible with --impl=native\n -V --no-verbose Be silent when deleting files, showing nothing as\n they are removed (default)\n -i --interactive Ask for confirmation before deleting anything\n Not compatible with --impl=native\n -I --no-interactive Do not ask for confirmation before deleting\n\n --impl= Specify the implementation to use:\n rimraf: choose the best option (default)\n native: the built-in implementation in Node.js\n manual: the platform-specific JS implementation\n posix: the Posix JS implementation\n windows: the Windows JS implementation (falls back to\n move-remove on ENOTEMPTY)\n move-remove: a slow reliable Windows fallback\n\nImplementation-specific options:\n --tmp= Temp file folder for 'move-remove' implementation\n --max-retries= maxRetries for 'native' and 'windows' implementations\n --retry-delay= retryDelay for 'native' implementation, default 100\n --backoff= Exponential backoff factor for retries (default: 1.2)\n`\n\nimport { parse, relative, resolve } from 'path'\nconst cwd = process.cwd()\n\nimport { Dirent, Stats } from 'fs'\nimport { createInterface, Interface } from 'readline'\nimport { fileURLToPath } from 'url'\n\nconst prompt = async (rl: Interface, q: string) =>\n new Promise(res => rl.question(q, res))\n\nconst interactiveRimraf = async (\n impl: (path: string | string[], opt?: RimrafAsyncOptions) => Promise,\n paths: string[],\n opt: RimrafAsyncOptions,\n) => {\n const existingFilter = opt.filter || (() => true)\n let allRemaining = false\n let noneRemaining = false\n const queue: (() => Promise)[] = []\n let processing = false\n const processQueue = async () => {\n if (processing) return\n processing = true\n let next: (() => Promise) | undefined\n while ((next = queue.shift())) {\n await next()\n }\n processing = false\n }\n const oneAtATime =\n (fn: (s: string, e: Dirent | Stats) => Promise) =>\n async (s: string, e: Dirent | Stats): Promise => {\n const p = new Promise(res => {\n queue.push(async () => {\n const result = await fn(s, e)\n res(result)\n return result\n })\n })\n processQueue()\n return p\n }\n const rl = createInterface({\n input: process.stdin,\n output: process.stdout,\n })\n opt.filter = oneAtATime(\n async (path: string, ent: Dirent | Stats): Promise => {\n if (noneRemaining) {\n return false\n }\n while (!allRemaining) {\n const a = (\n await prompt(rl, `rm? ${relative(cwd, path)}\\n[(Yes)/No/All/Quit] > `)\n ).trim()\n if (/^n/i.test(a)) {\n return false\n } else if (/^a/i.test(a)) {\n allRemaining = true\n break\n } else if (/^q/i.test(a)) {\n noneRemaining = true\n return false\n } else if (a === '' || /^y/i.test(a)) {\n break\n } else {\n continue\n }\n }\n return existingFilter(path, ent)\n },\n )\n await impl(paths, opt)\n rl.close()\n}\n\nconst main = async (...args: string[]) => {\n const verboseFilter = (s: string) => {\n console.log(relative(cwd, s))\n return true\n }\n\n if (process.env.__RIMRAF_TESTING_BIN_FAIL__ === '1') {\n throw new Error('simulated rimraf failure')\n }\n\n const opt: RimrafAsyncOptions = {}\n const paths: string[] = []\n let dashdash = false\n let impl: (\n path: string | string[],\n opt?: RimrafAsyncOptions,\n ) => Promise = rimraf\n\n let interactive = false\n\n for (const arg of args) {\n if (dashdash) {\n paths.push(arg)\n continue\n }\n if (arg === '--') {\n dashdash = true\n continue\n } else if (arg === '-rf' || arg === '-fr') {\n // this never did anything, but people put it there I guess\n continue\n } else if (arg === '-h' || arg === '--help') {\n console.log(help)\n return 0\n } else if (arg === '--interactive' || arg === '-i') {\n interactive = true\n continue\n } else if (arg === '--no-interactive' || arg === '-I') {\n interactive = false\n continue\n } else if (arg === '--verbose' || arg === '-v') {\n opt.filter = verboseFilter\n continue\n } else if (arg === '--no-verbose' || arg === '-V') {\n opt.filter = undefined\n continue\n } else if (arg === '-g' || arg === '--glob') {\n opt.glob = true\n continue\n } else if (arg === '-G' || arg === '--no-glob') {\n opt.glob = false\n continue\n } else if (arg === '--preserve-root') {\n opt.preserveRoot = true\n continue\n } else if (arg === '--no-preserve-root') {\n opt.preserveRoot = false\n continue\n } else if (/^--tmp=/.test(arg)) {\n const val = arg.substring('--tmp='.length)\n opt.tmp = val\n continue\n } else if (/^--max-retries=/.test(arg)) {\n const val = +arg.substring('--max-retries='.length)\n opt.maxRetries = val\n continue\n } else if (/^--retry-delay=/.test(arg)) {\n const val = +arg.substring('--retry-delay='.length)\n opt.retryDelay = val\n continue\n } else if (/^--backoff=/.test(arg)) {\n const val = +arg.substring('--backoff='.length)\n opt.backoff = val\n continue\n } else if (/^--impl=/.test(arg)) {\n const val = arg.substring('--impl='.length)\n switch (val) {\n case 'rimraf':\n impl = rimraf\n continue\n case 'native':\n case 'manual':\n case 'posix':\n case 'windows':\n impl = rimraf[val]\n continue\n case 'move-remove':\n impl = rimraf.moveRemove\n continue\n default:\n console.error(`unknown implementation: ${val}`)\n runHelpForUsage()\n return 1\n }\n } else if (/^-/.test(arg)) {\n console.error(`unknown option: ${arg}`)\n runHelpForUsage()\n return 1\n } else {\n paths.push(arg)\n }\n }\n\n if (opt.preserveRoot !== false) {\n for (const path of paths.map(p => resolve(p))) {\n if (path === parse(path).root) {\n console.error(`rimraf: it is dangerous to operate recursively on '/'`)\n console.error('use --no-preserve-root to override this failsafe')\n return 1\n }\n }\n }\n\n if (!paths.length) {\n console.error('rimraf: must provide a path to remove')\n runHelpForUsage()\n return 1\n }\n\n if (impl === rimraf.native && (interactive || opt.filter)) {\n console.error('native implementation does not support -v or -i')\n runHelpForUsage()\n return 1\n }\n\n if (interactive) {\n await interactiveRimraf(impl, paths, opt)\n } else {\n await impl(paths, opt)\n }\n\n return 0\n}\nmain.help = help\n\nexport default main\n\nif (process.env.__TESTING_RIMRAF_BIN__ !== '1') {\n const args = process.argv.slice(2)\n main(...args).then(\n code => process.exit(code),\n er => {\n console.error(er)\n process.exit(1)\n },\n )\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/default-tmp.d.ts b/deps/npm/node_modules/rimraf/dist/esm/default-tmp.d.ts new file mode 100644 index 00000000000000..a68e925b249a8d --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/default-tmp.d.ts @@ -0,0 +1,3 @@ +export declare const defaultTmp: (path: string) => Promise; +export declare const defaultTmpSync: (path: string) => string; +//# sourceMappingURL=default-tmp.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/default-tmp.d.ts.map b/deps/npm/node_modules/rimraf/dist/esm/default-tmp.d.ts.map new file mode 100644 index 00000000000000..d0b35f2786233b --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/default-tmp.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"default-tmp.d.ts","sourceRoot":"","sources":["../../src/default-tmp.ts"],"names":[],"mappings":"AAiEA,eAAO,MAAM,UAAU,SAnCc,MAAM,oBAoCe,CAAA;AAC1D,eAAO,MAAM,cAAc,SArBQ,MAAM,WAsByB,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/default-tmp.js b/deps/npm/node_modules/rimraf/dist/esm/default-tmp.js new file mode 100644 index 00000000000000..fb0846af5c3acf --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/default-tmp.js @@ -0,0 +1,55 @@ +// The default temporary folder location for use in the windows algorithm. +// It's TEMPting to use dirname(path), since that's guaranteed to be on the +// same device. However, this means that: +// rimraf(path).then(() => rimraf(dirname(path))) +// will often fail with EBUSY, because the parent dir contains +// marked-for-deletion directory entries (which do not show up in readdir). +// The approach here is to use os.tmpdir() if it's on the same drive letter, +// or resolve(path, '\\temp') if it exists, or the root of the drive if not. +// On Posix (not that you'd be likely to use the windows algorithm there), +// it uses os.tmpdir() always. +import { tmpdir } from 'os'; +import { parse, resolve } from 'path'; +import { promises, statSync } from './fs.js'; +import platform from './platform.js'; +const { stat } = promises; +const isDirSync = (path) => { + try { + return statSync(path).isDirectory(); + } + catch (er) { + return false; + } +}; +const isDir = (path) => stat(path).then(st => st.isDirectory(), () => false); +const win32DefaultTmp = async (path) => { + const { root } = parse(path); + const tmp = tmpdir(); + const { root: tmpRoot } = parse(tmp); + if (root.toLowerCase() === tmpRoot.toLowerCase()) { + return tmp; + } + const driveTmp = resolve(root, '/temp'); + if (await isDir(driveTmp)) { + return driveTmp; + } + return root; +}; +const win32DefaultTmpSync = (path) => { + const { root } = parse(path); + const tmp = tmpdir(); + const { root: tmpRoot } = parse(tmp); + if (root.toLowerCase() === tmpRoot.toLowerCase()) { + return tmp; + } + const driveTmp = resolve(root, '/temp'); + if (isDirSync(driveTmp)) { + return driveTmp; + } + return root; +}; +const posixDefaultTmp = async () => tmpdir(); +const posixDefaultTmpSync = () => tmpdir(); +export const defaultTmp = platform === 'win32' ? win32DefaultTmp : posixDefaultTmp; +export const defaultTmpSync = platform === 'win32' ? win32DefaultTmpSync : posixDefaultTmpSync; +//# sourceMappingURL=default-tmp.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/default-tmp.js.map b/deps/npm/node_modules/rimraf/dist/esm/default-tmp.js.map new file mode 100644 index 00000000000000..ea6af37802aa83 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/default-tmp.js.map @@ -0,0 +1 @@ +{"version":3,"file":"default-tmp.js","sourceRoot":"","sources":["../../src/default-tmp.ts"],"names":[],"mappings":"AAAA,0EAA0E;AAC1E,2EAA2E;AAC3E,0CAA0C;AAC1C,iDAAiD;AACjD,8DAA8D;AAC9D,2EAA2E;AAC3E,4EAA4E;AAC5E,4EAA4E;AAC5E,0EAA0E;AAC1E,8BAA8B;AAC9B,OAAO,EAAE,MAAM,EAAE,MAAM,IAAI,CAAA;AAC3B,OAAO,EAAE,KAAK,EAAE,OAAO,EAAE,MAAM,MAAM,CAAA;AACrC,OAAO,EAAE,QAAQ,EAAE,QAAQ,EAAE,MAAM,SAAS,CAAA;AAC5C,OAAO,QAAQ,MAAM,eAAe,CAAA;AACpC,MAAM,EAAE,IAAI,EAAE,GAAG,QAAQ,CAAA;AAEzB,MAAM,SAAS,GAAG,CAAC,IAAY,EAAE,EAAE;IACjC,IAAI,CAAC;QACH,OAAO,QAAQ,CAAC,IAAI,CAAC,CAAC,WAAW,EAAE,CAAA;IACrC,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,OAAO,KAAK,CAAA;IACd,CAAC;AACH,CAAC,CAAA;AAED,MAAM,KAAK,GAAG,CAAC,IAAY,EAAE,EAAE,CAC7B,IAAI,CAAC,IAAI,CAAC,CAAC,IAAI,CACb,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,WAAW,EAAE,EACtB,GAAG,EAAE,CAAC,KAAK,CACZ,CAAA;AAEH,MAAM,eAAe,GAAG,KAAK,EAAE,IAAY,EAAE,EAAE;IAC7C,MAAM,EAAE,IAAI,EAAE,GAAG,KAAK,CAAC,IAAI,CAAC,CAAA;IAC5B,MAAM,GAAG,GAAG,MAAM,EAAE,CAAA;IACpB,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,GAAG,KAAK,CAAC,GAAG,CAAC,CAAA;IACpC,IAAI,IAAI,CAAC,WAAW,EAAE,KAAK,OAAO,CAAC,WAAW,EAAE,EAAE,CAAC;QACjD,OAAO,GAAG,CAAA;IACZ,CAAC;IAED,MAAM,QAAQ,GAAG,OAAO,CAAC,IAAI,EAAE,OAAO,CAAC,CAAA;IACvC,IAAI,MAAM,KAAK,CAAC,QAAQ,CAAC,EAAE,CAAC;QAC1B,OAAO,QAAQ,CAAA;IACjB,CAAC;IAED,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAED,MAAM,mBAAmB,GAAG,CAAC,IAAY,EAAE,EAAE;IAC3C,MAAM,EAAE,IAAI,EAAE,GAAG,KAAK,CAAC,IAAI,CAAC,CAAA;IAC5B,MAAM,GAAG,GAAG,MAAM,EAAE,CAAA;IACpB,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,GAAG,KAAK,CAAC,GAAG,CAAC,CAAA;IACpC,IAAI,IAAI,CAAC,WAAW,EAAE,KAAK,OAAO,CAAC,WAAW,EAAE,EAAE,CAAC;QACjD,OAAO,GAAG,CAAA;IACZ,CAAC;IAED,MAAM,QAAQ,GAAG,OAAO,CAAC,IAAI,EAAE,OAAO,CAAC,CAAA;IACvC,IAAI,SAAS,CAAC,QAAQ,CAAC,EAAE,CAAC;QACxB,OAAO,QAAQ,CAAA;IACjB,CAAC;IAED,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAED,MAAM,eAAe,GAAG,KAAK,IAAI,EAAE,CAAC,MAAM,EAAE,CAAA;AAC5C,MAAM,mBAAmB,GAAG,GAAG,EAAE,CAAC,MAAM,EAAE,CAAA;AAE1C,MAAM,CAAC,MAAM,UAAU,GACrB,QAAQ,KAAK,OAAO,CAAC,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,eAAe,CAAA;AAC1D,MAAM,CAAC,MAAM,cAAc,GACzB,QAAQ,KAAK,OAAO,CAAC,CAAC,CAAC,mBAAmB,CAAC,CAAC,CAAC,mBAAmB,CAAA","sourcesContent":["// The default temporary folder location for use in the windows algorithm.\n// It's TEMPting to use dirname(path), since that's guaranteed to be on the\n// same device. However, this means that:\n// rimraf(path).then(() => rimraf(dirname(path)))\n// will often fail with EBUSY, because the parent dir contains\n// marked-for-deletion directory entries (which do not show up in readdir).\n// The approach here is to use os.tmpdir() if it's on the same drive letter,\n// or resolve(path, '\\\\temp') if it exists, or the root of the drive if not.\n// On Posix (not that you'd be likely to use the windows algorithm there),\n// it uses os.tmpdir() always.\nimport { tmpdir } from 'os'\nimport { parse, resolve } from 'path'\nimport { promises, statSync } from './fs.js'\nimport platform from './platform.js'\nconst { stat } = promises\n\nconst isDirSync = (path: string) => {\n try {\n return statSync(path).isDirectory()\n } catch (er) {\n return false\n }\n}\n\nconst isDir = (path: string) =>\n stat(path).then(\n st => st.isDirectory(),\n () => false,\n )\n\nconst win32DefaultTmp = async (path: string) => {\n const { root } = parse(path)\n const tmp = tmpdir()\n const { root: tmpRoot } = parse(tmp)\n if (root.toLowerCase() === tmpRoot.toLowerCase()) {\n return tmp\n }\n\n const driveTmp = resolve(root, '/temp')\n if (await isDir(driveTmp)) {\n return driveTmp\n }\n\n return root\n}\n\nconst win32DefaultTmpSync = (path: string) => {\n const { root } = parse(path)\n const tmp = tmpdir()\n const { root: tmpRoot } = parse(tmp)\n if (root.toLowerCase() === tmpRoot.toLowerCase()) {\n return tmp\n }\n\n const driveTmp = resolve(root, '/temp')\n if (isDirSync(driveTmp)) {\n return driveTmp\n }\n\n return root\n}\n\nconst posixDefaultTmp = async () => tmpdir()\nconst posixDefaultTmpSync = () => tmpdir()\n\nexport const defaultTmp =\n platform === 'win32' ? win32DefaultTmp : posixDefaultTmp\nexport const defaultTmpSync =\n platform === 'win32' ? win32DefaultTmpSync : posixDefaultTmpSync\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/fix-eperm.d.ts b/deps/npm/node_modules/rimraf/dist/esm/fix-eperm.d.ts new file mode 100644 index 00000000000000..20e76a82c4942e --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/fix-eperm.d.ts @@ -0,0 +1,3 @@ +export declare const fixEPERM: (fn: (path: string) => Promise) => (path: string) => Promise; +export declare const fixEPERMSync: (fn: (path: string) => any) => (path: string) => any; +//# sourceMappingURL=fix-eperm.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/fix-eperm.d.ts.map b/deps/npm/node_modules/rimraf/dist/esm/fix-eperm.d.ts.map new file mode 100644 index 00000000000000..ac17d6f4e060bb --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/fix-eperm.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"fix-eperm.d.ts","sourceRoot":"","sources":["../../src/fix-eperm.ts"],"names":[],"mappings":"AAGA,eAAO,MAAM,QAAQ,OACd,CAAC,IAAI,EAAE,MAAM,KAAK,OAAO,CAAC,GAAG,CAAC,YAAkB,MAAM,iBAsB1D,CAAA;AAEH,eAAO,MAAM,YAAY,OAAQ,CAAC,IAAI,EAAE,MAAM,KAAK,GAAG,YAAY,MAAM,QAsBvE,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/fix-eperm.js b/deps/npm/node_modules/rimraf/dist/esm/fix-eperm.js new file mode 100644 index 00000000000000..633c0e119df1f7 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/fix-eperm.js @@ -0,0 +1,53 @@ +import { chmodSync, promises } from './fs.js'; +const { chmod } = promises; +export const fixEPERM = (fn) => async (path) => { + try { + return await fn(path); + } + catch (er) { + const fer = er; + if (fer?.code === 'ENOENT') { + return; + } + if (fer?.code === 'EPERM') { + try { + await chmod(path, 0o666); + } + catch (er2) { + const fer2 = er2; + if (fer2?.code === 'ENOENT') { + return; + } + throw er; + } + return await fn(path); + } + throw er; + } +}; +export const fixEPERMSync = (fn) => (path) => { + try { + return fn(path); + } + catch (er) { + const fer = er; + if (fer?.code === 'ENOENT') { + return; + } + if (fer?.code === 'EPERM') { + try { + chmodSync(path, 0o666); + } + catch (er2) { + const fer2 = er2; + if (fer2?.code === 'ENOENT') { + return; + } + throw er; + } + return fn(path); + } + throw er; + } +}; +//# sourceMappingURL=fix-eperm.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/fix-eperm.js.map b/deps/npm/node_modules/rimraf/dist/esm/fix-eperm.js.map new file mode 100644 index 00000000000000..a6aa032b4891e8 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/fix-eperm.js.map @@ -0,0 +1 @@ +{"version":3,"file":"fix-eperm.js","sourceRoot":"","sources":["../../src/fix-eperm.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,SAAS,EAAE,QAAQ,EAAE,MAAM,SAAS,CAAA;AAC7C,MAAM,EAAE,KAAK,EAAE,GAAG,QAAQ,CAAA;AAE1B,MAAM,CAAC,MAAM,QAAQ,GACnB,CAAC,EAAkC,EAAE,EAAE,CAAC,KAAK,EAAE,IAAY,EAAE,EAAE;IAC7D,IAAI,CAAC;QACH,OAAO,MAAM,EAAE,CAAC,IAAI,CAAC,CAAA;IACvB,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,MAAM,GAAG,GAAG,EAA2B,CAAA;QACvC,IAAI,GAAG,EAAE,IAAI,KAAK,QAAQ,EAAE,CAAC;YAC3B,OAAM;QACR,CAAC;QACD,IAAI,GAAG,EAAE,IAAI,KAAK,OAAO,EAAE,CAAC;YAC1B,IAAI,CAAC;gBACH,MAAM,KAAK,CAAC,IAAI,EAAE,KAAK,CAAC,CAAA;YAC1B,CAAC;YAAC,OAAO,GAAG,EAAE,CAAC;gBACb,MAAM,IAAI,GAAG,GAA4B,CAAA;gBACzC,IAAI,IAAI,EAAE,IAAI,KAAK,QAAQ,EAAE,CAAC;oBAC5B,OAAM;gBACR,CAAC;gBACD,MAAM,EAAE,CAAA;YACV,CAAC;YACD,OAAO,MAAM,EAAE,CAAC,IAAI,CAAC,CAAA;QACvB,CAAC;QACD,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAEH,MAAM,CAAC,MAAM,YAAY,GAAG,CAAC,EAAyB,EAAE,EAAE,CAAC,CAAC,IAAY,EAAE,EAAE;IAC1E,IAAI,CAAC;QACH,OAAO,EAAE,CAAC,IAAI,CAAC,CAAA;IACjB,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,MAAM,GAAG,GAAG,EAA2B,CAAA;QACvC,IAAI,GAAG,EAAE,IAAI,KAAK,QAAQ,EAAE,CAAC;YAC3B,OAAM;QACR,CAAC;QACD,IAAI,GAAG,EAAE,IAAI,KAAK,OAAO,EAAE,CAAC;YAC1B,IAAI,CAAC;gBACH,SAAS,CAAC,IAAI,EAAE,KAAK,CAAC,CAAA;YACxB,CAAC;YAAC,OAAO,GAAG,EAAE,CAAC;gBACb,MAAM,IAAI,GAAG,GAA4B,CAAA;gBACzC,IAAI,IAAI,EAAE,IAAI,KAAK,QAAQ,EAAE,CAAC;oBAC5B,OAAM;gBACR,CAAC;gBACD,MAAM,EAAE,CAAA;YACV,CAAC;YACD,OAAO,EAAE,CAAC,IAAI,CAAC,CAAA;QACjB,CAAC;QACD,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA","sourcesContent":["import { chmodSync, promises } from './fs.js'\nconst { chmod } = promises\n\nexport const fixEPERM =\n (fn: (path: string) => Promise) => async (path: string) => {\n try {\n return await fn(path)\n } catch (er) {\n const fer = er as NodeJS.ErrnoException\n if (fer?.code === 'ENOENT') {\n return\n }\n if (fer?.code === 'EPERM') {\n try {\n await chmod(path, 0o666)\n } catch (er2) {\n const fer2 = er2 as NodeJS.ErrnoException\n if (fer2?.code === 'ENOENT') {\n return\n }\n throw er\n }\n return await fn(path)\n }\n throw er\n }\n }\n\nexport const fixEPERMSync = (fn: (path: string) => any) => (path: string) => {\n try {\n return fn(path)\n } catch (er) {\n const fer = er as NodeJS.ErrnoException\n if (fer?.code === 'ENOENT') {\n return\n }\n if (fer?.code === 'EPERM') {\n try {\n chmodSync(path, 0o666)\n } catch (er2) {\n const fer2 = er2 as NodeJS.ErrnoException\n if (fer2?.code === 'ENOENT') {\n return\n }\n throw er\n }\n return fn(path)\n }\n throw er\n }\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/fs.d.ts b/deps/npm/node_modules/rimraf/dist/esm/fs.d.ts new file mode 100644 index 00000000000000..9e4e95b4e7a411 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/fs.d.ts @@ -0,0 +1,17 @@ +import fs, { Dirent } from 'fs'; +export { chmodSync, mkdirSync, renameSync, rmdirSync, rmSync, statSync, lstatSync, unlinkSync, } from 'fs'; +export declare const readdirSync: (path: fs.PathLike) => Dirent[]; +export declare const promises: { + chmod: (path: fs.PathLike, mode: fs.Mode) => Promise; + mkdir: (path: fs.PathLike, options?: fs.Mode | (fs.MakeDirectoryOptions & { + recursive?: boolean | null; + }) | undefined | null) => Promise; + readdir: (path: fs.PathLike) => Promise; + rename: (oldPath: fs.PathLike, newPath: fs.PathLike) => Promise; + rm: (path: fs.PathLike, options: fs.RmOptions) => Promise; + rmdir: (path: fs.PathLike) => Promise; + stat: (path: fs.PathLike) => Promise; + lstat: (path: fs.PathLike) => Promise; + unlink: (path: fs.PathLike) => Promise; +}; +//# sourceMappingURL=fs.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/fs.d.ts.map b/deps/npm/node_modules/rimraf/dist/esm/fs.d.ts.map new file mode 100644 index 00000000000000..8c8b1034cbcd27 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/fs.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"fs.d.ts","sourceRoot":"","sources":["../../src/fs.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,EAAE,EAAE,MAAM,EAAE,MAAM,IAAI,CAAA;AAG/B,OAAO,EACL,SAAS,EACT,SAAS,EACT,UAAU,EACV,SAAS,EACT,MAAM,EACN,QAAQ,EACR,SAAS,EACT,UAAU,GACX,MAAM,IAAI,CAAA;AAGX,eAAO,MAAM,WAAW,SAAU,EAAE,CAAC,QAAQ,KAAG,MAAM,EACf,CAAA;AA+DvC,eAAO,MAAM,QAAQ;kBAxDA,EAAE,CAAC,QAAQ,QAAQ,EAAE,CAAC,IAAI,KAAG,OAAO,CAAC,IAAI,CAAC;kBAMvD,EAAE,CAAC,QAAQ,YAEb,EAAE,CAAC,IAAI,GACP,CAAC,EAAE,CAAC,oBAAoB,GAAG;QAAE,SAAS,CAAC,EAAE,OAAO,GAAG,IAAI,CAAA;KAAE,CAAC,GAC1D,SAAS,GACT,IAAI,KACP,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC;oBAKP,EAAE,CAAC,QAAQ,KAAG,OAAO,CAAC,MAAM,EAAE,CAAC;sBAO7B,EAAE,CAAC,QAAQ,WAAW,EAAE,CAAC,QAAQ,KAAG,OAAO,CAAC,IAAI,CAAC;eAOxD,EAAE,CAAC,QAAQ,WAAW,EAAE,CAAC,SAAS,KAAG,OAAO,CAAC,IAAI,CAAC;kBAK/C,EAAE,CAAC,QAAQ,KAAG,OAAO,CAAC,IAAI,CAAC;iBAK5B,EAAE,CAAC,QAAQ,KAAG,OAAO,CAAC,EAAE,CAAC,KAAK,CAAC;kBAK9B,EAAE,CAAC,QAAQ,KAAG,OAAO,CAAC,EAAE,CAAC,KAAK,CAAC;mBAK9B,EAAE,CAAC,QAAQ,KAAG,OAAO,CAAC,IAAI,CAAC;CAehD,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/fs.js b/deps/npm/node_modules/rimraf/dist/esm/fs.js new file mode 100644 index 00000000000000..f9422ce992a546 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/fs.js @@ -0,0 +1,31 @@ +// promisify ourselves, because older nodes don't have fs.promises +import fs from 'fs'; +// sync ones just take the sync version from node +export { chmodSync, mkdirSync, renameSync, rmdirSync, rmSync, statSync, lstatSync, unlinkSync, } from 'fs'; +import { readdirSync as rdSync } from 'fs'; +export const readdirSync = (path) => rdSync(path, { withFileTypes: true }); +// unrolled for better inlining, this seems to get better performance +// than something like: +// const makeCb = (res, rej) => (er, ...d) => er ? rej(er) : res(...d) +// which would be a bit cleaner. +const chmod = (path, mode) => new Promise((res, rej) => fs.chmod(path, mode, (er, ...d) => (er ? rej(er) : res(...d)))); +const mkdir = (path, options) => new Promise((res, rej) => fs.mkdir(path, options, (er, made) => (er ? rej(er) : res(made)))); +const readdir = (path) => new Promise((res, rej) => fs.readdir(path, { withFileTypes: true }, (er, data) => er ? rej(er) : res(data))); +const rename = (oldPath, newPath) => new Promise((res, rej) => fs.rename(oldPath, newPath, (er, ...d) => er ? rej(er) : res(...d))); +const rm = (path, options) => new Promise((res, rej) => fs.rm(path, options, (er, ...d) => (er ? rej(er) : res(...d)))); +const rmdir = (path) => new Promise((res, rej) => fs.rmdir(path, (er, ...d) => (er ? rej(er) : res(...d)))); +const stat = (path) => new Promise((res, rej) => fs.stat(path, (er, data) => (er ? rej(er) : res(data)))); +const lstat = (path) => new Promise((res, rej) => fs.lstat(path, (er, data) => (er ? rej(er) : res(data)))); +const unlink = (path) => new Promise((res, rej) => fs.unlink(path, (er, ...d) => (er ? rej(er) : res(...d)))); +export const promises = { + chmod, + mkdir, + readdir, + rename, + rm, + rmdir, + stat, + lstat, + unlink, +}; +//# sourceMappingURL=fs.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/fs.js.map b/deps/npm/node_modules/rimraf/dist/esm/fs.js.map new file mode 100644 index 00000000000000..c4c5d0f2cc42b4 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/fs.js.map @@ -0,0 +1 @@ +{"version":3,"file":"fs.js","sourceRoot":"","sources":["../../src/fs.ts"],"names":[],"mappings":"AAAA,kEAAkE;AAElE,OAAO,EAAc,MAAM,IAAI,CAAA;AAE/B,iDAAiD;AACjD,OAAO,EACL,SAAS,EACT,SAAS,EACT,UAAU,EACV,SAAS,EACT,MAAM,EACN,QAAQ,EACR,SAAS,EACT,UAAU,GACX,MAAM,IAAI,CAAA;AAEX,OAAO,EAAE,WAAW,IAAI,MAAM,EAAE,MAAM,IAAI,CAAA;AAC1C,MAAM,CAAC,MAAM,WAAW,GAAG,CAAC,IAAiB,EAAY,EAAE,CACzD,MAAM,CAAC,IAAI,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE,CAAC,CAAA;AAEvC,qEAAqE;AACrE,uBAAuB;AACvB,sEAAsE;AACtE,gCAAgC;AAEhC,MAAM,KAAK,GAAG,CAAC,IAAiB,EAAE,IAAa,EAAiB,EAAE,CAChE,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACvB,EAAE,CAAC,KAAK,CAAC,IAAI,EAAE,IAAI,EAAE,CAAC,EAAE,EAAE,GAAG,CAAQ,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CACtE,CAAA;AAEH,MAAM,KAAK,GAAG,CACZ,IAAiB,EACjB,OAIQ,EACqB,EAAE,CAC/B,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACvB,EAAE,CAAC,KAAK,CAAC,IAAI,EAAE,OAAO,EAAE,CAAC,EAAE,EAAE,IAAI,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,CAClE,CAAA;AAEH,MAAM,OAAO,GAAG,CAAC,IAAiB,EAAqB,EAAE,CACvD,IAAI,OAAO,CAAW,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACjC,EAAE,CAAC,OAAO,CAAC,IAAI,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE,EAAE,CAAC,EAAE,EAAE,IAAI,EAAE,EAAE,CACrD,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,CACzB,CACF,CAAA;AAEH,MAAM,MAAM,GAAG,CAAC,OAAoB,EAAE,OAAoB,EAAiB,EAAE,CAC3E,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACvB,EAAE,CAAC,MAAM,CAAC,OAAO,EAAE,OAAO,EAAE,CAAC,EAAE,EAAE,GAAG,CAAQ,EAAE,EAAE,CAC9C,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CACzB,CACF,CAAA;AAEH,MAAM,EAAE,GAAG,CAAC,IAAiB,EAAE,OAAqB,EAAiB,EAAE,CACrE,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACvB,EAAE,CAAC,EAAE,CAAC,IAAI,EAAE,OAAO,EAAE,CAAC,EAAE,EAAE,GAAG,CAAQ,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CACtE,CAAA;AAEH,MAAM,KAAK,GAAG,CAAC,IAAiB,EAAiB,EAAE,CACjD,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACvB,EAAE,CAAC,KAAK,CAAC,IAAI,EAAE,CAAC,EAAE,EAAE,GAAG,CAAQ,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAChE,CAAA;AAEH,MAAM,IAAI,GAAG,CAAC,IAAiB,EAAqB,EAAE,CACpD,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACvB,EAAE,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,EAAE,EAAE,IAAI,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,CACxD,CAAA;AAEH,MAAM,KAAK,GAAG,CAAC,IAAiB,EAAqB,EAAE,CACrD,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACvB,EAAE,CAAC,KAAK,CAAC,IAAI,EAAE,CAAC,EAAE,EAAE,IAAI,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,CACzD,CAAA;AAEH,MAAM,MAAM,GAAG,CAAC,IAAiB,EAAiB,EAAE,CAClD,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACvB,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,CAAC,EAAE,EAAE,GAAG,CAAQ,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CACjE,CAAA;AAEH,MAAM,CAAC,MAAM,QAAQ,GAAG;IACtB,KAAK;IACL,KAAK;IACL,OAAO;IACP,MAAM;IACN,EAAE;IACF,KAAK;IACL,IAAI;IACJ,KAAK;IACL,MAAM;CACP,CAAA","sourcesContent":["// promisify ourselves, because older nodes don't have fs.promises\n\nimport fs, { Dirent } from 'fs'\n\n// sync ones just take the sync version from node\nexport {\n chmodSync,\n mkdirSync,\n renameSync,\n rmdirSync,\n rmSync,\n statSync,\n lstatSync,\n unlinkSync,\n} from 'fs'\n\nimport { readdirSync as rdSync } from 'fs'\nexport const readdirSync = (path: fs.PathLike): Dirent[] =>\n rdSync(path, { withFileTypes: true })\n\n// unrolled for better inlining, this seems to get better performance\n// than something like:\n// const makeCb = (res, rej) => (er, ...d) => er ? rej(er) : res(...d)\n// which would be a bit cleaner.\n\nconst chmod = (path: fs.PathLike, mode: fs.Mode): Promise =>\n new Promise((res, rej) =>\n fs.chmod(path, mode, (er, ...d: any[]) => (er ? rej(er) : res(...d))),\n )\n\nconst mkdir = (\n path: fs.PathLike,\n options?:\n | fs.Mode\n | (fs.MakeDirectoryOptions & { recursive?: boolean | null })\n | undefined\n | null,\n): Promise =>\n new Promise((res, rej) =>\n fs.mkdir(path, options, (er, made) => (er ? rej(er) : res(made))),\n )\n\nconst readdir = (path: fs.PathLike): Promise =>\n new Promise((res, rej) =>\n fs.readdir(path, { withFileTypes: true }, (er, data) =>\n er ? rej(er) : res(data),\n ),\n )\n\nconst rename = (oldPath: fs.PathLike, newPath: fs.PathLike): Promise =>\n new Promise((res, rej) =>\n fs.rename(oldPath, newPath, (er, ...d: any[]) =>\n er ? rej(er) : res(...d),\n ),\n )\n\nconst rm = (path: fs.PathLike, options: fs.RmOptions): Promise =>\n new Promise((res, rej) =>\n fs.rm(path, options, (er, ...d: any[]) => (er ? rej(er) : res(...d))),\n )\n\nconst rmdir = (path: fs.PathLike): Promise =>\n new Promise((res, rej) =>\n fs.rmdir(path, (er, ...d: any[]) => (er ? rej(er) : res(...d))),\n )\n\nconst stat = (path: fs.PathLike): Promise =>\n new Promise((res, rej) =>\n fs.stat(path, (er, data) => (er ? rej(er) : res(data))),\n )\n\nconst lstat = (path: fs.PathLike): Promise =>\n new Promise((res, rej) =>\n fs.lstat(path, (er, data) => (er ? rej(er) : res(data))),\n )\n\nconst unlink = (path: fs.PathLike): Promise =>\n new Promise((res, rej) =>\n fs.unlink(path, (er, ...d: any[]) => (er ? rej(er) : res(...d))),\n )\n\nexport const promises = {\n chmod,\n mkdir,\n readdir,\n rename,\n rm,\n rmdir,\n stat,\n lstat,\n unlink,\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/ignore-enoent.d.ts b/deps/npm/node_modules/rimraf/dist/esm/ignore-enoent.d.ts new file mode 100644 index 00000000000000..f158cc27025b16 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/ignore-enoent.d.ts @@ -0,0 +1,3 @@ +export declare const ignoreENOENT: (p: Promise) => Promise; +export declare const ignoreENOENTSync: (fn: () => any) => any; +//# sourceMappingURL=ignore-enoent.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/ignore-enoent.d.ts.map b/deps/npm/node_modules/rimraf/dist/esm/ignore-enoent.d.ts.map new file mode 100644 index 00000000000000..2cfb3bbac5fab7 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/ignore-enoent.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"ignore-enoent.d.ts","sourceRoot":"","sources":["../../src/ignore-enoent.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,YAAY,MAAa,OAAO,CAAC,GAAG,CAAC,iBAK9C,CAAA;AAEJ,eAAO,MAAM,gBAAgB,OAAQ,MAAM,GAAG,QAQ7C,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/ignore-enoent.js b/deps/npm/node_modules/rimraf/dist/esm/ignore-enoent.js new file mode 100644 index 00000000000000..753f4811cd384f --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/ignore-enoent.js @@ -0,0 +1,16 @@ +export const ignoreENOENT = async (p) => p.catch(er => { + if (er.code !== 'ENOENT') { + throw er; + } +}); +export const ignoreENOENTSync = (fn) => { + try { + return fn(); + } + catch (er) { + if (er?.code !== 'ENOENT') { + throw er; + } + } +}; +//# sourceMappingURL=ignore-enoent.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/ignore-enoent.js.map b/deps/npm/node_modules/rimraf/dist/esm/ignore-enoent.js.map new file mode 100644 index 00000000000000..acffb146233e13 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/ignore-enoent.js.map @@ -0,0 +1 @@ +{"version":3,"file":"ignore-enoent.js","sourceRoot":"","sources":["../../src/ignore-enoent.ts"],"names":[],"mappings":"AAAA,MAAM,CAAC,MAAM,YAAY,GAAG,KAAK,EAAE,CAAe,EAAE,EAAE,CACpD,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,EAAE;IACX,IAAI,EAAE,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;QACzB,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAC,CAAA;AAEJ,MAAM,CAAC,MAAM,gBAAgB,GAAG,CAAC,EAAa,EAAE,EAAE;IAChD,IAAI,CAAC;QACH,OAAO,EAAE,EAAE,CAAA;IACb,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,IAAK,EAA4B,EAAE,IAAI,KAAK,QAAQ,EAAE,CAAC;YACrD,MAAM,EAAE,CAAA;QACV,CAAC;IACH,CAAC;AACH,CAAC,CAAA","sourcesContent":["export const ignoreENOENT = async (p: Promise) =>\n p.catch(er => {\n if (er.code !== 'ENOENT') {\n throw er\n }\n })\n\nexport const ignoreENOENTSync = (fn: () => any) => {\n try {\n return fn()\n } catch (er) {\n if ((er as NodeJS.ErrnoException)?.code !== 'ENOENT') {\n throw er\n }\n }\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/index.d.ts b/deps/npm/node_modules/rimraf/dist/esm/index.d.ts new file mode 100644 index 00000000000000..9ec4a124ab613d --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/index.d.ts @@ -0,0 +1,50 @@ +import { RimrafAsyncOptions, RimrafSyncOptions } from './opt-arg.js'; +export { assertRimrafOptions, isRimrafOptions, type RimrafAsyncOptions, type RimrafOptions, type RimrafSyncOptions, } from './opt-arg.js'; +export declare const nativeSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; +export declare const native: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & { + sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; +}; +export declare const manualSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; +export declare const manual: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & { + sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; +}; +export declare const windowsSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; +export declare const windows: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & { + sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; +}; +export declare const posixSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; +export declare const posix: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & { + sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; +}; +export declare const moveRemoveSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; +export declare const moveRemove: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & { + sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; +}; +export declare const rimrafSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; +export declare const sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; +export declare const rimraf: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & { + rimraf: (path: string | string[], opt?: RimrafAsyncOptions) => Promise; + sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; + rimrafSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; + manual: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & { + sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; + }; + manualSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; + native: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & { + sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; + }; + nativeSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; + posix: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & { + sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; + }; + posixSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; + windows: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & { + sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; + }; + windowsSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; + moveRemove: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & { + sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; + }; + moveRemoveSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; +}; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/index.d.ts.map b/deps/npm/node_modules/rimraf/dist/esm/index.d.ts.map new file mode 100644 index 00000000000000..0dc659ca730252 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AACA,OAAO,EAGL,kBAAkB,EAClB,iBAAiB,EAClB,MAAM,cAAc,CAAA;AASrB,OAAO,EACL,mBAAmB,EACnB,eAAe,EACf,KAAK,kBAAkB,EACvB,KAAK,aAAa,EAClB,KAAK,iBAAiB,GACvB,MAAM,cAAc,CAAA;AAqCrB,eAAO,MAAM,UAAU,SAdd,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAcF,CAAA;AACpD,eAAO,MAAM,MAAM,UAjCT,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;iBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;CAegB,CAAA;AAE7E,eAAO,MAAM,UAAU,SAjBd,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAiBF,CAAA;AACpD,eAAO,MAAM,MAAM,UApCT,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;iBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;CAkBgB,CAAA;AAE7E,eAAO,MAAM,WAAW,SApBf,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAoBA,CAAA;AACtD,eAAO,MAAM,OAAO,UAvCV,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;iBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;CAqBmB,CAAA;AAEhF,eAAO,MAAM,SAAS,SAvBb,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAuBJ,CAAA;AAClD,eAAO,MAAM,KAAK,UA1CR,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;iBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;CAwBa,CAAA;AAE1E,eAAO,MAAM,cAAc,SA1BlB,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OA0BM,CAAA;AAC5D,eAAO,MAAM,UAAU,UA7Cb,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;iBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;CA6B3D,CAAA;AAEF,eAAO,MAAM,UAAU,SA/Bd,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAmCrD,CAAA;AACD,eAAO,MAAM,IAAI,SApCR,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAoCxB,CAAA;AAK9B,eAAO,MAAM,MAAM,UA3DT,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;mBAFX,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;iBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;uBAApD,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;oBAlBnD,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;qBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;;uBAApD,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;oBAlBnD,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;qBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;;uBAApD,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;mBAlBnD,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;qBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;;sBAApD,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;qBAlBnD,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;qBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;;wBAApD,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;wBAlBnD,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;qBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;;2BAApD,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;CAuD3D,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/index.js b/deps/npm/node_modules/rimraf/dist/esm/index.js new file mode 100644 index 00000000000000..d94d6f81a485c9 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/index.js @@ -0,0 +1,70 @@ +import { glob, globSync } from 'glob'; +import { optArg, optArgSync, } from './opt-arg.js'; +import pathArg from './path-arg.js'; +import { rimrafManual, rimrafManualSync } from './rimraf-manual.js'; +import { rimrafMoveRemove, rimrafMoveRemoveSync } from './rimraf-move-remove.js'; +import { rimrafNative, rimrafNativeSync } from './rimraf-native.js'; +import { rimrafPosix, rimrafPosixSync } from './rimraf-posix.js'; +import { rimrafWindows, rimrafWindowsSync } from './rimraf-windows.js'; +import { useNative, useNativeSync } from './use-native.js'; +export { assertRimrafOptions, isRimrafOptions, } from './opt-arg.js'; +const wrap = (fn) => async (path, opt) => { + const options = optArg(opt); + if (options.glob) { + path = await glob(path, options.glob); + } + if (Array.isArray(path)) { + return !!(await Promise.all(path.map(p => fn(pathArg(p, options), options)))).reduce((a, b) => a && b, true); + } + else { + return !!(await fn(pathArg(path, options), options)); + } +}; +const wrapSync = (fn) => (path, opt) => { + const options = optArgSync(opt); + if (options.glob) { + path = globSync(path, options.glob); + } + if (Array.isArray(path)) { + return !!path + .map(p => fn(pathArg(p, options), options)) + .reduce((a, b) => a && b, true); + } + else { + return !!fn(pathArg(path, options), options); + } +}; +export const nativeSync = wrapSync(rimrafNativeSync); +export const native = Object.assign(wrap(rimrafNative), { sync: nativeSync }); +export const manualSync = wrapSync(rimrafManualSync); +export const manual = Object.assign(wrap(rimrafManual), { sync: manualSync }); +export const windowsSync = wrapSync(rimrafWindowsSync); +export const windows = Object.assign(wrap(rimrafWindows), { sync: windowsSync }); +export const posixSync = wrapSync(rimrafPosixSync); +export const posix = Object.assign(wrap(rimrafPosix), { sync: posixSync }); +export const moveRemoveSync = wrapSync(rimrafMoveRemoveSync); +export const moveRemove = Object.assign(wrap(rimrafMoveRemove), { + sync: moveRemoveSync, +}); +export const rimrafSync = wrapSync((path, opt) => useNativeSync(opt) ? + rimrafNativeSync(path, opt) + : rimrafManualSync(path, opt)); +export const sync = rimrafSync; +const rimraf_ = wrap((path, opt) => useNative(opt) ? rimrafNative(path, opt) : rimrafManual(path, opt)); +export const rimraf = Object.assign(rimraf_, { + rimraf: rimraf_, + sync: rimrafSync, + rimrafSync: rimrafSync, + manual, + manualSync, + native, + nativeSync, + posix, + posixSync, + windows, + windowsSync, + moveRemove, + moveRemoveSync, +}); +rimraf.rimraf = rimraf; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/index.js.map b/deps/npm/node_modules/rimraf/dist/esm/index.js.map new file mode 100644 index 00000000000000..0c8ca64c74f236 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,IAAI,EAAE,QAAQ,EAAE,MAAM,MAAM,CAAA;AACrC,OAAO,EACL,MAAM,EACN,UAAU,GAGX,MAAM,cAAc,CAAA;AACrB,OAAO,OAAO,MAAM,eAAe,CAAA;AACnC,OAAO,EAAE,YAAY,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAA;AACnE,OAAO,EAAE,gBAAgB,EAAE,oBAAoB,EAAE,MAAM,yBAAyB,CAAA;AAChF,OAAO,EAAE,YAAY,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAA;AACnE,OAAO,EAAE,WAAW,EAAE,eAAe,EAAE,MAAM,mBAAmB,CAAA;AAChE,OAAO,EAAE,aAAa,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAA;AACtE,OAAO,EAAE,SAAS,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAA;AAE1D,OAAO,EACL,mBAAmB,EACnB,eAAe,GAIhB,MAAM,cAAc,CAAA;AAErB,MAAM,IAAI,GACR,CAAC,EAA0D,EAAE,EAAE,CAC/D,KAAK,EACH,IAAuB,EACvB,GAAwB,EACN,EAAE;IACpB,MAAM,OAAO,GAAG,MAAM,CAAC,GAAG,CAAC,CAAA;IAC3B,IAAI,OAAO,CAAC,IAAI,EAAE,CAAC;QACjB,IAAI,GAAG,MAAM,IAAI,CAAC,IAAI,EAAE,OAAO,CAAC,IAAI,CAAC,CAAA;IACvC,CAAC;IACD,IAAI,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,EAAE,CAAC;QACxB,OAAO,CAAC,CAAC,CACP,MAAM,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC,EAAE,OAAO,CAAC,EAAE,OAAO,CAAC,CAAC,CAAC,CACnE,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,IAAI,CAAC,CAAA;IAClC,CAAC;SAAM,CAAC;QACN,OAAO,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,OAAO,CAAC,IAAI,EAAE,OAAO,CAAC,EAAE,OAAO,CAAC,CAAC,CAAA;IACtD,CAAC;AACH,CAAC,CAAA;AAEH,MAAM,QAAQ,GACZ,CAAC,EAAgD,EAAE,EAAE,CACrD,CAAC,IAAuB,EAAE,GAAuB,EAAW,EAAE;IAC5D,MAAM,OAAO,GAAG,UAAU,CAAC,GAAG,CAAC,CAAA;IAC/B,IAAI,OAAO,CAAC,IAAI,EAAE,CAAC;QACjB,IAAI,GAAG,QAAQ,CAAC,IAAI,EAAE,OAAO,CAAC,IAAI,CAAC,CAAA;IACrC,CAAC;IACD,IAAI,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,EAAE,CAAC;QACxB,OAAO,CAAC,CAAC,IAAI;aACV,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC,EAAE,OAAO,CAAC,EAAE,OAAO,CAAC,CAAC;aAC1C,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,IAAI,CAAC,CAAA;IACnC,CAAC;SAAM,CAAC;QACN,OAAO,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,IAAI,EAAE,OAAO,CAAC,EAAE,OAAO,CAAC,CAAA;IAC9C,CAAC;AACH,CAAC,CAAA;AAEH,MAAM,CAAC,MAAM,UAAU,GAAG,QAAQ,CAAC,gBAAgB,CAAC,CAAA;AACpD,MAAM,CAAC,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,EAAE,EAAE,IAAI,EAAE,UAAU,EAAE,CAAC,CAAA;AAE7E,MAAM,CAAC,MAAM,UAAU,GAAG,QAAQ,CAAC,gBAAgB,CAAC,CAAA;AACpD,MAAM,CAAC,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,EAAE,EAAE,IAAI,EAAE,UAAU,EAAE,CAAC,CAAA;AAE7E,MAAM,CAAC,MAAM,WAAW,GAAG,QAAQ,CAAC,iBAAiB,CAAC,CAAA;AACtD,MAAM,CAAC,MAAM,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,aAAa,CAAC,EAAE,EAAE,IAAI,EAAE,WAAW,EAAE,CAAC,CAAA;AAEhF,MAAM,CAAC,MAAM,SAAS,GAAG,QAAQ,CAAC,eAAe,CAAC,CAAA;AAClD,MAAM,CAAC,MAAM,KAAK,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,EAAE,EAAE,IAAI,EAAE,SAAS,EAAE,CAAC,CAAA;AAE1E,MAAM,CAAC,MAAM,cAAc,GAAG,QAAQ,CAAC,oBAAoB,CAAC,CAAA;AAC5D,MAAM,CAAC,MAAM,UAAU,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,gBAAgB,CAAC,EAAE;IAC9D,IAAI,EAAE,cAAc;CACrB,CAAC,CAAA;AAEF,MAAM,CAAC,MAAM,UAAU,GAAG,QAAQ,CAAC,CAAC,IAAI,EAAE,GAAG,EAAE,EAAE,CAC/C,aAAa,CAAC,GAAG,CAAC,CAAC,CAAC;IAClB,gBAAgB,CAAC,IAAI,EAAE,GAAG,CAAC;IAC7B,CAAC,CAAC,gBAAgB,CAAC,IAAI,EAAE,GAAG,CAAC,CAC9B,CAAA;AACD,MAAM,CAAC,MAAM,IAAI,GAAG,UAAU,CAAA;AAE9B,MAAM,OAAO,GAAG,IAAI,CAAC,CAAC,IAAI,EAAE,GAAG,EAAE,EAAE,CACjC,SAAS,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC,IAAI,EAAE,GAAG,CAAC,CACnE,CAAA;AACD,MAAM,CAAC,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,OAAO,EAAE;IAC3C,MAAM,EAAE,OAAO;IACf,IAAI,EAAE,UAAU;IAChB,UAAU,EAAE,UAAU;IACtB,MAAM;IACN,UAAU;IACV,MAAM;IACN,UAAU;IACV,KAAK;IACL,SAAS;IACT,OAAO;IACP,WAAW;IACX,UAAU;IACV,cAAc;CACf,CAAC,CAAA;AACF,MAAM,CAAC,MAAM,GAAG,MAAM,CAAA","sourcesContent":["import { glob, globSync } from 'glob'\nimport {\n optArg,\n optArgSync,\n RimrafAsyncOptions,\n RimrafSyncOptions,\n} from './opt-arg.js'\nimport pathArg from './path-arg.js'\nimport { rimrafManual, rimrafManualSync } from './rimraf-manual.js'\nimport { rimrafMoveRemove, rimrafMoveRemoveSync } from './rimraf-move-remove.js'\nimport { rimrafNative, rimrafNativeSync } from './rimraf-native.js'\nimport { rimrafPosix, rimrafPosixSync } from './rimraf-posix.js'\nimport { rimrafWindows, rimrafWindowsSync } from './rimraf-windows.js'\nimport { useNative, useNativeSync } from './use-native.js'\n\nexport {\n assertRimrafOptions,\n isRimrafOptions,\n type RimrafAsyncOptions,\n type RimrafOptions,\n type RimrafSyncOptions,\n} from './opt-arg.js'\n\nconst wrap =\n (fn: (p: string, o: RimrafAsyncOptions) => Promise) =>\n async (\n path: string | string[],\n opt?: RimrafAsyncOptions,\n ): Promise => {\n const options = optArg(opt)\n if (options.glob) {\n path = await glob(path, options.glob)\n }\n if (Array.isArray(path)) {\n return !!(\n await Promise.all(path.map(p => fn(pathArg(p, options), options)))\n ).reduce((a, b) => a && b, true)\n } else {\n return !!(await fn(pathArg(path, options), options))\n }\n }\n\nconst wrapSync =\n (fn: (p: string, o: RimrafSyncOptions) => boolean) =>\n (path: string | string[], opt?: RimrafSyncOptions): boolean => {\n const options = optArgSync(opt)\n if (options.glob) {\n path = globSync(path, options.glob)\n }\n if (Array.isArray(path)) {\n return !!path\n .map(p => fn(pathArg(p, options), options))\n .reduce((a, b) => a && b, true)\n } else {\n return !!fn(pathArg(path, options), options)\n }\n }\n\nexport const nativeSync = wrapSync(rimrafNativeSync)\nexport const native = Object.assign(wrap(rimrafNative), { sync: nativeSync })\n\nexport const manualSync = wrapSync(rimrafManualSync)\nexport const manual = Object.assign(wrap(rimrafManual), { sync: manualSync })\n\nexport const windowsSync = wrapSync(rimrafWindowsSync)\nexport const windows = Object.assign(wrap(rimrafWindows), { sync: windowsSync })\n\nexport const posixSync = wrapSync(rimrafPosixSync)\nexport const posix = Object.assign(wrap(rimrafPosix), { sync: posixSync })\n\nexport const moveRemoveSync = wrapSync(rimrafMoveRemoveSync)\nexport const moveRemove = Object.assign(wrap(rimrafMoveRemove), {\n sync: moveRemoveSync,\n})\n\nexport const rimrafSync = wrapSync((path, opt) =>\n useNativeSync(opt) ?\n rimrafNativeSync(path, opt)\n : rimrafManualSync(path, opt),\n)\nexport const sync = rimrafSync\n\nconst rimraf_ = wrap((path, opt) =>\n useNative(opt) ? rimrafNative(path, opt) : rimrafManual(path, opt),\n)\nexport const rimraf = Object.assign(rimraf_, {\n rimraf: rimraf_,\n sync: rimrafSync,\n rimrafSync: rimrafSync,\n manual,\n manualSync,\n native,\n nativeSync,\n posix,\n posixSync,\n windows,\n windowsSync,\n moveRemove,\n moveRemoveSync,\n})\nrimraf.rimraf = rimraf\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/opt-arg.d.ts b/deps/npm/node_modules/rimraf/dist/esm/opt-arg.d.ts new file mode 100644 index 00000000000000..c869d4ae85251b --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/opt-arg.d.ts @@ -0,0 +1,34 @@ +import { Dirent, Stats } from 'fs'; +import { GlobOptions } from 'glob'; +export declare const isRimrafOptions: (o: any) => o is RimrafOptions; +export declare const assertRimrafOptions: (o: any) => void; +export interface RimrafAsyncOptions { + preserveRoot?: boolean; + tmp?: string; + maxRetries?: number; + retryDelay?: number; + backoff?: number; + maxBackoff?: number; + signal?: AbortSignal; + glob?: boolean | GlobOptions; + filter?: ((path: string, ent: Dirent | Stats) => boolean) | ((path: string, ent: Dirent | Stats) => Promise); +} +export interface RimrafSyncOptions extends RimrafAsyncOptions { + filter?: (path: string, ent: Dirent | Stats) => boolean; +} +export type RimrafOptions = RimrafSyncOptions | RimrafAsyncOptions; +export declare const optArg: (opt?: RimrafAsyncOptions) => (RimrafAsyncOptions & { + glob: GlobOptions & { + withFileTypes: false; + }; +}) | (RimrafAsyncOptions & { + glob: undefined; +}); +export declare const optArgSync: (opt?: RimrafSyncOptions) => (RimrafSyncOptions & { + glob: GlobOptions & { + withFileTypes: false; + }; +}) | (RimrafSyncOptions & { + glob: undefined; +}); +//# sourceMappingURL=opt-arg.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/opt-arg.d.ts.map b/deps/npm/node_modules/rimraf/dist/esm/opt-arg.d.ts.map new file mode 100644 index 00000000000000..89e83b205ac628 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/opt-arg.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"opt-arg.d.ts","sourceRoot":"","sources":["../../src/opt-arg.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,IAAI,CAAA;AAClC,OAAO,EAAE,WAAW,EAAE,MAAM,MAAM,CAAA;AAKlC,eAAO,MAAM,eAAe,MAAO,GAAG,KAAG,CAAC,IAAI,aAUX,CAAA;AAEnC,eAAO,MAAM,mBAAmB,EAAE,CAAC,CAAC,EAAE,GAAG,KAAK,IAM7C,CAAA;AAED,MAAM,WAAW,kBAAkB;IACjC,YAAY,CAAC,EAAE,OAAO,CAAA;IACtB,GAAG,CAAC,EAAE,MAAM,CAAA;IACZ,UAAU,CAAC,EAAE,MAAM,CAAA;IACnB,UAAU,CAAC,EAAE,MAAM,CAAA;IACnB,OAAO,CAAC,EAAE,MAAM,CAAA;IAChB,UAAU,CAAC,EAAE,MAAM,CAAA;IACnB,MAAM,CAAC,EAAE,WAAW,CAAA;IACpB,IAAI,CAAC,EAAE,OAAO,GAAG,WAAW,CAAA;IAC5B,MAAM,CAAC,EACH,CAAC,CAAC,IAAI,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,GAAG,KAAK,KAAK,OAAO,CAAC,GAChD,CAAC,CAAC,IAAI,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,GAAG,KAAK,KAAK,OAAO,CAAC,OAAO,CAAC,CAAC,CAAA;CAC9D;AAED,MAAM,WAAW,iBAAkB,SAAQ,kBAAkB;IAC3D,MAAM,CAAC,EAAE,CAAC,IAAI,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,GAAG,KAAK,KAAK,OAAO,CAAA;CACxD;AAED,MAAM,MAAM,aAAa,GAAG,iBAAiB,GAAG,kBAAkB,CAAA;AAqClE,eAAO,MAAM,MAAM,SAAS,kBAAkB;UA/BlC,WAAW,GAAG;QAAE,aAAa,EAAE,KAAK,CAAA;KAAE;;UAEjC,SAAS;EA6B0C,CAAA;AACpE,eAAO,MAAM,UAAU,SAAS,iBAAiB;UAhCrC,WAAW,GAAG;QAAE,aAAa,EAAE,KAAK,CAAA;KAAE;;UAEjC,SAAS;EA8B6C,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/opt-arg.js b/deps/npm/node_modules/rimraf/dist/esm/opt-arg.js new file mode 100644 index 00000000000000..eacfe6c4325e22 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/opt-arg.js @@ -0,0 +1,46 @@ +const typeOrUndef = (val, t) => typeof val === 'undefined' || typeof val === t; +export const isRimrafOptions = (o) => !!o && + typeof o === 'object' && + typeOrUndef(o.preserveRoot, 'boolean') && + typeOrUndef(o.tmp, 'string') && + typeOrUndef(o.maxRetries, 'number') && + typeOrUndef(o.retryDelay, 'number') && + typeOrUndef(o.backoff, 'number') && + typeOrUndef(o.maxBackoff, 'number') && + (typeOrUndef(o.glob, 'boolean') || (o.glob && typeof o.glob === 'object')) && + typeOrUndef(o.filter, 'function'); +export const assertRimrafOptions = (o) => { + if (!isRimrafOptions(o)) { + throw new Error('invalid rimraf options'); + } +}; +const optArgT = (opt) => { + assertRimrafOptions(opt); + const { glob, ...options } = opt; + if (!glob) { + return options; + } + const globOpt = glob === true ? + opt.signal ? + { signal: opt.signal } + : {} + : opt.signal ? + { + signal: opt.signal, + ...glob, + } + : glob; + return { + ...options, + glob: { + ...globOpt, + // always get absolute paths from glob, to ensure + // that we are referencing the correct thing. + absolute: true, + withFileTypes: false, + }, + }; +}; +export const optArg = (opt = {}) => optArgT(opt); +export const optArgSync = (opt = {}) => optArgT(opt); +//# sourceMappingURL=opt-arg.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/opt-arg.js.map b/deps/npm/node_modules/rimraf/dist/esm/opt-arg.js.map new file mode 100644 index 00000000000000..82ff94f6e05b8e --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/opt-arg.js.map @@ -0,0 +1 @@ +{"version":3,"file":"opt-arg.js","sourceRoot":"","sources":["../../src/opt-arg.ts"],"names":[],"mappings":"AAGA,MAAM,WAAW,GAAG,CAAC,GAAQ,EAAE,CAAS,EAAE,EAAE,CAC1C,OAAO,GAAG,KAAK,WAAW,IAAI,OAAO,GAAG,KAAK,CAAC,CAAA;AAEhD,MAAM,CAAC,MAAM,eAAe,GAAG,CAAC,CAAM,EAAsB,EAAE,CAC5D,CAAC,CAAC,CAAC;IACH,OAAO,CAAC,KAAK,QAAQ;IACrB,WAAW,CAAC,CAAC,CAAC,YAAY,EAAE,SAAS,CAAC;IACtC,WAAW,CAAC,CAAC,CAAC,GAAG,EAAE,QAAQ,CAAC;IAC5B,WAAW,CAAC,CAAC,CAAC,UAAU,EAAE,QAAQ,CAAC;IACnC,WAAW,CAAC,CAAC,CAAC,UAAU,EAAE,QAAQ,CAAC;IACnC,WAAW,CAAC,CAAC,CAAC,OAAO,EAAE,QAAQ,CAAC;IAChC,WAAW,CAAC,CAAC,CAAC,UAAU,EAAE,QAAQ,CAAC;IACnC,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,EAAE,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,IAAI,OAAO,CAAC,CAAC,IAAI,KAAK,QAAQ,CAAC,CAAC;IAC1E,WAAW,CAAC,CAAC,CAAC,MAAM,EAAE,UAAU,CAAC,CAAA;AAEnC,MAAM,CAAC,MAAM,mBAAmB,GAAqB,CACnD,CAAM,EACsB,EAAE;IAC9B,IAAI,CAAC,eAAe,CAAC,CAAC,CAAC,EAAE,CAAC;QACxB,MAAM,IAAI,KAAK,CAAC,wBAAwB,CAAC,CAAA;IAC3C,CAAC;AACH,CAAC,CAAA;AAsBD,MAAM,OAAO,GAAG,CACd,GAAM,EAKsB,EAAE;IAC9B,mBAAmB,CAAC,GAAG,CAAC,CAAA;IACxB,MAAM,EAAE,IAAI,EAAE,GAAG,OAAO,EAAE,GAAG,GAAG,CAAA;IAChC,IAAI,CAAC,IAAI,EAAE,CAAC;QACV,OAAO,OAAkC,CAAA;IAC3C,CAAC;IACD,MAAM,OAAO,GACX,IAAI,KAAK,IAAI,CAAC,CAAC;QACb,GAAG,CAAC,MAAM,CAAC,CAAC;YACV,EAAE,MAAM,EAAE,GAAG,CAAC,MAAM,EAAE;YACxB,CAAC,CAAC,EAAE;QACN,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;YACZ;gBACE,MAAM,EAAE,GAAG,CAAC,MAAM;gBAClB,GAAG,IAAI;aACR;YACH,CAAC,CAAC,IAAI,CAAA;IACR,OAAO;QACL,GAAG,OAAO;QACV,IAAI,EAAE;YACJ,GAAG,OAAO;YACV,iDAAiD;YACjD,6CAA6C;YAC7C,QAAQ,EAAE,IAAI;YACd,aAAa,EAAE,KAAK;SACrB;KACsD,CAAA;AAC3D,CAAC,CAAA;AAED,MAAM,CAAC,MAAM,MAAM,GAAG,CAAC,MAA0B,EAAE,EAAE,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,CAAA;AACpE,MAAM,CAAC,MAAM,UAAU,GAAG,CAAC,MAAyB,EAAE,EAAE,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,CAAA","sourcesContent":["import { Dirent, Stats } from 'fs'\nimport { GlobOptions } from 'glob'\n\nconst typeOrUndef = (val: any, t: string) =>\n typeof val === 'undefined' || typeof val === t\n\nexport const isRimrafOptions = (o: any): o is RimrafOptions =>\n !!o &&\n typeof o === 'object' &&\n typeOrUndef(o.preserveRoot, 'boolean') &&\n typeOrUndef(o.tmp, 'string') &&\n typeOrUndef(o.maxRetries, 'number') &&\n typeOrUndef(o.retryDelay, 'number') &&\n typeOrUndef(o.backoff, 'number') &&\n typeOrUndef(o.maxBackoff, 'number') &&\n (typeOrUndef(o.glob, 'boolean') || (o.glob && typeof o.glob === 'object')) &&\n typeOrUndef(o.filter, 'function')\n\nexport const assertRimrafOptions: (o: any) => void = (\n o: any,\n): asserts o is RimrafOptions => {\n if (!isRimrafOptions(o)) {\n throw new Error('invalid rimraf options')\n }\n}\n\nexport interface RimrafAsyncOptions {\n preserveRoot?: boolean\n tmp?: string\n maxRetries?: number\n retryDelay?: number\n backoff?: number\n maxBackoff?: number\n signal?: AbortSignal\n glob?: boolean | GlobOptions\n filter?:\n | ((path: string, ent: Dirent | Stats) => boolean)\n | ((path: string, ent: Dirent | Stats) => Promise)\n}\n\nexport interface RimrafSyncOptions extends RimrafAsyncOptions {\n filter?: (path: string, ent: Dirent | Stats) => boolean\n}\n\nexport type RimrafOptions = RimrafSyncOptions | RimrafAsyncOptions\n\nconst optArgT = (\n opt: T,\n):\n | (T & {\n glob: GlobOptions & { withFileTypes: false }\n })\n | (T & { glob: undefined }) => {\n assertRimrafOptions(opt)\n const { glob, ...options } = opt\n if (!glob) {\n return options as T & { glob: undefined }\n }\n const globOpt =\n glob === true ?\n opt.signal ?\n { signal: opt.signal }\n : {}\n : opt.signal ?\n {\n signal: opt.signal,\n ...glob,\n }\n : glob\n return {\n ...options,\n glob: {\n ...globOpt,\n // always get absolute paths from glob, to ensure\n // that we are referencing the correct thing.\n absolute: true,\n withFileTypes: false,\n },\n } as T & { glob: GlobOptions & { withFileTypes: false } }\n}\n\nexport const optArg = (opt: RimrafAsyncOptions = {}) => optArgT(opt)\nexport const optArgSync = (opt: RimrafSyncOptions = {}) => optArgT(opt)\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/package.json b/deps/npm/node_modules/rimraf/dist/esm/package.json new file mode 100644 index 00000000000000..3dbc1ca591c055 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/deps/npm/node_modules/rimraf/dist/esm/path-arg.d.ts b/deps/npm/node_modules/rimraf/dist/esm/path-arg.d.ts new file mode 100644 index 00000000000000..c0b7e7cb4b15e3 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/path-arg.d.ts @@ -0,0 +1,4 @@ +import { RimrafAsyncOptions } from './index.js'; +declare const pathArg: (path: string, opt?: RimrafAsyncOptions) => string; +export default pathArg; +//# sourceMappingURL=path-arg.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/path-arg.d.ts.map b/deps/npm/node_modules/rimraf/dist/esm/path-arg.d.ts.map new file mode 100644 index 00000000000000..4fe93c3a8aec47 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/path-arg.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"path-arg.d.ts","sourceRoot":"","sources":["../../src/path-arg.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,kBAAkB,EAAE,MAAM,YAAY,CAAA;AAG/C,QAAA,MAAM,OAAO,SAAU,MAAM,QAAO,kBAAkB,WAgDrD,CAAA;AAED,eAAe,OAAO,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/path-arg.js b/deps/npm/node_modules/rimraf/dist/esm/path-arg.js new file mode 100644 index 00000000000000..f32cb106756dbc --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/path-arg.js @@ -0,0 +1,47 @@ +import { parse, resolve } from 'path'; +import { inspect } from 'util'; +import platform from './platform.js'; +const pathArg = (path, opt = {}) => { + const type = typeof path; + if (type !== 'string') { + const ctor = path && type === 'object' && path.constructor; + const received = ctor && ctor.name ? `an instance of ${ctor.name}` + : type === 'object' ? inspect(path) + : `type ${type} ${path}`; + const msg = 'The "path" argument must be of type string. ' + `Received ${received}`; + throw Object.assign(new TypeError(msg), { + path, + code: 'ERR_INVALID_ARG_TYPE', + }); + } + if (/\0/.test(path)) { + // simulate same failure that node raises + const msg = 'path must be a string without null bytes'; + throw Object.assign(new TypeError(msg), { + path, + code: 'ERR_INVALID_ARG_VALUE', + }); + } + path = resolve(path); + const { root } = parse(path); + if (path === root && opt.preserveRoot !== false) { + const msg = 'refusing to remove root directory without preserveRoot:false'; + throw Object.assign(new Error(msg), { + path, + code: 'ERR_PRESERVE_ROOT', + }); + } + if (platform === 'win32') { + const badWinChars = /[*|"<>?:]/; + const { root } = parse(path); + if (badWinChars.test(path.substring(root.length))) { + throw Object.assign(new Error('Illegal characters in path.'), { + path, + code: 'EINVAL', + }); + } + } + return path; +}; +export default pathArg; +//# sourceMappingURL=path-arg.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/path-arg.js.map b/deps/npm/node_modules/rimraf/dist/esm/path-arg.js.map new file mode 100644 index 00000000000000..2f73f5f38929be --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/path-arg.js.map @@ -0,0 +1 @@ +{"version":3,"file":"path-arg.js","sourceRoot":"","sources":["../../src/path-arg.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,EAAE,OAAO,EAAE,MAAM,MAAM,CAAA;AACrC,OAAO,EAAE,OAAO,EAAE,MAAM,MAAM,CAAA;AAE9B,OAAO,QAAQ,MAAM,eAAe,CAAA;AAEpC,MAAM,OAAO,GAAG,CAAC,IAAY,EAAE,MAA0B,EAAE,EAAE,EAAE;IAC7D,MAAM,IAAI,GAAG,OAAO,IAAI,CAAA;IACxB,IAAI,IAAI,KAAK,QAAQ,EAAE,CAAC;QACtB,MAAM,IAAI,GAAG,IAAI,IAAI,IAAI,KAAK,QAAQ,IAAI,IAAI,CAAC,WAAW,CAAA;QAC1D,MAAM,QAAQ,GACZ,IAAI,IAAI,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,kBAAkB,IAAI,CAAC,IAAI,EAAE;YACjD,CAAC,CAAC,IAAI,KAAK,QAAQ,CAAC,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC;gBACnC,CAAC,CAAC,QAAQ,IAAI,IAAI,IAAI,EAAE,CAAA;QAC1B,MAAM,GAAG,GACP,8CAA8C,GAAG,YAAY,QAAQ,EAAE,CAAA;QACzE,MAAM,MAAM,CAAC,MAAM,CAAC,IAAI,SAAS,CAAC,GAAG,CAAC,EAAE;YACtC,IAAI;YACJ,IAAI,EAAE,sBAAsB;SAC7B,CAAC,CAAA;IACJ,CAAC;IAED,IAAI,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC;QACpB,yCAAyC;QACzC,MAAM,GAAG,GAAG,0CAA0C,CAAA;QACtD,MAAM,MAAM,CAAC,MAAM,CAAC,IAAI,SAAS,CAAC,GAAG,CAAC,EAAE;YACtC,IAAI;YACJ,IAAI,EAAE,uBAAuB;SAC9B,CAAC,CAAA;IACJ,CAAC;IAED,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;IACpB,MAAM,EAAE,IAAI,EAAE,GAAG,KAAK,CAAC,IAAI,CAAC,CAAA;IAE5B,IAAI,IAAI,KAAK,IAAI,IAAI,GAAG,CAAC,YAAY,KAAK,KAAK,EAAE,CAAC;QAChD,MAAM,GAAG,GAAG,8DAA8D,CAAA;QAC1E,MAAM,MAAM,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,GAAG,CAAC,EAAE;YAClC,IAAI;YACJ,IAAI,EAAE,mBAAmB;SAC1B,CAAC,CAAA;IACJ,CAAC;IAED,IAAI,QAAQ,KAAK,OAAO,EAAE,CAAC;QACzB,MAAM,WAAW,GAAG,WAAW,CAAA;QAC/B,MAAM,EAAE,IAAI,EAAE,GAAG,KAAK,CAAC,IAAI,CAAC,CAAA;QAC5B,IAAI,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC;YAClD,MAAM,MAAM,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,6BAA6B,CAAC,EAAE;gBAC5D,IAAI;gBACJ,IAAI,EAAE,QAAQ;aACf,CAAC,CAAA;QACJ,CAAC;IACH,CAAC;IAED,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAED,eAAe,OAAO,CAAA","sourcesContent":["import { parse, resolve } from 'path'\nimport { inspect } from 'util'\nimport { RimrafAsyncOptions } from './index.js'\nimport platform from './platform.js'\n\nconst pathArg = (path: string, opt: RimrafAsyncOptions = {}) => {\n const type = typeof path\n if (type !== 'string') {\n const ctor = path && type === 'object' && path.constructor\n const received =\n ctor && ctor.name ? `an instance of ${ctor.name}`\n : type === 'object' ? inspect(path)\n : `type ${type} ${path}`\n const msg =\n 'The \"path\" argument must be of type string. ' + `Received ${received}`\n throw Object.assign(new TypeError(msg), {\n path,\n code: 'ERR_INVALID_ARG_TYPE',\n })\n }\n\n if (/\\0/.test(path)) {\n // simulate same failure that node raises\n const msg = 'path must be a string without null bytes'\n throw Object.assign(new TypeError(msg), {\n path,\n code: 'ERR_INVALID_ARG_VALUE',\n })\n }\n\n path = resolve(path)\n const { root } = parse(path)\n\n if (path === root && opt.preserveRoot !== false) {\n const msg = 'refusing to remove root directory without preserveRoot:false'\n throw Object.assign(new Error(msg), {\n path,\n code: 'ERR_PRESERVE_ROOT',\n })\n }\n\n if (platform === 'win32') {\n const badWinChars = /[*|\"<>?:]/\n const { root } = parse(path)\n if (badWinChars.test(path.substring(root.length))) {\n throw Object.assign(new Error('Illegal characters in path.'), {\n path,\n code: 'EINVAL',\n })\n }\n }\n\n return path\n}\n\nexport default pathArg\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/platform.d.ts b/deps/npm/node_modules/rimraf/dist/esm/platform.d.ts new file mode 100644 index 00000000000000..e127a8e529ffd2 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/platform.d.ts @@ -0,0 +1,3 @@ +declare const _default: string; +export default _default; +//# sourceMappingURL=platform.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/platform.d.ts.map b/deps/npm/node_modules/rimraf/dist/esm/platform.d.ts.map new file mode 100644 index 00000000000000..ef2e6734f8cfbb --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/platform.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"platform.d.ts","sourceRoot":"","sources":["../../src/platform.ts"],"names":[],"mappings":";AAAA,wBAA0E"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/platform.js b/deps/npm/node_modules/rimraf/dist/esm/platform.js new file mode 100644 index 00000000000000..a2641721b78190 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/platform.js @@ -0,0 +1,2 @@ +export default process.env.__TESTING_RIMRAF_PLATFORM__ || process.platform; +//# sourceMappingURL=platform.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/platform.js.map b/deps/npm/node_modules/rimraf/dist/esm/platform.js.map new file mode 100644 index 00000000000000..c5fdaf9c0e428c --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/platform.js.map @@ -0,0 +1 @@ +{"version":3,"file":"platform.js","sourceRoot":"","sources":["../../src/platform.ts"],"names":[],"mappings":"AAAA,eAAe,OAAO,CAAC,GAAG,CAAC,2BAA2B,IAAI,OAAO,CAAC,QAAQ,CAAA","sourcesContent":["export default process.env.__TESTING_RIMRAF_PLATFORM__ || process.platform\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/readdir-or-error.d.ts b/deps/npm/node_modules/rimraf/dist/esm/readdir-or-error.d.ts new file mode 100644 index 00000000000000..cce73097f1681f --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/readdir-or-error.d.ts @@ -0,0 +1,3 @@ +export declare const readdirOrError: (path: string) => Promise; +export declare const readdirOrErrorSync: (path: string) => import("fs").Dirent[] | NodeJS.ErrnoException; +//# sourceMappingURL=readdir-or-error.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/readdir-or-error.d.ts.map b/deps/npm/node_modules/rimraf/dist/esm/readdir-or-error.d.ts.map new file mode 100644 index 00000000000000..8a19f6bdfd0706 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/readdir-or-error.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"readdir-or-error.d.ts","sourceRoot":"","sources":["../../src/readdir-or-error.ts"],"names":[],"mappings":"AAIA,eAAO,MAAM,cAAc,SAAU,MAAM,2DACa,CAAA;AACxD,eAAO,MAAM,kBAAkB,SAAU,MAAM,kDAM9C,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/readdir-or-error.js b/deps/npm/node_modules/rimraf/dist/esm/readdir-or-error.js new file mode 100644 index 00000000000000..71235135c63009 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/readdir-or-error.js @@ -0,0 +1,14 @@ +// returns an array of entries if readdir() works, +// or the error that readdir() raised if not. +import { promises, readdirSync } from './fs.js'; +const { readdir } = promises; +export const readdirOrError = (path) => readdir(path).catch(er => er); +export const readdirOrErrorSync = (path) => { + try { + return readdirSync(path); + } + catch (er) { + return er; + } +}; +//# sourceMappingURL=readdir-or-error.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/readdir-or-error.js.map b/deps/npm/node_modules/rimraf/dist/esm/readdir-or-error.js.map new file mode 100644 index 00000000000000..1d0c00efde2e76 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/readdir-or-error.js.map @@ -0,0 +1 @@ +{"version":3,"file":"readdir-or-error.js","sourceRoot":"","sources":["../../src/readdir-or-error.ts"],"names":[],"mappings":"AAAA,kDAAkD;AAClD,6CAA6C;AAC7C,OAAO,EAAE,QAAQ,EAAE,WAAW,EAAE,MAAM,SAAS,CAAA;AAC/C,MAAM,EAAE,OAAO,EAAE,GAAG,QAAQ,CAAA;AAC5B,MAAM,CAAC,MAAM,cAAc,GAAG,CAAC,IAAY,EAAE,EAAE,CAC7C,OAAO,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,EAAE,CAAC,EAA2B,CAAC,CAAA;AACxD,MAAM,CAAC,MAAM,kBAAkB,GAAG,CAAC,IAAY,EAAE,EAAE;IACjD,IAAI,CAAC;QACH,OAAO,WAAW,CAAC,IAAI,CAAC,CAAA;IAC1B,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,OAAO,EAA2B,CAAA;IACpC,CAAC;AACH,CAAC,CAAA","sourcesContent":["// returns an array of entries if readdir() works,\n// or the error that readdir() raised if not.\nimport { promises, readdirSync } from './fs.js'\nconst { readdir } = promises\nexport const readdirOrError = (path: string) =>\n readdir(path).catch(er => er as NodeJS.ErrnoException)\nexport const readdirOrErrorSync = (path: string) => {\n try {\n return readdirSync(path)\n } catch (er) {\n return er as NodeJS.ErrnoException\n }\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/retry-busy.d.ts b/deps/npm/node_modules/rimraf/dist/esm/retry-busy.d.ts new file mode 100644 index 00000000000000..c0af0dd62f0df9 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/retry-busy.d.ts @@ -0,0 +1,8 @@ +import { RimrafAsyncOptions, RimrafOptions } from './index.js'; +export declare const MAXBACKOFF = 200; +export declare const RATE = 1.2; +export declare const MAXRETRIES = 10; +export declare const codes: Set; +export declare const retryBusy: (fn: (path: string) => Promise) => (path: string, opt: RimrafAsyncOptions, backoff?: number, total?: number) => Promise; +export declare const retryBusySync: (fn: (path: string) => any) => (path: string, opt: RimrafOptions) => any; +//# sourceMappingURL=retry-busy.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/retry-busy.d.ts.map b/deps/npm/node_modules/rimraf/dist/esm/retry-busy.d.ts.map new file mode 100644 index 00000000000000..21960c58914b4b --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/retry-busy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"retry-busy.d.ts","sourceRoot":"","sources":["../../src/retry-busy.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,kBAAkB,EAAE,aAAa,EAAE,MAAM,YAAY,CAAA;AAE9D,eAAO,MAAM,UAAU,MAAM,CAAA;AAC7B,eAAO,MAAM,IAAI,MAAM,CAAA;AACvB,eAAO,MAAM,UAAU,KAAK,CAAA;AAC5B,eAAO,MAAM,KAAK,aAAyC,CAAA;AAE3D,eAAO,MAAM,SAAS,OAAQ,CAAC,IAAI,EAAE,MAAM,KAAK,OAAO,CAAC,GAAG,CAAC,YAElD,MAAM,OACP,kBAAkB,mDAkC1B,CAAA;AAGD,eAAO,MAAM,aAAa,OAAQ,CAAC,IAAI,EAAE,MAAM,KAAK,GAAG,YAC/B,MAAM,OAAO,aAAa,QAsBjD,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/retry-busy.js b/deps/npm/node_modules/rimraf/dist/esm/retry-busy.js new file mode 100644 index 00000000000000..17e336a4d583f5 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/retry-busy.js @@ -0,0 +1,63 @@ +// note: max backoff is the maximum that any *single* backoff will do +export const MAXBACKOFF = 200; +export const RATE = 1.2; +export const MAXRETRIES = 10; +export const codes = new Set(['EMFILE', 'ENFILE', 'EBUSY']); +export const retryBusy = (fn) => { + const method = async (path, opt, backoff = 1, total = 0) => { + const mbo = opt.maxBackoff || MAXBACKOFF; + const rate = opt.backoff || RATE; + const max = opt.maxRetries || MAXRETRIES; + let retries = 0; + while (true) { + try { + return await fn(path); + } + catch (er) { + const fer = er; + if (fer?.path === path && fer?.code && codes.has(fer.code)) { + backoff = Math.ceil(backoff * rate); + total = backoff + total; + if (total < mbo) { + return new Promise((res, rej) => { + setTimeout(() => { + method(path, opt, backoff, total).then(res, rej); + }, backoff); + }); + } + if (retries < max) { + retries++; + continue; + } + } + throw er; + } + } + }; + return method; +}; +// just retries, no async so no backoff +export const retryBusySync = (fn) => { + const method = (path, opt) => { + const max = opt.maxRetries || MAXRETRIES; + let retries = 0; + while (true) { + try { + return fn(path); + } + catch (er) { + const fer = er; + if (fer?.path === path && + fer?.code && + codes.has(fer.code) && + retries < max) { + retries++; + continue; + } + throw er; + } + } + }; + return method; +}; +//# sourceMappingURL=retry-busy.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/retry-busy.js.map b/deps/npm/node_modules/rimraf/dist/esm/retry-busy.js.map new file mode 100644 index 00000000000000..f4239eb4142234 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/retry-busy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"retry-busy.js","sourceRoot":"","sources":["../../src/retry-busy.ts"],"names":[],"mappings":"AAAA,qEAAqE;AAIrE,MAAM,CAAC,MAAM,UAAU,GAAG,GAAG,CAAA;AAC7B,MAAM,CAAC,MAAM,IAAI,GAAG,GAAG,CAAA;AACvB,MAAM,CAAC,MAAM,UAAU,GAAG,EAAE,CAAA;AAC5B,MAAM,CAAC,MAAM,KAAK,GAAG,IAAI,GAAG,CAAC,CAAC,QAAQ,EAAE,QAAQ,EAAE,OAAO,CAAC,CAAC,CAAA;AAE3D,MAAM,CAAC,MAAM,SAAS,GAAG,CAAC,EAAkC,EAAE,EAAE;IAC9D,MAAM,MAAM,GAAG,KAAK,EAClB,IAAY,EACZ,GAAuB,EACvB,OAAO,GAAG,CAAC,EACX,KAAK,GAAG,CAAC,EACT,EAAE;QACF,MAAM,GAAG,GAAG,GAAG,CAAC,UAAU,IAAI,UAAU,CAAA;QACxC,MAAM,IAAI,GAAG,GAAG,CAAC,OAAO,IAAI,IAAI,CAAA;QAChC,MAAM,GAAG,GAAG,GAAG,CAAC,UAAU,IAAI,UAAU,CAAA;QACxC,IAAI,OAAO,GAAG,CAAC,CAAA;QACf,OAAO,IAAI,EAAE,CAAC;YACZ,IAAI,CAAC;gBACH,OAAO,MAAM,EAAE,CAAC,IAAI,CAAC,CAAA;YACvB,CAAC;YAAC,OAAO,EAAE,EAAE,CAAC;gBACZ,MAAM,GAAG,GAAG,EAA2B,CAAA;gBACvC,IAAI,GAAG,EAAE,IAAI,KAAK,IAAI,IAAI,GAAG,EAAE,IAAI,IAAI,KAAK,CAAC,GAAG,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC;oBAC3D,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,CAAA;oBACnC,KAAK,GAAG,OAAO,GAAG,KAAK,CAAA;oBACvB,IAAI,KAAK,GAAG,GAAG,EAAE,CAAC;wBAChB,OAAO,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE;4BAC9B,UAAU,CAAC,GAAG,EAAE;gCACd,MAAM,CAAC,IAAI,EAAE,GAAG,EAAE,OAAO,EAAE,KAAK,CAAC,CAAC,IAAI,CAAC,GAAG,EAAE,GAAG,CAAC,CAAA;4BAClD,CAAC,EAAE,OAAO,CAAC,CAAA;wBACb,CAAC,CAAC,CAAA;oBACJ,CAAC;oBACD,IAAI,OAAO,GAAG,GAAG,EAAE,CAAC;wBAClB,OAAO,EAAE,CAAA;wBACT,SAAQ;oBACV,CAAC;gBACH,CAAC;gBACD,MAAM,EAAE,CAAA;YACV,CAAC;QACH,CAAC;IACH,CAAC,CAAA;IAED,OAAO,MAAM,CAAA;AACf,CAAC,CAAA;AAED,uCAAuC;AACvC,MAAM,CAAC,MAAM,aAAa,GAAG,CAAC,EAAyB,EAAE,EAAE;IACzD,MAAM,MAAM,GAAG,CAAC,IAAY,EAAE,GAAkB,EAAE,EAAE;QAClD,MAAM,GAAG,GAAG,GAAG,CAAC,UAAU,IAAI,UAAU,CAAA;QACxC,IAAI,OAAO,GAAG,CAAC,CAAA;QACf,OAAO,IAAI,EAAE,CAAC;YACZ,IAAI,CAAC;gBACH,OAAO,EAAE,CAAC,IAAI,CAAC,CAAA;YACjB,CAAC;YAAC,OAAO,EAAE,EAAE,CAAC;gBACZ,MAAM,GAAG,GAAG,EAA2B,CAAA;gBACvC,IACE,GAAG,EAAE,IAAI,KAAK,IAAI;oBAClB,GAAG,EAAE,IAAI;oBACT,KAAK,CAAC,GAAG,CAAC,GAAG,CAAC,IAAI,CAAC;oBACnB,OAAO,GAAG,GAAG,EACb,CAAC;oBACD,OAAO,EAAE,CAAA;oBACT,SAAQ;gBACV,CAAC;gBACD,MAAM,EAAE,CAAA;YACV,CAAC;QACH,CAAC;IACH,CAAC,CAAA;IACD,OAAO,MAAM,CAAA;AACf,CAAC,CAAA","sourcesContent":["// note: max backoff is the maximum that any *single* backoff will do\n\nimport { RimrafAsyncOptions, RimrafOptions } from './index.js'\n\nexport const MAXBACKOFF = 200\nexport const RATE = 1.2\nexport const MAXRETRIES = 10\nexport const codes = new Set(['EMFILE', 'ENFILE', 'EBUSY'])\n\nexport const retryBusy = (fn: (path: string) => Promise) => {\n const method = async (\n path: string,\n opt: RimrafAsyncOptions,\n backoff = 1,\n total = 0,\n ) => {\n const mbo = opt.maxBackoff || MAXBACKOFF\n const rate = opt.backoff || RATE\n const max = opt.maxRetries || MAXRETRIES\n let retries = 0\n while (true) {\n try {\n return await fn(path)\n } catch (er) {\n const fer = er as NodeJS.ErrnoException\n if (fer?.path === path && fer?.code && codes.has(fer.code)) {\n backoff = Math.ceil(backoff * rate)\n total = backoff + total\n if (total < mbo) {\n return new Promise((res, rej) => {\n setTimeout(() => {\n method(path, opt, backoff, total).then(res, rej)\n }, backoff)\n })\n }\n if (retries < max) {\n retries++\n continue\n }\n }\n throw er\n }\n }\n }\n\n return method\n}\n\n// just retries, no async so no backoff\nexport const retryBusySync = (fn: (path: string) => any) => {\n const method = (path: string, opt: RimrafOptions) => {\n const max = opt.maxRetries || MAXRETRIES\n let retries = 0\n while (true) {\n try {\n return fn(path)\n } catch (er) {\n const fer = er as NodeJS.ErrnoException\n if (\n fer?.path === path &&\n fer?.code &&\n codes.has(fer.code) &&\n retries < max\n ) {\n retries++\n continue\n }\n throw er\n }\n }\n }\n return method\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/rimraf-manual.d.ts b/deps/npm/node_modules/rimraf/dist/esm/rimraf-manual.d.ts new file mode 100644 index 00000000000000..35c5c86844c7fa --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/rimraf-manual.d.ts @@ -0,0 +1,3 @@ +export declare const rimrafManual: (path: string, opt: import("./opt-arg.js").RimrafAsyncOptions) => Promise; +export declare const rimrafManualSync: (path: string, opt: import("./opt-arg.js").RimrafSyncOptions) => boolean; +//# sourceMappingURL=rimraf-manual.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/rimraf-manual.d.ts.map b/deps/npm/node_modules/rimraf/dist/esm/rimraf-manual.d.ts.map new file mode 100644 index 00000000000000..19bd25149ceb07 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/rimraf-manual.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"rimraf-manual.d.ts","sourceRoot":"","sources":["../../src/rimraf-manual.ts"],"names":[],"mappings":"AAKA,eAAO,MAAM,YAAY,oFAAqD,CAAA;AAC9E,eAAO,MAAM,gBAAgB,0EAC+B,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/rimraf-manual.js b/deps/npm/node_modules/rimraf/dist/esm/rimraf-manual.js new file mode 100644 index 00000000000000..132708ffaa5870 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/rimraf-manual.js @@ -0,0 +1,6 @@ +import platform from './platform.js'; +import { rimrafPosix, rimrafPosixSync } from './rimraf-posix.js'; +import { rimrafWindows, rimrafWindowsSync } from './rimraf-windows.js'; +export const rimrafManual = platform === 'win32' ? rimrafWindows : rimrafPosix; +export const rimrafManualSync = platform === 'win32' ? rimrafWindowsSync : rimrafPosixSync; +//# sourceMappingURL=rimraf-manual.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/rimraf-manual.js.map b/deps/npm/node_modules/rimraf/dist/esm/rimraf-manual.js.map new file mode 100644 index 00000000000000..212c815dc356d0 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/rimraf-manual.js.map @@ -0,0 +1 @@ +{"version":3,"file":"rimraf-manual.js","sourceRoot":"","sources":["../../src/rimraf-manual.ts"],"names":[],"mappings":"AAAA,OAAO,QAAQ,MAAM,eAAe,CAAA;AAEpC,OAAO,EAAE,WAAW,EAAE,eAAe,EAAE,MAAM,mBAAmB,CAAA;AAChE,OAAO,EAAE,aAAa,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAA;AAEtE,MAAM,CAAC,MAAM,YAAY,GAAG,QAAQ,KAAK,OAAO,CAAC,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC,WAAW,CAAA;AAC9E,MAAM,CAAC,MAAM,gBAAgB,GAC3B,QAAQ,KAAK,OAAO,CAAC,CAAC,CAAC,iBAAiB,CAAC,CAAC,CAAC,eAAe,CAAA","sourcesContent":["import platform from './platform.js'\n\nimport { rimrafPosix, rimrafPosixSync } from './rimraf-posix.js'\nimport { rimrafWindows, rimrafWindowsSync } from './rimraf-windows.js'\n\nexport const rimrafManual = platform === 'win32' ? rimrafWindows : rimrafPosix\nexport const rimrafManualSync =\n platform === 'win32' ? rimrafWindowsSync : rimrafPosixSync\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/rimraf-move-remove.d.ts b/deps/npm/node_modules/rimraf/dist/esm/rimraf-move-remove.d.ts new file mode 100644 index 00000000000000..5d41d40825e4c7 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/rimraf-move-remove.d.ts @@ -0,0 +1,4 @@ +import { RimrafAsyncOptions, RimrafSyncOptions } from './index.js'; +export declare const rimrafMoveRemove: (path: string, opt: RimrafAsyncOptions) => Promise; +export declare const rimrafMoveRemoveSync: (path: string, opt: RimrafSyncOptions) => boolean; +//# sourceMappingURL=rimraf-move-remove.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/rimraf-move-remove.d.ts.map b/deps/npm/node_modules/rimraf/dist/esm/rimraf-move-remove.d.ts.map new file mode 100644 index 00000000000000..4062eaebbb1302 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/rimraf-move-remove.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"rimraf-move-remove.d.ts","sourceRoot":"","sources":["../../src/rimraf-move-remove.ts"],"names":[],"mappings":"AA6BA,OAAO,EAAE,kBAAkB,EAAE,iBAAiB,EAAE,MAAM,YAAY,CAAA;AA4ClE,eAAO,MAAM,gBAAgB,SACrB,MAAM,OACP,kBAAkB,qBAWxB,CAAA;AA4ED,eAAO,MAAM,oBAAoB,SAAU,MAAM,OAAO,iBAAiB,YAUxE,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/rimraf-move-remove.js b/deps/npm/node_modules/rimraf/dist/esm/rimraf-move-remove.js new file mode 100644 index 00000000000000..093e40f49f5a2e --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/rimraf-move-remove.js @@ -0,0 +1,187 @@ +// https://youtu.be/uhRWMGBjlO8?t=537 +// +// 1. readdir +// 2. for each entry +// a. if a non-empty directory, recurse +// b. if an empty directory, move to random hidden file name in $TEMP +// c. unlink/rmdir $TEMP +// +// This works around the fact that unlink/rmdir is non-atomic and takes +// a non-deterministic amount of time to complete. +// +// However, it is HELLA SLOW, like 2-10x slower than a naive recursive rm. +import { basename, parse, resolve } from 'path'; +import { defaultTmp, defaultTmpSync } from './default-tmp.js'; +import { ignoreENOENT, ignoreENOENTSync } from './ignore-enoent.js'; +import { chmodSync, lstatSync, promises as fsPromises, renameSync, rmdirSync, unlinkSync, } from './fs.js'; +const { lstat, rename, unlink, rmdir, chmod } = fsPromises; +import { readdirOrError, readdirOrErrorSync } from './readdir-or-error.js'; +// crypto.randomBytes is much slower, and Math.random() is enough here +const uniqueFilename = (path) => `.${basename(path)}.${Math.random()}`; +const unlinkFixEPERM = async (path) => unlink(path).catch((er) => { + if (er.code === 'EPERM') { + return chmod(path, 0o666).then(() => unlink(path), er2 => { + if (er2.code === 'ENOENT') { + return; + } + throw er; + }); + } + else if (er.code === 'ENOENT') { + return; + } + throw er; +}); +const unlinkFixEPERMSync = (path) => { + try { + unlinkSync(path); + } + catch (er) { + if (er?.code === 'EPERM') { + try { + return chmodSync(path, 0o666); + } + catch (er2) { + if (er2?.code === 'ENOENT') { + return; + } + throw er; + } + } + else if (er?.code === 'ENOENT') { + return; + } + throw er; + } +}; +export const rimrafMoveRemove = async (path, opt) => { + if (opt?.signal?.aborted) { + throw opt.signal.reason; + } + try { + return await rimrafMoveRemoveDir(path, opt, await lstat(path)); + } + catch (er) { + if (er?.code === 'ENOENT') + return true; + throw er; + } +}; +const rimrafMoveRemoveDir = async (path, opt, ent) => { + if (opt?.signal?.aborted) { + throw opt.signal.reason; + } + if (!opt.tmp) { + return rimrafMoveRemoveDir(path, { ...opt, tmp: await defaultTmp(path) }, ent); + } + if (path === opt.tmp && parse(path).root !== path) { + throw new Error('cannot delete temp directory used for deletion'); + } + const entries = ent.isDirectory() ? await readdirOrError(path) : null; + if (!Array.isArray(entries)) { + // this can only happen if lstat/readdir lied, or if the dir was + // swapped out with a file at just the right moment. + /* c8 ignore start */ + if (entries) { + if (entries.code === 'ENOENT') { + return true; + } + if (entries.code !== 'ENOTDIR') { + throw entries; + } + } + /* c8 ignore stop */ + if (opt.filter && !(await opt.filter(path, ent))) { + return false; + } + await ignoreENOENT(tmpUnlink(path, opt.tmp, unlinkFixEPERM)); + return true; + } + const removedAll = (await Promise.all(entries.map(ent => rimrafMoveRemoveDir(resolve(path, ent.name), opt, ent)))).reduce((a, b) => a && b, true); + if (!removedAll) { + return false; + } + // we don't ever ACTUALLY try to unlink /, because that can never work + // but when preserveRoot is false, we could be operating on it. + // No need to check if preserveRoot is not false. + if (opt.preserveRoot === false && path === parse(path).root) { + return false; + } + if (opt.filter && !(await opt.filter(path, ent))) { + return false; + } + await ignoreENOENT(tmpUnlink(path, opt.tmp, rmdir)); + return true; +}; +const tmpUnlink = async (path, tmp, rm) => { + const tmpFile = resolve(tmp, uniqueFilename(path)); + await rename(path, tmpFile); + return await rm(tmpFile); +}; +export const rimrafMoveRemoveSync = (path, opt) => { + if (opt?.signal?.aborted) { + throw opt.signal.reason; + } + try { + return rimrafMoveRemoveDirSync(path, opt, lstatSync(path)); + } + catch (er) { + if (er?.code === 'ENOENT') + return true; + throw er; + } +}; +const rimrafMoveRemoveDirSync = (path, opt, ent) => { + if (opt?.signal?.aborted) { + throw opt.signal.reason; + } + if (!opt.tmp) { + return rimrafMoveRemoveDirSync(path, { ...opt, tmp: defaultTmpSync(path) }, ent); + } + const tmp = opt.tmp; + if (path === opt.tmp && parse(path).root !== path) { + throw new Error('cannot delete temp directory used for deletion'); + } + const entries = ent.isDirectory() ? readdirOrErrorSync(path) : null; + if (!Array.isArray(entries)) { + // this can only happen if lstat/readdir lied, or if the dir was + // swapped out with a file at just the right moment. + /* c8 ignore start */ + if (entries) { + if (entries.code === 'ENOENT') { + return true; + } + if (entries.code !== 'ENOTDIR') { + throw entries; + } + } + /* c8 ignore stop */ + if (opt.filter && !opt.filter(path, ent)) { + return false; + } + ignoreENOENTSync(() => tmpUnlinkSync(path, tmp, unlinkFixEPERMSync)); + return true; + } + let removedAll = true; + for (const ent of entries) { + const p = resolve(path, ent.name); + removedAll = rimrafMoveRemoveDirSync(p, opt, ent) && removedAll; + } + if (!removedAll) { + return false; + } + if (opt.preserveRoot === false && path === parse(path).root) { + return false; + } + if (opt.filter && !opt.filter(path, ent)) { + return false; + } + ignoreENOENTSync(() => tmpUnlinkSync(path, tmp, rmdirSync)); + return true; +}; +const tmpUnlinkSync = (path, tmp, rmSync) => { + const tmpFile = resolve(tmp, uniqueFilename(path)); + renameSync(path, tmpFile); + return rmSync(tmpFile); +}; +//# sourceMappingURL=rimraf-move-remove.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/rimraf-move-remove.js.map b/deps/npm/node_modules/rimraf/dist/esm/rimraf-move-remove.js.map new file mode 100644 index 00000000000000..b9a1fd57490501 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/rimraf-move-remove.js.map @@ -0,0 +1 @@ +{"version":3,"file":"rimraf-move-remove.js","sourceRoot":"","sources":["../../src/rimraf-move-remove.ts"],"names":[],"mappings":"AAAA,qCAAqC;AACrC,EAAE;AACF,aAAa;AACb,oBAAoB;AACpB,yCAAyC;AACzC,uEAAuE;AACvE,0BAA0B;AAC1B,EAAE;AACF,uEAAuE;AACvE,kDAAkD;AAClD,EAAE;AACF,0EAA0E;AAE1E,OAAO,EAAE,QAAQ,EAAE,KAAK,EAAE,OAAO,EAAE,MAAM,MAAM,CAAA;AAC/C,OAAO,EAAE,UAAU,EAAE,cAAc,EAAE,MAAM,kBAAkB,CAAA;AAE7D,OAAO,EAAE,YAAY,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAA;AAEnE,OAAO,EACL,SAAS,EACT,SAAS,EACT,QAAQ,IAAI,UAAU,EACtB,UAAU,EACV,SAAS,EACT,UAAU,GACX,MAAM,SAAS,CAAA;AAChB,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE,GAAG,UAAU,CAAA;AAI1D,OAAO,EAAE,cAAc,EAAE,kBAAkB,EAAE,MAAM,uBAAuB,CAAA;AAE1E,sEAAsE;AACtE,MAAM,cAAc,GAAG,CAAC,IAAY,EAAE,EAAE,CAAC,IAAI,QAAQ,CAAC,IAAI,CAAC,IAAI,IAAI,CAAC,MAAM,EAAE,EAAE,CAAA;AAE9E,MAAM,cAAc,GAAG,KAAK,EAAE,IAAY,EAAE,EAAE,CAC5C,MAAM,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,CAAC,EAA6B,EAAE,EAAE;IACnD,IAAI,EAAE,CAAC,IAAI,KAAK,OAAO,EAAE,CAAC;QACxB,OAAO,KAAK,CAAC,IAAI,EAAE,KAAK,CAAC,CAAC,IAAI,CAC5B,GAAG,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,EAClB,GAAG,CAAC,EAAE;YACJ,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;gBAC1B,OAAM;YACR,CAAC;YACD,MAAM,EAAE,CAAA;QACV,CAAC,CACF,CAAA;IACH,CAAC;SAAM,IAAI,EAAE,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;QAChC,OAAM;IACR,CAAC;IACD,MAAM,EAAE,CAAA;AACV,CAAC,CAAC,CAAA;AAEJ,MAAM,kBAAkB,GAAG,CAAC,IAAY,EAAE,EAAE;IAC1C,IAAI,CAAC;QACH,UAAU,CAAC,IAAI,CAAC,CAAA;IAClB,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,IAAK,EAA4B,EAAE,IAAI,KAAK,OAAO,EAAE,CAAC;YACpD,IAAI,CAAC;gBACH,OAAO,SAAS,CAAC,IAAI,EAAE,KAAK,CAAC,CAAA;YAC/B,CAAC;YAAC,OAAO,GAAG,EAAE,CAAC;gBACb,IAAK,GAA6B,EAAE,IAAI,KAAK,QAAQ,EAAE,CAAC;oBACtD,OAAM;gBACR,CAAC;gBACD,MAAM,EAAE,CAAA;YACV,CAAC;QACH,CAAC;aAAM,IAAK,EAA4B,EAAE,IAAI,KAAK,QAAQ,EAAE,CAAC;YAC5D,OAAM;QACR,CAAC;QACD,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAED,MAAM,CAAC,MAAM,gBAAgB,GAAG,KAAK,EACnC,IAAY,EACZ,GAAuB,EACvB,EAAE;IACF,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,IAAI,CAAC;QACH,OAAO,MAAM,mBAAmB,CAAC,IAAI,EAAE,GAAG,EAAE,MAAM,KAAK,CAAC,IAAI,CAAC,CAAC,CAAA;IAChE,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,IAAK,EAA4B,EAAE,IAAI,KAAK,QAAQ;YAAE,OAAO,IAAI,CAAA;QACjE,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAED,MAAM,mBAAmB,GAAG,KAAK,EAC/B,IAAY,EACZ,GAAuB,EACvB,GAAmB,EACD,EAAE;IACpB,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,IAAI,CAAC,GAAG,CAAC,GAAG,EAAE,CAAC;QACb,OAAO,mBAAmB,CACxB,IAAI,EACJ,EAAE,GAAG,GAAG,EAAE,GAAG,EAAE,MAAM,UAAU,CAAC,IAAI,CAAC,EAAE,EACvC,GAAG,CACJ,CAAA;IACH,CAAC;IACD,IAAI,IAAI,KAAK,GAAG,CAAC,GAAG,IAAI,KAAK,CAAC,IAAI,CAAC,CAAC,IAAI,KAAK,IAAI,EAAE,CAAC;QAClD,MAAM,IAAI,KAAK,CAAC,gDAAgD,CAAC,CAAA;IACnE,CAAC;IAED,MAAM,OAAO,GAAG,GAAG,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,MAAM,cAAc,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAA;IACrE,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC;QAC5B,gEAAgE;QAChE,oDAAoD;QACpD,qBAAqB;QACrB,IAAI,OAAO,EAAE,CAAC;YACZ,IAAI,OAAO,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;gBAC9B,OAAO,IAAI,CAAA;YACb,CAAC;YACD,IAAI,OAAO,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;gBAC/B,MAAM,OAAO,CAAA;YACf,CAAC;QACH,CAAC;QACD,oBAAoB;QACpB,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,EAAE,CAAC;YACjD,OAAO,KAAK,CAAA;QACd,CAAC;QACD,MAAM,YAAY,CAAC,SAAS,CAAC,IAAI,EAAE,GAAG,CAAC,GAAG,EAAE,cAAc,CAAC,CAAC,CAAA;QAC5D,OAAO,IAAI,CAAA;IACb,CAAC;IAED,MAAM,UAAU,GAAG,CACjB,MAAM,OAAO,CAAC,GAAG,CACf,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAChB,mBAAmB,CAAC,OAAO,CAAC,IAAI,EAAE,GAAG,CAAC,IAAI,CAAC,EAAE,GAAG,EAAE,GAAG,CAAC,CACvD,CACF,CACF,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,IAAI,CAAC,CAAA;IAChC,IAAI,CAAC,UAAU,EAAE,CAAC;QAChB,OAAO,KAAK,CAAA;IACd,CAAC;IAED,sEAAsE;IACtE,+DAA+D;IAC/D,iDAAiD;IACjD,IAAI,GAAG,CAAC,YAAY,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,CAAC;QAC5D,OAAO,KAAK,CAAA;IACd,CAAC;IACD,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,EAAE,CAAC;QACjD,OAAO,KAAK,CAAA;IACd,CAAC;IACD,MAAM,YAAY,CAAC,SAAS,CAAC,IAAI,EAAE,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC,CAAA;IACnD,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAED,MAAM,SAAS,GAAG,KAAK,EACrB,IAAY,EACZ,GAAW,EACX,EAA+B,EAC/B,EAAE;IACF,MAAM,OAAO,GAAG,OAAO,CAAC,GAAG,EAAE,cAAc,CAAC,IAAI,CAAC,CAAC,CAAA;IAClD,MAAM,MAAM,CAAC,IAAI,EAAE,OAAO,CAAC,CAAA;IAC3B,OAAO,MAAM,EAAE,CAAC,OAAO,CAAC,CAAA;AAC1B,CAAC,CAAA;AAED,MAAM,CAAC,MAAM,oBAAoB,GAAG,CAAC,IAAY,EAAE,GAAsB,EAAE,EAAE;IAC3E,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,IAAI,CAAC;QACH,OAAO,uBAAuB,CAAC,IAAI,EAAE,GAAG,EAAE,SAAS,CAAC,IAAI,CAAC,CAAC,CAAA;IAC5D,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,IAAK,EAA4B,EAAE,IAAI,KAAK,QAAQ;YAAE,OAAO,IAAI,CAAA;QACjE,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAED,MAAM,uBAAuB,GAAG,CAC9B,IAAY,EACZ,GAAsB,EACtB,GAAmB,EACV,EAAE;IACX,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,IAAI,CAAC,GAAG,CAAC,GAAG,EAAE,CAAC;QACb,OAAO,uBAAuB,CAC5B,IAAI,EACJ,EAAE,GAAG,GAAG,EAAE,GAAG,EAAE,cAAc,CAAC,IAAI,CAAC,EAAE,EACrC,GAAG,CACJ,CAAA;IACH,CAAC;IACD,MAAM,GAAG,GAAW,GAAG,CAAC,GAAG,CAAA;IAE3B,IAAI,IAAI,KAAK,GAAG,CAAC,GAAG,IAAI,KAAK,CAAC,IAAI,CAAC,CAAC,IAAI,KAAK,IAAI,EAAE,CAAC;QAClD,MAAM,IAAI,KAAK,CAAC,gDAAgD,CAAC,CAAA;IACnE,CAAC;IAED,MAAM,OAAO,GAAG,GAAG,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,kBAAkB,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAA;IACnE,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC;QAC5B,gEAAgE;QAChE,oDAAoD;QACpD,qBAAqB;QACrB,IAAI,OAAO,EAAE,CAAC;YACZ,IAAI,OAAO,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;gBAC9B,OAAO,IAAI,CAAA;YACb,CAAC;YACD,IAAI,OAAO,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;gBAC/B,MAAM,OAAO,CAAA;YACf,CAAC;QACH,CAAC;QACD,oBAAoB;QACpB,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,EAAE,CAAC;YACzC,OAAO,KAAK,CAAA;QACd,CAAC;QACD,gBAAgB,CAAC,GAAG,EAAE,CAAC,aAAa,CAAC,IAAI,EAAE,GAAG,EAAE,kBAAkB,CAAC,CAAC,CAAA;QACpE,OAAO,IAAI,CAAA;IACb,CAAC;IAED,IAAI,UAAU,GAAG,IAAI,CAAA;IACrB,KAAK,MAAM,GAAG,IAAI,OAAO,EAAE,CAAC;QAC1B,MAAM,CAAC,GAAG,OAAO,CAAC,IAAI,EAAE,GAAG,CAAC,IAAI,CAAC,CAAA;QACjC,UAAU,GAAG,uBAAuB,CAAC,CAAC,EAAE,GAAG,EAAE,GAAG,CAAC,IAAI,UAAU,CAAA;IACjE,CAAC;IACD,IAAI,CAAC,UAAU,EAAE,CAAC;QAChB,OAAO,KAAK,CAAA;IACd,CAAC;IACD,IAAI,GAAG,CAAC,YAAY,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,CAAC;QAC5D,OAAO,KAAK,CAAA;IACd,CAAC;IACD,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,EAAE,CAAC;QACzC,OAAO,KAAK,CAAA;IACd,CAAC;IACD,gBAAgB,CAAC,GAAG,EAAE,CAAC,aAAa,CAAC,IAAI,EAAE,GAAG,EAAE,SAAS,CAAC,CAAC,CAAA;IAC3D,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAED,MAAM,aAAa,GAAG,CACpB,IAAY,EACZ,GAAW,EACX,MAA2B,EAC3B,EAAE;IACF,MAAM,OAAO,GAAG,OAAO,CAAC,GAAG,EAAE,cAAc,CAAC,IAAI,CAAC,CAAC,CAAA;IAClD,UAAU,CAAC,IAAI,EAAE,OAAO,CAAC,CAAA;IACzB,OAAO,MAAM,CAAC,OAAO,CAAC,CAAA;AACxB,CAAC,CAAA","sourcesContent":["// https://youtu.be/uhRWMGBjlO8?t=537\n//\n// 1. readdir\n// 2. for each entry\n// a. if a non-empty directory, recurse\n// b. if an empty directory, move to random hidden file name in $TEMP\n// c. unlink/rmdir $TEMP\n//\n// This works around the fact that unlink/rmdir is non-atomic and takes\n// a non-deterministic amount of time to complete.\n//\n// However, it is HELLA SLOW, like 2-10x slower than a naive recursive rm.\n\nimport { basename, parse, resolve } from 'path'\nimport { defaultTmp, defaultTmpSync } from './default-tmp.js'\n\nimport { ignoreENOENT, ignoreENOENTSync } from './ignore-enoent.js'\n\nimport {\n chmodSync,\n lstatSync,\n promises as fsPromises,\n renameSync,\n rmdirSync,\n unlinkSync,\n} from './fs.js'\nconst { lstat, rename, unlink, rmdir, chmod } = fsPromises\n\nimport { Dirent, Stats } from 'fs'\nimport { RimrafAsyncOptions, RimrafSyncOptions } from './index.js'\nimport { readdirOrError, readdirOrErrorSync } from './readdir-or-error.js'\n\n// crypto.randomBytes is much slower, and Math.random() is enough here\nconst uniqueFilename = (path: string) => `.${basename(path)}.${Math.random()}`\n\nconst unlinkFixEPERM = async (path: string) =>\n unlink(path).catch((er: Error & { code?: string }) => {\n if (er.code === 'EPERM') {\n return chmod(path, 0o666).then(\n () => unlink(path),\n er2 => {\n if (er2.code === 'ENOENT') {\n return\n }\n throw er\n },\n )\n } else if (er.code === 'ENOENT') {\n return\n }\n throw er\n })\n\nconst unlinkFixEPERMSync = (path: string) => {\n try {\n unlinkSync(path)\n } catch (er) {\n if ((er as NodeJS.ErrnoException)?.code === 'EPERM') {\n try {\n return chmodSync(path, 0o666)\n } catch (er2) {\n if ((er2 as NodeJS.ErrnoException)?.code === 'ENOENT') {\n return\n }\n throw er\n }\n } else if ((er as NodeJS.ErrnoException)?.code === 'ENOENT') {\n return\n }\n throw er\n }\n}\n\nexport const rimrafMoveRemove = async (\n path: string,\n opt: RimrafAsyncOptions,\n) => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n try {\n return await rimrafMoveRemoveDir(path, opt, await lstat(path))\n } catch (er) {\n if ((er as NodeJS.ErrnoException)?.code === 'ENOENT') return true\n throw er\n }\n}\n\nconst rimrafMoveRemoveDir = async (\n path: string,\n opt: RimrafAsyncOptions,\n ent: Dirent | Stats,\n): Promise => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n if (!opt.tmp) {\n return rimrafMoveRemoveDir(\n path,\n { ...opt, tmp: await defaultTmp(path) },\n ent,\n )\n }\n if (path === opt.tmp && parse(path).root !== path) {\n throw new Error('cannot delete temp directory used for deletion')\n }\n\n const entries = ent.isDirectory() ? await readdirOrError(path) : null\n if (!Array.isArray(entries)) {\n // this can only happen if lstat/readdir lied, or if the dir was\n // swapped out with a file at just the right moment.\n /* c8 ignore start */\n if (entries) {\n if (entries.code === 'ENOENT') {\n return true\n }\n if (entries.code !== 'ENOTDIR') {\n throw entries\n }\n }\n /* c8 ignore stop */\n if (opt.filter && !(await opt.filter(path, ent))) {\n return false\n }\n await ignoreENOENT(tmpUnlink(path, opt.tmp, unlinkFixEPERM))\n return true\n }\n\n const removedAll = (\n await Promise.all(\n entries.map(ent =>\n rimrafMoveRemoveDir(resolve(path, ent.name), opt, ent),\n ),\n )\n ).reduce((a, b) => a && b, true)\n if (!removedAll) {\n return false\n }\n\n // we don't ever ACTUALLY try to unlink /, because that can never work\n // but when preserveRoot is false, we could be operating on it.\n // No need to check if preserveRoot is not false.\n if (opt.preserveRoot === false && path === parse(path).root) {\n return false\n }\n if (opt.filter && !(await opt.filter(path, ent))) {\n return false\n }\n await ignoreENOENT(tmpUnlink(path, opt.tmp, rmdir))\n return true\n}\n\nconst tmpUnlink = async (\n path: string,\n tmp: string,\n rm: (p: string) => Promise,\n) => {\n const tmpFile = resolve(tmp, uniqueFilename(path))\n await rename(path, tmpFile)\n return await rm(tmpFile)\n}\n\nexport const rimrafMoveRemoveSync = (path: string, opt: RimrafSyncOptions) => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n try {\n return rimrafMoveRemoveDirSync(path, opt, lstatSync(path))\n } catch (er) {\n if ((er as NodeJS.ErrnoException)?.code === 'ENOENT') return true\n throw er\n }\n}\n\nconst rimrafMoveRemoveDirSync = (\n path: string,\n opt: RimrafSyncOptions,\n ent: Dirent | Stats,\n): boolean => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n if (!opt.tmp) {\n return rimrafMoveRemoveDirSync(\n path,\n { ...opt, tmp: defaultTmpSync(path) },\n ent,\n )\n }\n const tmp: string = opt.tmp\n\n if (path === opt.tmp && parse(path).root !== path) {\n throw new Error('cannot delete temp directory used for deletion')\n }\n\n const entries = ent.isDirectory() ? readdirOrErrorSync(path) : null\n if (!Array.isArray(entries)) {\n // this can only happen if lstat/readdir lied, or if the dir was\n // swapped out with a file at just the right moment.\n /* c8 ignore start */\n if (entries) {\n if (entries.code === 'ENOENT') {\n return true\n }\n if (entries.code !== 'ENOTDIR') {\n throw entries\n }\n }\n /* c8 ignore stop */\n if (opt.filter && !opt.filter(path, ent)) {\n return false\n }\n ignoreENOENTSync(() => tmpUnlinkSync(path, tmp, unlinkFixEPERMSync))\n return true\n }\n\n let removedAll = true\n for (const ent of entries) {\n const p = resolve(path, ent.name)\n removedAll = rimrafMoveRemoveDirSync(p, opt, ent) && removedAll\n }\n if (!removedAll) {\n return false\n }\n if (opt.preserveRoot === false && path === parse(path).root) {\n return false\n }\n if (opt.filter && !opt.filter(path, ent)) {\n return false\n }\n ignoreENOENTSync(() => tmpUnlinkSync(path, tmp, rmdirSync))\n return true\n}\n\nconst tmpUnlinkSync = (\n path: string,\n tmp: string,\n rmSync: (p: string) => void,\n) => {\n const tmpFile = resolve(tmp, uniqueFilename(path))\n renameSync(path, tmpFile)\n return rmSync(tmpFile)\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/rimraf-native.d.ts b/deps/npm/node_modules/rimraf/dist/esm/rimraf-native.d.ts new file mode 100644 index 00000000000000..cc84bf7ffd34d0 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/rimraf-native.d.ts @@ -0,0 +1,4 @@ +import { RimrafAsyncOptions, RimrafSyncOptions } from './index.js'; +export declare const rimrafNative: (path: string, opt: RimrafAsyncOptions) => Promise; +export declare const rimrafNativeSync: (path: string, opt: RimrafSyncOptions) => boolean; +//# sourceMappingURL=rimraf-native.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/rimraf-native.d.ts.map b/deps/npm/node_modules/rimraf/dist/esm/rimraf-native.d.ts.map new file mode 100644 index 00000000000000..bea6b79965192f --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/rimraf-native.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"rimraf-native.d.ts","sourceRoot":"","sources":["../../src/rimraf-native.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,kBAAkB,EAAE,iBAAiB,EAAE,MAAM,YAAY,CAAA;AAIlE,eAAO,MAAM,YAAY,SACjB,MAAM,OACP,kBAAkB,KACtB,OAAO,CAAC,OAAO,CAOjB,CAAA;AAED,eAAO,MAAM,gBAAgB,SACrB,MAAM,OACP,iBAAiB,KACrB,OAOF,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/rimraf-native.js b/deps/npm/node_modules/rimraf/dist/esm/rimraf-native.js new file mode 100644 index 00000000000000..719161fc9e85ca --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/rimraf-native.js @@ -0,0 +1,19 @@ +import { promises, rmSync } from './fs.js'; +const { rm } = promises; +export const rimrafNative = async (path, opt) => { + await rm(path, { + ...opt, + force: true, + recursive: true, + }); + return true; +}; +export const rimrafNativeSync = (path, opt) => { + rmSync(path, { + ...opt, + force: true, + recursive: true, + }); + return true; +}; +//# sourceMappingURL=rimraf-native.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/rimraf-native.js.map b/deps/npm/node_modules/rimraf/dist/esm/rimraf-native.js.map new file mode 100644 index 00000000000000..fde373ba4ea13f --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/rimraf-native.js.map @@ -0,0 +1 @@ +{"version":3,"file":"rimraf-native.js","sourceRoot":"","sources":["../../src/rimraf-native.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,QAAQ,EAAE,MAAM,EAAE,MAAM,SAAS,CAAA;AAC1C,MAAM,EAAE,EAAE,EAAE,GAAG,QAAQ,CAAA;AAEvB,MAAM,CAAC,MAAM,YAAY,GAAG,KAAK,EAC/B,IAAY,EACZ,GAAuB,EACL,EAAE;IACpB,MAAM,EAAE,CAAC,IAAI,EAAE;QACb,GAAG,GAAG;QACN,KAAK,EAAE,IAAI;QACX,SAAS,EAAE,IAAI;KAChB,CAAC,CAAA;IACF,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAED,MAAM,CAAC,MAAM,gBAAgB,GAAG,CAC9B,IAAY,EACZ,GAAsB,EACb,EAAE;IACX,MAAM,CAAC,IAAI,EAAE;QACX,GAAG,GAAG;QACN,KAAK,EAAE,IAAI;QACX,SAAS,EAAE,IAAI;KAChB,CAAC,CAAA;IACF,OAAO,IAAI,CAAA;AACb,CAAC,CAAA","sourcesContent":["import { RimrafAsyncOptions, RimrafSyncOptions } from './index.js'\nimport { promises, rmSync } from './fs.js'\nconst { rm } = promises\n\nexport const rimrafNative = async (\n path: string,\n opt: RimrafAsyncOptions,\n): Promise => {\n await rm(path, {\n ...opt,\n force: true,\n recursive: true,\n })\n return true\n}\n\nexport const rimrafNativeSync = (\n path: string,\n opt: RimrafSyncOptions,\n): boolean => {\n rmSync(path, {\n ...opt,\n force: true,\n recursive: true,\n })\n return true\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/rimraf-posix.d.ts b/deps/npm/node_modules/rimraf/dist/esm/rimraf-posix.d.ts new file mode 100644 index 00000000000000..8e532efe9aba21 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/rimraf-posix.d.ts @@ -0,0 +1,4 @@ +import { RimrafAsyncOptions, RimrafSyncOptions } from './index.js'; +export declare const rimrafPosix: (path: string, opt: RimrafAsyncOptions) => Promise; +export declare const rimrafPosixSync: (path: string, opt: RimrafSyncOptions) => boolean; +//# sourceMappingURL=rimraf-posix.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/rimraf-posix.d.ts.map b/deps/npm/node_modules/rimraf/dist/esm/rimraf-posix.d.ts.map new file mode 100644 index 00000000000000..3f9b8084ed470b --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/rimraf-posix.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"rimraf-posix.d.ts","sourceRoot":"","sources":["../../src/rimraf-posix.ts"],"names":[],"mappings":"AAcA,OAAO,EAAE,kBAAkB,EAAE,iBAAiB,EAAE,MAAM,YAAY,CAAA;AAGlE,eAAO,MAAM,WAAW,SAAgB,MAAM,OAAO,kBAAkB,qBAUtE,CAAA;AAED,eAAO,MAAM,eAAe,SAAU,MAAM,OAAO,iBAAiB,YAUnE,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/rimraf-posix.js b/deps/npm/node_modules/rimraf/dist/esm/rimraf-posix.js new file mode 100644 index 00000000000000..356a477765a665 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/rimraf-posix.js @@ -0,0 +1,118 @@ +// the simple recursive removal, where unlink and rmdir are atomic +// Note that this approach does NOT work on Windows! +// We stat first and only unlink if the Dirent isn't a directory, +// because sunos will let root unlink a directory, and some +// SUPER weird breakage happens as a result. +import { lstatSync, promises, rmdirSync, unlinkSync } from './fs.js'; +const { lstat, rmdir, unlink } = promises; +import { parse, resolve } from 'path'; +import { readdirOrError, readdirOrErrorSync } from './readdir-or-error.js'; +import { ignoreENOENT, ignoreENOENTSync } from './ignore-enoent.js'; +export const rimrafPosix = async (path, opt) => { + if (opt?.signal?.aborted) { + throw opt.signal.reason; + } + try { + return await rimrafPosixDir(path, opt, await lstat(path)); + } + catch (er) { + if (er?.code === 'ENOENT') + return true; + throw er; + } +}; +export const rimrafPosixSync = (path, opt) => { + if (opt?.signal?.aborted) { + throw opt.signal.reason; + } + try { + return rimrafPosixDirSync(path, opt, lstatSync(path)); + } + catch (er) { + if (er?.code === 'ENOENT') + return true; + throw er; + } +}; +const rimrafPosixDir = async (path, opt, ent) => { + if (opt?.signal?.aborted) { + throw opt.signal.reason; + } + const entries = ent.isDirectory() ? await readdirOrError(path) : null; + if (!Array.isArray(entries)) { + // this can only happen if lstat/readdir lied, or if the dir was + // swapped out with a file at just the right moment. + /* c8 ignore start */ + if (entries) { + if (entries.code === 'ENOENT') { + return true; + } + if (entries.code !== 'ENOTDIR') { + throw entries; + } + } + /* c8 ignore stop */ + if (opt.filter && !(await opt.filter(path, ent))) { + return false; + } + await ignoreENOENT(unlink(path)); + return true; + } + const removedAll = (await Promise.all(entries.map(ent => rimrafPosixDir(resolve(path, ent.name), opt, ent)))).reduce((a, b) => a && b, true); + if (!removedAll) { + return false; + } + // we don't ever ACTUALLY try to unlink /, because that can never work + // but when preserveRoot is false, we could be operating on it. + // No need to check if preserveRoot is not false. + if (opt.preserveRoot === false && path === parse(path).root) { + return false; + } + if (opt.filter && !(await opt.filter(path, ent))) { + return false; + } + await ignoreENOENT(rmdir(path)); + return true; +}; +const rimrafPosixDirSync = (path, opt, ent) => { + if (opt?.signal?.aborted) { + throw opt.signal.reason; + } + const entries = ent.isDirectory() ? readdirOrErrorSync(path) : null; + if (!Array.isArray(entries)) { + // this can only happen if lstat/readdir lied, or if the dir was + // swapped out with a file at just the right moment. + /* c8 ignore start */ + if (entries) { + if (entries.code === 'ENOENT') { + return true; + } + if (entries.code !== 'ENOTDIR') { + throw entries; + } + } + /* c8 ignore stop */ + if (opt.filter && !opt.filter(path, ent)) { + return false; + } + ignoreENOENTSync(() => unlinkSync(path)); + return true; + } + let removedAll = true; + for (const ent of entries) { + const p = resolve(path, ent.name); + removedAll = rimrafPosixDirSync(p, opt, ent) && removedAll; + } + if (opt.preserveRoot === false && path === parse(path).root) { + return false; + } + if (!removedAll) { + return false; + } + if (opt.filter && !opt.filter(path, ent)) { + return false; + } + ignoreENOENTSync(() => rmdirSync(path)); + return true; +}; +//# sourceMappingURL=rimraf-posix.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/rimraf-posix.js.map b/deps/npm/node_modules/rimraf/dist/esm/rimraf-posix.js.map new file mode 100644 index 00000000000000..e301324d3f2649 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/rimraf-posix.js.map @@ -0,0 +1 @@ +{"version":3,"file":"rimraf-posix.js","sourceRoot":"","sources":["../../src/rimraf-posix.ts"],"names":[],"mappings":"AAAA,kEAAkE;AAClE,oDAAoD;AACpD,iEAAiE;AACjE,2DAA2D;AAC3D,4CAA4C;AAE5C,OAAO,EAAE,SAAS,EAAE,QAAQ,EAAE,SAAS,EAAE,UAAU,EAAE,MAAM,SAAS,CAAA;AACpE,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE,MAAM,EAAE,GAAG,QAAQ,CAAA;AAEzC,OAAO,EAAE,KAAK,EAAE,OAAO,EAAE,MAAM,MAAM,CAAA;AAErC,OAAO,EAAE,cAAc,EAAE,kBAAkB,EAAE,MAAM,uBAAuB,CAAA;AAI1E,OAAO,EAAE,YAAY,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAA;AAEnE,MAAM,CAAC,MAAM,WAAW,GAAG,KAAK,EAAE,IAAY,EAAE,GAAuB,EAAE,EAAE;IACzE,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,IAAI,CAAC;QACH,OAAO,MAAM,cAAc,CAAC,IAAI,EAAE,GAAG,EAAE,MAAM,KAAK,CAAC,IAAI,CAAC,CAAC,CAAA;IAC3D,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,IAAK,EAA4B,EAAE,IAAI,KAAK,QAAQ;YAAE,OAAO,IAAI,CAAA;QACjE,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAED,MAAM,CAAC,MAAM,eAAe,GAAG,CAAC,IAAY,EAAE,GAAsB,EAAE,EAAE;IACtE,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,IAAI,CAAC;QACH,OAAO,kBAAkB,CAAC,IAAI,EAAE,GAAG,EAAE,SAAS,CAAC,IAAI,CAAC,CAAC,CAAA;IACvD,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,IAAK,EAA4B,EAAE,IAAI,KAAK,QAAQ;YAAE,OAAO,IAAI,CAAA;QACjE,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAED,MAAM,cAAc,GAAG,KAAK,EAC1B,IAAY,EACZ,GAAuB,EACvB,GAAmB,EACD,EAAE;IACpB,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,MAAM,OAAO,GAAG,GAAG,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,MAAM,cAAc,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAA;IACrE,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC;QAC5B,gEAAgE;QAChE,oDAAoD;QACpD,qBAAqB;QACrB,IAAI,OAAO,EAAE,CAAC;YACZ,IAAI,OAAO,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;gBAC9B,OAAO,IAAI,CAAA;YACb,CAAC;YACD,IAAI,OAAO,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;gBAC/B,MAAM,OAAO,CAAA;YACf,CAAC;QACH,CAAC;QACD,oBAAoB;QACpB,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,EAAE,CAAC;YACjD,OAAO,KAAK,CAAA;QACd,CAAC;QACD,MAAM,YAAY,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAA;QAChC,OAAO,IAAI,CAAA;IACb,CAAC;IAED,MAAM,UAAU,GAAG,CACjB,MAAM,OAAO,CAAC,GAAG,CACf,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,cAAc,CAAC,OAAO,CAAC,IAAI,EAAE,GAAG,CAAC,IAAI,CAAC,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC,CACtE,CACF,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,IAAI,CAAC,CAAA;IAEhC,IAAI,CAAC,UAAU,EAAE,CAAC;QAChB,OAAO,KAAK,CAAA;IACd,CAAC;IAED,sEAAsE;IACtE,+DAA+D;IAC/D,iDAAiD;IACjD,IAAI,GAAG,CAAC,YAAY,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,CAAC;QAC5D,OAAO,KAAK,CAAA;IACd,CAAC;IAED,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,EAAE,CAAC;QACjD,OAAO,KAAK,CAAA;IACd,CAAC;IAED,MAAM,YAAY,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAA;IAC/B,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAED,MAAM,kBAAkB,GAAG,CACzB,IAAY,EACZ,GAAsB,EACtB,GAAmB,EACV,EAAE;IACX,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,MAAM,OAAO,GAAG,GAAG,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,kBAAkB,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAA;IACnE,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC;QAC5B,gEAAgE;QAChE,oDAAoD;QACpD,qBAAqB;QACrB,IAAI,OAAO,EAAE,CAAC;YACZ,IAAI,OAAO,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;gBAC9B,OAAO,IAAI,CAAA;YACb,CAAC;YACD,IAAI,OAAO,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;gBAC/B,MAAM,OAAO,CAAA;YACf,CAAC;QACH,CAAC;QACD,oBAAoB;QACpB,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,EAAE,CAAC;YACzC,OAAO,KAAK,CAAA;QACd,CAAC;QACD,gBAAgB,CAAC,GAAG,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC,CAAA;QACxC,OAAO,IAAI,CAAA;IACb,CAAC;IACD,IAAI,UAAU,GAAY,IAAI,CAAA;IAC9B,KAAK,MAAM,GAAG,IAAI,OAAO,EAAE,CAAC;QAC1B,MAAM,CAAC,GAAG,OAAO,CAAC,IAAI,EAAE,GAAG,CAAC,IAAI,CAAC,CAAA;QACjC,UAAU,GAAG,kBAAkB,CAAC,CAAC,EAAE,GAAG,EAAE,GAAG,CAAC,IAAI,UAAU,CAAA;IAC5D,CAAC;IACD,IAAI,GAAG,CAAC,YAAY,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,CAAC;QAC5D,OAAO,KAAK,CAAA;IACd,CAAC;IAED,IAAI,CAAC,UAAU,EAAE,CAAC;QAChB,OAAO,KAAK,CAAA;IACd,CAAC;IAED,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,EAAE,CAAC;QACzC,OAAO,KAAK,CAAA;IACd,CAAC;IAED,gBAAgB,CAAC,GAAG,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAA;IACvC,OAAO,IAAI,CAAA;AACb,CAAC,CAAA","sourcesContent":["// the simple recursive removal, where unlink and rmdir are atomic\n// Note that this approach does NOT work on Windows!\n// We stat first and only unlink if the Dirent isn't a directory,\n// because sunos will let root unlink a directory, and some\n// SUPER weird breakage happens as a result.\n\nimport { lstatSync, promises, rmdirSync, unlinkSync } from './fs.js'\nconst { lstat, rmdir, unlink } = promises\n\nimport { parse, resolve } from 'path'\n\nimport { readdirOrError, readdirOrErrorSync } from './readdir-or-error.js'\n\nimport { Dirent, Stats } from 'fs'\nimport { RimrafAsyncOptions, RimrafSyncOptions } from './index.js'\nimport { ignoreENOENT, ignoreENOENTSync } from './ignore-enoent.js'\n\nexport const rimrafPosix = async (path: string, opt: RimrafAsyncOptions) => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n try {\n return await rimrafPosixDir(path, opt, await lstat(path))\n } catch (er) {\n if ((er as NodeJS.ErrnoException)?.code === 'ENOENT') return true\n throw er\n }\n}\n\nexport const rimrafPosixSync = (path: string, opt: RimrafSyncOptions) => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n try {\n return rimrafPosixDirSync(path, opt, lstatSync(path))\n } catch (er) {\n if ((er as NodeJS.ErrnoException)?.code === 'ENOENT') return true\n throw er\n }\n}\n\nconst rimrafPosixDir = async (\n path: string,\n opt: RimrafAsyncOptions,\n ent: Dirent | Stats,\n): Promise => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n const entries = ent.isDirectory() ? await readdirOrError(path) : null\n if (!Array.isArray(entries)) {\n // this can only happen if lstat/readdir lied, or if the dir was\n // swapped out with a file at just the right moment.\n /* c8 ignore start */\n if (entries) {\n if (entries.code === 'ENOENT') {\n return true\n }\n if (entries.code !== 'ENOTDIR') {\n throw entries\n }\n }\n /* c8 ignore stop */\n if (opt.filter && !(await opt.filter(path, ent))) {\n return false\n }\n await ignoreENOENT(unlink(path))\n return true\n }\n\n const removedAll = (\n await Promise.all(\n entries.map(ent => rimrafPosixDir(resolve(path, ent.name), opt, ent)),\n )\n ).reduce((a, b) => a && b, true)\n\n if (!removedAll) {\n return false\n }\n\n // we don't ever ACTUALLY try to unlink /, because that can never work\n // but when preserveRoot is false, we could be operating on it.\n // No need to check if preserveRoot is not false.\n if (opt.preserveRoot === false && path === parse(path).root) {\n return false\n }\n\n if (opt.filter && !(await opt.filter(path, ent))) {\n return false\n }\n\n await ignoreENOENT(rmdir(path))\n return true\n}\n\nconst rimrafPosixDirSync = (\n path: string,\n opt: RimrafSyncOptions,\n ent: Dirent | Stats,\n): boolean => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n const entries = ent.isDirectory() ? readdirOrErrorSync(path) : null\n if (!Array.isArray(entries)) {\n // this can only happen if lstat/readdir lied, or if the dir was\n // swapped out with a file at just the right moment.\n /* c8 ignore start */\n if (entries) {\n if (entries.code === 'ENOENT') {\n return true\n }\n if (entries.code !== 'ENOTDIR') {\n throw entries\n }\n }\n /* c8 ignore stop */\n if (opt.filter && !opt.filter(path, ent)) {\n return false\n }\n ignoreENOENTSync(() => unlinkSync(path))\n return true\n }\n let removedAll: boolean = true\n for (const ent of entries) {\n const p = resolve(path, ent.name)\n removedAll = rimrafPosixDirSync(p, opt, ent) && removedAll\n }\n if (opt.preserveRoot === false && path === parse(path).root) {\n return false\n }\n\n if (!removedAll) {\n return false\n }\n\n if (opt.filter && !opt.filter(path, ent)) {\n return false\n }\n\n ignoreENOENTSync(() => rmdirSync(path))\n return true\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/rimraf-windows.d.ts b/deps/npm/node_modules/rimraf/dist/esm/rimraf-windows.d.ts new file mode 100644 index 00000000000000..555689073ffe75 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/rimraf-windows.d.ts @@ -0,0 +1,4 @@ +import { RimrafAsyncOptions, RimrafSyncOptions } from './index.js'; +export declare const rimrafWindows: (path: string, opt: RimrafAsyncOptions) => Promise; +export declare const rimrafWindowsSync: (path: string, opt: RimrafSyncOptions) => boolean; +//# sourceMappingURL=rimraf-windows.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/rimraf-windows.d.ts.map b/deps/npm/node_modules/rimraf/dist/esm/rimraf-windows.d.ts.map new file mode 100644 index 00000000000000..56f00d9f2e3d13 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/rimraf-windows.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"rimraf-windows.d.ts","sourceRoot":"","sources":["../../src/rimraf-windows.ts"],"names":[],"mappings":"AAYA,OAAO,EAAE,kBAAkB,EAAE,iBAAiB,EAAE,MAAM,YAAY,CAAA;AA2DlE,eAAO,MAAM,aAAa,SAAgB,MAAM,OAAO,kBAAkB,qBAUxE,CAAA;AAED,eAAO,MAAM,iBAAiB,SAAU,MAAM,OAAO,iBAAiB,YAUrE,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/rimraf-windows.js b/deps/npm/node_modules/rimraf/dist/esm/rimraf-windows.js new file mode 100644 index 00000000000000..bd2fa80657848d --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/rimraf-windows.js @@ -0,0 +1,177 @@ +// This is the same as rimrafPosix, with the following changes: +// +// 1. EBUSY, ENFILE, EMFILE trigger retries and/or exponential backoff +// 2. All non-directories are removed first and then all directories are +// removed in a second sweep. +// 3. If we hit ENOTEMPTY in the second sweep, fall back to move-remove on +// the that folder. +// +// Note: "move then remove" is 2-10 times slower, and just as unreliable. +import { parse, resolve } from 'path'; +import { fixEPERM, fixEPERMSync } from './fix-eperm.js'; +import { lstatSync, promises, rmdirSync, unlinkSync } from './fs.js'; +import { ignoreENOENT, ignoreENOENTSync } from './ignore-enoent.js'; +import { readdirOrError, readdirOrErrorSync } from './readdir-or-error.js'; +import { retryBusy, retryBusySync } from './retry-busy.js'; +import { rimrafMoveRemove, rimrafMoveRemoveSync } from './rimraf-move-remove.js'; +const { unlink, rmdir, lstat } = promises; +const rimrafWindowsFile = retryBusy(fixEPERM(unlink)); +const rimrafWindowsFileSync = retryBusySync(fixEPERMSync(unlinkSync)); +const rimrafWindowsDirRetry = retryBusy(fixEPERM(rmdir)); +const rimrafWindowsDirRetrySync = retryBusySync(fixEPERMSync(rmdirSync)); +const rimrafWindowsDirMoveRemoveFallback = async (path, opt) => { + /* c8 ignore start */ + if (opt?.signal?.aborted) { + throw opt.signal.reason; + } + /* c8 ignore stop */ + // already filtered, remove from options so we don't call unnecessarily + const { filter, ...options } = opt; + try { + return await rimrafWindowsDirRetry(path, options); + } + catch (er) { + if (er?.code === 'ENOTEMPTY') { + return await rimrafMoveRemove(path, options); + } + throw er; + } +}; +const rimrafWindowsDirMoveRemoveFallbackSync = (path, opt) => { + if (opt?.signal?.aborted) { + throw opt.signal.reason; + } + // already filtered, remove from options so we don't call unnecessarily + const { filter, ...options } = opt; + try { + return rimrafWindowsDirRetrySync(path, options); + } + catch (er) { + const fer = er; + if (fer?.code === 'ENOTEMPTY') { + return rimrafMoveRemoveSync(path, options); + } + throw er; + } +}; +const START = Symbol('start'); +const CHILD = Symbol('child'); +const FINISH = Symbol('finish'); +export const rimrafWindows = async (path, opt) => { + if (opt?.signal?.aborted) { + throw opt.signal.reason; + } + try { + return await rimrafWindowsDir(path, opt, await lstat(path), START); + } + catch (er) { + if (er?.code === 'ENOENT') + return true; + throw er; + } +}; +export const rimrafWindowsSync = (path, opt) => { + if (opt?.signal?.aborted) { + throw opt.signal.reason; + } + try { + return rimrafWindowsDirSync(path, opt, lstatSync(path), START); + } + catch (er) { + if (er?.code === 'ENOENT') + return true; + throw er; + } +}; +const rimrafWindowsDir = async (path, opt, ent, state = START) => { + if (opt?.signal?.aborted) { + throw opt.signal.reason; + } + const entries = ent.isDirectory() ? await readdirOrError(path) : null; + if (!Array.isArray(entries)) { + // this can only happen if lstat/readdir lied, or if the dir was + // swapped out with a file at just the right moment. + /* c8 ignore start */ + if (entries) { + if (entries.code === 'ENOENT') { + return true; + } + if (entries.code !== 'ENOTDIR') { + throw entries; + } + } + /* c8 ignore stop */ + if (opt.filter && !(await opt.filter(path, ent))) { + return false; + } + // is a file + await ignoreENOENT(rimrafWindowsFile(path, opt)); + return true; + } + const s = state === START ? CHILD : state; + const removedAll = (await Promise.all(entries.map(ent => rimrafWindowsDir(resolve(path, ent.name), opt, ent, s)))).reduce((a, b) => a && b, true); + if (state === START) { + return rimrafWindowsDir(path, opt, ent, FINISH); + } + else if (state === FINISH) { + if (opt.preserveRoot === false && path === parse(path).root) { + return false; + } + if (!removedAll) { + return false; + } + if (opt.filter && !(await opt.filter(path, ent))) { + return false; + } + await ignoreENOENT(rimrafWindowsDirMoveRemoveFallback(path, opt)); + } + return true; +}; +const rimrafWindowsDirSync = (path, opt, ent, state = START) => { + const entries = ent.isDirectory() ? readdirOrErrorSync(path) : null; + if (!Array.isArray(entries)) { + // this can only happen if lstat/readdir lied, or if the dir was + // swapped out with a file at just the right moment. + /* c8 ignore start */ + if (entries) { + if (entries.code === 'ENOENT') { + return true; + } + if (entries.code !== 'ENOTDIR') { + throw entries; + } + } + /* c8 ignore stop */ + if (opt.filter && !opt.filter(path, ent)) { + return false; + } + // is a file + ignoreENOENTSync(() => rimrafWindowsFileSync(path, opt)); + return true; + } + let removedAll = true; + for (const ent of entries) { + const s = state === START ? CHILD : state; + const p = resolve(path, ent.name); + removedAll = rimrafWindowsDirSync(p, opt, ent, s) && removedAll; + } + if (state === START) { + return rimrafWindowsDirSync(path, opt, ent, FINISH); + } + else if (state === FINISH) { + if (opt.preserveRoot === false && path === parse(path).root) { + return false; + } + if (!removedAll) { + return false; + } + if (opt.filter && !opt.filter(path, ent)) { + return false; + } + ignoreENOENTSync(() => { + rimrafWindowsDirMoveRemoveFallbackSync(path, opt); + }); + } + return true; +}; +//# sourceMappingURL=rimraf-windows.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/rimraf-windows.js.map b/deps/npm/node_modules/rimraf/dist/esm/rimraf-windows.js.map new file mode 100644 index 00000000000000..4b136c1e7f1f78 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/rimraf-windows.js.map @@ -0,0 +1 @@ +{"version":3,"file":"rimraf-windows.js","sourceRoot":"","sources":["../../src/rimraf-windows.ts"],"names":[],"mappings":"AAAA,+DAA+D;AAC/D,EAAE;AACF,sEAAsE;AACtE,wEAAwE;AACxE,gCAAgC;AAChC,0EAA0E;AAC1E,sBAAsB;AACtB,EAAE;AACF,yEAAyE;AAGzE,OAAO,EAAE,KAAK,EAAE,OAAO,EAAE,MAAM,MAAM,CAAA;AAErC,OAAO,EAAE,QAAQ,EAAE,YAAY,EAAE,MAAM,gBAAgB,CAAA;AACvD,OAAO,EAAE,SAAS,EAAE,QAAQ,EAAE,SAAS,EAAE,UAAU,EAAE,MAAM,SAAS,CAAA;AACpE,OAAO,EAAE,YAAY,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAA;AACnE,OAAO,EAAE,cAAc,EAAE,kBAAkB,EAAE,MAAM,uBAAuB,CAAA;AAC1E,OAAO,EAAE,SAAS,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAA;AAC1D,OAAO,EAAE,gBAAgB,EAAE,oBAAoB,EAAE,MAAM,yBAAyB,CAAA;AAChF,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE,GAAG,QAAQ,CAAA;AAEzC,MAAM,iBAAiB,GAAG,SAAS,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAA;AACrD,MAAM,qBAAqB,GAAG,aAAa,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,CAAA;AACrE,MAAM,qBAAqB,GAAG,SAAS,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAA;AACxD,MAAM,yBAAyB,GAAG,aAAa,CAAC,YAAY,CAAC,SAAS,CAAC,CAAC,CAAA;AAExE,MAAM,kCAAkC,GAAG,KAAK,EAC9C,IAAY,EACZ,GAAuB,EACL,EAAE;IACpB,qBAAqB;IACrB,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,oBAAoB;IACpB,uEAAuE;IACvE,MAAM,EAAE,MAAM,EAAE,GAAG,OAAO,EAAE,GAAG,GAAG,CAAA;IAClC,IAAI,CAAC;QACH,OAAO,MAAM,qBAAqB,CAAC,IAAI,EAAE,OAAO,CAAC,CAAA;IACnD,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,IAAK,EAA4B,EAAE,IAAI,KAAK,WAAW,EAAE,CAAC;YACxD,OAAO,MAAM,gBAAgB,CAAC,IAAI,EAAE,OAAO,CAAC,CAAA;QAC9C,CAAC;QACD,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAED,MAAM,sCAAsC,GAAG,CAC7C,IAAY,EACZ,GAAsB,EACb,EAAE;IACX,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,uEAAuE;IACvE,MAAM,EAAE,MAAM,EAAE,GAAG,OAAO,EAAE,GAAG,GAAG,CAAA;IAClC,IAAI,CAAC;QACH,OAAO,yBAAyB,CAAC,IAAI,EAAE,OAAO,CAAC,CAAA;IACjD,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,MAAM,GAAG,GAAG,EAA2B,CAAA;QACvC,IAAI,GAAG,EAAE,IAAI,KAAK,WAAW,EAAE,CAAC;YAC9B,OAAO,oBAAoB,CAAC,IAAI,EAAE,OAAO,CAAC,CAAA;QAC5C,CAAC;QACD,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAED,MAAM,KAAK,GAAG,MAAM,CAAC,OAAO,CAAC,CAAA;AAC7B,MAAM,KAAK,GAAG,MAAM,CAAC,OAAO,CAAC,CAAA;AAC7B,MAAM,MAAM,GAAG,MAAM,CAAC,QAAQ,CAAC,CAAA;AAE/B,MAAM,CAAC,MAAM,aAAa,GAAG,KAAK,EAAE,IAAY,EAAE,GAAuB,EAAE,EAAE;IAC3E,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,IAAI,CAAC;QACH,OAAO,MAAM,gBAAgB,CAAC,IAAI,EAAE,GAAG,EAAE,MAAM,KAAK,CAAC,IAAI,CAAC,EAAE,KAAK,CAAC,CAAA;IACpE,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,IAAK,EAA4B,EAAE,IAAI,KAAK,QAAQ;YAAE,OAAO,IAAI,CAAA;QACjE,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAED,MAAM,CAAC,MAAM,iBAAiB,GAAG,CAAC,IAAY,EAAE,GAAsB,EAAE,EAAE;IACxE,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,IAAI,CAAC;QACH,OAAO,oBAAoB,CAAC,IAAI,EAAE,GAAG,EAAE,SAAS,CAAC,IAAI,CAAC,EAAE,KAAK,CAAC,CAAA;IAChE,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,IAAK,EAA4B,EAAE,IAAI,KAAK,QAAQ;YAAE,OAAO,IAAI,CAAA;QACjE,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAED,MAAM,gBAAgB,GAAG,KAAK,EAC5B,IAAY,EACZ,GAAuB,EACvB,GAAmB,EACnB,KAAK,GAAG,KAAK,EACK,EAAE;IACpB,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IAED,MAAM,OAAO,GAAG,GAAG,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,MAAM,cAAc,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAA;IACrE,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC;QAC5B,gEAAgE;QAChE,oDAAoD;QACpD,qBAAqB;QACrB,IAAI,OAAO,EAAE,CAAC;YACZ,IAAI,OAAO,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;gBAC9B,OAAO,IAAI,CAAA;YACb,CAAC;YACD,IAAI,OAAO,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;gBAC/B,MAAM,OAAO,CAAA;YACf,CAAC;QACH,CAAC;QACD,oBAAoB;QACpB,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,EAAE,CAAC;YACjD,OAAO,KAAK,CAAA;QACd,CAAC;QACD,YAAY;QACZ,MAAM,YAAY,CAAC,iBAAiB,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAA;QAChD,OAAO,IAAI,CAAA;IACb,CAAC;IAED,MAAM,CAAC,GAAG,KAAK,KAAK,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAA;IACzC,MAAM,UAAU,GAAG,CACjB,MAAM,OAAO,CAAC,GAAG,CACf,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAChB,gBAAgB,CAAC,OAAO,CAAC,IAAI,EAAE,GAAG,CAAC,IAAI,CAAC,EAAE,GAAG,EAAE,GAAG,EAAE,CAAC,CAAC,CACvD,CACF,CACF,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,IAAI,CAAC,CAAA;IAEhC,IAAI,KAAK,KAAK,KAAK,EAAE,CAAC;QACpB,OAAO,gBAAgB,CAAC,IAAI,EAAE,GAAG,EAAE,GAAG,EAAE,MAAM,CAAC,CAAA;IACjD,CAAC;SAAM,IAAI,KAAK,KAAK,MAAM,EAAE,CAAC;QAC5B,IAAI,GAAG,CAAC,YAAY,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,CAAC;YAC5D,OAAO,KAAK,CAAA;QACd,CAAC;QACD,IAAI,CAAC,UAAU,EAAE,CAAC;YAChB,OAAO,KAAK,CAAA;QACd,CAAC;QACD,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,EAAE,CAAC;YACjD,OAAO,KAAK,CAAA;QACd,CAAC;QACD,MAAM,YAAY,CAAC,kCAAkC,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAA;IACnE,CAAC;IACD,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAED,MAAM,oBAAoB,GAAG,CAC3B,IAAY,EACZ,GAAsB,EACtB,GAAmB,EACnB,KAAK,GAAG,KAAK,EACJ,EAAE;IACX,MAAM,OAAO,GAAG,GAAG,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,kBAAkB,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAA;IACnE,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC;QAC5B,gEAAgE;QAChE,oDAAoD;QACpD,qBAAqB;QACrB,IAAI,OAAO,EAAE,CAAC;YACZ,IAAI,OAAO,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;gBAC9B,OAAO,IAAI,CAAA;YACb,CAAC;YACD,IAAI,OAAO,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;gBAC/B,MAAM,OAAO,CAAA;YACf,CAAC;QACH,CAAC;QACD,oBAAoB;QACpB,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,EAAE,CAAC;YACzC,OAAO,KAAK,CAAA;QACd,CAAC;QACD,YAAY;QACZ,gBAAgB,CAAC,GAAG,EAAE,CAAC,qBAAqB,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAA;QACxD,OAAO,IAAI,CAAA;IACb,CAAC;IAED,IAAI,UAAU,GAAG,IAAI,CAAA;IACrB,KAAK,MAAM,GAAG,IAAI,OAAO,EAAE,CAAC;QAC1B,MAAM,CAAC,GAAG,KAAK,KAAK,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAA;QACzC,MAAM,CAAC,GAAG,OAAO,CAAC,IAAI,EAAE,GAAG,CAAC,IAAI,CAAC,CAAA;QACjC,UAAU,GAAG,oBAAoB,CAAC,CAAC,EAAE,GAAG,EAAE,GAAG,EAAE,CAAC,CAAC,IAAI,UAAU,CAAA;IACjE,CAAC;IAED,IAAI,KAAK,KAAK,KAAK,EAAE,CAAC;QACpB,OAAO,oBAAoB,CAAC,IAAI,EAAE,GAAG,EAAE,GAAG,EAAE,MAAM,CAAC,CAAA;IACrD,CAAC;SAAM,IAAI,KAAK,KAAK,MAAM,EAAE,CAAC;QAC5B,IAAI,GAAG,CAAC,YAAY,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,CAAC;YAC5D,OAAO,KAAK,CAAA;QACd,CAAC;QACD,IAAI,CAAC,UAAU,EAAE,CAAC;YAChB,OAAO,KAAK,CAAA;QACd,CAAC;QACD,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,EAAE,CAAC;YACzC,OAAO,KAAK,CAAA;QACd,CAAC;QACD,gBAAgB,CAAC,GAAG,EAAE;YACpB,sCAAsC,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;QACnD,CAAC,CAAC,CAAA;IACJ,CAAC;IACD,OAAO,IAAI,CAAA;AACb,CAAC,CAAA","sourcesContent":["// This is the same as rimrafPosix, with the following changes:\n//\n// 1. EBUSY, ENFILE, EMFILE trigger retries and/or exponential backoff\n// 2. All non-directories are removed first and then all directories are\n// removed in a second sweep.\n// 3. If we hit ENOTEMPTY in the second sweep, fall back to move-remove on\n// the that folder.\n//\n// Note: \"move then remove\" is 2-10 times slower, and just as unreliable.\n\nimport { Dirent, Stats } from 'fs'\nimport { parse, resolve } from 'path'\nimport { RimrafAsyncOptions, RimrafSyncOptions } from './index.js'\nimport { fixEPERM, fixEPERMSync } from './fix-eperm.js'\nimport { lstatSync, promises, rmdirSync, unlinkSync } from './fs.js'\nimport { ignoreENOENT, ignoreENOENTSync } from './ignore-enoent.js'\nimport { readdirOrError, readdirOrErrorSync } from './readdir-or-error.js'\nimport { retryBusy, retryBusySync } from './retry-busy.js'\nimport { rimrafMoveRemove, rimrafMoveRemoveSync } from './rimraf-move-remove.js'\nconst { unlink, rmdir, lstat } = promises\n\nconst rimrafWindowsFile = retryBusy(fixEPERM(unlink))\nconst rimrafWindowsFileSync = retryBusySync(fixEPERMSync(unlinkSync))\nconst rimrafWindowsDirRetry = retryBusy(fixEPERM(rmdir))\nconst rimrafWindowsDirRetrySync = retryBusySync(fixEPERMSync(rmdirSync))\n\nconst rimrafWindowsDirMoveRemoveFallback = async (\n path: string,\n opt: RimrafAsyncOptions,\n): Promise => {\n /* c8 ignore start */\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n /* c8 ignore stop */\n // already filtered, remove from options so we don't call unnecessarily\n const { filter, ...options } = opt\n try {\n return await rimrafWindowsDirRetry(path, options)\n } catch (er) {\n if ((er as NodeJS.ErrnoException)?.code === 'ENOTEMPTY') {\n return await rimrafMoveRemove(path, options)\n }\n throw er\n }\n}\n\nconst rimrafWindowsDirMoveRemoveFallbackSync = (\n path: string,\n opt: RimrafSyncOptions,\n): boolean => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n // already filtered, remove from options so we don't call unnecessarily\n const { filter, ...options } = opt\n try {\n return rimrafWindowsDirRetrySync(path, options)\n } catch (er) {\n const fer = er as NodeJS.ErrnoException\n if (fer?.code === 'ENOTEMPTY') {\n return rimrafMoveRemoveSync(path, options)\n }\n throw er\n }\n}\n\nconst START = Symbol('start')\nconst CHILD = Symbol('child')\nconst FINISH = Symbol('finish')\n\nexport const rimrafWindows = async (path: string, opt: RimrafAsyncOptions) => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n try {\n return await rimrafWindowsDir(path, opt, await lstat(path), START)\n } catch (er) {\n if ((er as NodeJS.ErrnoException)?.code === 'ENOENT') return true\n throw er\n }\n}\n\nexport const rimrafWindowsSync = (path: string, opt: RimrafSyncOptions) => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n try {\n return rimrafWindowsDirSync(path, opt, lstatSync(path), START)\n } catch (er) {\n if ((er as NodeJS.ErrnoException)?.code === 'ENOENT') return true\n throw er\n }\n}\n\nconst rimrafWindowsDir = async (\n path: string,\n opt: RimrafAsyncOptions,\n ent: Dirent | Stats,\n state = START,\n): Promise => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n\n const entries = ent.isDirectory() ? await readdirOrError(path) : null\n if (!Array.isArray(entries)) {\n // this can only happen if lstat/readdir lied, or if the dir was\n // swapped out with a file at just the right moment.\n /* c8 ignore start */\n if (entries) {\n if (entries.code === 'ENOENT') {\n return true\n }\n if (entries.code !== 'ENOTDIR') {\n throw entries\n }\n }\n /* c8 ignore stop */\n if (opt.filter && !(await opt.filter(path, ent))) {\n return false\n }\n // is a file\n await ignoreENOENT(rimrafWindowsFile(path, opt))\n return true\n }\n\n const s = state === START ? CHILD : state\n const removedAll = (\n await Promise.all(\n entries.map(ent =>\n rimrafWindowsDir(resolve(path, ent.name), opt, ent, s),\n ),\n )\n ).reduce((a, b) => a && b, true)\n\n if (state === START) {\n return rimrafWindowsDir(path, opt, ent, FINISH)\n } else if (state === FINISH) {\n if (opt.preserveRoot === false && path === parse(path).root) {\n return false\n }\n if (!removedAll) {\n return false\n }\n if (opt.filter && !(await opt.filter(path, ent))) {\n return false\n }\n await ignoreENOENT(rimrafWindowsDirMoveRemoveFallback(path, opt))\n }\n return true\n}\n\nconst rimrafWindowsDirSync = (\n path: string,\n opt: RimrafSyncOptions,\n ent: Dirent | Stats,\n state = START,\n): boolean => {\n const entries = ent.isDirectory() ? readdirOrErrorSync(path) : null\n if (!Array.isArray(entries)) {\n // this can only happen if lstat/readdir lied, or if the dir was\n // swapped out with a file at just the right moment.\n /* c8 ignore start */\n if (entries) {\n if (entries.code === 'ENOENT') {\n return true\n }\n if (entries.code !== 'ENOTDIR') {\n throw entries\n }\n }\n /* c8 ignore stop */\n if (opt.filter && !opt.filter(path, ent)) {\n return false\n }\n // is a file\n ignoreENOENTSync(() => rimrafWindowsFileSync(path, opt))\n return true\n }\n\n let removedAll = true\n for (const ent of entries) {\n const s = state === START ? CHILD : state\n const p = resolve(path, ent.name)\n removedAll = rimrafWindowsDirSync(p, opt, ent, s) && removedAll\n }\n\n if (state === START) {\n return rimrafWindowsDirSync(path, opt, ent, FINISH)\n } else if (state === FINISH) {\n if (opt.preserveRoot === false && path === parse(path).root) {\n return false\n }\n if (!removedAll) {\n return false\n }\n if (opt.filter && !opt.filter(path, ent)) {\n return false\n }\n ignoreENOENTSync(() => {\n rimrafWindowsDirMoveRemoveFallbackSync(path, opt)\n })\n }\n return true\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/use-native.d.ts b/deps/npm/node_modules/rimraf/dist/esm/use-native.d.ts new file mode 100644 index 00000000000000..e191fd90da93d3 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/use-native.d.ts @@ -0,0 +1,4 @@ +import { RimrafAsyncOptions, RimrafOptions } from './index.js'; +export declare const useNative: (opt?: RimrafAsyncOptions) => boolean; +export declare const useNativeSync: (opt?: RimrafOptions) => boolean; +//# sourceMappingURL=use-native.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/use-native.d.ts.map b/deps/npm/node_modules/rimraf/dist/esm/use-native.d.ts.map new file mode 100644 index 00000000000000..b182beb1707a7d --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/use-native.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"use-native.d.ts","sourceRoot":"","sources":["../../src/use-native.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,kBAAkB,EAAE,aAAa,EAAE,MAAM,YAAY,CAAA;AAa9D,eAAO,MAAM,SAAS,EAAE,CAAC,GAAG,CAAC,EAAE,kBAAkB,KAAK,OAGf,CAAA;AACvC,eAAO,MAAM,aAAa,EAAE,CAAC,GAAG,CAAC,EAAE,aAAa,KAAK,OAGd,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/use-native.js b/deps/npm/node_modules/rimraf/dist/esm/use-native.js new file mode 100644 index 00000000000000..bf1ea5a14c5aa8 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/use-native.js @@ -0,0 +1,16 @@ +import platform from './platform.js'; +const version = process.env.__TESTING_RIMRAF_NODE_VERSION__ || process.version; +const versArr = version.replace(/^v/, '').split('.'); +/* c8 ignore start */ +const [major = 0, minor = 0] = versArr.map(v => parseInt(v, 10)); +/* c8 ignore stop */ +const hasNative = major > 14 || (major === 14 && minor >= 14); +// we do NOT use native by default on Windows, because Node's native +// rm implementation is less advanced. Change this code if that changes. +export const useNative = !hasNative || platform === 'win32' ? + () => false + : opt => !opt?.signal && !opt?.filter; +export const useNativeSync = !hasNative || platform === 'win32' ? + () => false + : opt => !opt?.signal && !opt?.filter; +//# sourceMappingURL=use-native.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/use-native.js.map b/deps/npm/node_modules/rimraf/dist/esm/use-native.js.map new file mode 100644 index 00000000000000..32da371e4181b6 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/use-native.js.map @@ -0,0 +1 @@ +{"version":3,"file":"use-native.js","sourceRoot":"","sources":["../../src/use-native.ts"],"names":[],"mappings":"AACA,OAAO,QAAQ,MAAM,eAAe,CAAA;AAEpC,MAAM,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,+BAA+B,IAAI,OAAO,CAAC,OAAO,CAAA;AAC9E,MAAM,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAA;AAEpD,qBAAqB;AACrB,MAAM,CAAC,KAAK,GAAG,CAAC,EAAE,KAAK,GAAG,CAAC,CAAC,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAA;AAChE,oBAAoB;AACpB,MAAM,SAAS,GAAG,KAAK,GAAG,EAAE,IAAI,CAAC,KAAK,KAAK,EAAE,IAAI,KAAK,IAAI,EAAE,CAAC,CAAA;AAE7D,oEAAoE;AACpE,yEAAyE;AACzE,MAAM,CAAC,MAAM,SAAS,GACpB,CAAC,SAAS,IAAI,QAAQ,KAAK,OAAO,CAAC,CAAC;IAClC,GAAG,EAAE,CAAC,KAAK;IACb,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,GAAG,EAAE,MAAM,IAAI,CAAC,GAAG,EAAE,MAAM,CAAA;AACvC,MAAM,CAAC,MAAM,aAAa,GACxB,CAAC,SAAS,IAAI,QAAQ,KAAK,OAAO,CAAC,CAAC;IAClC,GAAG,EAAE,CAAC,KAAK;IACb,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,GAAG,EAAE,MAAM,IAAI,CAAC,GAAG,EAAE,MAAM,CAAA","sourcesContent":["import { RimrafAsyncOptions, RimrafOptions } from './index.js'\nimport platform from './platform.js'\n\nconst version = process.env.__TESTING_RIMRAF_NODE_VERSION__ || process.version\nconst versArr = version.replace(/^v/, '').split('.')\n\n/* c8 ignore start */\nconst [major = 0, minor = 0] = versArr.map(v => parseInt(v, 10))\n/* c8 ignore stop */\nconst hasNative = major > 14 || (major === 14 && minor >= 14)\n\n// we do NOT use native by default on Windows, because Node's native\n// rm implementation is less advanced. Change this code if that changes.\nexport const useNative: (opt?: RimrafAsyncOptions) => boolean =\n !hasNative || platform === 'win32' ?\n () => false\n : opt => !opt?.signal && !opt?.filter\nexport const useNativeSync: (opt?: RimrafOptions) => boolean =\n !hasNative || platform === 'win32' ?\n () => false\n : opt => !opt?.signal && !opt?.filter\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/package.json b/deps/npm/node_modules/rimraf/package.json new file mode 100644 index 00000000000000..212180c8e3fcc4 --- /dev/null +++ b/deps/npm/node_modules/rimraf/package.json @@ -0,0 +1,89 @@ +{ + "name": "rimraf", + "version": "5.0.10", + "publishConfig": { + "tag": "v5-legacy" + }, + "type": "module", + "tshy": { + "main": true, + "exports": { + "./package.json": "./package.json", + ".": "./src/index.ts" + } + }, + "bin": "./dist/esm/bin.mjs", + "main": "./dist/commonjs/index.js", + "types": "./dist/commonjs/index.d.ts", + "exports": { + "./package.json": "./package.json", + ".": { + "import": { + "types": "./dist/esm/index.d.ts", + "default": "./dist/esm/index.js" + }, + "require": { + "types": "./dist/commonjs/index.d.ts", + "default": "./dist/commonjs/index.js" + } + } + }, + "files": [ + "dist" + ], + "description": "A deep deletion module for node (like `rm -rf`)", + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC", + "repository": "git://github.com/isaacs/rimraf.git", + "scripts": { + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "prepare": "tshy", + "pretest": "npm run prepare", + "presnap": "npm run prepare", + "test": "tap", + "snap": "tap", + "format": "prettier --write . --log-level warn", + "benchmark": "node benchmark/index.js", + "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts" + }, + "prettier": { + "experimentalTernaries": true, + "semi": false, + "printWidth": 80, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + }, + "devDependencies": { + "@types/node": "^20.12.11", + "mkdirp": "^3.0.1", + "prettier": "^3.2.5", + "tap": "^19.0.1", + "tshy": "^1.14.0", + "typedoc": "^0.25.13", + "typescript": "^5.4.5" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + }, + "dependencies": { + "glob": "^10.3.7" + }, + "keywords": [ + "rm", + "rm -rf", + "rm -fr", + "remove", + "directory", + "cli", + "rmdir", + "recursive" + ], + "module": "./dist/esm/index.js" +} diff --git a/deps/npm/node_modules/ssri/package.json b/deps/npm/node_modules/ssri/package.json index 28395414e4643c..83306cd044ec3c 100644 --- a/deps/npm/node_modules/ssri/package.json +++ b/deps/npm/node_modules/ssri/package.json @@ -1,6 +1,6 @@ { "name": "ssri", - "version": "10.0.6", + "version": "12.0.0", "description": "Standard Subresource Integrity library -- parses, serializes, generates, and verifies integrity metadata according to the SRI spec.", "main": "lib/index.js", "files": [ @@ -13,11 +13,12 @@ "posttest": "npm run lint", "test": "tap", "coverage": "tap", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", - "snap": "tap" + "lintfix": "npm run eslint -- --fix", + "snap": "tap", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "tap": { "check-coverage": true, @@ -50,16 +51,16 @@ "minipass": "^7.0.3" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.0.1" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.22.0", + "version": "4.23.3", "publish": "true" } } diff --git a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/agents.js b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/agents.js new file mode 100644 index 00000000000000..c541b93001517e --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/agents.js @@ -0,0 +1,206 @@ +'use strict' + +const net = require('net') +const tls = require('tls') +const { once } = require('events') +const timers = require('timers/promises') +const { normalizeOptions, cacheOptions } = require('./options') +const { getProxy, getProxyAgent, proxyCache } = require('./proxy.js') +const Errors = require('./errors.js') +const { Agent: AgentBase } = require('agent-base') + +module.exports = class Agent extends AgentBase { + #options + #timeouts + #proxy + #noProxy + #ProxyAgent + + constructor (options = {}) { + const { timeouts, proxy, noProxy, ...normalizedOptions } = normalizeOptions(options) + + super(normalizedOptions) + + this.#options = normalizedOptions + this.#timeouts = timeouts + + if (proxy) { + this.#proxy = new URL(proxy) + this.#noProxy = noProxy + this.#ProxyAgent = getProxyAgent(proxy) + } + } + + get proxy () { + return this.#proxy ? { url: this.#proxy } : {} + } + + #getProxy (options) { + if (!this.#proxy) { + return + } + + const proxy = getProxy(`${options.protocol}//${options.host}:${options.port}`, { + proxy: this.#proxy, + noProxy: this.#noProxy, + }) + + if (!proxy) { + return + } + + const cacheKey = cacheOptions({ + ...options, + ...this.#options, + timeouts: this.#timeouts, + proxy, + }) + + if (proxyCache.has(cacheKey)) { + return proxyCache.get(cacheKey) + } + + let ProxyAgent = this.#ProxyAgent + if (Array.isArray(ProxyAgent)) { + ProxyAgent = this.isSecureEndpoint(options) ? ProxyAgent[1] : ProxyAgent[0] + } + + const proxyAgent = new ProxyAgent(proxy, { + ...this.#options, + socketOptions: { family: this.#options.family }, + }) + proxyCache.set(cacheKey, proxyAgent) + + return proxyAgent + } + + // takes an array of promises and races them against the connection timeout + // which will throw the necessary error if it is hit. This will return the + // result of the promise race. + async #timeoutConnection ({ promises, options, timeout }, ac = new AbortController()) { + if (timeout) { + const connectionTimeout = timers.setTimeout(timeout, null, { signal: ac.signal }) + .then(() => { + throw new Errors.ConnectionTimeoutError(`${options.host}:${options.port}`) + }).catch((err) => { + if (err.name === 'AbortError') { + return + } + throw err + }) + promises.push(connectionTimeout) + } + + let result + try { + result = await Promise.race(promises) + ac.abort() + } catch (err) { + ac.abort() + throw err + } + return result + } + + async connect (request, options) { + // if the connection does not have its own lookup function + // set, then use the one from our options + options.lookup ??= this.#options.lookup + + let socket + let timeout = this.#timeouts.connection + const isSecureEndpoint = this.isSecureEndpoint(options) + + const proxy = this.#getProxy(options) + if (proxy) { + // some of the proxies will wait for the socket to fully connect before + // returning so we have to await this while also racing it against the + // connection timeout. + const start = Date.now() + socket = await this.#timeoutConnection({ + options, + timeout, + promises: [proxy.connect(request, options)], + }) + // see how much time proxy.connect took and subtract it from + // the timeout + if (timeout) { + timeout = timeout - (Date.now() - start) + } + } else { + socket = (isSecureEndpoint ? tls : net).connect(options) + } + + socket.setKeepAlive(this.keepAlive, this.keepAliveMsecs) + socket.setNoDelay(this.keepAlive) + + const abortController = new AbortController() + const { signal } = abortController + + const connectPromise = socket[isSecureEndpoint ? 'secureConnecting' : 'connecting'] + ? once(socket, isSecureEndpoint ? 'secureConnect' : 'connect', { signal }) + : Promise.resolve() + + await this.#timeoutConnection({ + options, + timeout, + promises: [ + connectPromise, + once(socket, 'error', { signal }).then((err) => { + throw err[0] + }), + ], + }, abortController) + + if (this.#timeouts.idle) { + socket.setTimeout(this.#timeouts.idle, () => { + socket.destroy(new Errors.IdleTimeoutError(`${options.host}:${options.port}`)) + }) + } + + return socket + } + + addRequest (request, options) { + const proxy = this.#getProxy(options) + // it would be better to call proxy.addRequest here but this causes the + // http-proxy-agent to call its super.addRequest which causes the request + // to be added to the agent twice. since we only support 3 agents + // currently (see the required agents in proxy.js) we have manually + // checked that the only public methods we need to call are called in the + // next block. this could change in the future and presumably we would get + // failing tests until we have properly called the necessary methods on + // each of our proxy agents + if (proxy?.setRequestProps) { + proxy.setRequestProps(request, options) + } + + request.setHeader('connection', this.keepAlive ? 'keep-alive' : 'close') + + if (this.#timeouts.response) { + let responseTimeout + request.once('finish', () => { + setTimeout(() => { + request.destroy(new Errors.ResponseTimeoutError(request, this.#proxy)) + }, this.#timeouts.response) + }) + request.once('response', () => { + clearTimeout(responseTimeout) + }) + } + + if (this.#timeouts.transfer) { + let transferTimeout + request.once('response', (res) => { + setTimeout(() => { + res.destroy(new Errors.TransferTimeoutError(request, this.#proxy)) + }, this.#timeouts.transfer) + res.once('close', () => { + clearTimeout(transferTimeout) + }) + }) + } + + return super.addRequest(request, options) + } +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/dns.js b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/dns.js new file mode 100644 index 00000000000000..3c6946c566d736 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/dns.js @@ -0,0 +1,53 @@ +'use strict' + +const { LRUCache } = require('lru-cache') +const dns = require('dns') + +// this is a factory so that each request can have its own opts (i.e. ttl) +// while still sharing the cache across all requests +const cache = new LRUCache({ max: 50 }) + +const getOptions = ({ + family = 0, + hints = dns.ADDRCONFIG, + all = false, + verbatim = undefined, + ttl = 5 * 60 * 1000, + lookup = dns.lookup, +}) => ({ + // hints and lookup are returned since both are top level properties to (net|tls).connect + hints, + lookup: (hostname, ...args) => { + const callback = args.pop() // callback is always last arg + const lookupOptions = args[0] ?? {} + + const options = { + family, + hints, + all, + verbatim, + ...(typeof lookupOptions === 'number' ? { family: lookupOptions } : lookupOptions), + } + + const key = JSON.stringify({ hostname, ...options }) + + if (cache.has(key)) { + const cached = cache.get(key) + return process.nextTick(callback, null, ...cached) + } + + lookup(hostname, options, (err, ...result) => { + if (err) { + return callback(err) + } + + cache.set(key, result, { ttl }) + return callback(null, ...result) + }) + }, +}) + +module.exports = { + cache, + getOptions, +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/errors.js b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/errors.js new file mode 100644 index 00000000000000..70475aec8eb357 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/errors.js @@ -0,0 +1,61 @@ +'use strict' + +class InvalidProxyProtocolError extends Error { + constructor (url) { + super(`Invalid protocol \`${url.protocol}\` connecting to proxy \`${url.host}\``) + this.code = 'EINVALIDPROXY' + this.proxy = url + } +} + +class ConnectionTimeoutError extends Error { + constructor (host) { + super(`Timeout connecting to host \`${host}\``) + this.code = 'ECONNECTIONTIMEOUT' + this.host = host + } +} + +class IdleTimeoutError extends Error { + constructor (host) { + super(`Idle timeout reached for host \`${host}\``) + this.code = 'EIDLETIMEOUT' + this.host = host + } +} + +class ResponseTimeoutError extends Error { + constructor (request, proxy) { + let msg = 'Response timeout ' + if (proxy) { + msg += `from proxy \`${proxy.host}\` ` + } + msg += `connecting to host \`${request.host}\`` + super(msg) + this.code = 'ERESPONSETIMEOUT' + this.proxy = proxy + this.request = request + } +} + +class TransferTimeoutError extends Error { + constructor (request, proxy) { + let msg = 'Transfer timeout ' + if (proxy) { + msg += `from proxy \`${proxy.host}\` ` + } + msg += `for \`${request.host}\`` + super(msg) + this.code = 'ETRANSFERTIMEOUT' + this.proxy = proxy + this.request = request + } +} + +module.exports = { + InvalidProxyProtocolError, + ConnectionTimeoutError, + IdleTimeoutError, + ResponseTimeoutError, + TransferTimeoutError, +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/index.js b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/index.js new file mode 100644 index 00000000000000..b33d6eaef07a21 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/index.js @@ -0,0 +1,56 @@ +'use strict' + +const { LRUCache } = require('lru-cache') +const { normalizeOptions, cacheOptions } = require('./options') +const { getProxy, proxyCache } = require('./proxy.js') +const dns = require('./dns.js') +const Agent = require('./agents.js') + +const agentCache = new LRUCache({ max: 20 }) + +const getAgent = (url, { agent, proxy, noProxy, ...options } = {}) => { + // false has meaning so this can't be a simple truthiness check + if (agent != null) { + return agent + } + + url = new URL(url) + + const proxyForUrl = getProxy(url, { proxy, noProxy }) + const normalizedOptions = { + ...normalizeOptions(options), + proxy: proxyForUrl, + } + + const cacheKey = cacheOptions({ + ...normalizedOptions, + secureEndpoint: url.protocol === 'https:', + }) + + if (agentCache.has(cacheKey)) { + return agentCache.get(cacheKey) + } + + const newAgent = new Agent(normalizedOptions) + agentCache.set(cacheKey, newAgent) + + return newAgent +} + +module.exports = { + getAgent, + Agent, + // these are exported for backwards compatability + HttpAgent: Agent, + HttpsAgent: Agent, + cache: { + proxy: proxyCache, + agent: agentCache, + dns: dns.cache, + clear: () => { + proxyCache.clear() + agentCache.clear() + dns.cache.clear() + }, + }, +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/options.js b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/options.js new file mode 100644 index 00000000000000..0bf53f725f0846 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/options.js @@ -0,0 +1,86 @@ +'use strict' + +const dns = require('./dns') + +const normalizeOptions = (opts) => { + const family = parseInt(opts.family ?? '0', 10) + const keepAlive = opts.keepAlive ?? true + + const normalized = { + // nodejs http agent options. these are all the defaults + // but kept here to increase the likelihood of cache hits + // https://nodejs.org/api/http.html#new-agentoptions + keepAliveMsecs: keepAlive ? 1000 : undefined, + maxSockets: opts.maxSockets ?? 15, + maxTotalSockets: Infinity, + maxFreeSockets: keepAlive ? 256 : undefined, + scheduling: 'fifo', + // then spread the rest of the options + ...opts, + // we already set these to their defaults that we want + family, + keepAlive, + // our custom timeout options + timeouts: { + // the standard timeout option is mapped to our idle timeout + // and then deleted below + idle: opts.timeout ?? 0, + connection: 0, + response: 0, + transfer: 0, + ...opts.timeouts, + }, + // get the dns options that go at the top level of socket connection + ...dns.getOptions({ family, ...opts.dns }), + } + + // remove timeout since we already used it to set our own idle timeout + delete normalized.timeout + + return normalized +} + +const createKey = (obj) => { + let key = '' + const sorted = Object.entries(obj).sort((a, b) => a[0] - b[0]) + for (let [k, v] of sorted) { + if (v == null) { + v = 'null' + } else if (v instanceof URL) { + v = v.toString() + } else if (typeof v === 'object') { + v = createKey(v) + } + key += `${k}:${v}:` + } + return key +} + +const cacheOptions = ({ secureEndpoint, ...options }) => createKey({ + secureEndpoint: !!secureEndpoint, + // socket connect options + family: options.family, + hints: options.hints, + localAddress: options.localAddress, + // tls specific connect options + strictSsl: secureEndpoint ? !!options.rejectUnauthorized : false, + ca: secureEndpoint ? options.ca : null, + cert: secureEndpoint ? options.cert : null, + key: secureEndpoint ? options.key : null, + // http agent options + keepAlive: options.keepAlive, + keepAliveMsecs: options.keepAliveMsecs, + maxSockets: options.maxSockets, + maxTotalSockets: options.maxTotalSockets, + maxFreeSockets: options.maxFreeSockets, + scheduling: options.scheduling, + // timeout options + timeouts: options.timeouts, + // proxy + proxy: options.proxy, +}) + +module.exports = { + normalizeOptions, + cacheOptions, +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/proxy.js b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/proxy.js new file mode 100644 index 00000000000000..6272e929e57bcf --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/proxy.js @@ -0,0 +1,88 @@ +'use strict' + +const { HttpProxyAgent } = require('http-proxy-agent') +const { HttpsProxyAgent } = require('https-proxy-agent') +const { SocksProxyAgent } = require('socks-proxy-agent') +const { LRUCache } = require('lru-cache') +const { InvalidProxyProtocolError } = require('./errors.js') + +const PROXY_CACHE = new LRUCache({ max: 20 }) + +const SOCKS_PROTOCOLS = new Set(SocksProxyAgent.protocols) + +const PROXY_ENV_KEYS = new Set(['https_proxy', 'http_proxy', 'proxy', 'no_proxy']) + +const PROXY_ENV = Object.entries(process.env).reduce((acc, [key, value]) => { + key = key.toLowerCase() + if (PROXY_ENV_KEYS.has(key)) { + acc[key] = value + } + return acc +}, {}) + +const getProxyAgent = (url) => { + url = new URL(url) + + const protocol = url.protocol.slice(0, -1) + if (SOCKS_PROTOCOLS.has(protocol)) { + return SocksProxyAgent + } + if (protocol === 'https' || protocol === 'http') { + return [HttpProxyAgent, HttpsProxyAgent] + } + + throw new InvalidProxyProtocolError(url) +} + +const isNoProxy = (url, noProxy) => { + if (typeof noProxy === 'string') { + noProxy = noProxy.split(',').map((p) => p.trim()).filter(Boolean) + } + + if (!noProxy || !noProxy.length) { + return false + } + + const hostSegments = url.hostname.split('.').reverse() + + return noProxy.some((no) => { + const noSegments = no.split('.').filter(Boolean).reverse() + if (!noSegments.length) { + return false + } + + for (let i = 0; i < noSegments.length; i++) { + if (hostSegments[i] !== noSegments[i]) { + return false + } + } + + return true + }) +} + +const getProxy = (url, { proxy, noProxy }) => { + url = new URL(url) + + if (!proxy) { + proxy = url.protocol === 'https:' + ? PROXY_ENV.https_proxy + : PROXY_ENV.https_proxy || PROXY_ENV.http_proxy || PROXY_ENV.proxy + } + + if (!noProxy) { + noProxy = PROXY_ENV.no_proxy + } + + if (!proxy || isNoProxy(url, noProxy)) { + return null + } + + return new URL(proxy) +} + +module.exports = { + getProxyAgent, + getProxy, + proxyCache: PROXY_CACHE, +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/package.json b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/package.json new file mode 100644 index 00000000000000..ef5b4e3228cc46 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/package.json @@ -0,0 +1,60 @@ +{ + "name": "@npmcli/agent", + "version": "2.2.2", + "description": "the http/https agent used by the npm cli", + "main": "lib/index.js", + "scripts": { + "gencerts": "bash scripts/create-cert.sh", + "test": "tap", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force", + "lintfix": "npm run lint -- --fix", + "snap": "tap", + "posttest": "npm run lint" + }, + "author": "GitHub Inc.", + "license": "ISC", + "bugs": { + "url": "https://github.com/npm/agent/issues" + }, + "homepage": "https://github.com/npm/agent#readme", + "files": [ + "bin/", + "lib/" + ], + "engines": { + "node": "^16.14.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.21.3", + "publish": "true" + }, + "dependencies": { + "agent-base": "^7.1.0", + "http-proxy-agent": "^7.0.0", + "https-proxy-agent": "^7.0.1", + "lru-cache": "^10.0.1", + "socks-proxy-agent": "^8.0.3" + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.21.3", + "minipass-fetch": "^3.0.3", + "nock": "^13.2.7", + "semver": "^7.5.4", + "simple-socks": "^3.1.0", + "tap": "^16.3.0" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/agent.git" + }, + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + } +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/LICENSE.md b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/LICENSE.md new file mode 100644 index 00000000000000..5fc208ff122e08 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/LICENSE.md @@ -0,0 +1,20 @@ + + +ISC License + +Copyright npm, Inc. + +Permission to use, copy, modify, and/or distribute this +software for any purpose with or without fee is hereby +granted, provided that the above copyright notice and this +permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL +WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO +EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/common/get-options.js b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/common/get-options.js new file mode 100644 index 00000000000000..cb5982f79077ac --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/common/get-options.js @@ -0,0 +1,20 @@ +// given an input that may or may not be an object, return an object that has +// a copy of every defined property listed in 'copy'. if the input is not an +// object, assign it to the property named by 'wrap' +const getOptions = (input, { copy, wrap }) => { + const result = {} + + if (input && typeof input === 'object') { + for (const prop of copy) { + if (input[prop] !== undefined) { + result[prop] = input[prop] + } + } + } else { + result[wrap] = input + } + + return result +} + +module.exports = getOptions diff --git a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/common/node.js b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/common/node.js new file mode 100644 index 00000000000000..4d13bc037359d7 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/common/node.js @@ -0,0 +1,9 @@ +const semver = require('semver') + +const satisfies = (range) => { + return semver.satisfies(process.version, range, { includePrerelease: true }) +} + +module.exports = { + satisfies, +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/cp/LICENSE b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/cp/LICENSE new file mode 100644 index 00000000000000..93546dfb7655bf --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/cp/LICENSE @@ -0,0 +1,15 @@ +(The MIT License) + +Copyright (c) 2011-2017 JP Richardson + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files +(the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, + merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS +OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, + ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/cp/errors.js b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/cp/errors.js new file mode 100644 index 00000000000000..1cd1e05d0c533d --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/cp/errors.js @@ -0,0 +1,129 @@ +'use strict' +const { inspect } = require('util') + +// adapted from node's internal/errors +// https://github.com/nodejs/node/blob/c8a04049/lib/internal/errors.js + +// close copy of node's internal SystemError class. +class SystemError { + constructor (code, prefix, context) { + // XXX context.code is undefined in all constructors used in cp/polyfill + // that may be a bug copied from node, maybe the constructor should use + // `code` not `errno`? nodejs/node#41104 + let message = `${prefix}: ${context.syscall} returned ` + + `${context.code} (${context.message})` + + if (context.path !== undefined) { + message += ` ${context.path}` + } + if (context.dest !== undefined) { + message += ` => ${context.dest}` + } + + this.code = code + Object.defineProperties(this, { + name: { + value: 'SystemError', + enumerable: false, + writable: true, + configurable: true, + }, + message: { + value: message, + enumerable: false, + writable: true, + configurable: true, + }, + info: { + value: context, + enumerable: true, + configurable: true, + writable: false, + }, + errno: { + get () { + return context.errno + }, + set (value) { + context.errno = value + }, + enumerable: true, + configurable: true, + }, + syscall: { + get () { + return context.syscall + }, + set (value) { + context.syscall = value + }, + enumerable: true, + configurable: true, + }, + }) + + if (context.path !== undefined) { + Object.defineProperty(this, 'path', { + get () { + return context.path + }, + set (value) { + context.path = value + }, + enumerable: true, + configurable: true, + }) + } + + if (context.dest !== undefined) { + Object.defineProperty(this, 'dest', { + get () { + return context.dest + }, + set (value) { + context.dest = value + }, + enumerable: true, + configurable: true, + }) + } + } + + toString () { + return `${this.name} [${this.code}]: ${this.message}` + } + + [Symbol.for('nodejs.util.inspect.custom')] (_recurseTimes, ctx) { + return inspect(this, { + ...ctx, + getters: true, + customInspect: false, + }) + } +} + +function E (code, message) { + module.exports[code] = class NodeError extends SystemError { + constructor (ctx) { + super(code, message, ctx) + } + } +} + +E('ERR_FS_CP_DIR_TO_NON_DIR', 'Cannot overwrite directory with non-directory') +E('ERR_FS_CP_EEXIST', 'Target already exists') +E('ERR_FS_CP_EINVAL', 'Invalid src or dest') +E('ERR_FS_CP_FIFO_PIPE', 'Cannot copy a FIFO pipe') +E('ERR_FS_CP_NON_DIR_TO_DIR', 'Cannot overwrite non-directory with directory') +E('ERR_FS_CP_SOCKET', 'Cannot copy a socket file') +E('ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY', 'Cannot overwrite symlink in subdirectory of self') +E('ERR_FS_CP_UNKNOWN', 'Cannot copy an unknown file type') +E('ERR_FS_EISDIR', 'Path is a directory') + +module.exports.ERR_INVALID_ARG_TYPE = class ERR_INVALID_ARG_TYPE extends Error { + constructor (name, expected, actual) { + super() + this.code = 'ERR_INVALID_ARG_TYPE' + this.message = `The ${name} argument must be ${expected}. Received ${typeof actual}` + } +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/cp/index.js b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/cp/index.js new file mode 100644 index 00000000000000..972ce7aa12abef --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/cp/index.js @@ -0,0 +1,22 @@ +const fs = require('fs/promises') +const getOptions = require('../common/get-options.js') +const node = require('../common/node.js') +const polyfill = require('./polyfill.js') + +// node 16.7.0 added fs.cp +const useNative = node.satisfies('>=16.7.0') + +const cp = async (src, dest, opts) => { + const options = getOptions(opts, { + copy: ['dereference', 'errorOnExist', 'filter', 'force', 'preserveTimestamps', 'recursive'], + }) + + // the polyfill is tested separately from this module, no need to hack + // process.version to try to trigger it just for coverage + // istanbul ignore next + return useNative + ? fs.cp(src, dest, options) + : polyfill(src, dest, options) +} + +module.exports = cp diff --git a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/cp/polyfill.js b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/cp/polyfill.js new file mode 100644 index 00000000000000..80eb10de971918 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/cp/polyfill.js @@ -0,0 +1,428 @@ +// this file is a modified version of the code in node 17.2.0 +// which is, in turn, a modified version of the fs-extra module on npm +// node core changes: +// - Use of the assert module has been replaced with core's error system. +// - All code related to the glob dependency has been removed. +// - Bring your own custom fs module is not currently supported. +// - Some basic code cleanup. +// changes here: +// - remove all callback related code +// - drop sync support +// - change assertions back to non-internal methods (see options.js) +// - throws ENOTDIR when rmdir gets an ENOENT for a path that exists in Windows +'use strict' + +const { + ERR_FS_CP_DIR_TO_NON_DIR, + ERR_FS_CP_EEXIST, + ERR_FS_CP_EINVAL, + ERR_FS_CP_FIFO_PIPE, + ERR_FS_CP_NON_DIR_TO_DIR, + ERR_FS_CP_SOCKET, + ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY, + ERR_FS_CP_UNKNOWN, + ERR_FS_EISDIR, + ERR_INVALID_ARG_TYPE, +} = require('./errors.js') +const { + constants: { + errno: { + EEXIST, + EISDIR, + EINVAL, + ENOTDIR, + }, + }, +} = require('os') +const { + chmod, + copyFile, + lstat, + mkdir, + readdir, + readlink, + stat, + symlink, + unlink, + utimes, +} = require('fs/promises') +const { + dirname, + isAbsolute, + join, + parse, + resolve, + sep, + toNamespacedPath, +} = require('path') +const { fileURLToPath } = require('url') + +const defaultOptions = { + dereference: false, + errorOnExist: false, + filter: undefined, + force: true, + preserveTimestamps: false, + recursive: false, +} + +async function cp (src, dest, opts) { + if (opts != null && typeof opts !== 'object') { + throw new ERR_INVALID_ARG_TYPE('options', ['Object'], opts) + } + return cpFn( + toNamespacedPath(getValidatedPath(src)), + toNamespacedPath(getValidatedPath(dest)), + { ...defaultOptions, ...opts }) +} + +function getValidatedPath (fileURLOrPath) { + const path = fileURLOrPath != null && fileURLOrPath.href + && fileURLOrPath.origin + ? fileURLToPath(fileURLOrPath) + : fileURLOrPath + return path +} + +async function cpFn (src, dest, opts) { + // Warn about using preserveTimestamps on 32-bit node + // istanbul ignore next + if (opts.preserveTimestamps && process.arch === 'ia32') { + const warning = 'Using the preserveTimestamps option in 32-bit ' + + 'node is not recommended' + process.emitWarning(warning, 'TimestampPrecisionWarning') + } + const stats = await checkPaths(src, dest, opts) + const { srcStat, destStat } = stats + await checkParentPaths(src, srcStat, dest) + if (opts.filter) { + return handleFilter(checkParentDir, destStat, src, dest, opts) + } + return checkParentDir(destStat, src, dest, opts) +} + +async function checkPaths (src, dest, opts) { + const { 0: srcStat, 1: destStat } = await getStats(src, dest, opts) + if (destStat) { + if (areIdentical(srcStat, destStat)) { + throw new ERR_FS_CP_EINVAL({ + message: 'src and dest cannot be the same', + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } + if (srcStat.isDirectory() && !destStat.isDirectory()) { + throw new ERR_FS_CP_DIR_TO_NON_DIR({ + message: `cannot overwrite directory ${src} ` + + `with non-directory ${dest}`, + path: dest, + syscall: 'cp', + errno: EISDIR, + }) + } + if (!srcStat.isDirectory() && destStat.isDirectory()) { + throw new ERR_FS_CP_NON_DIR_TO_DIR({ + message: `cannot overwrite non-directory ${src} ` + + `with directory ${dest}`, + path: dest, + syscall: 'cp', + errno: ENOTDIR, + }) + } + } + + if (srcStat.isDirectory() && isSrcSubdir(src, dest)) { + throw new ERR_FS_CP_EINVAL({ + message: `cannot copy ${src} to a subdirectory of self ${dest}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } + return { srcStat, destStat } +} + +function areIdentical (srcStat, destStat) { + return destStat.ino && destStat.dev && destStat.ino === srcStat.ino && + destStat.dev === srcStat.dev +} + +function getStats (src, dest, opts) { + const statFunc = opts.dereference ? + (file) => stat(file, { bigint: true }) : + (file) => lstat(file, { bigint: true }) + return Promise.all([ + statFunc(src), + statFunc(dest).catch((err) => { + // istanbul ignore next: unsure how to cover. + if (err.code === 'ENOENT') { + return null + } + // istanbul ignore next: unsure how to cover. + throw err + }), + ]) +} + +async function checkParentDir (destStat, src, dest, opts) { + const destParent = dirname(dest) + const dirExists = await pathExists(destParent) + if (dirExists) { + return getStatsForCopy(destStat, src, dest, opts) + } + await mkdir(destParent, { recursive: true }) + return getStatsForCopy(destStat, src, dest, opts) +} + +function pathExists (dest) { + return stat(dest).then( + () => true, + // istanbul ignore next: not sure when this would occur + (err) => (err.code === 'ENOENT' ? false : Promise.reject(err))) +} + +// Recursively check if dest parent is a subdirectory of src. +// It works for all file types including symlinks since it +// checks the src and dest inodes. It starts from the deepest +// parent and stops once it reaches the src parent or the root path. +async function checkParentPaths (src, srcStat, dest) { + const srcParent = resolve(dirname(src)) + const destParent = resolve(dirname(dest)) + if (destParent === srcParent || destParent === parse(destParent).root) { + return + } + let destStat + try { + destStat = await stat(destParent, { bigint: true }) + } catch (err) { + // istanbul ignore else: not sure when this would occur + if (err.code === 'ENOENT') { + return + } + // istanbul ignore next: not sure when this would occur + throw err + } + if (areIdentical(srcStat, destStat)) { + throw new ERR_FS_CP_EINVAL({ + message: `cannot copy ${src} to a subdirectory of self ${dest}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } + return checkParentPaths(src, srcStat, destParent) +} + +const normalizePathToArray = (path) => + resolve(path).split(sep).filter(Boolean) + +// Return true if dest is a subdir of src, otherwise false. +// It only checks the path strings. +function isSrcSubdir (src, dest) { + const srcArr = normalizePathToArray(src) + const destArr = normalizePathToArray(dest) + return srcArr.every((cur, i) => destArr[i] === cur) +} + +async function handleFilter (onInclude, destStat, src, dest, opts, cb) { + const include = await opts.filter(src, dest) + if (include) { + return onInclude(destStat, src, dest, opts, cb) + } +} + +function startCopy (destStat, src, dest, opts) { + if (opts.filter) { + return handleFilter(getStatsForCopy, destStat, src, dest, opts) + } + return getStatsForCopy(destStat, src, dest, opts) +} + +async function getStatsForCopy (destStat, src, dest, opts) { + const statFn = opts.dereference ? stat : lstat + const srcStat = await statFn(src) + // istanbul ignore else: can't portably test FIFO + if (srcStat.isDirectory() && opts.recursive) { + return onDir(srcStat, destStat, src, dest, opts) + } else if (srcStat.isDirectory()) { + throw new ERR_FS_EISDIR({ + message: `${src} is a directory (not copied)`, + path: src, + syscall: 'cp', + errno: EINVAL, + }) + } else if (srcStat.isFile() || + srcStat.isCharacterDevice() || + srcStat.isBlockDevice()) { + return onFile(srcStat, destStat, src, dest, opts) + } else if (srcStat.isSymbolicLink()) { + return onLink(destStat, src, dest) + } else if (srcStat.isSocket()) { + throw new ERR_FS_CP_SOCKET({ + message: `cannot copy a socket file: ${dest}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } else if (srcStat.isFIFO()) { + throw new ERR_FS_CP_FIFO_PIPE({ + message: `cannot copy a FIFO pipe: ${dest}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } + // istanbul ignore next: should be unreachable + throw new ERR_FS_CP_UNKNOWN({ + message: `cannot copy an unknown file type: ${dest}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) +} + +function onFile (srcStat, destStat, src, dest, opts) { + if (!destStat) { + return _copyFile(srcStat, src, dest, opts) + } + return mayCopyFile(srcStat, src, dest, opts) +} + +async function mayCopyFile (srcStat, src, dest, opts) { + if (opts.force) { + await unlink(dest) + return _copyFile(srcStat, src, dest, opts) + } else if (opts.errorOnExist) { + throw new ERR_FS_CP_EEXIST({ + message: `${dest} already exists`, + path: dest, + syscall: 'cp', + errno: EEXIST, + }) + } +} + +async function _copyFile (srcStat, src, dest, opts) { + await copyFile(src, dest) + if (opts.preserveTimestamps) { + return handleTimestampsAndMode(srcStat.mode, src, dest) + } + return setDestMode(dest, srcStat.mode) +} + +async function handleTimestampsAndMode (srcMode, src, dest) { + // Make sure the file is writable before setting the timestamp + // otherwise open fails with EPERM when invoked with 'r+' + // (through utimes call) + if (fileIsNotWritable(srcMode)) { + await makeFileWritable(dest, srcMode) + return setDestTimestampsAndMode(srcMode, src, dest) + } + return setDestTimestampsAndMode(srcMode, src, dest) +} + +function fileIsNotWritable (srcMode) { + return (srcMode & 0o200) === 0 +} + +function makeFileWritable (dest, srcMode) { + return setDestMode(dest, srcMode | 0o200) +} + +async function setDestTimestampsAndMode (srcMode, src, dest) { + await setDestTimestamps(src, dest) + return setDestMode(dest, srcMode) +} + +function setDestMode (dest, srcMode) { + return chmod(dest, srcMode) +} + +async function setDestTimestamps (src, dest) { + // The initial srcStat.atime cannot be trusted + // because it is modified by the read(2) system call + // (See https://nodejs.org/api/fs.html#fs_stat_time_values) + const updatedSrcStat = await stat(src) + return utimes(dest, updatedSrcStat.atime, updatedSrcStat.mtime) +} + +function onDir (srcStat, destStat, src, dest, opts) { + if (!destStat) { + return mkDirAndCopy(srcStat.mode, src, dest, opts) + } + return copyDir(src, dest, opts) +} + +async function mkDirAndCopy (srcMode, src, dest, opts) { + await mkdir(dest) + await copyDir(src, dest, opts) + return setDestMode(dest, srcMode) +} + +async function copyDir (src, dest, opts) { + const dir = await readdir(src) + for (let i = 0; i < dir.length; i++) { + const item = dir[i] + const srcItem = join(src, item) + const destItem = join(dest, item) + const { destStat } = await checkPaths(srcItem, destItem, opts) + await startCopy(destStat, srcItem, destItem, opts) + } +} + +async function onLink (destStat, src, dest) { + let resolvedSrc = await readlink(src) + if (!isAbsolute(resolvedSrc)) { + resolvedSrc = resolve(dirname(src), resolvedSrc) + } + if (!destStat) { + return symlink(resolvedSrc, dest) + } + let resolvedDest + try { + resolvedDest = await readlink(dest) + } catch (err) { + // Dest exists and is a regular file or directory, + // Windows may throw UNKNOWN error. If dest already exists, + // fs throws error anyway, so no need to guard against it here. + // istanbul ignore next: can only test on windows + if (err.code === 'EINVAL' || err.code === 'UNKNOWN') { + return symlink(resolvedSrc, dest) + } + // istanbul ignore next: should not be possible + throw err + } + if (!isAbsolute(resolvedDest)) { + resolvedDest = resolve(dirname(dest), resolvedDest) + } + if (isSrcSubdir(resolvedSrc, resolvedDest)) { + throw new ERR_FS_CP_EINVAL({ + message: `cannot copy ${resolvedSrc} to a subdirectory of self ` + + `${resolvedDest}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } + // Do not copy if src is a subdir of dest since unlinking + // dest in this case would result in removing src contents + // and therefore a broken symlink would be created. + const srcStat = await stat(src) + if (srcStat.isDirectory() && isSrcSubdir(resolvedDest, resolvedSrc)) { + throw new ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY({ + message: `cannot overwrite ${resolvedDest} with ${resolvedSrc}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } + return copyLink(resolvedSrc, dest) +} + +async function copyLink (resolvedSrc, dest) { + await unlink(dest) + return symlink(resolvedSrc, dest) +} + +module.exports = cp diff --git a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/index.js b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/index.js new file mode 100644 index 00000000000000..81c746304cc428 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/index.js @@ -0,0 +1,13 @@ +'use strict' + +const cp = require('./cp/index.js') +const withTempDir = require('./with-temp-dir.js') +const readdirScoped = require('./readdir-scoped.js') +const moveFile = require('./move-file.js') + +module.exports = { + cp, + withTempDir, + readdirScoped, + moveFile, +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/move-file.js b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/move-file.js new file mode 100644 index 00000000000000..d56e06d384659a --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/move-file.js @@ -0,0 +1,78 @@ +const { dirname, join, resolve, relative, isAbsolute } = require('path') +const fs = require('fs/promises') + +const pathExists = async path => { + try { + await fs.access(path) + return true + } catch (er) { + return er.code !== 'ENOENT' + } +} + +const moveFile = async (source, destination, options = {}, root = true, symlinks = []) => { + if (!source || !destination) { + throw new TypeError('`source` and `destination` file required') + } + + options = { + overwrite: true, + ...options, + } + + if (!options.overwrite && await pathExists(destination)) { + throw new Error(`The destination file exists: ${destination}`) + } + + await fs.mkdir(dirname(destination), { recursive: true }) + + try { + await fs.rename(source, destination) + } catch (error) { + if (error.code === 'EXDEV' || error.code === 'EPERM') { + const sourceStat = await fs.lstat(source) + if (sourceStat.isDirectory()) { + const files = await fs.readdir(source) + await Promise.all(files.map((file) => + moveFile(join(source, file), join(destination, file), options, false, symlinks) + )) + } else if (sourceStat.isSymbolicLink()) { + symlinks.push({ source, destination }) + } else { + await fs.copyFile(source, destination) + } + } else { + throw error + } + } + + if (root) { + await Promise.all(symlinks.map(async ({ source: symSource, destination: symDestination }) => { + let target = await fs.readlink(symSource) + // junction symlinks in windows will be absolute paths, so we need to + // make sure they point to the symlink destination + if (isAbsolute(target)) { + target = resolve(symDestination, relative(symSource, target)) + } + // try to determine what the actual file is so we can create the correct + // type of symlink in windows + let targetStat = 'file' + try { + targetStat = await fs.stat(resolve(dirname(symSource), target)) + if (targetStat.isDirectory()) { + targetStat = 'junction' + } + } catch { + // targetStat remains 'file' + } + await fs.symlink( + target, + symDestination, + targetStat + ) + })) + await fs.rm(source, { recursive: true, force: true }) + } +} + +module.exports = moveFile diff --git a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/readdir-scoped.js b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/readdir-scoped.js new file mode 100644 index 00000000000000..cd601dfbe7486b --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/readdir-scoped.js @@ -0,0 +1,20 @@ +const { readdir } = require('fs/promises') +const { join } = require('path') + +const readdirScoped = async (dir) => { + const results = [] + + for (const item of await readdir(dir)) { + if (item.startsWith('@')) { + for (const scopedItem of await readdir(join(dir, item))) { + results.push(join(item, scopedItem)) + } + } else { + results.push(item) + } + } + + return results +} + +module.exports = readdirScoped diff --git a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/with-temp-dir.js b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/with-temp-dir.js new file mode 100644 index 00000000000000..0738ac4f29e1be --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/with-temp-dir.js @@ -0,0 +1,39 @@ +const { join, sep } = require('path') + +const getOptions = require('./common/get-options.js') +const { mkdir, mkdtemp, rm } = require('fs/promises') + +// create a temp directory, ensure its permissions match its parent, then call +// the supplied function passing it the path to the directory. clean up after +// the function finishes, whether it throws or not +const withTempDir = async (root, fn, opts) => { + const options = getOptions(opts, { + copy: ['tmpPrefix'], + }) + // create the directory + await mkdir(root, { recursive: true }) + + const target = await mkdtemp(join(`${root}${sep}`, options.tmpPrefix || '')) + let err + let result + + try { + result = await fn(target) + } catch (_err) { + err = _err + } + + try { + await rm(target, { force: true, recursive: true }) + } catch { + // ignore errors + } + + if (err) { + throw err + } + + return result +} + +module.exports = withTempDir diff --git a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/package.json b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/package.json new file mode 100644 index 00000000000000..5261a11b78000e --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/package.json @@ -0,0 +1,52 @@ +{ + "name": "@npmcli/fs", + "version": "3.1.1", + "description": "filesystem utilities for the npm cli", + "main": "lib/index.js", + "files": [ + "bin/", + "lib/" + ], + "scripts": { + "snap": "tap", + "test": "tap", + "npmclilint": "npmcli-lint", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lintfix": "npm run lint -- --fix", + "posttest": "npm run lint", + "postsnap": "npm run lintfix --", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/npm/fs.git" + }, + "keywords": [ + "npm", + "oss" + ], + "author": "GitHub Inc.", + "license": "ISC", + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.22.0", + "tap": "^16.0.1" + }, + "dependencies": { + "semver": "^7.3.5" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.22.0" + }, + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + } +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/cacache/LICENSE.md b/deps/npm/node_modules/tuf-js/node_modules/cacache/LICENSE.md new file mode 100644 index 00000000000000..8d28acf866d932 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/cacache/LICENSE.md @@ -0,0 +1,16 @@ +ISC License + +Copyright (c) npm, Inc. + +Permission to use, copy, modify, and/or distribute this software for +any purpose with or without fee is hereby granted, provided that the +above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS +ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/content/path.js b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/content/path.js new file mode 100644 index 00000000000000..ad5a76a4f73f26 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/content/path.js @@ -0,0 +1,29 @@ +'use strict' + +const contentVer = require('../../package.json')['cache-version'].content +const hashToSegments = require('../util/hash-to-segments') +const path = require('path') +const ssri = require('ssri') + +// Current format of content file path: +// +// sha512-BaSE64Hex= -> +// ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee +// +module.exports = contentPath + +function contentPath (cache, integrity) { + const sri = ssri.parse(integrity, { single: true }) + // contentPath is the *strongest* algo given + return path.join( + contentDir(cache), + sri.algorithm, + ...hashToSegments(sri.hexDigest()) + ) +} + +module.exports.contentDir = contentDir + +function contentDir (cache) { + return path.join(cache, `content-v${contentVer}`) +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/content/read.js b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/content/read.js new file mode 100644 index 00000000000000..5f6192c3cec566 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/content/read.js @@ -0,0 +1,165 @@ +'use strict' + +const fs = require('fs/promises') +const fsm = require('fs-minipass') +const ssri = require('ssri') +const contentPath = require('./path') +const Pipeline = require('minipass-pipeline') + +module.exports = read + +const MAX_SINGLE_READ_SIZE = 64 * 1024 * 1024 +async function read (cache, integrity, opts = {}) { + const { size } = opts + const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => { + // get size + const stat = size ? { size } : await fs.stat(cpath) + return { stat, cpath, sri } + }) + + if (stat.size > MAX_SINGLE_READ_SIZE) { + return readPipeline(cpath, stat.size, sri, new Pipeline()).concat() + } + + const data = await fs.readFile(cpath, { encoding: null }) + + if (stat.size !== data.length) { + throw sizeError(stat.size, data.length) + } + + if (!ssri.checkData(data, sri)) { + throw integrityError(sri, cpath) + } + + return data +} + +const readPipeline = (cpath, size, sri, stream) => { + stream.push( + new fsm.ReadStream(cpath, { + size, + readSize: MAX_SINGLE_READ_SIZE, + }), + ssri.integrityStream({ + integrity: sri, + size, + }) + ) + return stream +} + +module.exports.stream = readStream +module.exports.readStream = readStream + +function readStream (cache, integrity, opts = {}) { + const { size } = opts + const stream = new Pipeline() + // Set all this up to run on the stream and then just return the stream + Promise.resolve().then(async () => { + const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => { + // get size + const stat = size ? { size } : await fs.stat(cpath) + return { stat, cpath, sri } + }) + + return readPipeline(cpath, stat.size, sri, stream) + }).catch(err => stream.emit('error', err)) + + return stream +} + +module.exports.copy = copy + +function copy (cache, integrity, dest) { + return withContentSri(cache, integrity, (cpath) => { + return fs.copyFile(cpath, dest) + }) +} + +module.exports.hasContent = hasContent + +async function hasContent (cache, integrity) { + if (!integrity) { + return false + } + + try { + return await withContentSri(cache, integrity, async (cpath, sri) => { + const stat = await fs.stat(cpath) + return { size: stat.size, sri, stat } + }) + } catch (err) { + if (err.code === 'ENOENT') { + return false + } + + if (err.code === 'EPERM') { + /* istanbul ignore else */ + if (process.platform !== 'win32') { + throw err + } else { + return false + } + } + } +} + +async function withContentSri (cache, integrity, fn) { + const sri = ssri.parse(integrity) + // If `integrity` has multiple entries, pick the first digest + // with available local data. + const algo = sri.pickAlgorithm() + const digests = sri[algo] + + if (digests.length <= 1) { + const cpath = contentPath(cache, digests[0]) + return fn(cpath, digests[0]) + } else { + // Can't use race here because a generic error can happen before + // a ENOENT error, and can happen before a valid result + const results = await Promise.all(digests.map(async (meta) => { + try { + return await withContentSri(cache, meta, fn) + } catch (err) { + if (err.code === 'ENOENT') { + return Object.assign( + new Error('No matching content found for ' + sri.toString()), + { code: 'ENOENT' } + ) + } + return err + } + })) + // Return the first non error if it is found + const result = results.find((r) => !(r instanceof Error)) + if (result) { + return result + } + + // Throw the No matching content found error + const enoentError = results.find((r) => r.code === 'ENOENT') + if (enoentError) { + throw enoentError + } + + // Throw generic error + throw results.find((r) => r instanceof Error) + } +} + +function sizeError (expected, found) { + /* eslint-disable-next-line max-len */ + const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) + err.expected = expected + err.found = found + err.code = 'EBADSIZE' + return err +} + +function integrityError (sri, path) { + const err = new Error(`Integrity verification failed for ${sri} (${path})`) + err.code = 'EINTEGRITY' + err.sri = sri + err.path = path + return err +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/content/rm.js b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/content/rm.js new file mode 100644 index 00000000000000..ce58d679e4cb25 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/content/rm.js @@ -0,0 +1,18 @@ +'use strict' + +const fs = require('fs/promises') +const contentPath = require('./path') +const { hasContent } = require('./read') + +module.exports = rm + +async function rm (cache, integrity) { + const content = await hasContent(cache, integrity) + // ~pretty~ sure we can't end up with a content lacking sri, but be safe + if (content && content.sri) { + await fs.rm(contentPath(cache, content.sri), { recursive: true, force: true }) + return true + } else { + return false + } +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/content/write.js b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/content/write.js new file mode 100644 index 00000000000000..e7187abca8788a --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/content/write.js @@ -0,0 +1,206 @@ +'use strict' + +const events = require('events') + +const contentPath = require('./path') +const fs = require('fs/promises') +const { moveFile } = require('@npmcli/fs') +const { Minipass } = require('minipass') +const Pipeline = require('minipass-pipeline') +const Flush = require('minipass-flush') +const path = require('path') +const ssri = require('ssri') +const uniqueFilename = require('unique-filename') +const fsm = require('fs-minipass') + +module.exports = write + +// Cache of move operations in process so we don't duplicate +const moveOperations = new Map() + +async function write (cache, data, opts = {}) { + const { algorithms, size, integrity } = opts + + if (typeof size === 'number' && data.length !== size) { + throw sizeError(size, data.length) + } + + const sri = ssri.fromData(data, algorithms ? { algorithms } : {}) + if (integrity && !ssri.checkData(data, integrity, opts)) { + throw checksumError(integrity, sri) + } + + for (const algo in sri) { + const tmp = await makeTmp(cache, opts) + const hash = sri[algo].toString() + try { + await fs.writeFile(tmp.target, data, { flag: 'wx' }) + await moveToDestination(tmp, cache, hash, opts) + } finally { + if (!tmp.moved) { + await fs.rm(tmp.target, { recursive: true, force: true }) + } + } + } + return { integrity: sri, size: data.length } +} + +module.exports.stream = writeStream + +// writes proxied to the 'inputStream' that is passed to the Promise +// 'end' is deferred until content is handled. +class CacacheWriteStream extends Flush { + constructor (cache, opts) { + super() + this.opts = opts + this.cache = cache + this.inputStream = new Minipass() + this.inputStream.on('error', er => this.emit('error', er)) + this.inputStream.on('drain', () => this.emit('drain')) + this.handleContentP = null + } + + write (chunk, encoding, cb) { + if (!this.handleContentP) { + this.handleContentP = handleContent( + this.inputStream, + this.cache, + this.opts + ) + this.handleContentP.catch(error => this.emit('error', error)) + } + return this.inputStream.write(chunk, encoding, cb) + } + + flush (cb) { + this.inputStream.end(() => { + if (!this.handleContentP) { + const e = new Error('Cache input stream was empty') + e.code = 'ENODATA' + // empty streams are probably emitting end right away. + // defer this one tick by rejecting a promise on it. + return Promise.reject(e).catch(cb) + } + // eslint-disable-next-line promise/catch-or-return + this.handleContentP.then( + (res) => { + res.integrity && this.emit('integrity', res.integrity) + // eslint-disable-next-line promise/always-return + res.size !== null && this.emit('size', res.size) + cb() + }, + (er) => cb(er) + ) + }) + } +} + +function writeStream (cache, opts = {}) { + return new CacacheWriteStream(cache, opts) +} + +async function handleContent (inputStream, cache, opts) { + const tmp = await makeTmp(cache, opts) + try { + const res = await pipeToTmp(inputStream, cache, tmp.target, opts) + await moveToDestination( + tmp, + cache, + res.integrity, + opts + ) + return res + } finally { + if (!tmp.moved) { + await fs.rm(tmp.target, { recursive: true, force: true }) + } + } +} + +async function pipeToTmp (inputStream, cache, tmpTarget, opts) { + const outStream = new fsm.WriteStream(tmpTarget, { + flags: 'wx', + }) + + if (opts.integrityEmitter) { + // we need to create these all simultaneously since they can fire in any order + const [integrity, size] = await Promise.all([ + events.once(opts.integrityEmitter, 'integrity').then(res => res[0]), + events.once(opts.integrityEmitter, 'size').then(res => res[0]), + new Pipeline(inputStream, outStream).promise(), + ]) + return { integrity, size } + } + + let integrity + let size + const hashStream = ssri.integrityStream({ + integrity: opts.integrity, + algorithms: opts.algorithms, + size: opts.size, + }) + hashStream.on('integrity', i => { + integrity = i + }) + hashStream.on('size', s => { + size = s + }) + + const pipeline = new Pipeline(inputStream, hashStream, outStream) + await pipeline.promise() + return { integrity, size } +} + +async function makeTmp (cache, opts) { + const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) + await fs.mkdir(path.dirname(tmpTarget), { recursive: true }) + return { + target: tmpTarget, + moved: false, + } +} + +async function moveToDestination (tmp, cache, sri) { + const destination = contentPath(cache, sri) + const destDir = path.dirname(destination) + if (moveOperations.has(destination)) { + return moveOperations.get(destination) + } + moveOperations.set( + destination, + fs.mkdir(destDir, { recursive: true }) + .then(async () => { + await moveFile(tmp.target, destination, { overwrite: false }) + tmp.moved = true + return tmp.moved + }) + .catch(err => { + if (!err.message.startsWith('The destination file exists')) { + throw Object.assign(err, { code: 'EEXIST' }) + } + }).finally(() => { + moveOperations.delete(destination) + }) + + ) + return moveOperations.get(destination) +} + +function sizeError (expected, found) { + /* eslint-disable-next-line max-len */ + const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) + err.expected = expected + err.found = found + err.code = 'EBADSIZE' + return err +} + +function checksumError (expected, found) { + const err = new Error(`Integrity check failed: + Wanted: ${expected} + Found: ${found}`) + err.code = 'EINTEGRITY' + err.expected = expected + err.found = found + return err +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/entry-index.js b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/entry-index.js new file mode 100644 index 00000000000000..89c28f2f257d48 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/entry-index.js @@ -0,0 +1,336 @@ +'use strict' + +const crypto = require('crypto') +const { + appendFile, + mkdir, + readFile, + readdir, + rm, + writeFile, +} = require('fs/promises') +const { Minipass } = require('minipass') +const path = require('path') +const ssri = require('ssri') +const uniqueFilename = require('unique-filename') + +const contentPath = require('./content/path') +const hashToSegments = require('./util/hash-to-segments') +const indexV = require('../package.json')['cache-version'].index +const { moveFile } = require('@npmcli/fs') + +const pMap = require('p-map') +const lsStreamConcurrency = 5 + +module.exports.NotFoundError = class NotFoundError extends Error { + constructor (cache, key) { + super(`No cache entry for ${key} found in ${cache}`) + this.code = 'ENOENT' + this.cache = cache + this.key = key + } +} + +module.exports.compact = compact + +async function compact (cache, key, matchFn, opts = {}) { + const bucket = bucketPath(cache, key) + const entries = await bucketEntries(bucket) + const newEntries = [] + // we loop backwards because the bottom-most result is the newest + // since we add new entries with appendFile + for (let i = entries.length - 1; i >= 0; --i) { + const entry = entries[i] + // a null integrity could mean either a delete was appended + // or the user has simply stored an index that does not map + // to any content. we determine if the user wants to keep the + // null integrity based on the validateEntry function passed in options. + // if the integrity is null and no validateEntry is provided, we break + // as we consider the null integrity to be a deletion of everything + // that came before it. + if (entry.integrity === null && !opts.validateEntry) { + break + } + + // if this entry is valid, and it is either the first entry or + // the newEntries array doesn't already include an entry that + // matches this one based on the provided matchFn, then we add + // it to the beginning of our list + if ((!opts.validateEntry || opts.validateEntry(entry) === true) && + (newEntries.length === 0 || + !newEntries.find((oldEntry) => matchFn(oldEntry, entry)))) { + newEntries.unshift(entry) + } + } + + const newIndex = '\n' + newEntries.map((entry) => { + const stringified = JSON.stringify(entry) + const hash = hashEntry(stringified) + return `${hash}\t${stringified}` + }).join('\n') + + const setup = async () => { + const target = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) + await mkdir(path.dirname(target), { recursive: true }) + return { + target, + moved: false, + } + } + + const teardown = async (tmp) => { + if (!tmp.moved) { + return rm(tmp.target, { recursive: true, force: true }) + } + } + + const write = async (tmp) => { + await writeFile(tmp.target, newIndex, { flag: 'wx' }) + await mkdir(path.dirname(bucket), { recursive: true }) + // we use @npmcli/move-file directly here because we + // want to overwrite the existing file + await moveFile(tmp.target, bucket) + tmp.moved = true + } + + // write the file atomically + const tmp = await setup() + try { + await write(tmp) + } finally { + await teardown(tmp) + } + + // we reverse the list we generated such that the newest + // entries come first in order to make looping through them easier + // the true passed to formatEntry tells it to keep null + // integrity values, if they made it this far it's because + // validateEntry returned true, and as such we should return it + return newEntries.reverse().map((entry) => formatEntry(cache, entry, true)) +} + +module.exports.insert = insert + +async function insert (cache, key, integrity, opts = {}) { + const { metadata, size, time } = opts + const bucket = bucketPath(cache, key) + const entry = { + key, + integrity: integrity && ssri.stringify(integrity), + time: time || Date.now(), + size, + metadata, + } + try { + await mkdir(path.dirname(bucket), { recursive: true }) + const stringified = JSON.stringify(entry) + // NOTE - Cleverness ahoy! + // + // This works because it's tremendously unlikely for an entry to corrupt + // another while still preserving the string length of the JSON in + // question. So, we just slap the length in there and verify it on read. + // + // Thanks to @isaacs for the whiteboarding session that ended up with + // this. + await appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`) + } catch (err) { + if (err.code === 'ENOENT') { + return undefined + } + + throw err + } + return formatEntry(cache, entry) +} + +module.exports.find = find + +async function find (cache, key) { + const bucket = bucketPath(cache, key) + try { + const entries = await bucketEntries(bucket) + return entries.reduce((latest, next) => { + if (next && next.key === key) { + return formatEntry(cache, next) + } else { + return latest + } + }, null) + } catch (err) { + if (err.code === 'ENOENT') { + return null + } else { + throw err + } + } +} + +module.exports.delete = del + +function del (cache, key, opts = {}) { + if (!opts.removeFully) { + return insert(cache, key, null, opts) + } + + const bucket = bucketPath(cache, key) + return rm(bucket, { recursive: true, force: true }) +} + +module.exports.lsStream = lsStream + +function lsStream (cache) { + const indexDir = bucketDir(cache) + const stream = new Minipass({ objectMode: true }) + + // Set all this up to run on the stream and then just return the stream + Promise.resolve().then(async () => { + const buckets = await readdirOrEmpty(indexDir) + await pMap(buckets, async (bucket) => { + const bucketPath = path.join(indexDir, bucket) + const subbuckets = await readdirOrEmpty(bucketPath) + await pMap(subbuckets, async (subbucket) => { + const subbucketPath = path.join(bucketPath, subbucket) + + // "/cachename//./*" + const subbucketEntries = await readdirOrEmpty(subbucketPath) + await pMap(subbucketEntries, async (entry) => { + const entryPath = path.join(subbucketPath, entry) + try { + const entries = await bucketEntries(entryPath) + // using a Map here prevents duplicate keys from showing up + // twice, I guess? + const reduced = entries.reduce((acc, entry) => { + acc.set(entry.key, entry) + return acc + }, new Map()) + // reduced is a map of key => entry + for (const entry of reduced.values()) { + const formatted = formatEntry(cache, entry) + if (formatted) { + stream.write(formatted) + } + } + } catch (err) { + if (err.code === 'ENOENT') { + return undefined + } + throw err + } + }, + { concurrency: lsStreamConcurrency }) + }, + { concurrency: lsStreamConcurrency }) + }, + { concurrency: lsStreamConcurrency }) + stream.end() + return stream + }).catch(err => stream.emit('error', err)) + + return stream +} + +module.exports.ls = ls + +async function ls (cache) { + const entries = await lsStream(cache).collect() + return entries.reduce((acc, xs) => { + acc[xs.key] = xs + return acc + }, {}) +} + +module.exports.bucketEntries = bucketEntries + +async function bucketEntries (bucket, filter) { + const data = await readFile(bucket, 'utf8') + return _bucketEntries(data, filter) +} + +function _bucketEntries (data) { + const entries = [] + data.split('\n').forEach((entry) => { + if (!entry) { + return + } + + const pieces = entry.split('\t') + if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) { + // Hash is no good! Corruption or malice? Doesn't matter! + // EJECT EJECT + return + } + let obj + try { + obj = JSON.parse(pieces[1]) + } catch (_) { + // eslint-ignore-next-line no-empty-block + } + // coverage disabled here, no need to test with an entry that parses to something falsey + // istanbul ignore else + if (obj) { + entries.push(obj) + } + }) + return entries +} + +module.exports.bucketDir = bucketDir + +function bucketDir (cache) { + return path.join(cache, `index-v${indexV}`) +} + +module.exports.bucketPath = bucketPath + +function bucketPath (cache, key) { + const hashed = hashKey(key) + return path.join.apply( + path, + [bucketDir(cache)].concat(hashToSegments(hashed)) + ) +} + +module.exports.hashKey = hashKey + +function hashKey (key) { + return hash(key, 'sha256') +} + +module.exports.hashEntry = hashEntry + +function hashEntry (str) { + return hash(str, 'sha1') +} + +function hash (str, digest) { + return crypto + .createHash(digest) + .update(str) + .digest('hex') +} + +function formatEntry (cache, entry, keepAll) { + // Treat null digests as deletions. They'll shadow any previous entries. + if (!entry.integrity && !keepAll) { + return null + } + + return { + key: entry.key, + integrity: entry.integrity, + path: entry.integrity ? contentPath(cache, entry.integrity) : undefined, + size: entry.size, + time: entry.time, + metadata: entry.metadata, + } +} + +function readdirOrEmpty (dir) { + return readdir(dir).catch((err) => { + if (err.code === 'ENOENT' || err.code === 'ENOTDIR') { + return [] + } + + throw err + }) +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/get.js b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/get.js new file mode 100644 index 00000000000000..80ec206c7ecaaa --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/get.js @@ -0,0 +1,170 @@ +'use strict' + +const Collect = require('minipass-collect') +const { Minipass } = require('minipass') +const Pipeline = require('minipass-pipeline') + +const index = require('./entry-index') +const memo = require('./memoization') +const read = require('./content/read') + +async function getData (cache, key, opts = {}) { + const { integrity, memoize, size } = opts + const memoized = memo.get(cache, key, opts) + if (memoized && memoize !== false) { + return { + metadata: memoized.entry.metadata, + data: memoized.data, + integrity: memoized.entry.integrity, + size: memoized.entry.size, + } + } + + const entry = await index.find(cache, key, opts) + if (!entry) { + throw new index.NotFoundError(cache, key) + } + const data = await read(cache, entry.integrity, { integrity, size }) + if (memoize) { + memo.put(cache, entry, data, opts) + } + + return { + data, + metadata: entry.metadata, + size: entry.size, + integrity: entry.integrity, + } +} +module.exports = getData + +async function getDataByDigest (cache, key, opts = {}) { + const { integrity, memoize, size } = opts + const memoized = memo.get.byDigest(cache, key, opts) + if (memoized && memoize !== false) { + return memoized + } + + const res = await read(cache, key, { integrity, size }) + if (memoize) { + memo.put.byDigest(cache, key, res, opts) + } + return res +} +module.exports.byDigest = getDataByDigest + +const getMemoizedStream = (memoized) => { + const stream = new Minipass() + stream.on('newListener', function (ev, cb) { + ev === 'metadata' && cb(memoized.entry.metadata) + ev === 'integrity' && cb(memoized.entry.integrity) + ev === 'size' && cb(memoized.entry.size) + }) + stream.end(memoized.data) + return stream +} + +function getStream (cache, key, opts = {}) { + const { memoize, size } = opts + const memoized = memo.get(cache, key, opts) + if (memoized && memoize !== false) { + return getMemoizedStream(memoized) + } + + const stream = new Pipeline() + // Set all this up to run on the stream and then just return the stream + Promise.resolve().then(async () => { + const entry = await index.find(cache, key) + if (!entry) { + throw new index.NotFoundError(cache, key) + } + + stream.emit('metadata', entry.metadata) + stream.emit('integrity', entry.integrity) + stream.emit('size', entry.size) + stream.on('newListener', function (ev, cb) { + ev === 'metadata' && cb(entry.metadata) + ev === 'integrity' && cb(entry.integrity) + ev === 'size' && cb(entry.size) + }) + + const src = read.readStream( + cache, + entry.integrity, + { ...opts, size: typeof size !== 'number' ? entry.size : size } + ) + + if (memoize) { + const memoStream = new Collect.PassThrough() + memoStream.on('collect', data => memo.put(cache, entry, data, opts)) + stream.unshift(memoStream) + } + stream.unshift(src) + return stream + }).catch((err) => stream.emit('error', err)) + + return stream +} + +module.exports.stream = getStream + +function getStreamDigest (cache, integrity, opts = {}) { + const { memoize } = opts + const memoized = memo.get.byDigest(cache, integrity, opts) + if (memoized && memoize !== false) { + const stream = new Minipass() + stream.end(memoized) + return stream + } else { + const stream = read.readStream(cache, integrity, opts) + if (!memoize) { + return stream + } + + const memoStream = new Collect.PassThrough() + memoStream.on('collect', data => memo.put.byDigest( + cache, + integrity, + data, + opts + )) + return new Pipeline(stream, memoStream) + } +} + +module.exports.stream.byDigest = getStreamDigest + +function info (cache, key, opts = {}) { + const { memoize } = opts + const memoized = memo.get(cache, key, opts) + if (memoized && memoize !== false) { + return Promise.resolve(memoized.entry) + } else { + return index.find(cache, key) + } +} +module.exports.info = info + +async function copy (cache, key, dest, opts = {}) { + const entry = await index.find(cache, key, opts) + if (!entry) { + throw new index.NotFoundError(cache, key) + } + await read.copy(cache, entry.integrity, dest, opts) + return { + metadata: entry.metadata, + size: entry.size, + integrity: entry.integrity, + } +} + +module.exports.copy = copy + +async function copyByDigest (cache, key, dest, opts = {}) { + await read.copy(cache, key, dest, opts) + return key +} + +module.exports.copy.byDigest = copyByDigest + +module.exports.hasContent = read.hasContent diff --git a/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/index.js b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/index.js new file mode 100644 index 00000000000000..c9b0da5f3a271b --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/index.js @@ -0,0 +1,42 @@ +'use strict' + +const get = require('./get.js') +const put = require('./put.js') +const rm = require('./rm.js') +const verify = require('./verify.js') +const { clearMemoized } = require('./memoization.js') +const tmp = require('./util/tmp.js') +const index = require('./entry-index.js') + +module.exports.index = {} +module.exports.index.compact = index.compact +module.exports.index.insert = index.insert + +module.exports.ls = index.ls +module.exports.ls.stream = index.lsStream + +module.exports.get = get +module.exports.get.byDigest = get.byDigest +module.exports.get.stream = get.stream +module.exports.get.stream.byDigest = get.stream.byDigest +module.exports.get.copy = get.copy +module.exports.get.copy.byDigest = get.copy.byDigest +module.exports.get.info = get.info +module.exports.get.hasContent = get.hasContent + +module.exports.put = put +module.exports.put.stream = put.stream + +module.exports.rm = rm.entry +module.exports.rm.all = rm.all +module.exports.rm.entry = module.exports.rm +module.exports.rm.content = rm.content + +module.exports.clearMemoized = clearMemoized + +module.exports.tmp = {} +module.exports.tmp.mkdir = tmp.mkdir +module.exports.tmp.withTmp = tmp.withTmp + +module.exports.verify = verify +module.exports.verify.lastRun = verify.lastRun diff --git a/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/memoization.js b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/memoization.js new file mode 100644 index 00000000000000..2ecc60912e4563 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/memoization.js @@ -0,0 +1,72 @@ +'use strict' + +const { LRUCache } = require('lru-cache') + +const MEMOIZED = new LRUCache({ + max: 500, + maxSize: 50 * 1024 * 1024, // 50MB + ttl: 3 * 60 * 1000, // 3 minutes + sizeCalculation: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length, +}) + +module.exports.clearMemoized = clearMemoized + +function clearMemoized () { + const old = {} + MEMOIZED.forEach((v, k) => { + old[k] = v + }) + MEMOIZED.clear() + return old +} + +module.exports.put = put + +function put (cache, entry, data, opts) { + pickMem(opts).set(`key:${cache}:${entry.key}`, { entry, data }) + putDigest(cache, entry.integrity, data, opts) +} + +module.exports.put.byDigest = putDigest + +function putDigest (cache, integrity, data, opts) { + pickMem(opts).set(`digest:${cache}:${integrity}`, data) +} + +module.exports.get = get + +function get (cache, key, opts) { + return pickMem(opts).get(`key:${cache}:${key}`) +} + +module.exports.get.byDigest = getDigest + +function getDigest (cache, integrity, opts) { + return pickMem(opts).get(`digest:${cache}:${integrity}`) +} + +class ObjProxy { + constructor (obj) { + this.obj = obj + } + + get (key) { + return this.obj[key] + } + + set (key, val) { + this.obj[key] = val + } +} + +function pickMem (opts) { + if (!opts || !opts.memoize) { + return MEMOIZED + } else if (opts.memoize.get && opts.memoize.set) { + return opts.memoize + } else if (typeof opts.memoize === 'object') { + return new ObjProxy(opts.memoize) + } else { + return MEMOIZED + } +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/put.js b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/put.js new file mode 100644 index 00000000000000..9fc932d5f6dec5 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/put.js @@ -0,0 +1,80 @@ +'use strict' + +const index = require('./entry-index') +const memo = require('./memoization') +const write = require('./content/write') +const Flush = require('minipass-flush') +const { PassThrough } = require('minipass-collect') +const Pipeline = require('minipass-pipeline') + +const putOpts = (opts) => ({ + algorithms: ['sha512'], + ...opts, +}) + +module.exports = putData + +async function putData (cache, key, data, opts = {}) { + const { memoize } = opts + opts = putOpts(opts) + const res = await write(cache, data, opts) + const entry = await index.insert(cache, key, res.integrity, { ...opts, size: res.size }) + if (memoize) { + memo.put(cache, entry, data, opts) + } + + return res.integrity +} + +module.exports.stream = putStream + +function putStream (cache, key, opts = {}) { + const { memoize } = opts + opts = putOpts(opts) + let integrity + let size + let error + + let memoData + const pipeline = new Pipeline() + // first item in the pipeline is the memoizer, because we need + // that to end first and get the collected data. + if (memoize) { + const memoizer = new PassThrough().on('collect', data => { + memoData = data + }) + pipeline.push(memoizer) + } + + // contentStream is a write-only, not a passthrough + // no data comes out of it. + const contentStream = write.stream(cache, opts) + .on('integrity', (int) => { + integrity = int + }) + .on('size', (s) => { + size = s + }) + .on('error', (err) => { + error = err + }) + + pipeline.push(contentStream) + + // last but not least, we write the index and emit hash and size, + // and memoize if we're doing that + pipeline.push(new Flush({ + async flush () { + if (!error) { + const entry = await index.insert(cache, key, integrity, { ...opts, size }) + if (memoize && memoData) { + memo.put(cache, entry, memoData, opts) + } + pipeline.emit('integrity', integrity) + pipeline.emit('size', size) + } + }, + })) + + return pipeline +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/rm.js b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/rm.js new file mode 100644 index 00000000000000..a94760c7cf2430 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/rm.js @@ -0,0 +1,31 @@ +'use strict' + +const { rm } = require('fs/promises') +const glob = require('./util/glob.js') +const index = require('./entry-index') +const memo = require('./memoization') +const path = require('path') +const rmContent = require('./content/rm') + +module.exports = entry +module.exports.entry = entry + +function entry (cache, key, opts) { + memo.clearMemoized() + return index.delete(cache, key, opts) +} + +module.exports.content = content + +function content (cache, integrity) { + memo.clearMemoized() + return rmContent(cache, integrity) +} + +module.exports.all = all + +async function all (cache) { + memo.clearMemoized() + const paths = await glob(path.join(cache, '*(content-*|index-*)'), { silent: true, nosort: true }) + return Promise.all(paths.map((p) => rm(p, { recursive: true, force: true }))) +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/util/glob.js b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/util/glob.js new file mode 100644 index 00000000000000..8500c1c16a429f --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/util/glob.js @@ -0,0 +1,7 @@ +'use strict' + +const { glob } = require('glob') +const path = require('path') + +const globify = (pattern) => pattern.split(path.win32.sep).join(path.posix.sep) +module.exports = (path, options) => glob(globify(path), options) diff --git a/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/util/hash-to-segments.js b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/util/hash-to-segments.js new file mode 100644 index 00000000000000..445599b5038088 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/util/hash-to-segments.js @@ -0,0 +1,7 @@ +'use strict' + +module.exports = hashToSegments + +function hashToSegments (hash) { + return [hash.slice(0, 2), hash.slice(2, 4), hash.slice(4)] +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/util/tmp.js b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/util/tmp.js new file mode 100644 index 00000000000000..0bf5302136ebeb --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/util/tmp.js @@ -0,0 +1,26 @@ +'use strict' + +const { withTempDir } = require('@npmcli/fs') +const fs = require('fs/promises') +const path = require('path') + +module.exports.mkdir = mktmpdir + +async function mktmpdir (cache, opts = {}) { + const { tmpPrefix } = opts + const tmpDir = path.join(cache, 'tmp') + await fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' }) + // do not use path.join(), it drops the trailing / if tmpPrefix is unset + const target = `${tmpDir}${path.sep}${tmpPrefix || ''}` + return fs.mkdtemp(target, { owner: 'inherit' }) +} + +module.exports.withTmp = withTmp + +function withTmp (cache, opts, cb) { + if (!cb) { + cb = opts + opts = {} + } + return withTempDir(path.join(cache, 'tmp'), cb, opts) +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/verify.js b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/verify.js new file mode 100644 index 00000000000000..d7423da1295b68 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/verify.js @@ -0,0 +1,257 @@ +'use strict' + +const { + mkdir, + readFile, + rm, + stat, + truncate, + writeFile, +} = require('fs/promises') +const pMap = require('p-map') +const contentPath = require('./content/path') +const fsm = require('fs-minipass') +const glob = require('./util/glob.js') +const index = require('./entry-index') +const path = require('path') +const ssri = require('ssri') + +const hasOwnProperty = (obj, key) => + Object.prototype.hasOwnProperty.call(obj, key) + +const verifyOpts = (opts) => ({ + concurrency: 20, + log: { silly () {} }, + ...opts, +}) + +module.exports = verify + +async function verify (cache, opts) { + opts = verifyOpts(opts) + opts.log.silly('verify', 'verifying cache at', cache) + + const steps = [ + markStartTime, + fixPerms, + garbageCollect, + rebuildIndex, + cleanTmp, + writeVerifile, + markEndTime, + ] + + const stats = {} + for (const step of steps) { + const label = step.name + const start = new Date() + const s = await step(cache, opts) + if (s) { + Object.keys(s).forEach((k) => { + stats[k] = s[k] + }) + } + const end = new Date() + if (!stats.runTime) { + stats.runTime = {} + } + stats.runTime[label] = end - start + } + stats.runTime.total = stats.endTime - stats.startTime + opts.log.silly( + 'verify', + 'verification finished for', + cache, + 'in', + `${stats.runTime.total}ms` + ) + return stats +} + +async function markStartTime () { + return { startTime: new Date() } +} + +async function markEndTime () { + return { endTime: new Date() } +} + +async function fixPerms (cache, opts) { + opts.log.silly('verify', 'fixing cache permissions') + await mkdir(cache, { recursive: true }) + return null +} + +// Implements a naive mark-and-sweep tracing garbage collector. +// +// The algorithm is basically as follows: +// 1. Read (and filter) all index entries ("pointers") +// 2. Mark each integrity value as "live" +// 3. Read entire filesystem tree in `content-vX/` dir +// 4. If content is live, verify its checksum and delete it if it fails +// 5. If content is not marked as live, rm it. +// +async function garbageCollect (cache, opts) { + opts.log.silly('verify', 'garbage collecting content') + const indexStream = index.lsStream(cache) + const liveContent = new Set() + indexStream.on('data', (entry) => { + if (opts.filter && !opts.filter(entry)) { + return + } + + // integrity is stringified, re-parse it so we can get each hash + const integrity = ssri.parse(entry.integrity) + for (const algo in integrity) { + liveContent.add(integrity[algo].toString()) + } + }) + await new Promise((resolve, reject) => { + indexStream.on('end', resolve).on('error', reject) + }) + const contentDir = contentPath.contentDir(cache) + const files = await glob(path.join(contentDir, '**'), { + follow: false, + nodir: true, + nosort: true, + }) + const stats = { + verifiedContent: 0, + reclaimedCount: 0, + reclaimedSize: 0, + badContentCount: 0, + keptSize: 0, + } + await pMap( + files, + async (f) => { + const split = f.split(/[/\\]/) + const digest = split.slice(split.length - 3).join('') + const algo = split[split.length - 4] + const integrity = ssri.fromHex(digest, algo) + if (liveContent.has(integrity.toString())) { + const info = await verifyContent(f, integrity) + if (!info.valid) { + stats.reclaimedCount++ + stats.badContentCount++ + stats.reclaimedSize += info.size + } else { + stats.verifiedContent++ + stats.keptSize += info.size + } + } else { + // No entries refer to this content. We can delete. + stats.reclaimedCount++ + const s = await stat(f) + await rm(f, { recursive: true, force: true }) + stats.reclaimedSize += s.size + } + return stats + }, + { concurrency: opts.concurrency } + ) + return stats +} + +async function verifyContent (filepath, sri) { + const contentInfo = {} + try { + const { size } = await stat(filepath) + contentInfo.size = size + contentInfo.valid = true + await ssri.checkStream(new fsm.ReadStream(filepath), sri) + } catch (err) { + if (err.code === 'ENOENT') { + return { size: 0, valid: false } + } + if (err.code !== 'EINTEGRITY') { + throw err + } + + await rm(filepath, { recursive: true, force: true }) + contentInfo.valid = false + } + return contentInfo +} + +async function rebuildIndex (cache, opts) { + opts.log.silly('verify', 'rebuilding index') + const entries = await index.ls(cache) + const stats = { + missingContent: 0, + rejectedEntries: 0, + totalEntries: 0, + } + const buckets = {} + for (const k in entries) { + /* istanbul ignore else */ + if (hasOwnProperty(entries, k)) { + const hashed = index.hashKey(k) + const entry = entries[k] + const excluded = opts.filter && !opts.filter(entry) + excluded && stats.rejectedEntries++ + if (buckets[hashed] && !excluded) { + buckets[hashed].push(entry) + } else if (buckets[hashed] && excluded) { + // skip + } else if (excluded) { + buckets[hashed] = [] + buckets[hashed]._path = index.bucketPath(cache, k) + } else { + buckets[hashed] = [entry] + buckets[hashed]._path = index.bucketPath(cache, k) + } + } + } + await pMap( + Object.keys(buckets), + (key) => { + return rebuildBucket(cache, buckets[key], stats, opts) + }, + { concurrency: opts.concurrency } + ) + return stats +} + +async function rebuildBucket (cache, bucket, stats) { + await truncate(bucket._path) + // This needs to be serialized because cacache explicitly + // lets very racy bucket conflicts clobber each other. + for (const entry of bucket) { + const content = contentPath(cache, entry.integrity) + try { + await stat(content) + await index.insert(cache, entry.key, entry.integrity, { + metadata: entry.metadata, + size: entry.size, + time: entry.time, + }) + stats.totalEntries++ + } catch (err) { + if (err.code === 'ENOENT') { + stats.rejectedEntries++ + stats.missingContent++ + } else { + throw err + } + } + } +} + +function cleanTmp (cache, opts) { + opts.log.silly('verify', 'cleaning tmp directory') + return rm(path.join(cache, 'tmp'), { recursive: true, force: true }) +} + +async function writeVerifile (cache, opts) { + const verifile = path.join(cache, '_lastverified') + opts.log.silly('verify', 'writing verifile to ' + verifile) + return writeFile(verifile, `${Date.now()}`) +} + +module.exports.lastRun = lastRun + +async function lastRun (cache) { + const data = await readFile(path.join(cache, '_lastverified'), { encoding: 'utf8' }) + return new Date(+data) +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/cacache/package.json b/deps/npm/node_modules/tuf-js/node_modules/cacache/package.json new file mode 100644 index 00000000000000..6e6219158ed759 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/cacache/package.json @@ -0,0 +1,82 @@ +{ + "name": "cacache", + "version": "18.0.4", + "cache-version": { + "content": "2", + "index": "5" + }, + "description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.", + "main": "lib/index.js", + "files": [ + "bin/", + "lib/" + ], + "scripts": { + "test": "tap", + "snap": "tap", + "coverage": "tap", + "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "npmclilint": "npmcli-lint", + "lintfix": "npm run lint -- --fix", + "postsnap": "npm run lintfix --", + "postlint": "template-oss-check", + "posttest": "npm run lint", + "template-oss-apply": "template-oss-apply --force" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/npm/cacache.git" + }, + "keywords": [ + "cache", + "caching", + "content-addressable", + "sri", + "sri hash", + "subresource integrity", + "cache", + "storage", + "store", + "file store", + "filesystem", + "disk cache", + "disk storage" + ], + "license": "ISC", + "dependencies": { + "@npmcli/fs": "^3.1.0", + "fs-minipass": "^3.0.0", + "glob": "^10.2.2", + "lru-cache": "^10.0.1", + "minipass": "^7.0.3", + "minipass-collect": "^2.0.1", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "p-map": "^4.0.0", + "ssri": "^10.0.0", + "tar": "^6.1.11", + "unique-filename": "^3.0.0" + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.22.0", + "tap": "^16.0.0" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "windowsCI": false, + "version": "4.22.0", + "publish": "true" + }, + "author": "GitHub Inc.", + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + } +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/LICENSE b/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/LICENSE new file mode 100644 index 00000000000000..1808eb2844231c --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/LICENSE @@ -0,0 +1,16 @@ +ISC License + +Copyright 2017-2022 (c) npm, Inc. + +Permission to use, copy, modify, and/or distribute this software for +any purpose with or without fee is hereby granted, provided that the +above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS +ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/entry.js b/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/entry.js new file mode 100644 index 00000000000000..bfcfacbcc95e18 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/entry.js @@ -0,0 +1,471 @@ +const { Request, Response } = require('minipass-fetch') +const { Minipass } = require('minipass') +const MinipassFlush = require('minipass-flush') +const cacache = require('cacache') +const url = require('url') + +const CachingMinipassPipeline = require('../pipeline.js') +const CachePolicy = require('./policy.js') +const cacheKey = require('./key.js') +const remote = require('../remote.js') + +const hasOwnProperty = (obj, prop) => Object.prototype.hasOwnProperty.call(obj, prop) + +// allow list for request headers that will be written to the cache index +// note: we will also store any request headers +// that are named in a response's vary header +const KEEP_REQUEST_HEADERS = [ + 'accept-charset', + 'accept-encoding', + 'accept-language', + 'accept', + 'cache-control', +] + +// allow list for response headers that will be written to the cache index +// note: we must not store the real response's age header, or when we load +// a cache policy based on the metadata it will think the cached response +// is always stale +const KEEP_RESPONSE_HEADERS = [ + 'cache-control', + 'content-encoding', + 'content-language', + 'content-type', + 'date', + 'etag', + 'expires', + 'last-modified', + 'link', + 'location', + 'pragma', + 'vary', +] + +// return an object containing all metadata to be written to the index +const getMetadata = (request, response, options) => { + const metadata = { + time: Date.now(), + url: request.url, + reqHeaders: {}, + resHeaders: {}, + + // options on which we must match the request and vary the response + options: { + compress: options.compress != null ? options.compress : request.compress, + }, + } + + // only save the status if it's not a 200 or 304 + if (response.status !== 200 && response.status !== 304) { + metadata.status = response.status + } + + for (const name of KEEP_REQUEST_HEADERS) { + if (request.headers.has(name)) { + metadata.reqHeaders[name] = request.headers.get(name) + } + } + + // if the request's host header differs from the host in the url + // we need to keep it, otherwise it's just noise and we ignore it + const host = request.headers.get('host') + const parsedUrl = new url.URL(request.url) + if (host && parsedUrl.host !== host) { + metadata.reqHeaders.host = host + } + + // if the response has a vary header, make sure + // we store the relevant request headers too + if (response.headers.has('vary')) { + const vary = response.headers.get('vary') + // a vary of "*" means every header causes a different response. + // in that scenario, we do not include any additional headers + // as the freshness check will always fail anyway and we don't + // want to bloat the cache indexes + if (vary !== '*') { + // copy any other request headers that will vary the response + const varyHeaders = vary.trim().toLowerCase().split(/\s*,\s*/) + for (const name of varyHeaders) { + if (request.headers.has(name)) { + metadata.reqHeaders[name] = request.headers.get(name) + } + } + } + } + + for (const name of KEEP_RESPONSE_HEADERS) { + if (response.headers.has(name)) { + metadata.resHeaders[name] = response.headers.get(name) + } + } + + for (const name of options.cacheAdditionalHeaders) { + if (response.headers.has(name)) { + metadata.resHeaders[name] = response.headers.get(name) + } + } + + return metadata +} + +// symbols used to hide objects that may be lazily evaluated in a getter +const _request = Symbol('request') +const _response = Symbol('response') +const _policy = Symbol('policy') + +class CacheEntry { + constructor ({ entry, request, response, options }) { + if (entry) { + this.key = entry.key + this.entry = entry + // previous versions of this module didn't write an explicit timestamp in + // the metadata, so fall back to the entry's timestamp. we can't use the + // entry timestamp to determine staleness because cacache will update it + // when it verifies its data + this.entry.metadata.time = this.entry.metadata.time || this.entry.time + } else { + this.key = cacheKey(request) + } + + this.options = options + + // these properties are behind getters that lazily evaluate + this[_request] = request + this[_response] = response + this[_policy] = null + } + + // returns a CacheEntry instance that satisfies the given request + // or undefined if no existing entry satisfies + static async find (request, options) { + try { + // compacts the index and returns an array of unique entries + var matches = await cacache.index.compact(options.cachePath, cacheKey(request), (A, B) => { + const entryA = new CacheEntry({ entry: A, options }) + const entryB = new CacheEntry({ entry: B, options }) + return entryA.policy.satisfies(entryB.request) + }, { + validateEntry: (entry) => { + // clean out entries with a buggy content-encoding value + if (entry.metadata && + entry.metadata.resHeaders && + entry.metadata.resHeaders['content-encoding'] === null) { + return false + } + + // if an integrity is null, it needs to have a status specified + if (entry.integrity === null) { + return !!(entry.metadata && entry.metadata.status) + } + + return true + }, + }) + } catch (err) { + // if the compact request fails, ignore the error and return + return + } + + // a cache mode of 'reload' means to behave as though we have no cache + // on the way to the network. return undefined to allow cacheFetch to + // create a brand new request no matter what. + if (options.cache === 'reload') { + return + } + + // find the specific entry that satisfies the request + let match + for (const entry of matches) { + const _entry = new CacheEntry({ + entry, + options, + }) + + if (_entry.policy.satisfies(request)) { + match = _entry + break + } + } + + return match + } + + // if the user made a PUT/POST/PATCH then we invalidate our + // cache for the same url by deleting the index entirely + static async invalidate (request, options) { + const key = cacheKey(request) + try { + await cacache.rm.entry(options.cachePath, key, { removeFully: true }) + } catch (err) { + // ignore errors + } + } + + get request () { + if (!this[_request]) { + this[_request] = new Request(this.entry.metadata.url, { + method: 'GET', + headers: this.entry.metadata.reqHeaders, + ...this.entry.metadata.options, + }) + } + + return this[_request] + } + + get response () { + if (!this[_response]) { + this[_response] = new Response(null, { + url: this.entry.metadata.url, + counter: this.options.counter, + status: this.entry.metadata.status || 200, + headers: { + ...this.entry.metadata.resHeaders, + 'content-length': this.entry.size, + }, + }) + } + + return this[_response] + } + + get policy () { + if (!this[_policy]) { + this[_policy] = new CachePolicy({ + entry: this.entry, + request: this.request, + response: this.response, + options: this.options, + }) + } + + return this[_policy] + } + + // wraps the response in a pipeline that stores the data + // in the cache while the user consumes it + async store (status) { + // if we got a status other than 200, 301, or 308, + // or the CachePolicy forbid storage, append the + // cache status header and return it untouched + if ( + this.request.method !== 'GET' || + ![200, 301, 308].includes(this.response.status) || + !this.policy.storable() + ) { + this.response.headers.set('x-local-cache-status', 'skip') + return this.response + } + + const size = this.response.headers.get('content-length') + const cacheOpts = { + algorithms: this.options.algorithms, + metadata: getMetadata(this.request, this.response, this.options), + size, + integrity: this.options.integrity, + integrityEmitter: this.response.body.hasIntegrityEmitter && this.response.body, + } + + let body = null + // we only set a body if the status is a 200, redirects are + // stored as metadata only + if (this.response.status === 200) { + let cacheWriteResolve, cacheWriteReject + const cacheWritePromise = new Promise((resolve, reject) => { + cacheWriteResolve = resolve + cacheWriteReject = reject + }).catch((err) => { + body.emit('error', err) + }) + + body = new CachingMinipassPipeline({ events: ['integrity', 'size'] }, new MinipassFlush({ + flush () { + return cacheWritePromise + }, + })) + // this is always true since if we aren't reusing the one from the remote fetch, we + // are using the one from cacache + body.hasIntegrityEmitter = true + + const onResume = () => { + const tee = new Minipass() + const cacheStream = cacache.put.stream(this.options.cachePath, this.key, cacheOpts) + // re-emit the integrity and size events on our new response body so they can be reused + cacheStream.on('integrity', i => body.emit('integrity', i)) + cacheStream.on('size', s => body.emit('size', s)) + // stick a flag on here so downstream users will know if they can expect integrity events + tee.pipe(cacheStream) + // TODO if the cache write fails, log a warning but return the response anyway + // eslint-disable-next-line promise/catch-or-return + cacheStream.promise().then(cacheWriteResolve, cacheWriteReject) + body.unshift(tee) + body.unshift(this.response.body) + } + + body.once('resume', onResume) + body.once('end', () => body.removeListener('resume', onResume)) + } else { + await cacache.index.insert(this.options.cachePath, this.key, null, cacheOpts) + } + + // note: we do not set the x-local-cache-hash header because we do not know + // the hash value until after the write to the cache completes, which doesn't + // happen until after the response has been sent and it's too late to write + // the header anyway + this.response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath)) + this.response.headers.set('x-local-cache-key', encodeURIComponent(this.key)) + this.response.headers.set('x-local-cache-mode', 'stream') + this.response.headers.set('x-local-cache-status', status) + this.response.headers.set('x-local-cache-time', new Date().toISOString()) + const newResponse = new Response(body, { + url: this.response.url, + status: this.response.status, + headers: this.response.headers, + counter: this.options.counter, + }) + return newResponse + } + + // use the cached data to create a response and return it + async respond (method, options, status) { + let response + if (method === 'HEAD' || [301, 308].includes(this.response.status)) { + // if the request is a HEAD, or the response is a redirect, + // then the metadata in the entry already includes everything + // we need to build a response + response = this.response + } else { + // we're responding with a full cached response, so create a body + // that reads from cacache and attach it to a new Response + const body = new Minipass() + const headers = { ...this.policy.responseHeaders() } + + const onResume = () => { + const cacheStream = cacache.get.stream.byDigest( + this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize } + ) + cacheStream.on('error', async (err) => { + cacheStream.pause() + if (err.code === 'EINTEGRITY') { + await cacache.rm.content( + this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize } + ) + } + if (err.code === 'ENOENT' || err.code === 'EINTEGRITY') { + await CacheEntry.invalidate(this.request, this.options) + } + body.emit('error', err) + cacheStream.resume() + }) + // emit the integrity and size events based on our metadata so we're consistent + body.emit('integrity', this.entry.integrity) + body.emit('size', Number(headers['content-length'])) + cacheStream.pipe(body) + } + + body.once('resume', onResume) + body.once('end', () => body.removeListener('resume', onResume)) + response = new Response(body, { + url: this.entry.metadata.url, + counter: options.counter, + status: 200, + headers, + }) + } + + response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath)) + response.headers.set('x-local-cache-hash', encodeURIComponent(this.entry.integrity)) + response.headers.set('x-local-cache-key', encodeURIComponent(this.key)) + response.headers.set('x-local-cache-mode', 'stream') + response.headers.set('x-local-cache-status', status) + response.headers.set('x-local-cache-time', new Date(this.entry.metadata.time).toUTCString()) + return response + } + + // use the provided request along with this cache entry to + // revalidate the stored response. returns a response, either + // from the cache or from the update + async revalidate (request, options) { + const revalidateRequest = new Request(request, { + headers: this.policy.revalidationHeaders(request), + }) + + try { + // NOTE: be sure to remove the headers property from the + // user supplied options, since we have already defined + // them on the new request object. if they're still in the + // options then those will overwrite the ones from the policy + var response = await remote(revalidateRequest, { + ...options, + headers: undefined, + }) + } catch (err) { + // if the network fetch fails, return the stale + // cached response unless it has a cache-control + // of 'must-revalidate' + if (!this.policy.mustRevalidate) { + return this.respond(request.method, options, 'stale') + } + + throw err + } + + if (this.policy.revalidated(revalidateRequest, response)) { + // we got a 304, write a new index to the cache and respond from cache + const metadata = getMetadata(request, response, options) + // 304 responses do not include headers that are specific to the response data + // since they do not include a body, so we copy values for headers that were + // in the old cache entry to the new one, if the new metadata does not already + // include that header + for (const name of KEEP_RESPONSE_HEADERS) { + if ( + !hasOwnProperty(metadata.resHeaders, name) && + hasOwnProperty(this.entry.metadata.resHeaders, name) + ) { + metadata.resHeaders[name] = this.entry.metadata.resHeaders[name] + } + } + + for (const name of options.cacheAdditionalHeaders) { + const inMeta = hasOwnProperty(metadata.resHeaders, name) + const inEntry = hasOwnProperty(this.entry.metadata.resHeaders, name) + const inPolicy = hasOwnProperty(this.policy.response.headers, name) + + // if the header is in the existing entry, but it is not in the metadata + // then we need to write it to the metadata as this will refresh the on-disk cache + if (!inMeta && inEntry) { + metadata.resHeaders[name] = this.entry.metadata.resHeaders[name] + } + // if the header is in the metadata, but not in the policy, then we need to set + // it in the policy so that it's included in the immediate response. future + // responses will load a new cache entry, so we don't need to change that + if (!inPolicy && inMeta) { + this.policy.response.headers[name] = metadata.resHeaders[name] + } + } + + try { + await cacache.index.insert(options.cachePath, this.key, this.entry.integrity, { + size: this.entry.size, + metadata, + }) + } catch (err) { + // if updating the cache index fails, we ignore it and + // respond anyway + } + return this.respond(request.method, options, 'revalidated') + } + + // if we got a modified response, create a new entry based on it + const newEntry = new CacheEntry({ + request, + response, + options, + }) + + // respond with the new entry while writing it to the cache + return newEntry.store('updated') + } +} + +module.exports = CacheEntry diff --git a/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/errors.js b/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/errors.js new file mode 100644 index 00000000000000..67a66573bebe66 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/errors.js @@ -0,0 +1,11 @@ +class NotCachedError extends Error { + constructor (url) { + /* eslint-disable-next-line max-len */ + super(`request to ${url} failed: cache mode is 'only-if-cached' but no cached response is available.`) + this.code = 'ENOTCACHED' + } +} + +module.exports = { + NotCachedError, +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/index.js b/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/index.js new file mode 100644 index 00000000000000..0de49d23fb9336 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/index.js @@ -0,0 +1,49 @@ +const { NotCachedError } = require('./errors.js') +const CacheEntry = require('./entry.js') +const remote = require('../remote.js') + +// do whatever is necessary to get a Response and return it +const cacheFetch = async (request, options) => { + // try to find a cached entry that satisfies this request + const entry = await CacheEntry.find(request, options) + if (!entry) { + // no cached result, if the cache mode is 'only-if-cached' that's a failure + if (options.cache === 'only-if-cached') { + throw new NotCachedError(request.url) + } + + // otherwise, we make a request, store it and return it + const response = await remote(request, options) + const newEntry = new CacheEntry({ request, response, options }) + return newEntry.store('miss') + } + + // we have a cached response that satisfies this request, however if the cache + // mode is 'no-cache' then we send the revalidation request no matter what + if (options.cache === 'no-cache') { + return entry.revalidate(request, options) + } + + // if the cached entry is not stale, or if the cache mode is 'force-cache' or + // 'only-if-cached' we can respond with the cached entry. set the status + // based on the result of needsRevalidation and respond + const _needsRevalidation = entry.policy.needsRevalidation(request) + if (options.cache === 'force-cache' || + options.cache === 'only-if-cached' || + !_needsRevalidation) { + return entry.respond(request.method, options, _needsRevalidation ? 'stale' : 'hit') + } + + // if we got here, the cache entry is stale so revalidate it + return entry.revalidate(request, options) +} + +cacheFetch.invalidate = async (request, options) => { + if (!options.cachePath) { + return + } + + return CacheEntry.invalidate(request, options) +} + +module.exports = cacheFetch diff --git a/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/key.js b/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/key.js new file mode 100644 index 00000000000000..f7684d562b7fae --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/key.js @@ -0,0 +1,17 @@ +const { URL, format } = require('url') + +// options passed to url.format() when generating a key +const formatOptions = { + auth: false, + fragment: false, + search: true, + unicode: false, +} + +// returns a string to be used as the cache key for the Request +const cacheKey = (request) => { + const parsed = new URL(request.url) + return `make-fetch-happen:request-cache:${format(parsed, formatOptions)}` +} + +module.exports = cacheKey diff --git a/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/policy.js b/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/policy.js new file mode 100644 index 00000000000000..ada3c8600dae92 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/policy.js @@ -0,0 +1,161 @@ +const CacheSemantics = require('http-cache-semantics') +const Negotiator = require('negotiator') +const ssri = require('ssri') + +// options passed to http-cache-semantics constructor +const policyOptions = { + shared: false, + ignoreCargoCult: true, +} + +// a fake empty response, used when only testing the +// request for storability +const emptyResponse = { status: 200, headers: {} } + +// returns a plain object representation of the Request +const requestObject = (request) => { + const _obj = { + method: request.method, + url: request.url, + headers: {}, + compress: request.compress, + } + + request.headers.forEach((value, key) => { + _obj.headers[key] = value + }) + + return _obj +} + +// returns a plain object representation of the Response +const responseObject = (response) => { + const _obj = { + status: response.status, + headers: {}, + } + + response.headers.forEach((value, key) => { + _obj.headers[key] = value + }) + + return _obj +} + +class CachePolicy { + constructor ({ entry, request, response, options }) { + this.entry = entry + this.request = requestObject(request) + this.response = responseObject(response) + this.options = options + this.policy = new CacheSemantics(this.request, this.response, policyOptions) + + if (this.entry) { + // if we have an entry, copy the timestamp to the _responseTime + // this is necessary because the CacheSemantics constructor forces + // the value to Date.now() which means a policy created from a + // cache entry is likely to always identify itself as stale + this.policy._responseTime = this.entry.metadata.time + } + } + + // static method to quickly determine if a request alone is storable + static storable (request, options) { + // no cachePath means no caching + if (!options.cachePath) { + return false + } + + // user explicitly asked not to cache + if (options.cache === 'no-store') { + return false + } + + // we only cache GET and HEAD requests + if (!['GET', 'HEAD'].includes(request.method)) { + return false + } + + // otherwise, let http-cache-semantics make the decision + // based on the request's headers + const policy = new CacheSemantics(requestObject(request), emptyResponse, policyOptions) + return policy.storable() + } + + // returns true if the policy satisfies the request + satisfies (request) { + const _req = requestObject(request) + if (this.request.headers.host !== _req.headers.host) { + return false + } + + if (this.request.compress !== _req.compress) { + return false + } + + const negotiatorA = new Negotiator(this.request) + const negotiatorB = new Negotiator(_req) + + if (JSON.stringify(negotiatorA.mediaTypes()) !== JSON.stringify(negotiatorB.mediaTypes())) { + return false + } + + if (JSON.stringify(negotiatorA.languages()) !== JSON.stringify(negotiatorB.languages())) { + return false + } + + if (JSON.stringify(negotiatorA.encodings()) !== JSON.stringify(negotiatorB.encodings())) { + return false + } + + if (this.options.integrity) { + return ssri.parse(this.options.integrity).match(this.entry.integrity) + } + + return true + } + + // returns true if the request and response allow caching + storable () { + return this.policy.storable() + } + + // NOTE: this is a hack to avoid parsing the cache-control + // header ourselves, it returns true if the response's + // cache-control contains must-revalidate + get mustRevalidate () { + return !!this.policy._rescc['must-revalidate'] + } + + // returns true if the cached response requires revalidation + // for the given request + needsRevalidation (request) { + const _req = requestObject(request) + // force method to GET because we only cache GETs + // but can serve a HEAD from a cached GET + _req.method = 'GET' + return !this.policy.satisfiesWithoutRevalidation(_req) + } + + responseHeaders () { + return this.policy.responseHeaders() + } + + // returns a new object containing the appropriate headers + // to send a revalidation request + revalidationHeaders (request) { + const _req = requestObject(request) + return this.policy.revalidationHeaders(_req) + } + + // returns true if the request/response was revalidated + // successfully. returns false if a new response was received + revalidated (request, response) { + const _req = requestObject(request) + const _res = responseObject(response) + const policy = this.policy.revalidatedPolicy(_req, _res) + return !policy.modified + } +} + +module.exports = CachePolicy diff --git a/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/fetch.js b/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/fetch.js new file mode 100644 index 00000000000000..233ba67e165502 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/fetch.js @@ -0,0 +1,118 @@ +'use strict' + +const { FetchError, Request, isRedirect } = require('minipass-fetch') +const url = require('url') + +const CachePolicy = require('./cache/policy.js') +const cache = require('./cache/index.js') +const remote = require('./remote.js') + +// given a Request, a Response and user options +// return true if the response is a redirect that +// can be followed. we throw errors that will result +// in the fetch being rejected if the redirect is +// possible but invalid for some reason +const canFollowRedirect = (request, response, options) => { + if (!isRedirect(response.status)) { + return false + } + + if (options.redirect === 'manual') { + return false + } + + if (options.redirect === 'error') { + throw new FetchError(`redirect mode is set to error: ${request.url}`, + 'no-redirect', { code: 'ENOREDIRECT' }) + } + + if (!response.headers.has('location')) { + throw new FetchError(`redirect location header missing for: ${request.url}`, + 'no-location', { code: 'EINVALIDREDIRECT' }) + } + + if (request.counter >= request.follow) { + throw new FetchError(`maximum redirect reached at: ${request.url}`, + 'max-redirect', { code: 'EMAXREDIRECT' }) + } + + return true +} + +// given a Request, a Response, and the user's options return an object +// with a new Request and a new options object that will be used for +// following the redirect +const getRedirect = (request, response, options) => { + const _opts = { ...options } + const location = response.headers.get('location') + const redirectUrl = new url.URL(location, /^https?:/.test(location) ? undefined : request.url) + // Comment below is used under the following license: + /** + * @license + * Copyright (c) 2010-2012 Mikeal Rogers + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an "AS + * IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language + * governing permissions and limitations under the License. + */ + + // Remove authorization if changing hostnames (but not if just + // changing ports or protocols). This matches the behavior of request: + // https://github.com/request/request/blob/b12a6245/lib/redirect.js#L134-L138 + if (new url.URL(request.url).hostname !== redirectUrl.hostname) { + request.headers.delete('authorization') + request.headers.delete('cookie') + } + + // for POST request with 301/302 response, or any request with 303 response, + // use GET when following redirect + if ( + response.status === 303 || + (request.method === 'POST' && [301, 302].includes(response.status)) + ) { + _opts.method = 'GET' + _opts.body = null + request.headers.delete('content-length') + } + + _opts.headers = {} + request.headers.forEach((value, key) => { + _opts.headers[key] = value + }) + + _opts.counter = ++request.counter + const redirectReq = new Request(url.format(redirectUrl), _opts) + return { + request: redirectReq, + options: _opts, + } +} + +const fetch = async (request, options) => { + const response = CachePolicy.storable(request, options) + ? await cache(request, options) + : await remote(request, options) + + // if the request wasn't a GET or HEAD, and the response + // status is between 200 and 399 inclusive, invalidate the + // request url + if (!['GET', 'HEAD'].includes(request.method) && + response.status >= 200 && + response.status <= 399) { + await cache.invalidate(request, options) + } + + if (!canFollowRedirect(request, response, options)) { + return response + } + + const redirect = getRedirect(request, response, options) + return fetch(redirect.request, redirect.options) +} + +module.exports = fetch diff --git a/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/index.js b/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/index.js new file mode 100644 index 00000000000000..2f12e8e1b61131 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/index.js @@ -0,0 +1,41 @@ +const { FetchError, Headers, Request, Response } = require('minipass-fetch') + +const configureOptions = require('./options.js') +const fetch = require('./fetch.js') + +const makeFetchHappen = (url, opts) => { + const options = configureOptions(opts) + + const request = new Request(url, options) + return fetch(request, options) +} + +makeFetchHappen.defaults = (defaultUrl, defaultOptions = {}, wrappedFetch = makeFetchHappen) => { + if (typeof defaultUrl === 'object') { + defaultOptions = defaultUrl + defaultUrl = null + } + + const defaultedFetch = (url, options = {}) => { + const finalUrl = url || defaultUrl + const finalOptions = { + ...defaultOptions, + ...options, + headers: { + ...defaultOptions.headers, + ...options.headers, + }, + } + return wrappedFetch(finalUrl, finalOptions) + } + + defaultedFetch.defaults = (defaultUrl1, defaultOptions1 = {}) => + makeFetchHappen.defaults(defaultUrl1, defaultOptions1, defaultedFetch) + return defaultedFetch +} + +module.exports = makeFetchHappen +module.exports.FetchError = FetchError +module.exports.Headers = Headers +module.exports.Request = Request +module.exports.Response = Response diff --git a/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/options.js b/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/options.js new file mode 100644 index 00000000000000..f77511279f831d --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/options.js @@ -0,0 +1,54 @@ +const dns = require('dns') + +const conditionalHeaders = [ + 'if-modified-since', + 'if-none-match', + 'if-unmodified-since', + 'if-match', + 'if-range', +] + +const configureOptions = (opts) => { + const { strictSSL, ...options } = { ...opts } + options.method = options.method ? options.method.toUpperCase() : 'GET' + options.rejectUnauthorized = strictSSL !== false + + if (!options.retry) { + options.retry = { retries: 0 } + } else if (typeof options.retry === 'string') { + const retries = parseInt(options.retry, 10) + if (isFinite(retries)) { + options.retry = { retries } + } else { + options.retry = { retries: 0 } + } + } else if (typeof options.retry === 'number') { + options.retry = { retries: options.retry } + } else { + options.retry = { retries: 0, ...options.retry } + } + + options.dns = { ttl: 5 * 60 * 1000, lookup: dns.lookup, ...options.dns } + + options.cache = options.cache || 'default' + if (options.cache === 'default') { + const hasConditionalHeader = Object.keys(options.headers || {}).some((name) => { + return conditionalHeaders.includes(name.toLowerCase()) + }) + if (hasConditionalHeader) { + options.cache = 'no-store' + } + } + + options.cacheAdditionalHeaders = options.cacheAdditionalHeaders || [] + + // cacheManager is deprecated, but if it's set and + // cachePath is not we should copy it to the new field + if (options.cacheManager && !options.cachePath) { + options.cachePath = options.cacheManager + } + + return options +} + +module.exports = configureOptions diff --git a/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/pipeline.js b/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/pipeline.js new file mode 100644 index 00000000000000..b1d221b2d0ce31 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/pipeline.js @@ -0,0 +1,41 @@ +'use strict' + +const MinipassPipeline = require('minipass-pipeline') + +class CachingMinipassPipeline extends MinipassPipeline { + #events = [] + #data = new Map() + + constructor (opts, ...streams) { + // CRITICAL: do NOT pass the streams to the call to super(), this will start + // the flow of data and potentially cause the events we need to catch to emit + // before we've finished our own setup. instead we call super() with no args, + // finish our setup, and then push the streams into ourselves to start the + // data flow + super() + this.#events = opts.events + + /* istanbul ignore next - coverage disabled because this is pointless to test here */ + if (streams.length) { + this.push(...streams) + } + } + + on (event, handler) { + if (this.#events.includes(event) && this.#data.has(event)) { + return handler(...this.#data.get(event)) + } + + return super.on(event, handler) + } + + emit (event, ...data) { + if (this.#events.includes(event)) { + this.#data.set(event, data) + } + + return super.emit(event, ...data) + } +} + +module.exports = CachingMinipassPipeline diff --git a/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/remote.js b/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/remote.js new file mode 100644 index 00000000000000..8554564074de6e --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/remote.js @@ -0,0 +1,131 @@ +const { Minipass } = require('minipass') +const fetch = require('minipass-fetch') +const promiseRetry = require('promise-retry') +const ssri = require('ssri') +const { log } = require('proc-log') + +const CachingMinipassPipeline = require('./pipeline.js') +const { getAgent } = require('@npmcli/agent') +const pkg = require('../package.json') + +const USER_AGENT = `${pkg.name}/${pkg.version} (+https://npm.im/${pkg.name})` + +const RETRY_ERRORS = [ + 'ECONNRESET', // remote socket closed on us + 'ECONNREFUSED', // remote host refused to open connection + 'EADDRINUSE', // failed to bind to a local port (proxy?) + 'ETIMEDOUT', // someone in the transaction is WAY TOO SLOW + // from @npmcli/agent + 'ECONNECTIONTIMEOUT', + 'EIDLETIMEOUT', + 'ERESPONSETIMEOUT', + 'ETRANSFERTIMEOUT', + // Known codes we do NOT retry on: + // ENOTFOUND (getaddrinfo failure. Either bad hostname, or offline) + // EINVALIDPROXY // invalid protocol from @npmcli/agent + // EINVALIDRESPONSE // invalid status code from @npmcli/agent +] + +const RETRY_TYPES = [ + 'request-timeout', +] + +// make a request directly to the remote source, +// retrying certain classes of errors as well as +// following redirects (through the cache if necessary) +// and verifying response integrity +const remoteFetch = (request, options) => { + const agent = getAgent(request.url, options) + if (!request.headers.has('connection')) { + request.headers.set('connection', agent ? 'keep-alive' : 'close') + } + + if (!request.headers.has('user-agent')) { + request.headers.set('user-agent', USER_AGENT) + } + + // keep our own options since we're overriding the agent + // and the redirect mode + const _opts = { + ...options, + agent, + redirect: 'manual', + } + + return promiseRetry(async (retryHandler, attemptNum) => { + const req = new fetch.Request(request, _opts) + try { + let res = await fetch(req, _opts) + if (_opts.integrity && res.status === 200) { + // we got a 200 response and the user has specified an expected + // integrity value, so wrap the response in an ssri stream to verify it + const integrityStream = ssri.integrityStream({ + algorithms: _opts.algorithms, + integrity: _opts.integrity, + size: _opts.size, + }) + const pipeline = new CachingMinipassPipeline({ + events: ['integrity', 'size'], + }, res.body, integrityStream) + // we also propagate the integrity and size events out to the pipeline so we can use + // this new response body as an integrityEmitter for cacache + integrityStream.on('integrity', i => pipeline.emit('integrity', i)) + integrityStream.on('size', s => pipeline.emit('size', s)) + res = new fetch.Response(pipeline, res) + // set an explicit flag so we know if our response body will emit integrity and size + res.body.hasIntegrityEmitter = true + } + + res.headers.set('x-fetch-attempts', attemptNum) + + // do not retry POST requests, or requests with a streaming body + // do retry requests with a 408, 420, 429 or 500+ status in the response + const isStream = Minipass.isStream(req.body) + const isRetriable = req.method !== 'POST' && + !isStream && + ([408, 420, 429].includes(res.status) || res.status >= 500) + + if (isRetriable) { + if (typeof options.onRetry === 'function') { + options.onRetry(res) + } + + /* eslint-disable-next-line max-len */ + log.http('fetch', `${req.method} ${req.url} attempt ${attemptNum} failed with ${res.status}`) + return retryHandler(res) + } + + return res + } catch (err) { + const code = (err.code === 'EPROMISERETRY') + ? err.retried.code + : err.code + + // err.retried will be the thing that was thrown from above + // if it's a response, we just got a bad status code and we + // can re-throw to allow the retry + const isRetryError = err.retried instanceof fetch.Response || + (RETRY_ERRORS.includes(code) && RETRY_TYPES.includes(err.type)) + + if (req.method === 'POST' || isRetryError) { + throw err + } + + if (typeof options.onRetry === 'function') { + options.onRetry(err) + } + + log.http('fetch', `${req.method} ${req.url} attempt ${attemptNum} failed with ${err.code}`) + return retryHandler(err) + } + }, options.retry).catch((err) => { + // don't reject for http errors, just return them + if (err.status >= 400 && err.type !== 'system') { + return err + } + + throw err + }) +} + +module.exports = remoteFetch diff --git a/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/package.json b/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/package.json new file mode 100644 index 00000000000000..7adb4d1e7f9719 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/package.json @@ -0,0 +1,75 @@ +{ + "name": "make-fetch-happen", + "version": "13.0.1", + "description": "Opinionated, caching, retrying fetch client", + "main": "lib/index.js", + "files": [ + "bin/", + "lib/" + ], + "scripts": { + "test": "tap", + "posttest": "npm run lint", + "eslint": "eslint", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lintfix": "npm run lint -- --fix", + "postlint": "template-oss-check", + "snap": "tap", + "template-oss-apply": "template-oss-apply --force" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/make-fetch-happen.git" + }, + "keywords": [ + "http", + "request", + "fetch", + "mean girls", + "caching", + "cache", + "subresource integrity" + ], + "author": "GitHub Inc.", + "license": "ISC", + "dependencies": { + "@npmcli/agent": "^2.0.0", + "cacache": "^18.0.0", + "http-cache-semantics": "^4.1.1", + "is-lambda": "^1.0.1", + "minipass": "^7.0.2", + "minipass-fetch": "^3.0.0", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "negotiator": "^0.6.3", + "proc-log": "^4.2.0", + "promise-retry": "^2.0.1", + "ssri": "^10.0.0" + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.21.4", + "nock": "^13.2.4", + "safe-buffer": "^5.2.1", + "standard-version": "^9.3.2", + "tap": "^16.0.0" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + }, + "tap": { + "color": 1, + "files": "test/*.js", + "check-coverage": true, + "timeout": 60, + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.21.4", + "publish": "true" + } +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/LICENSE b/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/LICENSE new file mode 100644 index 00000000000000..3c3410cdc12ee3 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/LICENSE @@ -0,0 +1,28 @@ +The MIT License (MIT) + +Copyright (c) Isaac Z. Schlueter and Contributors +Copyright (c) 2016 David Frank + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +--- + +Note: This is a derivative work based on "node-fetch" by David Frank, +modified and distributed under the terms of the MIT license above. +https://github.com/bitinn/node-fetch diff --git a/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/abort-error.js b/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/abort-error.js new file mode 100644 index 00000000000000..b18f643269e375 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/abort-error.js @@ -0,0 +1,17 @@ +'use strict' +class AbortError extends Error { + constructor (message) { + super(message) + this.code = 'FETCH_ABORTED' + this.type = 'aborted' + Error.captureStackTrace(this, this.constructor) + } + + get name () { + return 'AbortError' + } + + // don't allow name to be overridden, but don't throw either + set name (s) {} +} +module.exports = AbortError diff --git a/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/blob.js b/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/blob.js new file mode 100644 index 00000000000000..121b1730102e72 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/blob.js @@ -0,0 +1,97 @@ +'use strict' +const { Minipass } = require('minipass') +const TYPE = Symbol('type') +const BUFFER = Symbol('buffer') + +class Blob { + constructor (blobParts, options) { + this[TYPE] = '' + + const buffers = [] + let size = 0 + + if (blobParts) { + const a = blobParts + const length = Number(a.length) + for (let i = 0; i < length; i++) { + const element = a[i] + const buffer = element instanceof Buffer ? element + : ArrayBuffer.isView(element) + ? Buffer.from(element.buffer, element.byteOffset, element.byteLength) + : element instanceof ArrayBuffer ? Buffer.from(element) + : element instanceof Blob ? element[BUFFER] + : typeof element === 'string' ? Buffer.from(element) + : Buffer.from(String(element)) + size += buffer.length + buffers.push(buffer) + } + } + + this[BUFFER] = Buffer.concat(buffers, size) + + const type = options && options.type !== undefined + && String(options.type).toLowerCase() + if (type && !/[^\u0020-\u007E]/.test(type)) { + this[TYPE] = type + } + } + + get size () { + return this[BUFFER].length + } + + get type () { + return this[TYPE] + } + + text () { + return Promise.resolve(this[BUFFER].toString()) + } + + arrayBuffer () { + const buf = this[BUFFER] + const off = buf.byteOffset + const len = buf.byteLength + const ab = buf.buffer.slice(off, off + len) + return Promise.resolve(ab) + } + + stream () { + return new Minipass().end(this[BUFFER]) + } + + slice (start, end, type) { + const size = this.size + const relativeStart = start === undefined ? 0 + : start < 0 ? Math.max(size + start, 0) + : Math.min(start, size) + const relativeEnd = end === undefined ? size + : end < 0 ? Math.max(size + end, 0) + : Math.min(end, size) + const span = Math.max(relativeEnd - relativeStart, 0) + + const buffer = this[BUFFER] + const slicedBuffer = buffer.slice( + relativeStart, + relativeStart + span + ) + const blob = new Blob([], { type }) + blob[BUFFER] = slicedBuffer + return blob + } + + get [Symbol.toStringTag] () { + return 'Blob' + } + + static get BUFFER () { + return BUFFER + } +} + +Object.defineProperties(Blob.prototype, { + size: { enumerable: true }, + type: { enumerable: true }, +}) + +module.exports = Blob diff --git a/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/body.js b/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/body.js new file mode 100644 index 00000000000000..62286bd1de0d91 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/body.js @@ -0,0 +1,350 @@ +'use strict' +const { Minipass } = require('minipass') +const MinipassSized = require('minipass-sized') + +const Blob = require('./blob.js') +const { BUFFER } = Blob +const FetchError = require('./fetch-error.js') + +// optional dependency on 'encoding' +let convert +try { + convert = require('encoding').convert +} catch (e) { + // defer error until textConverted is called +} + +const INTERNALS = Symbol('Body internals') +const CONSUME_BODY = Symbol('consumeBody') + +class Body { + constructor (bodyArg, options = {}) { + const { size = 0, timeout = 0 } = options + const body = bodyArg === undefined || bodyArg === null ? null + : isURLSearchParams(bodyArg) ? Buffer.from(bodyArg.toString()) + : isBlob(bodyArg) ? bodyArg + : Buffer.isBuffer(bodyArg) ? bodyArg + : Object.prototype.toString.call(bodyArg) === '[object ArrayBuffer]' + ? Buffer.from(bodyArg) + : ArrayBuffer.isView(bodyArg) + ? Buffer.from(bodyArg.buffer, bodyArg.byteOffset, bodyArg.byteLength) + : Minipass.isStream(bodyArg) ? bodyArg + : Buffer.from(String(bodyArg)) + + this[INTERNALS] = { + body, + disturbed: false, + error: null, + } + + this.size = size + this.timeout = timeout + + if (Minipass.isStream(body)) { + body.on('error', er => { + const error = er.name === 'AbortError' ? er + : new FetchError(`Invalid response while trying to fetch ${ + this.url}: ${er.message}`, 'system', er) + this[INTERNALS].error = error + }) + } + } + + get body () { + return this[INTERNALS].body + } + + get bodyUsed () { + return this[INTERNALS].disturbed + } + + arrayBuffer () { + return this[CONSUME_BODY]().then(buf => + buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength)) + } + + blob () { + const ct = this.headers && this.headers.get('content-type') || '' + return this[CONSUME_BODY]().then(buf => Object.assign( + new Blob([], { type: ct.toLowerCase() }), + { [BUFFER]: buf } + )) + } + + async json () { + const buf = await this[CONSUME_BODY]() + try { + return JSON.parse(buf.toString()) + } catch (er) { + throw new FetchError( + `invalid json response body at ${this.url} reason: ${er.message}`, + 'invalid-json' + ) + } + } + + text () { + return this[CONSUME_BODY]().then(buf => buf.toString()) + } + + buffer () { + return this[CONSUME_BODY]() + } + + textConverted () { + return this[CONSUME_BODY]().then(buf => convertBody(buf, this.headers)) + } + + [CONSUME_BODY] () { + if (this[INTERNALS].disturbed) { + return Promise.reject(new TypeError(`body used already for: ${ + this.url}`)) + } + + this[INTERNALS].disturbed = true + + if (this[INTERNALS].error) { + return Promise.reject(this[INTERNALS].error) + } + + // body is null + if (this.body === null) { + return Promise.resolve(Buffer.alloc(0)) + } + + if (Buffer.isBuffer(this.body)) { + return Promise.resolve(this.body) + } + + const upstream = isBlob(this.body) ? this.body.stream() : this.body + + /* istanbul ignore if: should never happen */ + if (!Minipass.isStream(upstream)) { + return Promise.resolve(Buffer.alloc(0)) + } + + const stream = this.size && upstream instanceof MinipassSized ? upstream + : !this.size && upstream instanceof Minipass && + !(upstream instanceof MinipassSized) ? upstream + : this.size ? new MinipassSized({ size: this.size }) + : new Minipass() + + // allow timeout on slow response body, but only if the stream is still writable. this + // makes the timeout center on the socket stream from lib/index.js rather than the + // intermediary minipass stream we create to receive the data + const resTimeout = this.timeout && stream.writable ? setTimeout(() => { + stream.emit('error', new FetchError( + `Response timeout while trying to fetch ${ + this.url} (over ${this.timeout}ms)`, 'body-timeout')) + }, this.timeout) : null + + // do not keep the process open just for this timeout, even + // though we expect it'll get cleared eventually. + if (resTimeout && resTimeout.unref) { + resTimeout.unref() + } + + // do the pipe in the promise, because the pipe() can send too much + // data through right away and upset the MP Sized object + return new Promise((resolve) => { + // if the stream is some other kind of stream, then pipe through a MP + // so we can collect it more easily. + if (stream !== upstream) { + upstream.on('error', er => stream.emit('error', er)) + upstream.pipe(stream) + } + resolve() + }).then(() => stream.concat()).then(buf => { + clearTimeout(resTimeout) + return buf + }).catch(er => { + clearTimeout(resTimeout) + // request was aborted, reject with this Error + if (er.name === 'AbortError' || er.name === 'FetchError') { + throw er + } else if (er.name === 'RangeError') { + throw new FetchError(`Could not create Buffer from response body for ${ + this.url}: ${er.message}`, 'system', er) + } else { + // other errors, such as incorrect content-encoding or content-length + throw new FetchError(`Invalid response body while trying to fetch ${ + this.url}: ${er.message}`, 'system', er) + } + }) + } + + static clone (instance) { + if (instance.bodyUsed) { + throw new Error('cannot clone body after it is used') + } + + const body = instance.body + + // check that body is a stream and not form-data object + // NB: can't clone the form-data object without having it as a dependency + if (Minipass.isStream(body) && typeof body.getBoundary !== 'function') { + // create a dedicated tee stream so that we don't lose data + // potentially sitting in the body stream's buffer by writing it + // immediately to p1 and not having it for p2. + const tee = new Minipass() + const p1 = new Minipass() + const p2 = new Minipass() + tee.on('error', er => { + p1.emit('error', er) + p2.emit('error', er) + }) + body.on('error', er => tee.emit('error', er)) + tee.pipe(p1) + tee.pipe(p2) + body.pipe(tee) + // set instance body to one fork, return the other + instance[INTERNALS].body = p1 + return p2 + } else { + return instance.body + } + } + + static extractContentType (body) { + return body === null || body === undefined ? null + : typeof body === 'string' ? 'text/plain;charset=UTF-8' + : isURLSearchParams(body) + ? 'application/x-www-form-urlencoded;charset=UTF-8' + : isBlob(body) ? body.type || null + : Buffer.isBuffer(body) ? null + : Object.prototype.toString.call(body) === '[object ArrayBuffer]' ? null + : ArrayBuffer.isView(body) ? null + : typeof body.getBoundary === 'function' + ? `multipart/form-data;boundary=${body.getBoundary()}` + : Minipass.isStream(body) ? null + : 'text/plain;charset=UTF-8' + } + + static getTotalBytes (instance) { + const { body } = instance + return (body === null || body === undefined) ? 0 + : isBlob(body) ? body.size + : Buffer.isBuffer(body) ? body.length + : body && typeof body.getLengthSync === 'function' && ( + // detect form data input from form-data module + body._lengthRetrievers && + /* istanbul ignore next */ body._lengthRetrievers.length === 0 || // 1.x + body.hasKnownLength && body.hasKnownLength()) // 2.x + ? body.getLengthSync() + : null + } + + static writeToStream (dest, instance) { + const { body } = instance + + if (body === null || body === undefined) { + dest.end() + } else if (Buffer.isBuffer(body) || typeof body === 'string') { + dest.end(body) + } else { + // body is stream or blob + const stream = isBlob(body) ? body.stream() : body + stream.on('error', er => dest.emit('error', er)).pipe(dest) + } + + return dest + } +} + +Object.defineProperties(Body.prototype, { + body: { enumerable: true }, + bodyUsed: { enumerable: true }, + arrayBuffer: { enumerable: true }, + blob: { enumerable: true }, + json: { enumerable: true }, + text: { enumerable: true }, +}) + +const isURLSearchParams = obj => + // Duck-typing as a necessary condition. + (typeof obj !== 'object' || + typeof obj.append !== 'function' || + typeof obj.delete !== 'function' || + typeof obj.get !== 'function' || + typeof obj.getAll !== 'function' || + typeof obj.has !== 'function' || + typeof obj.set !== 'function') ? false + // Brand-checking and more duck-typing as optional condition. + : obj.constructor.name === 'URLSearchParams' || + Object.prototype.toString.call(obj) === '[object URLSearchParams]' || + typeof obj.sort === 'function' + +const isBlob = obj => + typeof obj === 'object' && + typeof obj.arrayBuffer === 'function' && + typeof obj.type === 'string' && + typeof obj.stream === 'function' && + typeof obj.constructor === 'function' && + typeof obj.constructor.name === 'string' && + /^(Blob|File)$/.test(obj.constructor.name) && + /^(Blob|File)$/.test(obj[Symbol.toStringTag]) + +const convertBody = (buffer, headers) => { + /* istanbul ignore if */ + if (typeof convert !== 'function') { + throw new Error('The package `encoding` must be installed to use the textConverted() function') + } + + const ct = headers && headers.get('content-type') + let charset = 'utf-8' + let res + + // header + if (ct) { + res = /charset=([^;]*)/i.exec(ct) + } + + // no charset in content type, peek at response body for at most 1024 bytes + const str = buffer.slice(0, 1024).toString() + + // html5 + if (!res && str) { + res = / this.expect + ? 'max-size' : type + this.message = message + Error.captureStackTrace(this, this.constructor) + } + + get name () { + return 'FetchError' + } + + // don't allow name to be overwritten + set name (n) {} + + get [Symbol.toStringTag] () { + return 'FetchError' + } +} +module.exports = FetchError diff --git a/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/headers.js b/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/headers.js new file mode 100644 index 00000000000000..dd6e854d5ba399 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/headers.js @@ -0,0 +1,267 @@ +'use strict' +const invalidTokenRegex = /[^^_`a-zA-Z\-0-9!#$%&'*+.|~]/ +const invalidHeaderCharRegex = /[^\t\x20-\x7e\x80-\xff]/ + +const validateName = name => { + name = `${name}` + if (invalidTokenRegex.test(name) || name === '') { + throw new TypeError(`${name} is not a legal HTTP header name`) + } +} + +const validateValue = value => { + value = `${value}` + if (invalidHeaderCharRegex.test(value)) { + throw new TypeError(`${value} is not a legal HTTP header value`) + } +} + +const find = (map, name) => { + name = name.toLowerCase() + for (const key in map) { + if (key.toLowerCase() === name) { + return key + } + } + return undefined +} + +const MAP = Symbol('map') +class Headers { + constructor (init = undefined) { + this[MAP] = Object.create(null) + if (init instanceof Headers) { + const rawHeaders = init.raw() + const headerNames = Object.keys(rawHeaders) + for (const headerName of headerNames) { + for (const value of rawHeaders[headerName]) { + this.append(headerName, value) + } + } + return + } + + // no-op + if (init === undefined || init === null) { + return + } + + if (typeof init === 'object') { + const method = init[Symbol.iterator] + if (method !== null && method !== undefined) { + if (typeof method !== 'function') { + throw new TypeError('Header pairs must be iterable') + } + + // sequence> + // Note: per spec we have to first exhaust the lists then process them + const pairs = [] + for (const pair of init) { + if (typeof pair !== 'object' || + typeof pair[Symbol.iterator] !== 'function') { + throw new TypeError('Each header pair must be iterable') + } + const arrPair = Array.from(pair) + if (arrPair.length !== 2) { + throw new TypeError('Each header pair must be a name/value tuple') + } + pairs.push(arrPair) + } + + for (const pair of pairs) { + this.append(pair[0], pair[1]) + } + } else { + // record + for (const key of Object.keys(init)) { + this.append(key, init[key]) + } + } + } else { + throw new TypeError('Provided initializer must be an object') + } + } + + get (name) { + name = `${name}` + validateName(name) + const key = find(this[MAP], name) + if (key === undefined) { + return null + } + + return this[MAP][key].join(', ') + } + + forEach (callback, thisArg = undefined) { + let pairs = getHeaders(this) + for (let i = 0; i < pairs.length; i++) { + const [name, value] = pairs[i] + callback.call(thisArg, value, name, this) + // refresh in case the callback added more headers + pairs = getHeaders(this) + } + } + + set (name, value) { + name = `${name}` + value = `${value}` + validateName(name) + validateValue(value) + const key = find(this[MAP], name) + this[MAP][key !== undefined ? key : name] = [value] + } + + append (name, value) { + name = `${name}` + value = `${value}` + validateName(name) + validateValue(value) + const key = find(this[MAP], name) + if (key !== undefined) { + this[MAP][key].push(value) + } else { + this[MAP][name] = [value] + } + } + + has (name) { + name = `${name}` + validateName(name) + return find(this[MAP], name) !== undefined + } + + delete (name) { + name = `${name}` + validateName(name) + const key = find(this[MAP], name) + if (key !== undefined) { + delete this[MAP][key] + } + } + + raw () { + return this[MAP] + } + + keys () { + return new HeadersIterator(this, 'key') + } + + values () { + return new HeadersIterator(this, 'value') + } + + [Symbol.iterator] () { + return new HeadersIterator(this, 'key+value') + } + + entries () { + return new HeadersIterator(this, 'key+value') + } + + get [Symbol.toStringTag] () { + return 'Headers' + } + + static exportNodeCompatibleHeaders (headers) { + const obj = Object.assign(Object.create(null), headers[MAP]) + + // http.request() only supports string as Host header. This hack makes + // specifying custom Host header possible. + const hostHeaderKey = find(headers[MAP], 'Host') + if (hostHeaderKey !== undefined) { + obj[hostHeaderKey] = obj[hostHeaderKey][0] + } + + return obj + } + + static createHeadersLenient (obj) { + const headers = new Headers() + for (const name of Object.keys(obj)) { + if (invalidTokenRegex.test(name)) { + continue + } + + if (Array.isArray(obj[name])) { + for (const val of obj[name]) { + if (invalidHeaderCharRegex.test(val)) { + continue + } + + if (headers[MAP][name] === undefined) { + headers[MAP][name] = [val] + } else { + headers[MAP][name].push(val) + } + } + } else if (!invalidHeaderCharRegex.test(obj[name])) { + headers[MAP][name] = [obj[name]] + } + } + return headers + } +} + +Object.defineProperties(Headers.prototype, { + get: { enumerable: true }, + forEach: { enumerable: true }, + set: { enumerable: true }, + append: { enumerable: true }, + has: { enumerable: true }, + delete: { enumerable: true }, + keys: { enumerable: true }, + values: { enumerable: true }, + entries: { enumerable: true }, +}) + +const getHeaders = (headers, kind = 'key+value') => + Object.keys(headers[MAP]).sort().map( + kind === 'key' ? k => k.toLowerCase() + : kind === 'value' ? k => headers[MAP][k].join(', ') + : k => [k.toLowerCase(), headers[MAP][k].join(', ')] + ) + +const INTERNAL = Symbol('internal') + +class HeadersIterator { + constructor (target, kind) { + this[INTERNAL] = { + target, + kind, + index: 0, + } + } + + get [Symbol.toStringTag] () { + return 'HeadersIterator' + } + + next () { + /* istanbul ignore if: should be impossible */ + if (!this || Object.getPrototypeOf(this) !== HeadersIterator.prototype) { + throw new TypeError('Value of `this` is not a HeadersIterator') + } + + const { target, kind, index } = this[INTERNAL] + const values = getHeaders(target, kind) + const len = values.length + if (index >= len) { + return { + value: undefined, + done: true, + } + } + + this[INTERNAL].index++ + + return { value: values[index], done: false } + } +} + +// manually extend because 'extends' requires a ctor +Object.setPrototypeOf(HeadersIterator.prototype, + Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))) + +module.exports = Headers diff --git a/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/index.js b/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/index.js new file mode 100644 index 00000000000000..da402161670e65 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/index.js @@ -0,0 +1,377 @@ +'use strict' +const { URL } = require('url') +const http = require('http') +const https = require('https') +const zlib = require('minizlib') +const { Minipass } = require('minipass') + +const Body = require('./body.js') +const { writeToStream, getTotalBytes } = Body +const Response = require('./response.js') +const Headers = require('./headers.js') +const { createHeadersLenient } = Headers +const Request = require('./request.js') +const { getNodeRequestOptions } = Request +const FetchError = require('./fetch-error.js') +const AbortError = require('./abort-error.js') + +// XXX this should really be split up and unit-ized for easier testing +// and better DRY implementation of data/http request aborting +const fetch = async (url, opts) => { + if (/^data:/.test(url)) { + const request = new Request(url, opts) + // delay 1 promise tick so that the consumer can abort right away + return Promise.resolve().then(() => new Promise((resolve, reject) => { + let type, data + try { + const { pathname, search } = new URL(url) + const split = pathname.split(',') + if (split.length < 2) { + throw new Error('invalid data: URI') + } + const mime = split.shift() + const base64 = /;base64$/.test(mime) + type = base64 ? mime.slice(0, -1 * ';base64'.length) : mime + const rawData = decodeURIComponent(split.join(',') + search) + data = base64 ? Buffer.from(rawData, 'base64') : Buffer.from(rawData) + } catch (er) { + return reject(new FetchError(`[${request.method}] ${ + request.url} invalid URL, ${er.message}`, 'system', er)) + } + + const { signal } = request + if (signal && signal.aborted) { + return reject(new AbortError('The user aborted a request.')) + } + + const headers = { 'Content-Length': data.length } + if (type) { + headers['Content-Type'] = type + } + return resolve(new Response(data, { headers })) + })) + } + + return new Promise((resolve, reject) => { + // build request object + const request = new Request(url, opts) + let options + try { + options = getNodeRequestOptions(request) + } catch (er) { + return reject(er) + } + + const send = (options.protocol === 'https:' ? https : http).request + const { signal } = request + let response = null + const abort = () => { + const error = new AbortError('The user aborted a request.') + reject(error) + if (Minipass.isStream(request.body) && + typeof request.body.destroy === 'function') { + request.body.destroy(error) + } + if (response && response.body) { + response.body.emit('error', error) + } + } + + if (signal && signal.aborted) { + return abort() + } + + const abortAndFinalize = () => { + abort() + finalize() + } + + const finalize = () => { + req.abort() + if (signal) { + signal.removeEventListener('abort', abortAndFinalize) + } + clearTimeout(reqTimeout) + } + + // send request + const req = send(options) + + if (signal) { + signal.addEventListener('abort', abortAndFinalize) + } + + let reqTimeout = null + if (request.timeout) { + req.once('socket', () => { + reqTimeout = setTimeout(() => { + reject(new FetchError(`network timeout at: ${ + request.url}`, 'request-timeout')) + finalize() + }, request.timeout) + }) + } + + req.on('error', er => { + // if a 'response' event is emitted before the 'error' event, then by the + // time this handler is run it's too late to reject the Promise for the + // response. instead, we forward the error event to the response stream + // so that the error will surface to the user when they try to consume + // the body. this is done as a side effect of aborting the request except + // for in windows, where we must forward the event manually, otherwise + // there is no longer a ref'd socket attached to the request and the + // stream never ends so the event loop runs out of work and the process + // exits without warning. + // coverage skipped here due to the difficulty in testing + // istanbul ignore next + if (req.res) { + req.res.emit('error', er) + } + reject(new FetchError(`request to ${request.url} failed, reason: ${ + er.message}`, 'system', er)) + finalize() + }) + + req.on('response', res => { + clearTimeout(reqTimeout) + + const headers = createHeadersLenient(res.headers) + + // HTTP fetch step 5 + if (fetch.isRedirect(res.statusCode)) { + // HTTP fetch step 5.2 + const location = headers.get('Location') + + // HTTP fetch step 5.3 + let locationURL = null + try { + locationURL = location === null ? null : new URL(location, request.url).toString() + } catch { + // error here can only be invalid URL in Location: header + // do not throw when options.redirect == manual + // let the user extract the errorneous redirect URL + if (request.redirect !== 'manual') { + /* eslint-disable-next-line max-len */ + reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect')) + finalize() + return + } + } + + // HTTP fetch step 5.5 + if (request.redirect === 'error') { + reject(new FetchError('uri requested responds with a redirect, ' + + `redirect mode is set to error: ${request.url}`, 'no-redirect')) + finalize() + return + } else if (request.redirect === 'manual') { + // node-fetch-specific step: make manual redirect a bit easier to + // use by setting the Location header value to the resolved URL. + if (locationURL !== null) { + // handle corrupted header + try { + headers.set('Location', locationURL) + } catch (err) { + /* istanbul ignore next: nodejs server prevent invalid + response headers, we can't test this through normal + request */ + reject(err) + } + } + } else if (request.redirect === 'follow' && locationURL !== null) { + // HTTP-redirect fetch step 5 + if (request.counter >= request.follow) { + reject(new FetchError(`maximum redirect reached at: ${ + request.url}`, 'max-redirect')) + finalize() + return + } + + // HTTP-redirect fetch step 9 + if (res.statusCode !== 303 && + request.body && + getTotalBytes(request) === null) { + reject(new FetchError( + 'Cannot follow redirect with body being a readable stream', + 'unsupported-redirect' + )) + finalize() + return + } + + // Update host due to redirection + request.headers.set('host', (new URL(locationURL)).host) + + // HTTP-redirect fetch step 6 (counter increment) + // Create a new Request object. + const requestOpts = { + headers: new Headers(request.headers), + follow: request.follow, + counter: request.counter + 1, + agent: request.agent, + compress: request.compress, + method: request.method, + body: request.body, + signal: request.signal, + timeout: request.timeout, + } + + // if the redirect is to a new hostname, strip the authorization and cookie headers + const parsedOriginal = new URL(request.url) + const parsedRedirect = new URL(locationURL) + if (parsedOriginal.hostname !== parsedRedirect.hostname) { + requestOpts.headers.delete('authorization') + requestOpts.headers.delete('cookie') + } + + // HTTP-redirect fetch step 11 + if (res.statusCode === 303 || ( + (res.statusCode === 301 || res.statusCode === 302) && + request.method === 'POST' + )) { + requestOpts.method = 'GET' + requestOpts.body = undefined + requestOpts.headers.delete('content-length') + } + + // HTTP-redirect fetch step 15 + resolve(fetch(new Request(locationURL, requestOpts))) + finalize() + return + } + } // end if(isRedirect) + + // prepare response + res.once('end', () => + signal && signal.removeEventListener('abort', abortAndFinalize)) + + const body = new Minipass() + // if an error occurs, either on the response stream itself, on one of the + // decoder streams, or a response length timeout from the Body class, we + // forward the error through to our internal body stream. If we see an + // error event on that, we call finalize to abort the request and ensure + // we don't leave a socket believing a request is in flight. + // this is difficult to test, so lacks specific coverage. + body.on('error', finalize) + // exceedingly rare that the stream would have an error, + // but just in case we proxy it to the stream in use. + res.on('error', /* istanbul ignore next */ er => body.emit('error', er)) + res.on('data', (chunk) => body.write(chunk)) + res.on('end', () => body.end()) + + const responseOptions = { + url: request.url, + status: res.statusCode, + statusText: res.statusMessage, + headers: headers, + size: request.size, + timeout: request.timeout, + counter: request.counter, + trailer: new Promise(resolveTrailer => + res.on('end', () => resolveTrailer(createHeadersLenient(res.trailers)))), + } + + // HTTP-network fetch step 12.1.1.3 + const codings = headers.get('Content-Encoding') + + // HTTP-network fetch step 12.1.1.4: handle content codings + + // in following scenarios we ignore compression support + // 1. compression support is disabled + // 2. HEAD request + // 3. no Content-Encoding header + // 4. no content response (204) + // 5. content not modified response (304) + if (!request.compress || + request.method === 'HEAD' || + codings === null || + res.statusCode === 204 || + res.statusCode === 304) { + response = new Response(body, responseOptions) + resolve(response) + return + } + + // Be less strict when decoding compressed responses, since sometimes + // servers send slightly invalid responses that are still accepted + // by common browsers. + // Always using Z_SYNC_FLUSH is what cURL does. + const zlibOptions = { + flush: zlib.constants.Z_SYNC_FLUSH, + finishFlush: zlib.constants.Z_SYNC_FLUSH, + } + + // for gzip + if (codings === 'gzip' || codings === 'x-gzip') { + const unzip = new zlib.Gunzip(zlibOptions) + response = new Response( + // exceedingly rare that the stream would have an error, + // but just in case we proxy it to the stream in use. + body.on('error', /* istanbul ignore next */ er => unzip.emit('error', er)).pipe(unzip), + responseOptions + ) + resolve(response) + return + } + + // for deflate + if (codings === 'deflate' || codings === 'x-deflate') { + // handle the infamous raw deflate response from old servers + // a hack for old IIS and Apache servers + const raw = res.pipe(new Minipass()) + raw.once('data', chunk => { + // see http://stackoverflow.com/questions/37519828 + const decoder = (chunk[0] & 0x0F) === 0x08 + ? new zlib.Inflate() + : new zlib.InflateRaw() + // exceedingly rare that the stream would have an error, + // but just in case we proxy it to the stream in use. + body.on('error', /* istanbul ignore next */ er => decoder.emit('error', er)).pipe(decoder) + response = new Response(decoder, responseOptions) + resolve(response) + }) + return + } + + // for br + if (codings === 'br') { + // ignoring coverage so tests don't have to fake support (or lack of) for brotli + // istanbul ignore next + try { + var decoder = new zlib.BrotliDecompress() + } catch (err) { + reject(err) + finalize() + return + } + // exceedingly rare that the stream would have an error, + // but just in case we proxy it to the stream in use. + body.on('error', /* istanbul ignore next */ er => decoder.emit('error', er)).pipe(decoder) + response = new Response(decoder, responseOptions) + resolve(response) + return + } + + // otherwise, use response as-is + response = new Response(body, responseOptions) + resolve(response) + }) + + writeToStream(req, request) + }) +} + +module.exports = fetch + +fetch.isRedirect = code => + code === 301 || + code === 302 || + code === 303 || + code === 307 || + code === 308 + +fetch.Headers = Headers +fetch.Request = Request +fetch.Response = Response +fetch.FetchError = FetchError +fetch.AbortError = AbortError diff --git a/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/request.js b/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/request.js new file mode 100644 index 00000000000000..054439e6699107 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/request.js @@ -0,0 +1,282 @@ +'use strict' +const { URL } = require('url') +const { Minipass } = require('minipass') +const Headers = require('./headers.js') +const { exportNodeCompatibleHeaders } = Headers +const Body = require('./body.js') +const { clone, extractContentType, getTotalBytes } = Body + +const version = require('../package.json').version +const defaultUserAgent = + `minipass-fetch/${version} (+https://github.com/isaacs/minipass-fetch)` + +const INTERNALS = Symbol('Request internals') + +const isRequest = input => + typeof input === 'object' && typeof input[INTERNALS] === 'object' + +const isAbortSignal = signal => { + const proto = ( + signal + && typeof signal === 'object' + && Object.getPrototypeOf(signal) + ) + return !!(proto && proto.constructor.name === 'AbortSignal') +} + +class Request extends Body { + constructor (input, init = {}) { + const parsedURL = isRequest(input) ? new URL(input.url) + : input && input.href ? new URL(input.href) + : new URL(`${input}`) + + if (isRequest(input)) { + init = { ...input[INTERNALS], ...init } + } else if (!input || typeof input === 'string') { + input = {} + } + + const method = (init.method || input.method || 'GET').toUpperCase() + const isGETHEAD = method === 'GET' || method === 'HEAD' + + if ((init.body !== null && init.body !== undefined || + isRequest(input) && input.body !== null) && isGETHEAD) { + throw new TypeError('Request with GET/HEAD method cannot have body') + } + + const inputBody = init.body !== null && init.body !== undefined ? init.body + : isRequest(input) && input.body !== null ? clone(input) + : null + + super(inputBody, { + timeout: init.timeout || input.timeout || 0, + size: init.size || input.size || 0, + }) + + const headers = new Headers(init.headers || input.headers || {}) + + if (inputBody !== null && inputBody !== undefined && + !headers.has('Content-Type')) { + const contentType = extractContentType(inputBody) + if (contentType) { + headers.append('Content-Type', contentType) + } + } + + const signal = 'signal' in init ? init.signal + : null + + if (signal !== null && signal !== undefined && !isAbortSignal(signal)) { + throw new TypeError('Expected signal must be an instanceof AbortSignal') + } + + // TLS specific options that are handled by node + const { + ca, + cert, + ciphers, + clientCertEngine, + crl, + dhparam, + ecdhCurve, + family, + honorCipherOrder, + key, + passphrase, + pfx, + rejectUnauthorized = process.env.NODE_TLS_REJECT_UNAUTHORIZED !== '0', + secureOptions, + secureProtocol, + servername, + sessionIdContext, + } = init + + this[INTERNALS] = { + method, + redirect: init.redirect || input.redirect || 'follow', + headers, + parsedURL, + signal, + ca, + cert, + ciphers, + clientCertEngine, + crl, + dhparam, + ecdhCurve, + family, + honorCipherOrder, + key, + passphrase, + pfx, + rejectUnauthorized, + secureOptions, + secureProtocol, + servername, + sessionIdContext, + } + + // node-fetch-only options + this.follow = init.follow !== undefined ? init.follow + : input.follow !== undefined ? input.follow + : 20 + this.compress = init.compress !== undefined ? init.compress + : input.compress !== undefined ? input.compress + : true + this.counter = init.counter || input.counter || 0 + this.agent = init.agent || input.agent + } + + get method () { + return this[INTERNALS].method + } + + get url () { + return this[INTERNALS].parsedURL.toString() + } + + get headers () { + return this[INTERNALS].headers + } + + get redirect () { + return this[INTERNALS].redirect + } + + get signal () { + return this[INTERNALS].signal + } + + clone () { + return new Request(this) + } + + get [Symbol.toStringTag] () { + return 'Request' + } + + static getNodeRequestOptions (request) { + const parsedURL = request[INTERNALS].parsedURL + const headers = new Headers(request[INTERNALS].headers) + + // fetch step 1.3 + if (!headers.has('Accept')) { + headers.set('Accept', '*/*') + } + + // Basic fetch + if (!/^https?:$/.test(parsedURL.protocol)) { + throw new TypeError('Only HTTP(S) protocols are supported') + } + + if (request.signal && + Minipass.isStream(request.body) && + typeof request.body.destroy !== 'function') { + throw new Error( + 'Cancellation of streamed requests with AbortSignal is not supported') + } + + // HTTP-network-or-cache fetch steps 2.4-2.7 + const contentLengthValue = + (request.body === null || request.body === undefined) && + /^(POST|PUT)$/i.test(request.method) ? '0' + : request.body !== null && request.body !== undefined + ? getTotalBytes(request) + : null + + if (contentLengthValue) { + headers.set('Content-Length', contentLengthValue + '') + } + + // HTTP-network-or-cache fetch step 2.11 + if (!headers.has('User-Agent')) { + headers.set('User-Agent', defaultUserAgent) + } + + // HTTP-network-or-cache fetch step 2.15 + if (request.compress && !headers.has('Accept-Encoding')) { + headers.set('Accept-Encoding', 'gzip,deflate') + } + + const agent = typeof request.agent === 'function' + ? request.agent(parsedURL) + : request.agent + + if (!headers.has('Connection') && !agent) { + headers.set('Connection', 'close') + } + + // TLS specific options that are handled by node + const { + ca, + cert, + ciphers, + clientCertEngine, + crl, + dhparam, + ecdhCurve, + family, + honorCipherOrder, + key, + passphrase, + pfx, + rejectUnauthorized, + secureOptions, + secureProtocol, + servername, + sessionIdContext, + } = request[INTERNALS] + + // HTTP-network fetch step 4.2 + // chunked encoding is handled by Node.js + + // we cannot spread parsedURL directly, so we have to read each property one-by-one + // and map them to the equivalent https?.request() method options + const urlProps = { + auth: parsedURL.username || parsedURL.password + ? `${parsedURL.username}:${parsedURL.password}` + : '', + host: parsedURL.host, + hostname: parsedURL.hostname, + path: `${parsedURL.pathname}${parsedURL.search}`, + port: parsedURL.port, + protocol: parsedURL.protocol, + } + + return { + ...urlProps, + method: request.method, + headers: exportNodeCompatibleHeaders(headers), + agent, + ca, + cert, + ciphers, + clientCertEngine, + crl, + dhparam, + ecdhCurve, + family, + honorCipherOrder, + key, + passphrase, + pfx, + rejectUnauthorized, + secureOptions, + secureProtocol, + servername, + sessionIdContext, + timeout: request.timeout, + } + } +} + +module.exports = Request + +Object.defineProperties(Request.prototype, { + method: { enumerable: true }, + url: { enumerable: true }, + headers: { enumerable: true }, + redirect: { enumerable: true }, + clone: { enumerable: true }, + signal: { enumerable: true }, +}) diff --git a/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/response.js b/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/response.js new file mode 100644 index 00000000000000..54cb52db3594a7 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/response.js @@ -0,0 +1,90 @@ +'use strict' +const http = require('http') +const { STATUS_CODES } = http + +const Headers = require('./headers.js') +const Body = require('./body.js') +const { clone, extractContentType } = Body + +const INTERNALS = Symbol('Response internals') + +class Response extends Body { + constructor (body = null, opts = {}) { + super(body, opts) + + const status = opts.status || 200 + const headers = new Headers(opts.headers) + + if (body !== null && body !== undefined && !headers.has('Content-Type')) { + const contentType = extractContentType(body) + if (contentType) { + headers.append('Content-Type', contentType) + } + } + + this[INTERNALS] = { + url: opts.url, + status, + statusText: opts.statusText || STATUS_CODES[status], + headers, + counter: opts.counter, + trailer: Promise.resolve(opts.trailer || new Headers()), + } + } + + get trailer () { + return this[INTERNALS].trailer + } + + get url () { + return this[INTERNALS].url || '' + } + + get status () { + return this[INTERNALS].status + } + + get ok () { + return this[INTERNALS].status >= 200 && this[INTERNALS].status < 300 + } + + get redirected () { + return this[INTERNALS].counter > 0 + } + + get statusText () { + return this[INTERNALS].statusText + } + + get headers () { + return this[INTERNALS].headers + } + + clone () { + return new Response(clone(this), { + url: this.url, + status: this.status, + statusText: this.statusText, + headers: this.headers, + ok: this.ok, + redirected: this.redirected, + trailer: this.trailer, + }) + } + + get [Symbol.toStringTag] () { + return 'Response' + } +} + +module.exports = Response + +Object.defineProperties(Response.prototype, { + url: { enumerable: true }, + status: { enumerable: true }, + ok: { enumerable: true }, + redirected: { enumerable: true }, + statusText: { enumerable: true }, + headers: { enumerable: true }, + clone: { enumerable: true }, +}) diff --git a/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/package.json b/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/package.json new file mode 100644 index 00000000000000..d491a7fba126d0 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/package.json @@ -0,0 +1,69 @@ +{ + "name": "minipass-fetch", + "version": "3.0.5", + "description": "An implementation of window.fetch in Node.js using Minipass streams", + "license": "MIT", + "main": "lib/index.js", + "scripts": { + "test:tls-fixtures": "./test/fixtures/tls/setup.sh", + "test": "tap", + "snap": "tap", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "postlint": "template-oss-check", + "lintfix": "npm run lint -- --fix", + "posttest": "npm run lint", + "template-oss-apply": "template-oss-apply --force" + }, + "tap": { + "coverage-map": "map.js", + "check-coverage": true, + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.22.0", + "@ungap/url-search-params": "^0.2.2", + "abort-controller": "^3.0.0", + "abortcontroller-polyfill": "~1.7.3", + "encoding": "^0.1.13", + "form-data": "^4.0.0", + "nock": "^13.2.4", + "parted": "^0.1.1", + "string-to-arraybuffer": "^1.0.2", + "tap": "^16.0.0" + }, + "dependencies": { + "minipass": "^7.0.3", + "minipass-sized": "^1.0.3", + "minizlib": "^2.1.2" + }, + "optionalDependencies": { + "encoding": "^0.1.13" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/npm/minipass-fetch.git" + }, + "keywords": [ + "fetch", + "minipass", + "node-fetch", + "window.fetch" + ], + "files": [ + "bin/", + "lib/" + ], + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "author": "GitHub Inc.", + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.22.0", + "publish": "true" + } +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/proc-log/LICENSE b/deps/npm/node_modules/tuf-js/node_modules/proc-log/LICENSE new file mode 100644 index 00000000000000..83837797202b70 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/proc-log/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) GitHub, Inc. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/tuf-js/node_modules/proc-log/lib/index.js b/deps/npm/node_modules/tuf-js/node_modules/proc-log/lib/index.js new file mode 100644 index 00000000000000..86d90861078dab --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/proc-log/lib/index.js @@ -0,0 +1,153 @@ +const META = Symbol('proc-log.meta') +module.exports = { + META: META, + output: { + LEVELS: [ + 'standard', + 'error', + 'buffer', + 'flush', + ], + KEYS: { + standard: 'standard', + error: 'error', + buffer: 'buffer', + flush: 'flush', + }, + standard: function (...args) { + return process.emit('output', 'standard', ...args) + }, + error: function (...args) { + return process.emit('output', 'error', ...args) + }, + buffer: function (...args) { + return process.emit('output', 'buffer', ...args) + }, + flush: function (...args) { + return process.emit('output', 'flush', ...args) + }, + }, + log: { + LEVELS: [ + 'notice', + 'error', + 'warn', + 'info', + 'verbose', + 'http', + 'silly', + 'timing', + 'pause', + 'resume', + ], + KEYS: { + notice: 'notice', + error: 'error', + warn: 'warn', + info: 'info', + verbose: 'verbose', + http: 'http', + silly: 'silly', + timing: 'timing', + pause: 'pause', + resume: 'resume', + }, + error: function (...args) { + return process.emit('log', 'error', ...args) + }, + notice: function (...args) { + return process.emit('log', 'notice', ...args) + }, + warn: function (...args) { + return process.emit('log', 'warn', ...args) + }, + info: function (...args) { + return process.emit('log', 'info', ...args) + }, + verbose: function (...args) { + return process.emit('log', 'verbose', ...args) + }, + http: function (...args) { + return process.emit('log', 'http', ...args) + }, + silly: function (...args) { + return process.emit('log', 'silly', ...args) + }, + timing: function (...args) { + return process.emit('log', 'timing', ...args) + }, + pause: function () { + return process.emit('log', 'pause') + }, + resume: function () { + return process.emit('log', 'resume') + }, + }, + time: { + LEVELS: [ + 'start', + 'end', + ], + KEYS: { + start: 'start', + end: 'end', + }, + start: function (name, fn) { + process.emit('time', 'start', name) + function end () { + return process.emit('time', 'end', name) + } + if (typeof fn === 'function') { + const res = fn() + if (res && res.finally) { + return res.finally(end) + } + end() + return res + } + return end + }, + end: function (name) { + return process.emit('time', 'end', name) + }, + }, + input: { + LEVELS: [ + 'start', + 'end', + 'read', + ], + KEYS: { + start: 'start', + end: 'end', + read: 'read', + }, + start: function (fn) { + process.emit('input', 'start') + function end () { + return process.emit('input', 'end') + } + if (typeof fn === 'function') { + const res = fn() + if (res && res.finally) { + return res.finally(end) + } + end() + return res + } + return end + }, + end: function () { + return process.emit('input', 'end') + }, + read: function (...args) { + let resolve, reject + const promise = new Promise((_resolve, _reject) => { + resolve = _resolve + reject = _reject + }) + process.emit('input', 'read', resolve, reject, ...args) + return promise + }, + }, +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/proc-log/package.json b/deps/npm/node_modules/tuf-js/node_modules/proc-log/package.json new file mode 100644 index 00000000000000..4ab89102ecc9b5 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/proc-log/package.json @@ -0,0 +1,45 @@ +{ + "name": "proc-log", + "version": "4.2.0", + "files": [ + "bin/", + "lib/" + ], + "main": "lib/index.js", + "description": "just emit 'log' events on the process object", + "repository": { + "type": "git", + "url": "https://github.com/npm/proc-log.git" + }, + "author": "GitHub Inc.", + "license": "ISC", + "scripts": { + "test": "tap", + "snap": "tap", + "posttest": "npm run lint", + "postsnap": "eslint index.js test/*.js --fix", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "postlint": "template-oss-check", + "lintfix": "npm run lint -- --fix", + "template-oss-apply": "template-oss-apply --force" + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.21.3", + "tap": "^16.0.1" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.21.3", + "publish": true + }, + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + } +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/ssri/LICENSE.md b/deps/npm/node_modules/tuf-js/node_modules/ssri/LICENSE.md new file mode 100644 index 00000000000000..e335388869f50f --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/ssri/LICENSE.md @@ -0,0 +1,16 @@ +ISC License + +Copyright 2021 (c) npm, Inc. + +Permission to use, copy, modify, and/or distribute this software for +any purpose with or without fee is hereby granted, provided that the +above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS +ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/tuf-js/node_modules/ssri/lib/index.js b/deps/npm/node_modules/tuf-js/node_modules/ssri/lib/index.js new file mode 100644 index 00000000000000..7d749ed480fb98 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/ssri/lib/index.js @@ -0,0 +1,580 @@ +'use strict' + +const crypto = require('crypto') +const { Minipass } = require('minipass') + +const SPEC_ALGORITHMS = ['sha512', 'sha384', 'sha256'] +const DEFAULT_ALGORITHMS = ['sha512'] + +// TODO: this should really be a hardcoded list of algorithms we support, +// rather than [a-z0-9]. +const BASE64_REGEX = /^[a-z0-9+/]+(?:=?=?)$/i +const SRI_REGEX = /^([a-z0-9]+)-([^?]+)([?\S*]*)$/ +const STRICT_SRI_REGEX = /^([a-z0-9]+)-([A-Za-z0-9+/=]{44,88})(\?[\x21-\x7E]*)?$/ +const VCHAR_REGEX = /^[\x21-\x7E]+$/ + +const getOptString = options => options?.length ? `?${options.join('?')}` : '' + +class IntegrityStream extends Minipass { + #emittedIntegrity + #emittedSize + #emittedVerified + + constructor (opts) { + super() + this.size = 0 + this.opts = opts + + // may be overridden later, but set now for class consistency + this.#getOptions() + + // options used for calculating stream. can't be changed. + if (opts?.algorithms) { + this.algorithms = [...opts.algorithms] + } else { + this.algorithms = [...DEFAULT_ALGORITHMS] + } + if (this.algorithm !== null && !this.algorithms.includes(this.algorithm)) { + this.algorithms.push(this.algorithm) + } + + this.hashes = this.algorithms.map(crypto.createHash) + } + + #getOptions () { + // For verification + this.sri = this.opts?.integrity ? parse(this.opts?.integrity, this.opts) : null + this.expectedSize = this.opts?.size + + if (!this.sri) { + this.algorithm = null + } else if (this.sri.isHash) { + this.goodSri = true + this.algorithm = this.sri.algorithm + } else { + this.goodSri = !this.sri.isEmpty() + this.algorithm = this.sri.pickAlgorithm(this.opts) + } + + this.digests = this.goodSri ? this.sri[this.algorithm] : null + this.optString = getOptString(this.opts?.options) + } + + on (ev, handler) { + if (ev === 'size' && this.#emittedSize) { + return handler(this.#emittedSize) + } + + if (ev === 'integrity' && this.#emittedIntegrity) { + return handler(this.#emittedIntegrity) + } + + if (ev === 'verified' && this.#emittedVerified) { + return handler(this.#emittedVerified) + } + + return super.on(ev, handler) + } + + emit (ev, data) { + if (ev === 'end') { + this.#onEnd() + } + return super.emit(ev, data) + } + + write (data) { + this.size += data.length + this.hashes.forEach(h => h.update(data)) + return super.write(data) + } + + #onEnd () { + if (!this.goodSri) { + this.#getOptions() + } + const newSri = parse(this.hashes.map((h, i) => { + return `${this.algorithms[i]}-${h.digest('base64')}${this.optString}` + }).join(' '), this.opts) + // Integrity verification mode + const match = this.goodSri && newSri.match(this.sri, this.opts) + if (typeof this.expectedSize === 'number' && this.size !== this.expectedSize) { + /* eslint-disable-next-line max-len */ + const err = new Error(`stream size mismatch when checking ${this.sri}.\n Wanted: ${this.expectedSize}\n Found: ${this.size}`) + err.code = 'EBADSIZE' + err.found = this.size + err.expected = this.expectedSize + err.sri = this.sri + this.emit('error', err) + } else if (this.sri && !match) { + /* eslint-disable-next-line max-len */ + const err = new Error(`${this.sri} integrity checksum failed when using ${this.algorithm}: wanted ${this.digests} but got ${newSri}. (${this.size} bytes)`) + err.code = 'EINTEGRITY' + err.found = newSri + err.expected = this.digests + err.algorithm = this.algorithm + err.sri = this.sri + this.emit('error', err) + } else { + this.#emittedSize = this.size + this.emit('size', this.size) + this.#emittedIntegrity = newSri + this.emit('integrity', newSri) + if (match) { + this.#emittedVerified = match + this.emit('verified', match) + } + } + } +} + +class Hash { + get isHash () { + return true + } + + constructor (hash, opts) { + const strict = opts?.strict + this.source = hash.trim() + + // set default values so that we make V8 happy to + // always see a familiar object template. + this.digest = '' + this.algorithm = '' + this.options = [] + + // 3.1. Integrity metadata (called "Hash" by ssri) + // https://w3c.github.io/webappsec-subresource-integrity/#integrity-metadata-description + const match = this.source.match( + strict + ? STRICT_SRI_REGEX + : SRI_REGEX + ) + if (!match) { + return + } + if (strict && !SPEC_ALGORITHMS.includes(match[1])) { + return + } + this.algorithm = match[1] + this.digest = match[2] + + const rawOpts = match[3] + if (rawOpts) { + this.options = rawOpts.slice(1).split('?') + } + } + + hexDigest () { + return this.digest && Buffer.from(this.digest, 'base64').toString('hex') + } + + toJSON () { + return this.toString() + } + + match (integrity, opts) { + const other = parse(integrity, opts) + if (!other) { + return false + } + if (other.isIntegrity) { + const algo = other.pickAlgorithm(opts, [this.algorithm]) + + if (!algo) { + return false + } + + const foundHash = other[algo].find(hash => hash.digest === this.digest) + + if (foundHash) { + return foundHash + } + + return false + } + return other.digest === this.digest ? other : false + } + + toString (opts) { + if (opts?.strict) { + // Strict mode enforces the standard as close to the foot of the + // letter as it can. + if (!( + // The spec has very restricted productions for algorithms. + // https://www.w3.org/TR/CSP2/#source-list-syntax + SPEC_ALGORITHMS.includes(this.algorithm) && + // Usually, if someone insists on using a "different" base64, we + // leave it as-is, since there's multiple standards, and the + // specified is not a URL-safe variant. + // https://www.w3.org/TR/CSP2/#base64_value + this.digest.match(BASE64_REGEX) && + // Option syntax is strictly visual chars. + // https://w3c.github.io/webappsec-subresource-integrity/#grammardef-option-expression + // https://tools.ietf.org/html/rfc5234#appendix-B.1 + this.options.every(opt => opt.match(VCHAR_REGEX)) + )) { + return '' + } + } + return `${this.algorithm}-${this.digest}${getOptString(this.options)}` + } +} + +function integrityHashToString (toString, sep, opts, hashes) { + const toStringIsNotEmpty = toString !== '' + + let shouldAddFirstSep = false + let complement = '' + + const lastIndex = hashes.length - 1 + + for (let i = 0; i < lastIndex; i++) { + const hashString = Hash.prototype.toString.call(hashes[i], opts) + + if (hashString) { + shouldAddFirstSep = true + + complement += hashString + complement += sep + } + } + + const finalHashString = Hash.prototype.toString.call(hashes[lastIndex], opts) + + if (finalHashString) { + shouldAddFirstSep = true + complement += finalHashString + } + + if (toStringIsNotEmpty && shouldAddFirstSep) { + return toString + sep + complement + } + + return toString + complement +} + +class Integrity { + get isIntegrity () { + return true + } + + toJSON () { + return this.toString() + } + + isEmpty () { + return Object.keys(this).length === 0 + } + + toString (opts) { + let sep = opts?.sep || ' ' + let toString = '' + + if (opts?.strict) { + // Entries must be separated by whitespace, according to spec. + sep = sep.replace(/\S+/g, ' ') + + for (const hash of SPEC_ALGORITHMS) { + if (this[hash]) { + toString = integrityHashToString(toString, sep, opts, this[hash]) + } + } + } else { + for (const hash of Object.keys(this)) { + toString = integrityHashToString(toString, sep, opts, this[hash]) + } + } + + return toString + } + + concat (integrity, opts) { + const other = typeof integrity === 'string' + ? integrity + : stringify(integrity, opts) + return parse(`${this.toString(opts)} ${other}`, opts) + } + + hexDigest () { + return parse(this, { single: true }).hexDigest() + } + + // add additional hashes to an integrity value, but prevent + // *changing* an existing integrity hash. + merge (integrity, opts) { + const other = parse(integrity, opts) + for (const algo in other) { + if (this[algo]) { + if (!this[algo].find(hash => + other[algo].find(otherhash => + hash.digest === otherhash.digest))) { + throw new Error('hashes do not match, cannot update integrity') + } + } else { + this[algo] = other[algo] + } + } + } + + match (integrity, opts) { + const other = parse(integrity, opts) + if (!other) { + return false + } + const algo = other.pickAlgorithm(opts, Object.keys(this)) + return ( + !!algo && + this[algo] && + other[algo] && + this[algo].find(hash => + other[algo].find(otherhash => + hash.digest === otherhash.digest + ) + ) + ) || false + } + + // Pick the highest priority algorithm present, optionally also limited to a + // set of hashes found in another integrity. When limiting it may return + // nothing. + pickAlgorithm (opts, hashes) { + const pickAlgorithm = opts?.pickAlgorithm || getPrioritizedHash + const keys = Object.keys(this).filter(k => { + if (hashes?.length) { + return hashes.includes(k) + } + return true + }) + if (keys.length) { + return keys.reduce((acc, algo) => pickAlgorithm(acc, algo) || acc) + } + // no intersection between this and hashes, + return null + } +} + +module.exports.parse = parse +function parse (sri, opts) { + if (!sri) { + return null + } + if (typeof sri === 'string') { + return _parse(sri, opts) + } else if (sri.algorithm && sri.digest) { + const fullSri = new Integrity() + fullSri[sri.algorithm] = [sri] + return _parse(stringify(fullSri, opts), opts) + } else { + return _parse(stringify(sri, opts), opts) + } +} + +function _parse (integrity, opts) { + // 3.4.3. Parse metadata + // https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata + if (opts?.single) { + return new Hash(integrity, opts) + } + const hashes = integrity.trim().split(/\s+/).reduce((acc, string) => { + const hash = new Hash(string, opts) + if (hash.algorithm && hash.digest) { + const algo = hash.algorithm + if (!acc[algo]) { + acc[algo] = [] + } + acc[algo].push(hash) + } + return acc + }, new Integrity()) + return hashes.isEmpty() ? null : hashes +} + +module.exports.stringify = stringify +function stringify (obj, opts) { + if (obj.algorithm && obj.digest) { + return Hash.prototype.toString.call(obj, opts) + } else if (typeof obj === 'string') { + return stringify(parse(obj, opts), opts) + } else { + return Integrity.prototype.toString.call(obj, opts) + } +} + +module.exports.fromHex = fromHex +function fromHex (hexDigest, algorithm, opts) { + const optString = getOptString(opts?.options) + return parse( + `${algorithm}-${ + Buffer.from(hexDigest, 'hex').toString('base64') + }${optString}`, opts + ) +} + +module.exports.fromData = fromData +function fromData (data, opts) { + const algorithms = opts?.algorithms || [...DEFAULT_ALGORITHMS] + const optString = getOptString(opts?.options) + return algorithms.reduce((acc, algo) => { + const digest = crypto.createHash(algo).update(data).digest('base64') + const hash = new Hash( + `${algo}-${digest}${optString}`, + opts + ) + /* istanbul ignore else - it would be VERY strange if the string we + * just calculated with an algo did not have an algo or digest. + */ + if (hash.algorithm && hash.digest) { + const hashAlgo = hash.algorithm + if (!acc[hashAlgo]) { + acc[hashAlgo] = [] + } + acc[hashAlgo].push(hash) + } + return acc + }, new Integrity()) +} + +module.exports.fromStream = fromStream +function fromStream (stream, opts) { + const istream = integrityStream(opts) + return new Promise((resolve, reject) => { + stream.pipe(istream) + stream.on('error', reject) + istream.on('error', reject) + let sri + istream.on('integrity', s => { + sri = s + }) + istream.on('end', () => resolve(sri)) + istream.resume() + }) +} + +module.exports.checkData = checkData +function checkData (data, sri, opts) { + sri = parse(sri, opts) + if (!sri || !Object.keys(sri).length) { + if (opts?.error) { + throw Object.assign( + new Error('No valid integrity hashes to check against'), { + code: 'EINTEGRITY', + } + ) + } else { + return false + } + } + const algorithm = sri.pickAlgorithm(opts) + const digest = crypto.createHash(algorithm).update(data).digest('base64') + const newSri = parse({ algorithm, digest }) + const match = newSri.match(sri, opts) + opts = opts || {} + if (match || !(opts.error)) { + return match + } else if (typeof opts.size === 'number' && (data.length !== opts.size)) { + /* eslint-disable-next-line max-len */ + const err = new Error(`data size mismatch when checking ${sri}.\n Wanted: ${opts.size}\n Found: ${data.length}`) + err.code = 'EBADSIZE' + err.found = data.length + err.expected = opts.size + err.sri = sri + throw err + } else { + /* eslint-disable-next-line max-len */ + const err = new Error(`Integrity checksum failed when using ${algorithm}: Wanted ${sri}, but got ${newSri}. (${data.length} bytes)`) + err.code = 'EINTEGRITY' + err.found = newSri + err.expected = sri + err.algorithm = algorithm + err.sri = sri + throw err + } +} + +module.exports.checkStream = checkStream +function checkStream (stream, sri, opts) { + opts = opts || Object.create(null) + opts.integrity = sri + sri = parse(sri, opts) + if (!sri || !Object.keys(sri).length) { + return Promise.reject(Object.assign( + new Error('No valid integrity hashes to check against'), { + code: 'EINTEGRITY', + } + )) + } + const checker = integrityStream(opts) + return new Promise((resolve, reject) => { + stream.pipe(checker) + stream.on('error', reject) + checker.on('error', reject) + let verified + checker.on('verified', s => { + verified = s + }) + checker.on('end', () => resolve(verified)) + checker.resume() + }) +} + +module.exports.integrityStream = integrityStream +function integrityStream (opts = Object.create(null)) { + return new IntegrityStream(opts) +} + +module.exports.create = createIntegrity +function createIntegrity (opts) { + const algorithms = opts?.algorithms || [...DEFAULT_ALGORITHMS] + const optString = getOptString(opts?.options) + + const hashes = algorithms.map(crypto.createHash) + + return { + update: function (chunk, enc) { + hashes.forEach(h => h.update(chunk, enc)) + return this + }, + digest: function () { + const integrity = algorithms.reduce((acc, algo) => { + const digest = hashes.shift().digest('base64') + const hash = new Hash( + `${algo}-${digest}${optString}`, + opts + ) + /* istanbul ignore else - it would be VERY strange if the hash we + * just calculated with an algo did not have an algo or digest. + */ + if (hash.algorithm && hash.digest) { + const hashAlgo = hash.algorithm + if (!acc[hashAlgo]) { + acc[hashAlgo] = [] + } + acc[hashAlgo].push(hash) + } + return acc + }, new Integrity()) + + return integrity + }, + } +} + +const NODE_HASHES = crypto.getHashes() + +// This is a Best Effortâ„¢ at a reasonable priority for hash algos +const DEFAULT_PRIORITY = [ + 'md5', 'whirlpool', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512', + // TODO - it's unclear _which_ of these Node will actually use as its name + // for the algorithm, so we guesswork it based on the OpenSSL names. + 'sha3', + 'sha3-256', 'sha3-384', 'sha3-512', + 'sha3_256', 'sha3_384', 'sha3_512', +].filter(algo => NODE_HASHES.includes(algo)) + +function getPrioritizedHash (algo1, algo2) { + /* eslint-disable-next-line max-len */ + return DEFAULT_PRIORITY.indexOf(algo1.toLowerCase()) >= DEFAULT_PRIORITY.indexOf(algo2.toLowerCase()) + ? algo1 + : algo2 +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/ssri/package.json b/deps/npm/node_modules/tuf-js/node_modules/ssri/package.json new file mode 100644 index 00000000000000..28395414e4643c --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/ssri/package.json @@ -0,0 +1,65 @@ +{ + "name": "ssri", + "version": "10.0.6", + "description": "Standard Subresource Integrity library -- parses, serializes, generates, and verifies integrity metadata according to the SRI spec.", + "main": "lib/index.js", + "files": [ + "bin/", + "lib/" + ], + "scripts": { + "prerelease": "npm t", + "postrelease": "npm publish", + "posttest": "npm run lint", + "test": "tap", + "coverage": "tap", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force", + "lintfix": "npm run lint -- --fix", + "snap": "tap" + }, + "tap": { + "check-coverage": true, + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "repository": { + "type": "git", + "url": "git+https://github.com/npm/ssri.git" + }, + "keywords": [ + "w3c", + "web", + "security", + "integrity", + "checksum", + "hashing", + "subresource integrity", + "sri", + "sri hash", + "sri string", + "sri generator", + "html" + ], + "author": "GitHub Inc.", + "license": "ISC", + "dependencies": { + "minipass": "^7.0.3" + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.22.0", + "tap": "^16.0.1" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.22.0", + "publish": "true" + } +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/unique-filename/LICENSE b/deps/npm/node_modules/tuf-js/node_modules/unique-filename/LICENSE new file mode 100644 index 00000000000000..69619c125ea7ef --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/unique-filename/LICENSE @@ -0,0 +1,5 @@ +Copyright npm, Inc + +Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/tuf-js/node_modules/unique-filename/lib/index.js b/deps/npm/node_modules/tuf-js/node_modules/unique-filename/lib/index.js new file mode 100644 index 00000000000000..d067d2e709809a --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/unique-filename/lib/index.js @@ -0,0 +1,7 @@ +var path = require('path') + +var uniqueSlug = require('unique-slug') + +module.exports = function (filepath, prefix, uniq) { + return path.join(filepath, (prefix ? prefix + '-' : '') + uniqueSlug(uniq)) +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/unique-filename/package.json b/deps/npm/node_modules/tuf-js/node_modules/unique-filename/package.json new file mode 100644 index 00000000000000..b2fbf0666489a6 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/unique-filename/package.json @@ -0,0 +1,51 @@ +{ + "name": "unique-filename", + "version": "3.0.0", + "description": "Generate a unique filename for use in temporary directories or caches.", + "main": "lib/index.js", + "scripts": { + "test": "tap", + "lint": "eslint \"**/*.js\"", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force", + "lintfix": "npm run lint -- --fix", + "snap": "tap", + "posttest": "npm run lint" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/unique-filename.git" + }, + "keywords": [], + "author": "GitHub Inc.", + "license": "ISC", + "bugs": { + "url": "https://github.com/iarna/unique-filename/issues" + }, + "homepage": "https://github.com/iarna/unique-filename", + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.5.1", + "tap": "^16.3.0" + }, + "dependencies": { + "unique-slug": "^4.0.0" + }, + "files": [ + "bin/", + "lib/" + ], + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.5.1" + }, + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + } +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/unique-slug/LICENSE b/deps/npm/node_modules/tuf-js/node_modules/unique-slug/LICENSE new file mode 100644 index 00000000000000..7953647e7760b8 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/unique-slug/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright npm, Inc + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/tuf-js/node_modules/unique-slug/lib/index.js b/deps/npm/node_modules/tuf-js/node_modules/unique-slug/lib/index.js new file mode 100644 index 00000000000000..1bac84d95d7307 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/unique-slug/lib/index.js @@ -0,0 +1,11 @@ +'use strict' +var MurmurHash3 = require('imurmurhash') + +module.exports = function (uniq) { + if (uniq) { + var hash = new MurmurHash3(uniq) + return ('00000000' + hash.result().toString(16)).slice(-8) + } else { + return (Math.random().toString(16) + '0000000').slice(2, 10) + } +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/unique-slug/package.json b/deps/npm/node_modules/tuf-js/node_modules/unique-slug/package.json new file mode 100644 index 00000000000000..33732cdbb42859 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/unique-slug/package.json @@ -0,0 +1,47 @@ +{ + "name": "unique-slug", + "version": "4.0.0", + "description": "Generate a unique character string suitible for use in files and URLs.", + "main": "lib/index.js", + "scripts": { + "test": "tap", + "lint": "eslint \"**/*.js\"", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force", + "lintfix": "npm run lint -- --fix", + "snap": "tap", + "posttest": "npm run lint" + }, + "keywords": [], + "author": "GitHub Inc.", + "license": "ISC", + "devDependencies": { + "@npmcli/eslint-config": "^3.1.0", + "@npmcli/template-oss": "4.5.1", + "tap": "^16.3.0" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/unique-slug.git" + }, + "dependencies": { + "imurmurhash": "^0.1.4" + }, + "files": [ + "bin/", + "lib/" + ], + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.5.1" + }, + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + } +} diff --git a/deps/npm/node_modules/unique-filename/package.json b/deps/npm/node_modules/unique-filename/package.json index b2fbf0666489a6..a08196e1db303a 100644 --- a/deps/npm/node_modules/unique-filename/package.json +++ b/deps/npm/node_modules/unique-filename/package.json @@ -1,20 +1,21 @@ { "name": "unique-filename", - "version": "3.0.0", + "version": "4.0.0", "description": "Generate a unique filename for use in temporary directories or caches.", "main": "lib/index.js", "scripts": { "test": "tap", - "lint": "eslint \"**/*.js\"", + "lint": "npm run eslint", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", + "lintfix": "npm run eslint -- --fix", "snap": "tap", - "posttest": "npm run lint" + "posttest": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "repository": { "type": "git", - "url": "https://github.com/npm/unique-filename.git" + "url": "git+https://github.com/npm/unique-filename.git" }, "keywords": [], "author": "GitHub Inc.", @@ -24,23 +25,24 @@ }, "homepage": "https://github.com/iarna/unique-filename", "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.5.1", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.3.0" }, "dependencies": { - "unique-slug": "^4.0.0" + "unique-slug": "^5.0.0" }, "files": [ "bin/", "lib/" ], "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.5.1" + "version": "4.23.3", + "publish": true }, "tap": { "nyc-arg": [ diff --git a/deps/npm/node_modules/unique-slug/package.json b/deps/npm/node_modules/unique-slug/package.json index 33732cdbb42859..b4d287aecef3d2 100644 --- a/deps/npm/node_modules/unique-slug/package.json +++ b/deps/npm/node_modules/unique-slug/package.json @@ -1,28 +1,29 @@ { "name": "unique-slug", - "version": "4.0.0", + "version": "5.0.0", "description": "Generate a unique character string suitible for use in files and URLs.", "main": "lib/index.js", "scripts": { "test": "tap", - "lint": "eslint \"**/*.js\"", + "lint": "npm run eslint", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", + "lintfix": "npm run eslint -- --fix", "snap": "tap", - "posttest": "npm run lint" + "posttest": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "keywords": [], "author": "GitHub Inc.", "license": "ISC", "devDependencies": { - "@npmcli/eslint-config": "^3.1.0", - "@npmcli/template-oss": "4.5.1", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.3.0" }, "repository": { "type": "git", - "url": "https://github.com/npm/unique-slug.git" + "url": "git+https://github.com/npm/unique-slug.git" }, "dependencies": { "imurmurhash": "^0.1.4" @@ -32,11 +33,12 @@ "lib/" ], "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.5.1" + "version": "4.23.3", + "publish": true }, "tap": { "nyc-arg": [ diff --git a/deps/npm/node_modules/validate-npm-package-name/package.json b/deps/npm/node_modules/validate-npm-package-name/package.json index 8a38b66e1d3e43..42089cbbede700 100644 --- a/deps/npm/node_modules/validate-npm-package-name/package.json +++ b/deps/npm/node_modules/validate-npm-package-name/package.json @@ -1,14 +1,14 @@ { "name": "validate-npm-package-name", - "version": "5.0.1", + "version": "6.0.0", "description": "Give me a string and I'll tell you if it's a valid npm package name", "main": "lib/", "directories": { "test": "test" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.0.1" }, "scripts": { @@ -16,12 +16,13 @@ "test:code": "tap ${TAP_FLAGS:-'--'} test/*.js", "test:style": "standard", "test": "tap", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", + "lintfix": "npm run eslint -- --fix", "snap": "tap", - "posttest": "npm run lint" + "posttest": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "repository": { "type": "git", @@ -44,11 +45,11 @@ "lib/" ], "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.22.0", + "version": "4.23.3", "publish": true }, "tap": { diff --git a/deps/npm/node_modules/which/package.json b/deps/npm/node_modules/which/package.json index 515bfb22ca0e1e..94184233c61c4c 100644 --- a/deps/npm/node_modules/which/package.json +++ b/deps/npm/node_modules/which/package.json @@ -2,10 +2,10 @@ "author": "GitHub Inc.", "name": "which", "description": "Like which(1) unix command. Find the first instance of an executable in the PATH.", - "version": "4.0.0", + "version": "5.0.0", "repository": { "type": "git", - "url": "https://github.com/npm/node-which.git" + "url": "git+https://github.com/npm/node-which.git" }, "main": "lib/index.js", "bin": { @@ -16,18 +16,19 @@ "isexe": "^3.1.1" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.18.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.3.0" }, "scripts": { "test": "tap", - "lint": "eslint \"**/*.js\"", + "lint": "npm run eslint", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", + "lintfix": "npm run eslint -- --fix", "snap": "tap", - "posttest": "npm run lint" + "posttest": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "files": [ "bin/", @@ -41,17 +42,11 @@ ] }, "engines": { - "node": "^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "ciVersions": [ - "16.13.0", - "16.x", - "18.0.0", - "18.x" - ], - "version": "4.18.0", + "version": "4.23.3", "publish": "true" } } diff --git a/deps/npm/node_modules/write-file-atomic/package.json b/deps/npm/node_modules/write-file-atomic/package.json index 54d58d7eeb984d..1e88b5b889ec95 100644 --- a/deps/npm/node_modules/write-file-atomic/package.json +++ b/deps/npm/node_modules/write-file-atomic/package.json @@ -1,20 +1,21 @@ { "name": "write-file-atomic", - "version": "5.0.1", + "version": "6.0.0", "description": "Write files in an atomic fashion w/configurable ownership", "main": "./lib/index.js", "scripts": { "test": "tap", "posttest": "npm run lint", - "lint": "eslint \"**/*.js\"", + "lint": "npm run eslint", "postlint": "template-oss-check", - "lintfix": "npm run lint -- --fix", + "lintfix": "npm run eslint -- --fix", "snap": "tap", - "template-oss-apply": "template-oss-apply --force" + "template-oss-apply": "template-oss-apply --force", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "repository": { "type": "git", - "url": "https://github.com/npm/write-file-atomic.git" + "url": "git+https://github.com/npm/write-file-atomic.git" }, "keywords": [ "writeFile", @@ -31,8 +32,8 @@ "signal-exit": "^4.0.1" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.14.1", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.0.1" }, "files": [ @@ -40,12 +41,12 @@ "lib/" ], "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", "windowsCI": false, - "version": "4.14.1", + "version": "4.23.3", "publish": "true" }, "tap": { diff --git a/deps/npm/package.json b/deps/npm/package.json index e7784119332087..c92578506b30ac 100644 --- a/deps/npm/package.json +++ b/deps/npm/package.json @@ -1,5 +1,5 @@ { - "version": "10.8.3", + "version": "10.9.0", "name": "npm", "description": "a package manager for JavaScript", "workspaces": [ @@ -52,18 +52,18 @@ }, "dependencies": { "@isaacs/string-locale-compare": "^1.1.0", - "@npmcli/arborist": "^7.5.4", - "@npmcli/config": "^8.3.4", - "@npmcli/fs": "^3.1.1", - "@npmcli/map-workspaces": "^3.0.6", - "@npmcli/package-json": "^5.2.0", - "@npmcli/promise-spawn": "^7.0.2", - "@npmcli/redact": "^2.0.1", - "@npmcli/run-script": "^8.1.0", + "@npmcli/arborist": "^8.0.0", + "@npmcli/config": "^9.0.0", + "@npmcli/fs": "^4.0.0", + "@npmcli/map-workspaces": "^4.0.1", + "@npmcli/package-json": "^6.0.1", + "@npmcli/promise-spawn": "^8.0.1", + "@npmcli/redact": "^3.0.0", + "@npmcli/run-script": "^9.0.1", "@sigstore/tuf": "^2.3.4", - "abbrev": "^2.0.0", + "abbrev": "^3.0.0", "archy": "~1.0.0", - "cacache": "^18.0.4", + "cacache": "^19.0.1", "chalk": "^5.3.0", "ci-info": "^4.0.0", "cli-columns": "^4.0.0", @@ -71,54 +71,54 @@ "fs-minipass": "^3.0.3", "glob": "^10.4.5", "graceful-fs": "^4.2.11", - "hosted-git-info": "^7.0.2", - "ini": "^4.1.3", - "init-package-json": "^6.0.3", + "hosted-git-info": "^8.0.0", + "ini": "^5.0.0", + "init-package-json": "^7.0.1", "is-cidr": "^5.1.0", - "json-parse-even-better-errors": "^3.0.2", - "libnpmaccess": "^8.0.6", - "libnpmdiff": "^6.1.4", - "libnpmexec": "^8.1.4", - "libnpmfund": "^5.0.12", - "libnpmhook": "^10.0.5", - "libnpmorg": "^6.0.6", - "libnpmpack": "^7.0.4", - "libnpmpublish": "^9.0.9", - "libnpmsearch": "^7.0.6", - "libnpmteam": "^6.0.5", - "libnpmversion": "^6.0.3", - "make-fetch-happen": "^13.0.1", + "json-parse-even-better-errors": "^4.0.0", + "libnpmaccess": "^9.0.0", + "libnpmdiff": "^7.0.0", + "libnpmexec": "^9.0.0", + "libnpmfund": "^6.0.0", + "libnpmhook": "^11.0.0", + "libnpmorg": "^7.0.0", + "libnpmpack": "^8.0.0", + "libnpmpublish": "^10.0.0", + "libnpmsearch": "^8.0.0", + "libnpmteam": "^7.0.0", + "libnpmversion": "^7.0.0", + "make-fetch-happen": "^14.0.1", "minimatch": "^9.0.5", "minipass": "^7.1.1", "minipass-pipeline": "^1.2.4", "ms": "^2.1.2", "node-gyp": "^10.2.0", - "nopt": "^7.2.1", - "normalize-package-data": "^6.0.2", - "npm-audit-report": "^5.0.0", - "npm-install-checks": "^6.3.0", - "npm-package-arg": "^11.0.3", - "npm-pick-manifest": "^9.1.0", - "npm-profile": "^10.0.0", - "npm-registry-fetch": "^17.1.0", - "npm-user-validate": "^2.0.1", + "nopt": "^8.0.0", + "normalize-package-data": "^7.0.0", + "npm-audit-report": "^6.0.0", + "npm-install-checks": "^7.1.0", + "npm-package-arg": "^12.0.0", + "npm-pick-manifest": "^10.0.0", + "npm-profile": "^11.0.1", + "npm-registry-fetch": "^18.0.1", + "npm-user-validate": "^3.0.0", "p-map": "^4.0.0", - "pacote": "^18.0.6", - "parse-conflict-json": "^3.0.1", - "proc-log": "^4.2.0", + "pacote": "^19.0.0", + "parse-conflict-json": "^4.0.0", + "proc-log": "^5.0.0", "qrcode-terminal": "^0.12.0", - "read": "^3.0.1", + "read": "^4.0.0", "semver": "^7.6.3", "spdx-expression-parse": "^4.0.0", - "ssri": "^10.0.6", + "ssri": "^12.0.0", "supports-color": "^9.4.0", "tar": "^6.2.1", "text-table": "~0.2.0", "tiny-relative-date": "^1.3.0", "treeverse": "^3.0.0", - "validate-npm-package-name": "^5.0.1", - "which": "^4.0.0", - "write-file-atomic": "^5.0.1" + "validate-npm-package-name": "^6.0.0", + "which": "^5.0.0", + "write-file-atomic": "^6.0.0" }, "bundleDependencies": [ "@isaacs/string-locale-compare", @@ -192,8 +192,8 @@ ], "devDependencies": { "@npmcli/docs": "^1.0.0", - "@npmcli/eslint-config": "^4.0.2", - "@npmcli/git": "^5.0.8", + "@npmcli/eslint-config": "^5.0.1", + "@npmcli/git": "^6.0.1", "@npmcli/mock-globals": "^1.0.0", "@npmcli/mock-registry": "^1.0.0", "@npmcli/template-oss": "4.23.3", @@ -204,7 +204,7 @@ "cli-table3": "^0.6.4", "diff": "^5.2.0", "nock": "^13.4.0", - "npm-packlist": "^8.0.2", + "npm-packlist": "^9.0.0", "remark": "^14.0.2", "remark-gfm": "^3.0.1", "remark-github": "^11.2.4", diff --git a/deps/npm/tap-snapshots/test/lib/commands/doctor.js.test.cjs b/deps/npm/tap-snapshots/test/lib/commands/doctor.js.test.cjs index 0f5b9520516f23..134e2290b5e999 100644 --- a/deps/npm/tap-snapshots/test/lib/commands/doctor.js.test.cjs +++ b/deps/npm/tap-snapshots/test/lib/commands/doctor.js.test.cjs @@ -1166,7 +1166,7 @@ Object { exports[`test/lib/commands/doctor.js TAP ping 404 > ping 404 1`] = ` Connecting to the registry Not ok -404 404 Not Found - GET https://registry.npmjs.org/-/ping?write=true +404 404 Not Found - GET https://registry.npmjs.org/-/ping Checking npm version Ok current: v1.0.0, latest: v1.0.0 @@ -1226,7 +1226,7 @@ Object { exports[`test/lib/commands/doctor.js TAP ping 404 in color > ping 404 in color 1`] = ` Connecting to the registry Not ok -404 404 Not Found - GET https://registry.npmjs.org/-/ping?write=true +404 404 Not Found - GET https://registry.npmjs.org/-/ping Checking npm version Ok current: v1.0.0, latest: v1.0.0 @@ -1286,7 +1286,7 @@ Object { exports[`test/lib/commands/doctor.js TAP ping exception with code > ping failure 1`] = ` Connecting to the registry Not ok -request to https://registry.npmjs.org/-/ping?write=true failed, reason: Test Error +request to https://registry.npmjs.org/-/ping failed, reason: Test Error Checking npm version Ok current: v1.0.0, latest: v1.0.0 @@ -1346,7 +1346,7 @@ Object { exports[`test/lib/commands/doctor.js TAP ping exception without code > ping failure 1`] = ` Connecting to the registry Not ok -request to https://registry.npmjs.org/-/ping?write=true failed, reason: Test Error +request to https://registry.npmjs.org/-/ping failed, reason: Test Error Checking npm version Ok current: v1.0.0, latest: v1.0.0 diff --git a/deps/npm/tap-snapshots/test/lib/commands/install.js.test.cjs b/deps/npm/tap-snapshots/test/lib/commands/install.js.test.cjs new file mode 100644 index 00000000000000..8f426ec3103aee --- /dev/null +++ b/deps/npm/tap-snapshots/test/lib/commands/install.js.test.cjs @@ -0,0 +1,309 @@ +/* IMPORTANT + * This snapshot file is auto-generated, but designed for humans. + * It should be checked into source control and tracked carefully. + * Re-generate by setting TAP_SNAPSHOT=1 and running tests. + * Make sure to inspect the output below. Do not ignore changes! + */ +'use strict' +exports[`test/lib/commands/install.js TAP devEngines should not utilize engines in root if devEngines is provided > must match snapshot 1`] = ` +silly config load:file:{CWD}/npmrc +silly config load:file:{CWD}/prefix/.npmrc +silly config load:file:{CWD}/home/.npmrc +silly config load:file:{CWD}/global/etc/npmrc +verbose title npm +verbose argv "--fetch-retries" "0" "--cache" "{CWD}/cache" "--loglevel" "silly" "--color" "false" +verbose logfile logs-max:10 dir:{CWD}/cache/_logs/{DATE}- +verbose logfile {CWD}/cache/_logs/{DATE}-debug-0.log +silly logfile done cleaning log files +warn EBADDEVENGINES The developer of this package has specified the following through devEngines +warn EBADDEVENGINES Invalid engine "runtime" +warn EBADDEVENGINES Invalid semver version "0.0.1" does not match "v1337.0.0" for "runtime" +warn EBADDEVENGINES { +warn EBADDEVENGINES current: { name: 'node', version: 'v1337.0.0' }, +warn EBADDEVENGINES required: { name: 'node', version: '0.0.1', onFail: 'warn' } +warn EBADDEVENGINES } +silly packumentCache heap:{heap} maxSize:{maxSize} maxEntrySize:{maxEntrySize} +silly idealTree buildDeps +silly reify moves {} +silly audit report null + +up to date, audited 1 package in {TIME} +found 0 vulnerabilities +` + +exports[`test/lib/commands/install.js TAP devEngines should show devEngines doesnt break engines > must match snapshot 1`] = ` +silly config load:file:{CWD}/npmrc +silly config load:file:{CWD}/home/.npmrc +silly config load:file:{CWD}/global/etc/npmrc +verbose title npm +verbose argv "--fetch-retries" "0" "--cache" "{CWD}/cache" "--loglevel" "silly" "--color" "false" "--global" "true" +verbose logfile logs-max:10 dir:{CWD}/cache/_logs/{DATE}- +verbose logfile {CWD}/cache/_logs/{DATE}-debug-0.log +silly logfile done cleaning log files +silly packumentCache heap:{heap} maxSize:{maxSize} maxEntrySize:{maxEntrySize} +silly idealTree buildDeps +silly placeDep ROOT alpha@ OK for: want: file:../../prefix/alpha +warn EBADENGINE Unsupported engine { +warn EBADENGINE package: undefined, +warn EBADENGINE required: { node: '1.0.0' }, +warn EBADENGINE current: { node: 'v1337.0.0', npm: '42.0.0' } +warn EBADENGINE } +warn EBADENGINE Unsupported engine { +warn EBADENGINE package: undefined, +warn EBADENGINE required: { node: '1.0.0' }, +warn EBADENGINE current: { node: 'v1337.0.0', npm: '42.0.0' } +warn EBADENGINE } +silly reify moves {} +silly ADD node_modules/alpha + +added 1 package in {TIME} +` + +exports[`test/lib/commands/install.js TAP devEngines should show devEngines has no effect on dev package install > must match snapshot 1`] = ` +silly config load:file:{CWD}/npmrc +silly config load:file:{CWD}/prefix/.npmrc +silly config load:file:{CWD}/home/.npmrc +silly config load:file:{CWD}/global/etc/npmrc +verbose title npm +verbose argv "--fetch-retries" "0" "--cache" "{CWD}/cache" "--loglevel" "silly" "--color" "false" "--save-dev" "true" +verbose logfile logs-max:10 dir:{CWD}/cache/_logs/{DATE}- +verbose logfile {CWD}/cache/_logs/{DATE}-debug-0.log +silly logfile done cleaning log files +silly packumentCache heap:{heap} maxSize:{maxSize} maxEntrySize:{maxEntrySize} +silly idealTree buildDeps +silly placeDep ROOT alpha@ OK for: want: file:alpha +silly reify moves {} +silly audit bulk request {} +silly audit report null +silly ADD node_modules/alpha + +added 1 package, and audited 3 packages in {TIME} +found 0 vulnerabilities +` + +exports[`test/lib/commands/install.js TAP devEngines should show devEngines has no effect on global package install > must match snapshot 1`] = ` +silly config load:file:{CWD}/npmrc +silly config load:file:{CWD}/home/.npmrc +silly config load:file:{CWD}/global/etc/npmrc +verbose title npm +verbose argv "--fetch-retries" "0" "--cache" "{CWD}/cache" "--loglevel" "silly" "--color" "false" "--global" "true" +verbose logfile logs-max:10 dir:{CWD}/cache/_logs/{DATE}- +verbose logfile {CWD}/cache/_logs/{DATE}-debug-0.log +silly logfile done cleaning log files +silly packumentCache heap:{heap} maxSize:{maxSize} maxEntrySize:{maxEntrySize} +silly idealTree buildDeps +silly placeDep ROOT alpha@ OK for: want: file:../../prefix +silly reify moves {} +silly ADD node_modules/alpha + +added 1 package in {TIME} +` + +exports[`test/lib/commands/install.js TAP devEngines should show devEngines has no effect on package install > must match snapshot 1`] = ` +silly config load:file:{CWD}/npmrc +silly config load:file:{CWD}/prefix/.npmrc +silly config load:file:{CWD}/home/.npmrc +silly config load:file:{CWD}/global/etc/npmrc +verbose title npm +verbose argv "--fetch-retries" "0" "--cache" "{CWD}/cache" "--loglevel" "silly" "--color" "false" +verbose logfile logs-max:10 dir:{CWD}/cache/_logs/{DATE}- +verbose logfile {CWD}/cache/_logs/{DATE}-debug-0.log +silly logfile done cleaning log files +silly packumentCache heap:{heap} maxSize:{maxSize} maxEntrySize:{maxEntrySize} +silly idealTree buildDeps +silly placeDep ROOT alpha@ OK for: want: file:alpha +silly reify moves {} +silly audit bulk request {} +silly audit report null +silly ADD node_modules/alpha + +added 1 package, and audited 3 packages in {TIME} +found 0 vulnerabilities +` + +exports[`test/lib/commands/install.js TAP devEngines should utilize devEngines 2x error case > must match snapshot 1`] = ` +silly config load:file:{CWD}/npmrc +silly config load:file:{CWD}/prefix/.npmrc +silly config load:file:{CWD}/home/.npmrc +silly config load:file:{CWD}/global/etc/npmrc +verbose title npm +verbose argv "--fetch-retries" "0" "--cache" "{CWD}/cache" "--loglevel" "silly" "--color" "false" +verbose logfile logs-max:10 dir:{CWD}/cache/_logs/{DATE}- +verbose logfile {CWD}/cache/_logs/{DATE}-debug-0.log +silly logfile done cleaning log files +verbose stack Error: The developer of this package has specified the following through devEngines +verbose stack Invalid engine "runtime" +verbose stack Invalid name "nondescript" does not match "node" for "runtime" +verbose stack at Install.checkDevEngines ({CWD}/lib/base-cmd.js:182:27) +verbose stack at MockNpm.#exec ({CWD}/lib/npm.js:251:7) +verbose stack at MockNpm.exec ({CWD}/lib/npm.js:207:9) +error code EBADDEVENGINES +error EBADDEVENGINES The developer of this package has specified the following through devEngines +error EBADDEVENGINES Invalid engine "runtime" +error EBADDEVENGINES Invalid name "nondescript" does not match "node" for "runtime" +error EBADDEVENGINES { +error EBADDEVENGINES current: { name: 'node', version: 'v1337.0.0' }, +error EBADDEVENGINES required: { name: 'nondescript', onFail: 'error' } +error EBADDEVENGINES } +` + +exports[`test/lib/commands/install.js TAP devEngines should utilize devEngines 2x warning case > must match snapshot 1`] = ` +silly config load:file:{CWD}/npmrc +silly config load:file:{CWD}/prefix/.npmrc +silly config load:file:{CWD}/home/.npmrc +silly config load:file:{CWD}/global/etc/npmrc +verbose title npm +verbose argv "--fetch-retries" "0" "--cache" "{CWD}/cache" "--loglevel" "silly" "--color" "false" +verbose logfile logs-max:10 dir:{CWD}/cache/_logs/{DATE}- +verbose logfile {CWD}/cache/_logs/{DATE}-debug-0.log +silly logfile done cleaning log files +warn EBADDEVENGINES The developer of this package has specified the following through devEngines +warn EBADDEVENGINES Invalid engine "runtime" +warn EBADDEVENGINES Invalid name "nondescript" does not match "node" for "runtime" +warn EBADDEVENGINES { +warn EBADDEVENGINES current: { name: 'node', version: 'v1337.0.0' }, +warn EBADDEVENGINES required: { name: 'nondescript', onFail: 'warn' } +warn EBADDEVENGINES } +warn EBADDEVENGINES Invalid engine "cpu" +warn EBADDEVENGINES Invalid name "risv" does not match "x86" for "cpu" +warn EBADDEVENGINES { +warn EBADDEVENGINES current: { name: 'x86' }, +warn EBADDEVENGINES required: { name: 'risv', onFail: 'warn' } +warn EBADDEVENGINES } +silly packumentCache heap:{heap} maxSize:{maxSize} maxEntrySize:{maxEntrySize} +silly idealTree buildDeps +silly reify moves {} +silly audit report null + +up to date, audited 1 package in {TIME} +found 0 vulnerabilities +` + +exports[`test/lib/commands/install.js TAP devEngines should utilize devEngines failure and warning case > must match snapshot 1`] = ` +silly config load:file:{CWD}/npmrc +silly config load:file:{CWD}/prefix/.npmrc +silly config load:file:{CWD}/home/.npmrc +silly config load:file:{CWD}/global/etc/npmrc +verbose title npm +verbose argv "--fetch-retries" "0" "--cache" "{CWD}/cache" "--loglevel" "silly" "--color" "false" +verbose logfile logs-max:10 dir:{CWD}/cache/_logs/{DATE}- +verbose logfile {CWD}/cache/_logs/{DATE}-debug-0.log +silly logfile done cleaning log files +warn EBADDEVENGINES The developer of this package has specified the following through devEngines +warn EBADDEVENGINES Invalid engine "cpu" +warn EBADDEVENGINES Invalid name "risv" does not match "x86" for "cpu" +warn EBADDEVENGINES { +warn EBADDEVENGINES current: { name: 'x86' }, +warn EBADDEVENGINES required: { name: 'risv', onFail: 'warn' } +warn EBADDEVENGINES } +verbose stack Error: The developer of this package has specified the following through devEngines +verbose stack Invalid engine "runtime" +verbose stack Invalid name "nondescript" does not match "node" for "runtime" +verbose stack at Install.checkDevEngines ({CWD}/lib/base-cmd.js:182:27) +verbose stack at MockNpm.#exec ({CWD}/lib/npm.js:251:7) +verbose stack at MockNpm.exec ({CWD}/lib/npm.js:207:9) +error code EBADDEVENGINES +error EBADDEVENGINES The developer of this package has specified the following through devEngines +error EBADDEVENGINES Invalid engine "runtime" +error EBADDEVENGINES Invalid name "nondescript" does not match "node" for "runtime" +error EBADDEVENGINES { +error EBADDEVENGINES current: { name: 'node', version: 'v1337.0.0' }, +error EBADDEVENGINES required: { name: 'nondescript' } +error EBADDEVENGINES } +` + +exports[`test/lib/commands/install.js TAP devEngines should utilize devEngines failure case > must match snapshot 1`] = ` +silly config load:file:{CWD}/npmrc +silly config load:file:{CWD}/prefix/.npmrc +silly config load:file:{CWD}/home/.npmrc +silly config load:file:{CWD}/global/etc/npmrc +verbose title npm +verbose argv "--fetch-retries" "0" "--cache" "{CWD}/cache" "--loglevel" "silly" "--color" "false" +verbose logfile logs-max:10 dir:{CWD}/cache/_logs/{DATE}- +verbose logfile {CWD}/cache/_logs/{DATE}-debug-0.log +silly logfile done cleaning log files +verbose stack Error: The developer of this package has specified the following through devEngines +verbose stack Invalid engine "runtime" +verbose stack Invalid name "nondescript" does not match "node" for "runtime" +verbose stack at Install.checkDevEngines ({CWD}/lib/base-cmd.js:182:27) +verbose stack at MockNpm.#exec ({CWD}/lib/npm.js:251:7) +verbose stack at MockNpm.exec ({CWD}/lib/npm.js:207:9) +error code EBADDEVENGINES +error EBADDEVENGINES The developer of this package has specified the following through devEngines +error EBADDEVENGINES Invalid engine "runtime" +error EBADDEVENGINES Invalid name "nondescript" does not match "node" for "runtime" +error EBADDEVENGINES { +error EBADDEVENGINES current: { name: 'node', version: 'v1337.0.0' }, +error EBADDEVENGINES required: { name: 'nondescript' } +error EBADDEVENGINES } +` + +exports[`test/lib/commands/install.js TAP devEngines should utilize devEngines failure force case > must match snapshot 1`] = ` +silly config load:file:{CWD}/npmrc +silly config load:file:{CWD}/prefix/.npmrc +silly config load:file:{CWD}/home/.npmrc +silly config load:file:{CWD}/global/etc/npmrc +verbose title npm +verbose argv "--fetch-retries" "0" "--cache" "{CWD}/cache" "--loglevel" "silly" "--color" "false" "--force" "true" +verbose logfile logs-max:10 dir:{CWD}/cache/_logs/{DATE}- +verbose logfile {CWD}/cache/_logs/{DATE}-debug-0.log +warn using --force Recommended protections disabled. +silly logfile done cleaning log files +warn EBADDEVENGINES The developer of this package has specified the following through devEngines +warn EBADDEVENGINES Invalid engine "runtime" +warn EBADDEVENGINES Invalid name "nondescript" does not match "node" for "runtime" +warn EBADDEVENGINES { +warn EBADDEVENGINES current: { name: 'node', version: 'v1337.0.0' }, +warn EBADDEVENGINES required: { name: 'nondescript' } +warn EBADDEVENGINES } +silly packumentCache heap:{heap} maxSize:{maxSize} maxEntrySize:{maxEntrySize} +silly idealTree buildDeps +silly reify moves {} +silly audit report null + +up to date, audited 1 package in {TIME} +found 0 vulnerabilities +` + +exports[`test/lib/commands/install.js TAP devEngines should utilize devEngines success case > must match snapshot 1`] = ` +silly config load:file:{CWD}/npmrc +silly config load:file:{CWD}/prefix/.npmrc +silly config load:file:{CWD}/home/.npmrc +silly config load:file:{CWD}/global/etc/npmrc +verbose title npm +verbose argv "--fetch-retries" "0" "--cache" "{CWD}/cache" "--loglevel" "silly" "--color" "false" +verbose logfile logs-max:10 dir:{CWD}/cache/_logs/{DATE}- +verbose logfile {CWD}/cache/_logs/{DATE}-debug-0.log +silly logfile done cleaning log files +silly packumentCache heap:{heap} maxSize:{maxSize} maxEntrySize:{maxEntrySize} +silly idealTree buildDeps +silly reify moves {} +silly audit report null + +up to date, audited 1 package in {TIME} +found 0 vulnerabilities +` + +exports[`test/lib/commands/install.js TAP devEngines should utilize engines in root if devEngines is not provided > must match snapshot 1`] = ` +silly config load:file:{CWD}/npmrc +silly config load:file:{CWD}/prefix/.npmrc +silly config load:file:{CWD}/home/.npmrc +silly config load:file:{CWD}/global/etc/npmrc +verbose title npm +verbose argv "--fetch-retries" "0" "--cache" "{CWD}/cache" "--loglevel" "silly" "--color" "false" +verbose logfile logs-max:10 dir:{CWD}/cache/_logs/{DATE}- +verbose logfile {CWD}/cache/_logs/{DATE}-debug-0.log +silly logfile done cleaning log files +silly packumentCache heap:{heap} maxSize:{maxSize} maxEntrySize:{maxEntrySize} +silly idealTree buildDeps +warn EBADENGINE Unsupported engine { +warn EBADENGINE package: undefined, +warn EBADENGINE required: { node: '0.0.1' }, +warn EBADENGINE current: { node: 'v1337.0.0', npm: '42.0.0' } +warn EBADENGINE } +silly reify moves {} +silly audit report null + +up to date, audited 1 package in {TIME} +found 0 vulnerabilities +` diff --git a/deps/npm/test/fixtures/clean-snapshot.js b/deps/npm/test/fixtures/clean-snapshot.js index bcbf699cb81fc9..3439400b576aeb 100644 --- a/deps/npm/test/fixtures/clean-snapshot.js +++ b/deps/npm/test/fixtures/clean-snapshot.js @@ -42,12 +42,18 @@ const cleanZlib = str => str .replace(/"integrity": ".*",/g, '"integrity": "{integrity}",') .replace(/"size": [0-9]*,/g, '"size": "{size}",') +const cleanPackumentCache = str => str + .replace(/heap:[0-9]*/g, 'heap:{heap}') + .replace(/maxSize:[0-9]*/g, 'maxSize:{maxSize}') + .replace(/maxEntrySize:[0-9]*/g, 'maxEntrySize:{maxEntrySize}') + module.exports = { cleanCwd, cleanDate, cleanNewlines, cleanTime, cleanZlib, + cleanPackumentCache, normalizePath, pathRegex, } diff --git a/deps/npm/test/lib/commands/doctor.js b/deps/npm/test/lib/commands/doctor.js index 0c58a09e20c577..5d912d8b82f769 100644 --- a/deps/npm/test/lib/commands/doctor.js +++ b/deps/npm/test/lib/commands/doctor.js @@ -89,7 +89,7 @@ t.test('all clear', async t => { ...dirs, }) tnock(t, npm.config.get('registry')) - .get('/-/ping?write=true').reply(200, '{}') + .get('/-/ping').reply(200, '{}') .get('/npm').reply(200, npmManifest(npm.version)) tnock(t, 'https://nodejs.org') .get('/dist/index.json').reply(200, nodeVersions) @@ -108,7 +108,7 @@ t.test('all clear in color', async t => { }, }) tnock(t, npm.config.get('registry')) - .get('/-/ping?write=true').reply(200, '{}') + .get('/-/ping').reply(200, '{}') .get('/npm').reply(200, npmManifest(npm.version)) tnock(t, 'https://nodejs.org') .get('/dist/index.json').reply(200, nodeVersions) @@ -127,7 +127,7 @@ t.test('silent success', async t => { ...dirs, }) tnock(t, npm.config.get('registry')) - .get('/-/ping?write=true').reply(200, '{}') + .get('/-/ping').reply(200, '{}') .get('/npm').reply(200, npmManifest(npm.version)) tnock(t, 'https://nodejs.org') .get('/dist/index.json').reply(200, nodeVersions) @@ -146,7 +146,7 @@ t.test('silent errors', async t => { ...dirs, }) tnock(t, npm.config.get('registry')) - .get('/-/ping?write=true').reply(404, '{}') + .get('/-/ping').reply(404, '{}') await t.rejects(npm.exec('doctor', ['ping']), { message: /Check logs/, }) @@ -161,7 +161,7 @@ t.test('ping 404', async t => { ...dirs, }) tnock(t, npm.config.get('registry')) - .get('/-/ping?write=true').reply(404, '{}') + .get('/-/ping').reply(404, '{}') .get('/npm').reply(200, npmManifest(npm.version)) tnock(t, 'https://nodejs.org') .get('/dist/index.json').reply(200, nodeVersions) @@ -182,7 +182,7 @@ t.test('ping 404 in color', async t => { }, }) tnock(t, npm.config.get('registry')) - .get('/-/ping?write=true').reply(404, '{}') + .get('/-/ping').reply(404, '{}') .get('/npm').reply(200, npmManifest(npm.version)) tnock(t, 'https://nodejs.org') .get('/dist/index.json').reply(200, nodeVersions) @@ -198,7 +198,7 @@ t.test('ping exception with code', async t => { ...dirs, }) tnock(t, npm.config.get('registry')) - .get('/-/ping?write=true').replyWithError({ message: 'Test Error', code: 'TEST' }) + .get('/-/ping').replyWithError({ message: 'Test Error', code: 'TEST' }) .get('/npm').reply(200, npmManifest(npm.version)) tnock(t, 'https://nodejs.org') .get('/dist/index.json').reply(200, nodeVersions) @@ -214,7 +214,7 @@ t.test('ping exception without code', async t => { ...dirs, }) tnock(t, npm.config.get('registry')) - .get('/-/ping?write=true').replyWithError({ message: 'Test Error', code: false }) + .get('/-/ping').replyWithError({ message: 'Test Error', code: false }) .get('/npm').reply(200, npmManifest(npm.version)) tnock(t, 'https://nodejs.org') .get('/dist/index.json').reply(200, nodeVersions) @@ -230,7 +230,7 @@ t.test('npm out of date', async t => { ...dirs, }) tnock(t, npm.config.get('registry')) - .get('/-/ping?write=true').reply(200, '{}') + .get('/-/ping').reply(200, '{}') .get('/npm').reply(200, npmManifest('2.0.0')) tnock(t, 'https://nodejs.org') .get('/dist/index.json').reply(200, nodeVersions) @@ -255,7 +255,7 @@ t.test('node out of date - lts', async t => { ...dirs, }) tnock(t, npm.config.get('registry')) - .get('/-/ping?write=true').reply(200, '{}') + .get('/-/ping').reply(200, '{}') .get('/npm').reply(200, npmManifest(npm.version)) tnock(t, 'https://nodejs.org') .get('/dist/index.json').reply(200, nodeVersions) @@ -280,7 +280,7 @@ t.test('node out of date - current', async t => { ...dirs, }) tnock(t, npm.config.get('registry')) - .get('/-/ping?write=true').reply(200, '{}') + .get('/-/ping').reply(200, '{}') .get('/npm').reply(200, npmManifest(npm.version)) tnock(t, 'https://nodejs.org') .get('/dist/index.json').reply(200, nodeVersions) @@ -297,7 +297,7 @@ t.test('non-default registry', async t => { ...dirs, }) tnock(t, npm.config.get('registry')) - .get('/-/ping?write=true').reply(200, '{}') + .get('/-/ping').reply(200, '{}') .get('/npm').reply(200, npmManifest(npm.version)) tnock(t, 'https://nodejs.org') .get('/dist/index.json').reply(200, nodeVersions) @@ -318,7 +318,7 @@ t.test('missing git', async t => { ...dirs, }) tnock(t, npm.config.get('registry')) - .get('/-/ping?write=true').reply(200, '{}') + .get('/-/ping').reply(200, '{}') .get('/npm').reply(200, npmManifest(npm.version)) tnock(t, 'https://nodejs.org') .get('/dist/index.json').reply(200, nodeVersions) @@ -344,7 +344,7 @@ t.test('windows skips permissions checks', async t => { globalPrefixDir: {}, }) tnock(t, npm.config.get('registry')) - .get('/-/ping?write=true').reply(200, '{}') + .get('/-/ping').reply(200, '{}') .get('/npm').reply(200, npmManifest(npm.version)) tnock(t, 'https://nodejs.org') .get('/dist/index.json').reply(200, nodeVersions) @@ -361,7 +361,7 @@ t.test('missing global directories', async t => { globalPrefixDir: {}, }) tnock(t, npm.config.get('registry')) - .get('/-/ping?write=true').reply(200, '{}') + .get('/-/ping').reply(200, '{}') .get('/npm').reply(200, npmManifest(npm.version)) tnock(t, 'https://nodejs.org') .get('/dist/index.json').reply(200, nodeVersions) @@ -377,7 +377,7 @@ t.test('missing local node_modules', async t => { globalPrefixDir: dirs.globalPrefixDir, }) tnock(t, npm.config.get('registry')) - .get('/-/ping?write=true').reply(200, '{}') + .get('/-/ping').reply(200, '{}') .get('/npm').reply(200, npmManifest(npm.version)) tnock(t, 'https://nodejs.org') .get('/dist/index.json').reply(200, nodeVersions) @@ -406,7 +406,7 @@ t.test('incorrect owner', async t => { ...dirs, }) tnock(t, npm.config.get('registry')) - .get('/-/ping?write=true').reply(200, '{}') + .get('/-/ping').reply(200, '{}') .get('/npm').reply(200, npmManifest(npm.version)) tnock(t, 'https://nodejs.org') .get('/dist/index.json').reply(200, nodeVersions) @@ -430,7 +430,7 @@ t.test('incorrect permissions', async t => { ...dirs, }) tnock(t, npm.config.get('registry')) - .get('/-/ping?write=true').reply(200, '{}') + .get('/-/ping').reply(200, '{}') .get('/npm').reply(200, npmManifest(npm.version)) tnock(t, 'https://nodejs.org') .get('/dist/index.json').reply(200, nodeVersions) @@ -458,7 +458,7 @@ t.test('error reading directory', async t => { ...dirs, }) tnock(t, npm.config.get('registry')) - .get('/-/ping?write=true').reply(200, '{}') + .get('/-/ping').reply(200, '{}') .get('/npm').reply(200, npmManifest(npm.version)) tnock(t, 'https://nodejs.org') .get('/dist/index.json').reply(200, nodeVersions) @@ -481,7 +481,7 @@ t.test('cacache badContent', async t => { ...dirs, }) tnock(t, npm.config.get('registry')) - .get('/-/ping?write=true').reply(200, '{}') + .get('/-/ping').reply(200, '{}') .get('/npm').reply(200, npmManifest(npm.version)) tnock(t, 'https://nodejs.org') .get('/dist/index.json').reply(200, nodeVersions) @@ -504,7 +504,7 @@ t.test('cacache reclaimedCount', async t => { ...dirs, }) tnock(t, npm.config.get('registry')) - .get('/-/ping?write=true').reply(200, '{}') + .get('/-/ping').reply(200, '{}') .get('/npm').reply(200, npmManifest(npm.version)) tnock(t, 'https://nodejs.org') .get('/dist/index.json').reply(200, nodeVersions) @@ -527,7 +527,7 @@ t.test('cacache missingContent', async t => { ...dirs, }) tnock(t, npm.config.get('registry')) - .get('/-/ping?write=true').reply(200, '{}') + .get('/-/ping').reply(200, '{}') .get('/npm').reply(200, npmManifest(npm.version)) tnock(t, 'https://nodejs.org') .get('/dist/index.json').reply(200, nodeVersions) @@ -558,7 +558,7 @@ t.test('discrete checks', t => { ...dirs, }) tnock(t, npm.config.get('registry')) - .get('/-/ping?write=true').reply(200, '{}') + .get('/-/ping').reply(200, '{}') await npm.exec('doctor', ['ping']) t.matchSnapshot(joinedOutput(), 'output') t.matchSnapshot({ info: logs.info, warn: logs.warn, error: logs.error }, 'logs') @@ -586,7 +586,7 @@ t.test('discrete checks', t => { ...dirs, }) tnock(t, npm.config.get('registry')) - .get('/-/ping?write=true').reply(200, '{}') + .get('/-/ping').reply(200, '{}') await npm.exec('doctor', ['registry']) t.matchSnapshot(joinedOutput(), 'output') t.matchSnapshot({ info: logs.info, warn: logs.warn, error: logs.error }, 'logs') diff --git a/deps/npm/test/lib/commands/install.js b/deps/npm/test/lib/commands/install.js index 0273f3deec73e9..a4d9c06129ec06 100644 --- a/deps/npm/test/lib/commands/install.js +++ b/deps/npm/test/lib/commands/install.js @@ -1,8 +1,16 @@ const tspawk = require('../../fixtures/tspawk') +const { + cleanCwd, + cleanTime, + cleanDate, + cleanPackumentCache, +} = require('../../fixtures/clean-snapshot.js') const path = require('node:path') const t = require('tap') +t.cleanSnapshot = (str) => cleanPackumentCache(cleanDate(cleanTime(cleanCwd(str)))) + const { loadNpmWithRegistry: loadMockNpm, workspaceMock, @@ -400,3 +408,312 @@ t.test('should show install keeps dirty --workspace flag', async t => { assert.packageDirty('node_modules/abbrev@1.1.0') assert.packageInstalled('node_modules/lodash@1.1.1') }) + +t.test('devEngines', async t => { + const mockArguments = { + globals: { + 'process.platform': 'linux', + 'process.arch': 'x86', + 'process.version': 'v1337.0.0', + }, + mocks: { + '{ROOT}/package.json': { version: '42.0.0' }, + }, + } + + t.test('should utilize devEngines success case', async t => { + const { npm, joinedFullOutput } = await loadMockNpm(t, { + ...mockArguments, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-package', + version: '1.0.0', + devEngines: { + runtime: { + name: 'node', + }, + }, + }), + }, + }) + await npm.exec('install', []) + const output = joinedFullOutput() + t.matchSnapshot(output) + t.ok(!output.includes('EBADDEVENGINES')) + }) + + t.test('should utilize devEngines failure case', async t => { + const { npm, joinedFullOutput } = await loadMockNpm(t, { + ...mockArguments, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-package', + version: '1.0.0', + devEngines: { + runtime: { + name: 'nondescript', + }, + }, + }), + }, + }) + await t.rejects( + npm.exec('install', []) + ) + const output = joinedFullOutput() + t.matchSnapshot(output) + t.ok(output.includes('error EBADDEVENGINES')) + }) + + t.test('should utilize devEngines failure force case', async t => { + const { npm, joinedFullOutput } = await loadMockNpm(t, { + ...mockArguments, + config: { + force: true, + }, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-package', + version: '1.0.0', + devEngines: { + runtime: { + name: 'nondescript', + }, + }, + }), + }, + }) + await npm.exec('install', []) + const output = joinedFullOutput() + t.matchSnapshot(output) + t.ok(output.includes('warn EBADDEVENGINES')) + }) + + t.test('should utilize devEngines 2x warning case', async t => { + const { npm, joinedFullOutput } = await loadMockNpm(t, { + ...mockArguments, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-package', + version: '1.0.0', + devEngines: { + runtime: { + name: 'nondescript', + onFail: 'warn', + }, + cpu: { + name: 'risv', + onFail: 'warn', + }, + }, + }), + }, + }) + await npm.exec('install', []) + const output = joinedFullOutput() + t.matchSnapshot(output) + t.ok(output.includes('warn EBADDEVENGINES')) + }) + + t.test('should utilize devEngines 2x error case', async t => { + const { npm, joinedFullOutput } = await loadMockNpm(t, { + ...mockArguments, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-package', + version: '1.0.0', + devEngines: { + runtime: { + name: 'nondescript', + onFail: 'error', + }, + cpu: { + name: 'risv', + onFail: 'error', + }, + }, + }), + }, + }) + await t.rejects( + npm.exec('install', []) + ) + const output = joinedFullOutput() + t.matchSnapshot(output) + t.ok(output.includes('error EBADDEVENGINES')) + }) + + t.test('should utilize devEngines failure and warning case', async t => { + const { npm, joinedFullOutput } = await loadMockNpm(t, { + ...mockArguments, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-package', + version: '1.0.0', + devEngines: { + runtime: { + name: 'nondescript', + }, + cpu: { + name: 'risv', + onFail: 'warn', + }, + }, + }), + }, + }) + await t.rejects( + npm.exec('install', []) + ) + const output = joinedFullOutput() + t.matchSnapshot(output) + t.ok(output.includes('EBADDEVENGINES')) + }) + + t.test('should show devEngines has no effect on package install', async t => { + const { npm, joinedFullOutput } = await loadMockNpm(t, { + ...mockArguments, + prefixDir: { + alpha: { + 'package.json': JSON.stringify({ + name: 'alpha', + devEngines: { runtime: { name: 'node', version: '1.0.0' } }, + }), + 'index.js': 'console.log("this is alpha index")', + }, + 'package.json': JSON.stringify({ + name: 'project', + }), + }, + }) + await npm.exec('install', ['./alpha']) + const output = joinedFullOutput() + t.matchSnapshot(output) + t.ok(!output.includes('EBADDEVENGINES')) + }) + + t.test('should show devEngines has no effect on dev package install', async t => { + const { npm, joinedFullOutput } = await loadMockNpm(t, { + ...mockArguments, + prefixDir: { + alpha: { + 'package.json': JSON.stringify({ + name: 'alpha', + devEngines: { runtime: { name: 'node', version: '1.0.0' } }, + }), + 'index.js': 'console.log("this is alpha index")', + }, + 'package.json': JSON.stringify({ + name: 'project', + }), + }, + config: { + 'save-dev': true, + }, + }) + await npm.exec('install', ['./alpha']) + const output = joinedFullOutput() + t.matchSnapshot(output) + t.ok(!output.includes('EBADDEVENGINES')) + }) + + t.test('should show devEngines doesnt break engines', async t => { + const { npm, joinedFullOutput } = await loadMockNpm(t, { + ...mockArguments, + prefixDir: { + alpha: { + 'package.json': JSON.stringify({ + name: 'alpha', + devEngines: { runtime: { name: 'node', version: '1.0.0' } }, + engines: { node: '1.0.0' }, + }), + 'index.js': 'console.log("this is alpha index")', + }, + 'package.json': JSON.stringify({ + name: 'project', + }), + }, + config: { global: true }, + }) + await npm.exec('install', ['./alpha']) + const output = joinedFullOutput() + t.matchSnapshot(output) + t.ok(output.includes('warn EBADENGINE')) + }) + + t.test('should not utilize engines in root if devEngines is provided', async t => { + const { npm, joinedFullOutput } = await loadMockNpm(t, { + ...mockArguments, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'alpha', + engines: { + node: '0.0.1', + }, + devEngines: { + runtime: { + name: 'node', + version: '0.0.1', + onFail: 'warn', + }, + }, + }), + 'index.js': 'console.log("this is alpha index")', + }, + }) + await npm.exec('install') + const output = joinedFullOutput() + t.matchSnapshot(output) + t.ok(!output.includes('EBADENGINE')) + t.ok(output.includes('warn EBADDEVENGINES')) + }) + + t.test('should utilize engines in root if devEngines is not provided', async t => { + const { npm, joinedFullOutput } = await loadMockNpm(t, { + ...mockArguments, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'alpha', + engines: { + node: '0.0.1', + }, + }), + 'index.js': 'console.log("this is alpha index")', + }, + }) + await npm.exec('install') + const output = joinedFullOutput() + t.matchSnapshot(output) + t.ok(output.includes('EBADENGINE')) + t.ok(!output.includes('EBADDEVENGINES')) + }) + + t.test('should show devEngines has no effect on global package install', async t => { + const { npm, joinedFullOutput } = await loadMockNpm(t, { + ...mockArguments, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'alpha', + bin: { + alpha: 'index.js', + }, + devEngines: { + runtime: { + name: 'node', + version: '0.0.1', + }, + }, + }), + 'index.js': 'console.log("this is alpha index")', + }, + config: { + global: true, + }, + }) + await npm.exec('install', ['.']) + const output = joinedFullOutput() + t.matchSnapshot(output) + t.ok(!output.includes('EBADENGINE')) + t.ok(!output.includes('EBADDEVENGINES')) + }) +}) diff --git a/deps/npm/test/lib/commands/ping.js b/deps/npm/test/lib/commands/ping.js index 7f90ea394f9aeb..aca1e730131df6 100644 --- a/deps/npm/test/lib/commands/ping.js +++ b/deps/npm/test/lib/commands/ping.js @@ -1,6 +1,8 @@ const t = require('tap') const { load: loadMockNpm } = require('../../fixtures/mock-npm.js') const MockRegistry = require('@npmcli/mock-registry') +const cacache = require('cacache') +const path = require('node:path') t.test('no details', async t => { const { npm, logs, joinedOutput } = await loadMockNpm(t) @@ -74,3 +76,19 @@ t.test('invalid json', async t => { details: {}, }) }) +t.test('fail when registry is unreachable even if request is cached', async t => { + const { npm } = await loadMockNpm(t, { + config: { registry: 'https://ur.npmlocal.npmtest/' }, + cacheDir: { _cacache: {} }, + }) + const url = `${npm.config.get('registry')}-/ping` + const cache = path.join(npm.cache, '_cacache') + await cacache.put(cache, + `make-fetch-happen:request-cache:${url}`, + '{}', { metadata: { url } } + ) + t.rejects(npm.exec('ping', []), { + code: 'ENOTFOUND', + }, + 'throws ENOTFOUND error') +}) diff --git a/deps/npm/test/lib/npm.js b/deps/npm/test/lib/npm.js index 00ef3f79b04c17..739aa28eb0343f 100644 --- a/deps/npm/test/lib/npm.js +++ b/deps/npm/test/lib/npm.js @@ -149,8 +149,8 @@ t.test('npm.load', async t => { 'does not change npm.command when another command is called') t.match(logs, [ + /timing config:load:flatten Completed in [0-9.]+ms/, /timing command:config Completed in [0-9.]+ms/, - /timing command:get Completed in [0-9.]+ms/, ]) t.same(outputs, ['scope=@foo\nusage=false']) })